1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Set to non-zero once AIX common-mode calls have been defined. */
78 static int common_mode_defined;
80 /* Save information from a "cmpxx" operation until the branch or scc is
82 rtx rs6000_compare_op0, rs6000_compare_op1;
83 int rs6000_compare_fp_p;
85 /* Label number of label created for -mrelocatable, to call to so we can
86 get the address of the GOT section */
87 int rs6000_pic_labelno;
90 /* Which abi to adhere to */
91 const char *rs6000_abi_name = RS6000_ABI_NAME;
93 /* Semantics of the small data area */
94 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
96 /* Which small data model to use */
97 const char *rs6000_sdata_name = (char *)0;
99 /* Counter for labels which are to be placed in .fixup. */
100 int fixuplabelno = 0;
103 /* ABI enumeration available for subtarget to use. */
104 enum rs6000_abi rs6000_current_abi;
106 /* ABI string from -mabi= option. */
107 const char *rs6000_abi_string;
110 const char *rs6000_debug_name;
111 int rs6000_debug_stack; /* debug stack applications */
112 int rs6000_debug_arg; /* debug argument handling */
114 /* Flag to say the TOC is initialized */
116 char toc_label_name[10];
118 /* Alias set for saves and restores from the rs6000 stack. */
119 static int rs6000_sr_alias_set;
121 static void rs6000_add_gc_roots PARAMS ((void));
122 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
123 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
124 static void validate_condition_mode
125 PARAMS ((enum rtx_code, enum machine_mode));
126 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
127 static void rs6000_maybe_dead PARAMS ((rtx));
128 static void rs6000_emit_stack_tie PARAMS ((void));
129 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
130 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
131 static unsigned rs6000_hash_constant PARAMS ((rtx));
132 static unsigned toc_hash_function PARAMS ((const void *));
133 static int toc_hash_eq PARAMS ((const void *, const void *));
134 static int toc_hash_mark_entry PARAMS ((void **, void *));
135 static void toc_hash_mark_table PARAMS ((void *));
136 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
137 static void rs6000_free_machine_status PARAMS ((struct function *));
138 static void rs6000_init_machine_status PARAMS ((struct function *));
139 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
140 static int rs6000_ra_ever_killed PARAMS ((void));
141 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
142 const struct attribute_spec rs6000_attribute_table[];
143 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
144 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
145 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
146 HOST_WIDE_INT, HOST_WIDE_INT));
148 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
150 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
151 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
153 #ifdef OBJECT_FORMAT_COFF
154 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
156 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
157 static int rs6000_adjust_priority PARAMS ((rtx, int));
158 static int rs6000_issue_rate PARAMS ((void));
160 static void rs6000_init_builtins PARAMS ((void));
161 static void altivec_init_builtins PARAMS ((void));
162 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
163 static rtx altivec_expand_builtin PARAMS ((tree, rtx));
164 static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
165 static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
166 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
167 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
168 static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
169 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
170 static void rs6000_parse_abi_options PARAMS ((void));
171 static int first_altivec_reg_to_save PARAMS ((void));
172 static unsigned int compute_vrsave_mask PARAMS ((void));
173 static void is_altivec_return_reg PARAMS ((rtx, void *));
174 int vrsave_operation PARAMS ((rtx, enum machine_mode));
175 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
176 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
177 static int easy_vector_constant PARAMS ((rtx));
179 /* Default register names. */
180 char rs6000_reg_names[][8] =
182 "0", "1", "2", "3", "4", "5", "6", "7",
183 "8", "9", "10", "11", "12", "13", "14", "15",
184 "16", "17", "18", "19", "20", "21", "22", "23",
185 "24", "25", "26", "27", "28", "29", "30", "31",
186 "0", "1", "2", "3", "4", "5", "6", "7",
187 "8", "9", "10", "11", "12", "13", "14", "15",
188 "16", "17", "18", "19", "20", "21", "22", "23",
189 "24", "25", "26", "27", "28", "29", "30", "31",
190 "mq", "lr", "ctr","ap",
191 "0", "1", "2", "3", "4", "5", "6", "7",
193 /* AltiVec registers. */
194 "0", "1", "2", "3", "4", "5", "6", "7",
195 "8", "9", "10", "11", "12", "13", "14", "15",
196 "16", "17", "18", "19", "20", "21", "22", "23",
197 "24", "25", "26", "27", "28", "29", "30", "31",
201 #ifdef TARGET_REGNAMES
202 static const char alt_reg_names[][8] =
204 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
205 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
206 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
207 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
208 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
209 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
210 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
211 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
212 "mq", "lr", "ctr", "ap",
213 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
215 /* AltiVec registers. */
216 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
217 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
218 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
219 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
224 #ifndef MASK_STRICT_ALIGN
225 #define MASK_STRICT_ALIGN 0
228 /* Initialize the GCC target structure. */
229 #undef TARGET_ATTRIBUTE_TABLE
230 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
232 #undef TARGET_ASM_ALIGNED_DI_OP
233 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
235 /* Default unaligned ops are only provided for ELF. Find the ops needed
236 for non-ELF systems. */
237 #ifndef OBJECT_FORMAT_ELF
238 #ifdef OBJECT_FORMAT_COFF
239 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
241 #undef TARGET_ASM_UNALIGNED_HI_OP
242 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
243 #undef TARGET_ASM_UNALIGNED_SI_OP
244 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
245 #undef TARGET_ASM_UNALIGNED_DI_OP
246 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
249 #undef TARGET_ASM_UNALIGNED_HI_OP
250 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
251 #undef TARGET_ASM_UNALIGNED_SI_OP
252 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
256 /* This hook deals with fixups for relocatable code and DI-mode objects
258 #undef TARGET_ASM_INTEGER
259 #define TARGET_ASM_INTEGER rs6000_assemble_integer
261 #undef TARGET_ASM_FUNCTION_PROLOGUE
262 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
263 #undef TARGET_ASM_FUNCTION_EPILOGUE
264 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
267 #undef TARGET_SECTION_TYPE_FLAGS
268 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
271 #undef TARGET_SCHED_ISSUE_RATE
272 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
273 #undef TARGET_SCHED_ADJUST_COST
274 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
275 #undef TARGET_SCHED_ADJUST_PRIORITY
276 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
278 #undef TARGET_INIT_BUILTINS
279 #define TARGET_INIT_BUILTINS rs6000_init_builtins
281 #undef TARGET_EXPAND_BUILTIN
282 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
284 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
285 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
287 struct gcc_target targetm = TARGET_INITIALIZER;
289 /* Override command line options. Mostly we process the processor
290 type and sometimes adjust other TARGET_ options. */
293 rs6000_override_options (default_cpu)
294 const char *default_cpu;
297 struct rs6000_cpu_select *ptr;
299 /* Simplify the entries below by making a mask for any POWER
300 variant and any PowerPC variant. */
302 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
303 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
304 | MASK_PPC_GFXOPT | MASK_POWERPC64)
305 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
309 const char *const name; /* Canonical processor name. */
310 const enum processor_type processor; /* Processor type enum value. */
311 const int target_enable; /* Target flags to enable. */
312 const int target_disable; /* Target flags to disable. */
313 } const processor_target_table[]
314 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
315 POWER_MASKS | POWERPC_MASKS},
316 {"power", PROCESSOR_POWER,
317 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
318 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
319 {"power2", PROCESSOR_POWER,
320 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
321 POWERPC_MASKS | MASK_NEW_MNEMONICS},
322 {"power3", PROCESSOR_PPC630,
323 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
324 POWER_MASKS | MASK_PPC_GPOPT},
325 {"powerpc", PROCESSOR_POWERPC,
326 MASK_POWERPC | MASK_NEW_MNEMONICS,
327 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
328 {"powerpc64", PROCESSOR_POWERPC64,
329 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
330 POWER_MASKS | POWERPC_OPT_MASKS},
331 {"rios", PROCESSOR_RIOS1,
332 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
333 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
334 {"rios1", PROCESSOR_RIOS1,
335 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
336 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
337 {"rsc", PROCESSOR_PPC601,
338 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
339 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
340 {"rsc1", PROCESSOR_PPC601,
341 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
342 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
343 {"rios2", PROCESSOR_RIOS2,
344 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
345 POWERPC_MASKS | MASK_NEW_MNEMONICS},
346 {"rs64a", PROCESSOR_RS64A,
347 MASK_POWERPC | MASK_NEW_MNEMONICS,
348 POWER_MASKS | POWERPC_OPT_MASKS},
349 {"401", PROCESSOR_PPC403,
350 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
351 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
352 {"403", PROCESSOR_PPC403,
353 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
354 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
355 {"405", PROCESSOR_PPC405,
356 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
357 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
358 {"505", PROCESSOR_MPCCORE,
359 MASK_POWERPC | MASK_NEW_MNEMONICS,
360 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
361 {"601", PROCESSOR_PPC601,
362 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
363 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
364 {"602", PROCESSOR_PPC603,
365 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
366 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
367 {"603", PROCESSOR_PPC603,
368 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
369 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
370 {"603e", PROCESSOR_PPC603,
371 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
372 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
373 {"ec603e", PROCESSOR_PPC603,
374 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
375 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
376 {"604", PROCESSOR_PPC604,
377 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
378 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
379 {"604e", PROCESSOR_PPC604e,
380 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
381 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
382 {"620", PROCESSOR_PPC620,
383 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
384 POWER_MASKS | MASK_PPC_GPOPT},
385 {"630", PROCESSOR_PPC630,
386 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
387 POWER_MASKS | MASK_PPC_GPOPT},
388 {"740", PROCESSOR_PPC750,
389 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
390 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
391 {"750", PROCESSOR_PPC750,
392 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
393 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
394 {"7400", PROCESSOR_PPC7400,
395 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
396 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
397 {"7450", PROCESSOR_PPC7450,
398 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
399 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
400 {"801", PROCESSOR_MPCCORE,
401 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
402 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
403 {"821", PROCESSOR_MPCCORE,
404 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
405 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
406 {"823", PROCESSOR_MPCCORE,
407 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
408 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
409 {"860", PROCESSOR_MPCCORE,
410 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
411 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
413 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
415 /* Save current -mmultiple/-mno-multiple status. */
416 int multiple = TARGET_MULTIPLE;
417 /* Save current -mstring/-mno-string status. */
418 int string = TARGET_STRING;
420 /* Identify the processor type. */
421 rs6000_select[0].string = default_cpu;
422 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
424 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
426 ptr = &rs6000_select[i];
427 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
429 for (j = 0; j < ptt_size; j++)
430 if (! strcmp (ptr->string, processor_target_table[j].name))
433 rs6000_cpu = processor_target_table[j].processor;
437 target_flags |= processor_target_table[j].target_enable;
438 target_flags &= ~processor_target_table[j].target_disable;
444 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
448 /* If we are optimizing big endian systems for space, use the store
449 multiple instructions. */
450 if (BYTES_BIG_ENDIAN && optimize_size)
451 target_flags |= MASK_MULTIPLE;
453 /* If -mmultiple or -mno-multiple was explicitly used, don't
454 override with the processor default */
455 if (TARGET_MULTIPLE_SET)
456 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
458 /* If -mstring or -mno-string was explicitly used, don't override
459 with the processor default. */
460 if (TARGET_STRING_SET)
461 target_flags = (target_flags & ~MASK_STRING) | string;
463 /* Don't allow -mmultiple or -mstring on little endian systems
464 unless the cpu is a 750, because the hardware doesn't support the
465 instructions used in little endian mode, and causes an alignment
466 trap. The 750 does not cause an alignment trap (except when the
467 target is unaligned). */
469 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
473 target_flags &= ~MASK_MULTIPLE;
474 if (TARGET_MULTIPLE_SET)
475 warning ("-mmultiple is not supported on little endian systems");
480 target_flags &= ~MASK_STRING;
481 if (TARGET_STRING_SET)
482 warning ("-mstring is not supported on little endian systems");
486 if (flag_pic && DEFAULT_ABI == ABI_AIX)
488 warning ("-f%s ignored (all code is position independent)",
489 (flag_pic > 1) ? "PIC" : "pic");
493 #ifdef XCOFF_DEBUGGING_INFO
494 if (flag_function_sections && (write_symbols != NO_DEBUG)
495 && DEFAULT_ABI == ABI_AIX)
497 warning ("-ffunction-sections disabled on AIX when debugging");
498 flag_function_sections = 0;
501 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
503 warning ("-fdata-sections not supported on AIX");
504 flag_data_sections = 0;
508 /* Set debug flags */
509 if (rs6000_debug_name)
511 if (! strcmp (rs6000_debug_name, "all"))
512 rs6000_debug_stack = rs6000_debug_arg = 1;
513 else if (! strcmp (rs6000_debug_name, "stack"))
514 rs6000_debug_stack = 1;
515 else if (! strcmp (rs6000_debug_name, "arg"))
516 rs6000_debug_arg = 1;
518 error ("unknown -mdebug-%s switch", rs6000_debug_name);
521 /* Set size of long double */
522 rs6000_long_double_type_size = 64;
523 if (rs6000_long_double_size_string)
526 int size = strtol (rs6000_long_double_size_string, &tail, 10);
527 if (*tail != '\0' || (size != 64 && size != 128))
528 error ("Unknown switch -mlong-double-%s",
529 rs6000_long_double_size_string);
531 rs6000_long_double_type_size = size;
534 /* Handle -mabi= options. */
535 rs6000_parse_abi_options ();
537 #ifdef TARGET_REGNAMES
538 /* If the user desires alternate register names, copy in the
539 alternate names now. */
541 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
544 #ifdef SUBTARGET_OVERRIDE_OPTIONS
545 SUBTARGET_OVERRIDE_OPTIONS;
547 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
548 SUBSUBTARGET_OVERRIDE_OPTIONS;
551 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
552 If -maix-struct-return or -msvr4-struct-return was explicitly
553 used, don't override with the ABI default. */
554 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
556 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
557 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
559 target_flags |= MASK_AIX_STRUCT_RET;
562 /* Register global variables with the garbage collector. */
563 rs6000_add_gc_roots ();
565 /* Allocate an alias set for register saves & restores from stack. */
566 rs6000_sr_alias_set = new_alias_set ();
569 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
571 /* We can only guarantee the availability of DI pseudo-ops when
572 assembling for 64-bit targets. */
575 targetm.asm_out.aligned_op.di = NULL;
576 targetm.asm_out.unaligned_op.di = NULL;
579 /* Arrange to save and restore machine status around nested functions. */
580 init_machine_status = rs6000_init_machine_status;
581 free_machine_status = rs6000_free_machine_status;
584 /* Handle -mabi= options. */
586 rs6000_parse_abi_options ()
588 if (rs6000_abi_string == 0)
590 else if (! strcmp (rs6000_abi_string, "altivec"))
591 rs6000_altivec_abi = 1;
592 else if (! strcmp (rs6000_abi_string, "no-altivec"))
593 rs6000_altivec_abi = 0;
595 error ("unknown ABI specified: '%s'", rs6000_abi_string);
599 optimization_options (level, size)
600 int level ATTRIBUTE_UNUSED;
601 int size ATTRIBUTE_UNUSED;
605 /* Do anything needed at the start of the asm file. */
608 rs6000_file_start (file, default_cpu)
610 const char *default_cpu;
614 const char *start = buffer;
615 struct rs6000_cpu_select *ptr;
617 if (flag_verbose_asm)
619 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
620 rs6000_select[0].string = default_cpu;
622 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
624 ptr = &rs6000_select[i];
625 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
627 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
633 switch (rs6000_sdata)
635 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
636 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
637 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
638 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
641 if (rs6000_sdata && g_switch_value)
643 fprintf (file, "%s -G %d", start, g_switch_value);
654 /* Create a CONST_DOUBLE from a string. */
657 rs6000_float_const (string, mode)
659 enum machine_mode mode;
661 REAL_VALUE_TYPE value;
662 value = REAL_VALUE_ATOF (string, mode);
663 return immed_real_const_1 (value, mode);
666 /* Return non-zero if this function is known to have a null epilogue. */
671 if (reload_completed)
673 rs6000_stack_t *info = rs6000_stack_info ();
675 if (info->first_gp_reg_save == 32
676 && info->first_fp_reg_save == 64
677 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
680 && info->vrsave_mask == 0
688 /* Returns 1 always. */
691 any_operand (op, mode)
692 rtx op ATTRIBUTE_UNUSED;
693 enum machine_mode mode ATTRIBUTE_UNUSED;
698 /* Returns 1 if op is the count register. */
700 count_register_operand (op, mode)
702 enum machine_mode mode ATTRIBUTE_UNUSED;
704 if (GET_CODE (op) != REG)
707 if (REGNO (op) == COUNT_REGISTER_REGNUM)
710 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
716 /* Returns 1 if op is an altivec register. */
718 altivec_register_operand (op, mode)
720 enum machine_mode mode ATTRIBUTE_UNUSED;
723 return (register_operand (op, mode)
724 && (GET_CODE (op) != REG
725 || REGNO (op) > FIRST_PSEUDO_REGISTER
726 || ALTIVEC_REGNO_P (REGNO (op))));
730 xer_operand (op, mode)
732 enum machine_mode mode ATTRIBUTE_UNUSED;
734 if (GET_CODE (op) != REG)
737 if (XER_REGNO_P (REGNO (op)))
743 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
744 by such constants completes more quickly. */
747 s8bit_cint_operand (op, mode)
749 enum machine_mode mode ATTRIBUTE_UNUSED;
751 return ( GET_CODE (op) == CONST_INT
752 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
755 /* Return 1 if OP is a constant that can fit in a D field. */
758 short_cint_operand (op, mode)
760 enum machine_mode mode ATTRIBUTE_UNUSED;
762 return (GET_CODE (op) == CONST_INT
763 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
766 /* Similar for an unsigned D field. */
769 u_short_cint_operand (op, mode)
771 enum machine_mode mode ATTRIBUTE_UNUSED;
773 return (GET_CODE (op) == CONST_INT
774 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
777 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
780 non_short_cint_operand (op, mode)
782 enum machine_mode mode ATTRIBUTE_UNUSED;
784 return (GET_CODE (op) == CONST_INT
785 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
788 /* Returns 1 if OP is a CONST_INT that is a positive value
789 and an exact power of 2. */
792 exact_log2_cint_operand (op, mode)
794 enum machine_mode mode ATTRIBUTE_UNUSED;
796 return (GET_CODE (op) == CONST_INT
798 && exact_log2 (INTVAL (op)) >= 0);
801 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
805 gpc_reg_operand (op, mode)
807 enum machine_mode mode;
809 return (register_operand (op, mode)
810 && (GET_CODE (op) != REG
811 || (REGNO (op) >= ARG_POINTER_REGNUM
812 && !XER_REGNO_P (REGNO (op)))
813 || REGNO (op) < MQ_REGNO));
816 /* Returns 1 if OP is either a pseudo-register or a register denoting a
820 cc_reg_operand (op, mode)
822 enum machine_mode mode;
824 return (register_operand (op, mode)
825 && (GET_CODE (op) != REG
826 || REGNO (op) >= FIRST_PSEUDO_REGISTER
827 || CR_REGNO_P (REGNO (op))));
830 /* Returns 1 if OP is either a pseudo-register or a register denoting a
831 CR field that isn't CR0. */
834 cc_reg_not_cr0_operand (op, mode)
836 enum machine_mode mode;
838 return (register_operand (op, mode)
839 && (GET_CODE (op) != REG
840 || REGNO (op) >= FIRST_PSEUDO_REGISTER
841 || CR_REGNO_NOT_CR0_P (REGNO (op))));
844 /* Returns 1 if OP is either a constant integer valid for a D-field or
845 a non-special register. If a register, it must be in the proper
846 mode unless MODE is VOIDmode. */
849 reg_or_short_operand (op, mode)
851 enum machine_mode mode;
853 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
856 /* Similar, except check if the negation of the constant would be
857 valid for a D-field. */
860 reg_or_neg_short_operand (op, mode)
862 enum machine_mode mode;
864 if (GET_CODE (op) == CONST_INT)
865 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
867 return gpc_reg_operand (op, mode);
870 /* Returns 1 if OP is either a constant integer valid for a DS-field or
871 a non-special register. If a register, it must be in the proper
872 mode unless MODE is VOIDmode. */
875 reg_or_aligned_short_operand (op, mode)
877 enum machine_mode mode;
879 if (gpc_reg_operand (op, mode))
881 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
888 /* Return 1 if the operand is either a register or an integer whose
889 high-order 16 bits are zero. */
892 reg_or_u_short_operand (op, mode)
894 enum machine_mode mode;
896 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
899 /* Return 1 is the operand is either a non-special register or ANY
903 reg_or_cint_operand (op, mode)
905 enum machine_mode mode;
907 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
910 /* Return 1 is the operand is either a non-special register or ANY
911 32-bit signed constant integer. */
914 reg_or_arith_cint_operand (op, mode)
916 enum machine_mode mode;
918 return (gpc_reg_operand (op, mode)
919 || (GET_CODE (op) == CONST_INT
920 #if HOST_BITS_PER_WIDE_INT != 32
921 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
922 < (unsigned HOST_WIDE_INT) 0x100000000ll)
927 /* Return 1 is the operand is either a non-special register or a 32-bit
928 signed constant integer valid for 64-bit addition. */
931 reg_or_add_cint64_operand (op, mode)
933 enum machine_mode mode;
935 return (gpc_reg_operand (op, mode)
936 || (GET_CODE (op) == CONST_INT
937 #if HOST_BITS_PER_WIDE_INT == 32
938 && INTVAL (op) < 0x7fff8000
940 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
946 /* Return 1 is the operand is either a non-special register or a 32-bit
947 signed constant integer valid for 64-bit subtraction. */
950 reg_or_sub_cint64_operand (op, mode)
952 enum machine_mode mode;
954 return (gpc_reg_operand (op, mode)
955 || (GET_CODE (op) == CONST_INT
956 #if HOST_BITS_PER_WIDE_INT == 32
957 && (- INTVAL (op)) < 0x7fff8000
959 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
965 /* Return 1 is the operand is either a non-special register or ANY
966 32-bit unsigned constant integer. */
969 reg_or_logical_cint_operand (op, mode)
971 enum machine_mode mode;
973 if (GET_CODE (op) == CONST_INT)
975 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
977 if (GET_MODE_BITSIZE (mode) <= 32)
984 return ((INTVAL (op) & GET_MODE_MASK (mode)
985 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
987 else if (GET_CODE (op) == CONST_DOUBLE)
989 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
993 return CONST_DOUBLE_HIGH (op) == 0;
996 return gpc_reg_operand (op, mode);
999 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1002 got_operand (op, mode)
1004 enum machine_mode mode ATTRIBUTE_UNUSED;
1006 return (GET_CODE (op) == SYMBOL_REF
1007 || GET_CODE (op) == CONST
1008 || GET_CODE (op) == LABEL_REF);
1011 /* Return 1 if the operand is a simple references that can be loaded via
1012 the GOT (labels involving addition aren't allowed). */
1015 got_no_const_operand (op, mode)
1017 enum machine_mode mode ATTRIBUTE_UNUSED;
1019 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1022 /* Return the number of instructions it takes to form a constant in an
1023 integer register. */
1026 num_insns_constant_wide (value)
1027 HOST_WIDE_INT value;
1029 /* signed constant loadable with {cal|addi} */
1030 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1033 /* constant loadable with {cau|addis} */
1034 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1037 #if HOST_BITS_PER_WIDE_INT == 64
1038 else if (TARGET_POWERPC64)
1040 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1041 HOST_WIDE_INT high = value >> 31;
1043 if (high == 0 || high == -1)
1049 return num_insns_constant_wide (high) + 1;
1051 return (num_insns_constant_wide (high)
1052 + num_insns_constant_wide (low) + 1);
1061 num_insns_constant (op, mode)
1063 enum machine_mode mode;
1065 if (GET_CODE (op) == CONST_INT)
1067 #if HOST_BITS_PER_WIDE_INT == 64
1068 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1069 && mask64_operand (op, mode))
1073 return num_insns_constant_wide (INTVAL (op));
1076 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1081 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1082 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1083 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1086 else if (GET_CODE (op) == CONST_DOUBLE)
1092 int endian = (WORDS_BIG_ENDIAN == 0);
1094 if (mode == VOIDmode || mode == DImode)
1096 high = CONST_DOUBLE_HIGH (op);
1097 low = CONST_DOUBLE_LOW (op);
1101 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1102 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1104 low = l[1 - endian];
1108 return (num_insns_constant_wide (low)
1109 + num_insns_constant_wide (high));
1113 if (high == 0 && low >= 0)
1114 return num_insns_constant_wide (low);
1116 else if (high == -1 && low < 0)
1117 return num_insns_constant_wide (low);
1119 else if (mask64_operand (op, mode))
1123 return num_insns_constant_wide (high) + 1;
1126 return (num_insns_constant_wide (high)
1127 + num_insns_constant_wide (low) + 1);
1135 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1136 register with one instruction per word. We only do this if we can
1137 safely read CONST_DOUBLE_{LOW,HIGH}. */
1140 easy_fp_constant (op, mode)
1142 enum machine_mode mode;
1144 if (GET_CODE (op) != CONST_DOUBLE
1145 || GET_MODE (op) != mode
1146 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1149 /* Consider all constants with -msoft-float to be easy. */
1150 if (TARGET_SOFT_FLOAT && mode != DImode)
1153 /* If we are using V.4 style PIC, consider all constants to be hard. */
1154 if (flag_pic && DEFAULT_ABI == ABI_V4)
1157 #ifdef TARGET_RELOCATABLE
1158 /* Similarly if we are using -mrelocatable, consider all constants
1160 if (TARGET_RELOCATABLE)
1169 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1170 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1172 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1173 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1176 else if (mode == SFmode)
1181 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1182 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1184 return num_insns_constant_wide (l) == 1;
1187 else if (mode == DImode)
1188 return ((TARGET_POWERPC64
1189 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1190 || (num_insns_constant (op, DImode) <= 2));
1192 else if (mode == SImode)
1198 /* Return 1 if the operand is a CONST_INT and can be put into a
1199 register with one instruction. */
1202 easy_vector_constant (op)
1208 if (GET_CODE (op) != CONST_VECTOR)
1211 units = CONST_VECTOR_NUNITS (op);
1213 /* We can generate 0 easily. Look for that. */
1214 for (i = 0; i < units; ++i)
1216 elt = CONST_VECTOR_ELT (op, i);
1218 /* We could probably simplify this by just checking for equality
1219 with CONST0_RTX for the current mode, but let's be safe
1222 if (GET_CODE (elt) == CONST_INT && INTVAL (elt) != 0)
1225 if (GET_CODE (elt) == CONST_DOUBLE
1226 && (CONST_DOUBLE_LOW (elt) != 0
1227 || CONST_DOUBLE_HIGH (elt) != 0))
1231 /* We could probably generate a few other constants trivially, but
1232 gcc doesn't generate them yet. FIXME later. */
1236 /* Return 1 if the operand is the constant 0. This works for scalars
1237 as well as vectors. */
1239 zero_constant (op, mode)
1241 enum machine_mode mode;
1243 return op == CONST0_RTX (mode);
1246 /* Return 1 if the operand is 0.0. */
1248 zero_fp_constant (op, mode)
1250 enum machine_mode mode;
1252 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1255 /* Return 1 if the operand is in volatile memory. Note that during
1256 the RTL generation phase, memory_operand does not return TRUE for
1257 volatile memory references. So this function allows us to
1258 recognize volatile references where its safe. */
1261 volatile_mem_operand (op, mode)
1263 enum machine_mode mode;
1265 if (GET_CODE (op) != MEM)
1268 if (!MEM_VOLATILE_P (op))
1271 if (mode != GET_MODE (op))
1274 if (reload_completed)
1275 return memory_operand (op, mode);
1277 if (reload_in_progress)
1278 return strict_memory_address_p (mode, XEXP (op, 0));
1280 return memory_address_p (mode, XEXP (op, 0));
1283 /* Return 1 if the operand is an offsettable memory operand. */
1286 offsettable_mem_operand (op, mode)
1288 enum machine_mode mode;
1290 return ((GET_CODE (op) == MEM)
1291 && offsettable_address_p (reload_completed || reload_in_progress,
1292 mode, XEXP (op, 0)));
1295 /* Return 1 if the operand is either an easy FP constant (see above) or
1299 mem_or_easy_const_operand (op, mode)
1301 enum machine_mode mode;
1303 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1306 /* Return 1 if the operand is either a non-special register or an item
1307 that can be used as the operand of a `mode' add insn. */
1310 add_operand (op, mode)
1312 enum machine_mode mode;
1314 if (GET_CODE (op) == CONST_INT)
1315 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1316 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1318 return gpc_reg_operand (op, mode);
1321 /* Return 1 if OP is a constant but not a valid add_operand. */
1324 non_add_cint_operand (op, mode)
1326 enum machine_mode mode ATTRIBUTE_UNUSED;
1328 return (GET_CODE (op) == CONST_INT
1329 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1330 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1333 /* Return 1 if the operand is a non-special register or a constant that
1334 can be used as the operand of an OR or XOR insn on the RS/6000. */
1337 logical_operand (op, mode)
1339 enum machine_mode mode;
1341 HOST_WIDE_INT opl, oph;
1343 if (gpc_reg_operand (op, mode))
1346 if (GET_CODE (op) == CONST_INT)
1348 opl = INTVAL (op) & GET_MODE_MASK (mode);
1350 #if HOST_BITS_PER_WIDE_INT <= 32
1351 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1355 else if (GET_CODE (op) == CONST_DOUBLE)
1357 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1360 opl = CONST_DOUBLE_LOW (op);
1361 oph = CONST_DOUBLE_HIGH (op);
1368 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1369 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1372 /* Return 1 if C is a constant that is not a logical operand (as
1373 above), but could be split into one. */
1376 non_logical_cint_operand (op, mode)
1378 enum machine_mode mode;
1380 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1381 && ! logical_operand (op, mode)
1382 && reg_or_logical_cint_operand (op, mode));
1385 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1386 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1387 Reject all ones and all zeros, since these should have been optimized
1388 away and confuse the making of MB and ME. */
1391 mask_operand (op, mode)
1393 enum machine_mode mode ATTRIBUTE_UNUSED;
1395 HOST_WIDE_INT c, lsb;
1397 if (GET_CODE (op) != CONST_INT)
1402 /* Fail in 64-bit mode if the mask wraps around because the upper
1403 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1404 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1407 /* We don't change the number of transitions by inverting,
1408 so make sure we start with the LS bit zero. */
1412 /* Reject all zeros or all ones. */
1416 /* Find the first transition. */
1419 /* Invert to look for a second transition. */
1422 /* Erase first transition. */
1425 /* Find the second transition (if any). */
1428 /* Match if all the bits above are 1's (or c is zero). */
1432 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1433 It is if there are no more than one 1->0 or 0->1 transitions.
1434 Reject all ones and all zeros, since these should have been optimized
1435 away and confuse the making of MB and ME. */
1438 mask64_operand (op, mode)
1440 enum machine_mode mode;
1442 if (GET_CODE (op) == CONST_INT)
1444 HOST_WIDE_INT c, lsb;
1446 /* We don't change the number of transitions by inverting,
1447 so make sure we start with the LS bit zero. */
1452 /* Reject all zeros or all ones. */
1456 /* Find the transition, and check that all bits above are 1's. */
1460 else if (GET_CODE (op) == CONST_DOUBLE
1461 && (mode == VOIDmode || mode == DImode))
1463 HOST_WIDE_INT low, high, lsb;
1465 if (HOST_BITS_PER_WIDE_INT < 64)
1466 high = CONST_DOUBLE_HIGH (op);
1468 low = CONST_DOUBLE_LOW (op);
1471 if (HOST_BITS_PER_WIDE_INT < 64)
1478 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1482 return high == -lsb;
1486 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1492 /* Return 1 if the operand is either a non-special register or a constant
1493 that can be used as the operand of a PowerPC64 logical AND insn. */
1496 and64_operand (op, mode)
1498 enum machine_mode mode;
1500 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1501 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1503 return (logical_operand (op, mode) || mask64_operand (op, mode));
1506 /* Return 1 if the operand is either a non-special register or a
1507 constant that can be used as the operand of an RS/6000 logical AND insn. */
1510 and_operand (op, mode)
1512 enum machine_mode mode;
1514 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1515 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1517 return (logical_operand (op, mode) || mask_operand (op, mode));
1520 /* Return 1 if the operand is a general register or memory operand. */
1523 reg_or_mem_operand (op, mode)
1525 enum machine_mode mode;
1527 return (gpc_reg_operand (op, mode)
1528 || memory_operand (op, mode)
1529 || volatile_mem_operand (op, mode));
1532 /* Return 1 if the operand is a general register or memory operand without
1533 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1537 lwa_operand (op, mode)
1539 enum machine_mode mode;
1543 if (reload_completed && GET_CODE (inner) == SUBREG)
1544 inner = SUBREG_REG (inner);
1546 return gpc_reg_operand (inner, mode)
1547 || (memory_operand (inner, mode)
1548 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1549 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1550 && (GET_CODE (XEXP (inner, 0)) != PLUS
1551 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1552 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1555 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1556 to CALL. This is a SYMBOL_REF or a pseudo-register, which will be
1560 call_operand (op, mode)
1562 enum machine_mode mode;
1564 if (mode != VOIDmode && GET_MODE (op) != mode)
1567 return (GET_CODE (op) == SYMBOL_REF
1568 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1571 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1572 this file and the function is not weakly defined. */
1575 current_file_function_operand (op, mode)
1577 enum machine_mode mode ATTRIBUTE_UNUSED;
1579 return (GET_CODE (op) == SYMBOL_REF
1580 && (SYMBOL_REF_FLAG (op)
1581 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1582 && ! DECL_WEAK (current_function_decl))));
1585 /* Return 1 if this operand is a valid input for a move insn. */
1588 input_operand (op, mode)
1590 enum machine_mode mode;
1592 /* Memory is always valid. */
1593 if (memory_operand (op, mode))
1596 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1597 if (GET_CODE (op) == CONSTANT_P_RTX)
1600 /* For floating-point, easy constants are valid. */
1601 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1603 && easy_fp_constant (op, mode))
1606 /* Allow any integer constant. */
1607 if (GET_MODE_CLASS (mode) == MODE_INT
1608 && (GET_CODE (op) == CONST_INT
1609 || GET_CODE (op) == CONST_DOUBLE))
1612 /* For floating-point or multi-word mode, the only remaining valid type
1614 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1615 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1616 return register_operand (op, mode);
1618 /* The only cases left are integral modes one word or smaller (we
1619 do not get called for MODE_CC values). These can be in any
1621 if (register_operand (op, mode))
1624 /* A SYMBOL_REF referring to the TOC is valid. */
1625 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1628 /* A constant pool expression (relative to the TOC) is valid */
1629 if (TOC_RELATIVE_EXPR_P (op))
1632 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1634 if (DEFAULT_ABI == ABI_V4
1635 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1636 && small_data_operand (op, Pmode))
1642 /* Return 1 for an operand in small memory on V.4/eabi. */
1645 small_data_operand (op, mode)
1646 rtx op ATTRIBUTE_UNUSED;
1647 enum machine_mode mode ATTRIBUTE_UNUSED;
1652 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1655 if (DEFAULT_ABI != ABI_V4)
1658 if (GET_CODE (op) == SYMBOL_REF)
1661 else if (GET_CODE (op) != CONST
1662 || GET_CODE (XEXP (op, 0)) != PLUS
1663 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1664 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1669 rtx sum = XEXP (op, 0);
1670 HOST_WIDE_INT summand;
1672 /* We have to be careful here, because it is the referenced address
1673 that must be 32k from _SDA_BASE_, not just the symbol. */
1674 summand = INTVAL (XEXP (sum, 1));
1675 if (summand < 0 || summand > g_switch_value)
1678 sym_ref = XEXP (sum, 0);
1681 if (*XSTR (sym_ref, 0) != '@')
1692 constant_pool_expr_1 (op, have_sym, have_toc)
1697 switch (GET_CODE(op))
1700 if (CONSTANT_POOL_ADDRESS_P (op))
1702 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1710 else if (! strcmp (XSTR (op, 0), toc_label_name))
1719 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1720 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1722 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1731 constant_pool_expr_p (op)
1736 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1740 toc_relative_expr_p (op)
1745 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1748 /* Try machine-dependent ways of modifying an illegitimate address
1749 to be legitimate. If we find one, return the new, valid address.
1750 This is used from only one place: `memory_address' in explow.c.
1752 OLDX is the address as it was before break_out_memory_refs was
1753 called. In some cases it is useful to look at this to decide what
1756 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1758 It is always safe for this function to do nothing. It exists to
1759 recognize opportunities to optimize the output.
1761 On RS/6000, first check for the sum of a register with a constant
1762 integer that is out of range. If so, generate code to add the
1763 constant with the low-order 16 bits masked to the register and force
1764 this result into another register (this can be done with `cau').
1765 Then generate an address of REG+(CONST&0xffff), allowing for the
1766 possibility of bit 16 being a one.
1768 Then check for the sum of a register and something not constant, try to
1769 load the other things into a register and return the sum. */
1771 rs6000_legitimize_address (x, oldx, mode)
1773 rtx oldx ATTRIBUTE_UNUSED;
1774 enum machine_mode mode;
1776 if (GET_CODE (x) == PLUS
1777 && GET_CODE (XEXP (x, 0)) == REG
1778 && GET_CODE (XEXP (x, 1)) == CONST_INT
1779 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1781 HOST_WIDE_INT high_int, low_int;
1783 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
1784 high_int = INTVAL (XEXP (x, 1)) - low_int;
1785 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1786 GEN_INT (high_int)), 0);
1787 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1789 else if (GET_CODE (x) == PLUS
1790 && GET_CODE (XEXP (x, 0)) == REG
1791 && GET_CODE (XEXP (x, 1)) != CONST_INT
1792 && GET_MODE_NUNITS (mode) == 1
1793 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1794 && (TARGET_POWERPC64 || mode != DImode)
1797 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1798 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1800 else if (ALTIVEC_VECTOR_MODE (mode))
1804 /* Make sure both operands are registers. */
1805 if (GET_CODE (x) == PLUS)
1806 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1807 force_reg (Pmode, XEXP (x, 1)));
1809 reg = force_reg (Pmode, x);
1812 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1813 && GET_CODE (x) != CONST_INT
1814 && GET_CODE (x) != CONST_DOUBLE
1816 && GET_MODE_NUNITS (mode) == 1
1817 && (GET_MODE_BITSIZE (mode) <= 32
1818 || (TARGET_HARD_FLOAT && mode == DFmode)))
1820 rtx reg = gen_reg_rtx (Pmode);
1821 emit_insn (gen_elf_high (reg, (x)));
1822 return gen_rtx_LO_SUM (Pmode, reg, (x));
1824 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1826 && GET_CODE (x) != CONST_INT
1827 && GET_CODE (x) != CONST_DOUBLE
1829 && (TARGET_HARD_FLOAT || mode != DFmode)
1833 rtx reg = gen_reg_rtx (Pmode);
1834 emit_insn (gen_macho_high (reg, (x)));
1835 return gen_rtx_LO_SUM (Pmode, reg, (x));
1838 && CONSTANT_POOL_EXPR_P (x)
1839 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1841 return create_TOC_reference (x);
1847 /* The convention appears to be to define this wherever it is used.
1848 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1849 is now used here. */
1850 #ifndef REG_MODE_OK_FOR_BASE_P
1851 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1854 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1855 replace the input X, or the original X if no replacement is called for.
1856 The output parameter *WIN is 1 if the calling macro should goto WIN,
1859 For RS/6000, we wish to handle large displacements off a base
1860 register by splitting the addend across an addiu/addis and the mem insn.
1861 This cuts number of extra insns needed from 3 to 1.
1863 On Darwin, we use this to generate code for floating point constants.
1864 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1865 The Darwin code is inside #if TARGET_MACHO because only then is
1866 machopic_function_base_name() defined. */
1868 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1870 enum machine_mode mode;
1873 int ind_levels ATTRIBUTE_UNUSED;
1876 /* We must recognize output that we have already generated ourselves. */
1877 if (GET_CODE (x) == PLUS
1878 && GET_CODE (XEXP (x, 0)) == PLUS
1879 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1880 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1881 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1883 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1884 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1885 opnum, (enum reload_type)type);
1891 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1892 && GET_CODE (x) == LO_SUM
1893 && GET_CODE (XEXP (x, 0)) == PLUS
1894 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1895 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1896 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1897 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1898 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1899 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1900 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1902 /* Result of previous invocation of this function on Darwin
1903 floating point constant. */
1904 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1905 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1906 opnum, (enum reload_type)type);
1911 if (GET_CODE (x) == PLUS
1912 && GET_CODE (XEXP (x, 0)) == REG
1913 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1914 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1915 && GET_CODE (XEXP (x, 1)) == CONST_INT
1916 && !ALTIVEC_VECTOR_MODE (mode))
1918 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1919 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1921 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1923 /* Check for 32-bit overflow. */
1924 if (high + low != val)
1930 /* Reload the high part into a base reg; leave the low part
1931 in the mem directly. */
1933 x = gen_rtx_PLUS (GET_MODE (x),
1934 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1938 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1939 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1940 opnum, (enum reload_type)type);
1945 if (GET_CODE (x) == SYMBOL_REF
1946 && DEFAULT_ABI == ABI_DARWIN
1947 && !ALTIVEC_VECTOR_MODE (mode)
1950 /* Darwin load of floating point constant. */
1951 rtx offset = gen_rtx (CONST, Pmode,
1952 gen_rtx (MINUS, Pmode, x,
1953 gen_rtx (SYMBOL_REF, Pmode,
1954 machopic_function_base_name ())));
1955 x = gen_rtx (LO_SUM, GET_MODE (x),
1956 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
1957 gen_rtx (HIGH, Pmode, offset)), offset);
1958 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1959 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1960 opnum, (enum reload_type)type);
1966 && CONSTANT_POOL_EXPR_P (x)
1967 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
1969 (x) = create_TOC_reference (x);
1977 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1978 that is a valid memory address for an instruction.
1979 The MODE argument is the machine mode for the MEM expression
1980 that wants to use this address.
1982 On the RS/6000, there are four valid address: a SYMBOL_REF that
1983 refers to a constant pool entry of an address (or the sum of it
1984 plus a constant), a short (16-bit signed) constant plus a register,
1985 the sum of two registers, or a register indirect, possibly with an
1986 auto-increment. For DFmode and DImode with an constant plus register,
1987 we must ensure that both words are addressable or PowerPC64 with offset
1990 For modes spanning multiple registers (DFmode in 32-bit GPRs,
1991 32-bit DImode, TImode), indexed addressing cannot be used because
1992 adjacent memory cells are accessed by adding word-sized offsets
1993 during assembly output. */
1995 rs6000_legitimate_address (mode, x, reg_ok_strict)
1996 enum machine_mode mode;
2000 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2002 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2004 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2006 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2008 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2010 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2012 && GET_CODE (x) == PLUS
2013 && GET_CODE (XEXP (x, 0)) == REG
2014 && XEXP (x, 0) == virtual_stack_vars_rtx
2015 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2017 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2020 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2021 && (TARGET_POWERPC64 || mode != DImode)
2022 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2024 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2029 /* Try to output insns to set TARGET equal to the constant C if it can
2030 be done in less than N insns. Do all computations in MODE.
2031 Returns the place where the output has been placed if it can be
2032 done and the insns have been emitted. If it would take more than N
2033 insns, zero is returned and no insns and emitted. */
2036 rs6000_emit_set_const (dest, mode, source, n)
2038 enum machine_mode mode;
2039 int n ATTRIBUTE_UNUSED;
2041 HOST_WIDE_INT c0, c1;
2043 if (mode == QImode || mode == HImode || mode == SImode)
2046 dest = gen_reg_rtx (mode);
2047 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2051 if (GET_CODE (source) == CONST_INT)
2053 c0 = INTVAL (source);
2056 else if (GET_CODE (source) == CONST_DOUBLE)
2058 #if HOST_BITS_PER_WIDE_INT >= 64
2059 c0 = CONST_DOUBLE_LOW (source);
2062 c0 = CONST_DOUBLE_LOW (source);
2063 c1 = CONST_DOUBLE_HIGH (source);
2069 return rs6000_emit_set_long_const (dest, c0, c1);
2072 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2073 fall back to a straight forward decomposition. We do this to avoid
2074 exponential run times encountered when looking for longer sequences
2075 with rs6000_emit_set_const. */
2077 rs6000_emit_set_long_const (dest, c1, c2)
2079 HOST_WIDE_INT c1, c2;
2081 if (!TARGET_POWERPC64)
2083 rtx operand1, operand2;
2085 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2087 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2089 emit_move_insn (operand1, GEN_INT (c1));
2090 emit_move_insn (operand2, GEN_INT (c2));
2094 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2097 ud2 = (c1 & 0xffff0000) >> 16;
2098 #if HOST_BITS_PER_WIDE_INT >= 64
2102 ud4 = (c2 & 0xffff0000) >> 16;
2104 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2105 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2108 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2110 emit_move_insn (dest, GEN_INT (ud1));
2113 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2114 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2117 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2120 emit_move_insn (dest, GEN_INT (ud2 << 16));
2122 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2124 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2125 || (ud4 == 0 && ! (ud3 & 0x8000)))
2128 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2131 emit_move_insn (dest, GEN_INT (ud3 << 16));
2134 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2135 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2137 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2142 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2145 emit_move_insn (dest, GEN_INT (ud4 << 16));
2148 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2150 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2152 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2153 GEN_INT (ud2 << 16)));
2155 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2161 /* Emit a move from SOURCE to DEST in mode MODE. */
2163 rs6000_emit_move (dest, source, mode)
2166 enum machine_mode mode;
2170 operands[1] = source;
2172 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2173 if (GET_CODE (operands[1]) == CONST_DOUBLE
2174 && ! FLOAT_MODE_P (mode)
2175 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2177 /* FIXME. This should never happen. */
2178 /* Since it seems that it does, do the safe thing and convert
2181 GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands[1]), mode));
2183 if (GET_CODE (operands[1]) == CONST_DOUBLE
2184 && ! FLOAT_MODE_P (mode)
2185 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2186 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2187 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2188 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2191 /* Check if GCC is setting up a block move that will end up using FP
2192 registers as temporaries. We must make sure this is acceptable. */
2193 if (GET_CODE (operands[0]) == MEM
2194 && GET_CODE (operands[1]) == MEM
2196 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2197 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2198 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2199 ? 32 : MEM_ALIGN (operands[0])))
2200 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2202 : MEM_ALIGN (operands[1]))))
2203 && ! MEM_VOLATILE_P (operands [0])
2204 && ! MEM_VOLATILE_P (operands [1]))
2206 emit_move_insn (adjust_address (operands[0], SImode, 0),
2207 adjust_address (operands[1], SImode, 0));
2208 emit_move_insn (adjust_address (operands[0], SImode, 4),
2209 adjust_address (operands[1], SImode, 4));
2213 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2214 operands[1] = force_reg (mode, operands[1]);
2216 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2217 && GET_CODE (operands[0]) == MEM)
2221 if (reload_in_progress || reload_completed)
2222 regnum = true_regnum (operands[1]);
2223 else if (GET_CODE (operands[1]) == REG)
2224 regnum = REGNO (operands[1]);
2228 /* If operands[1] is a register, on POWER it may have
2229 double-precision data in it, so truncate it to single
2231 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2234 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2235 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2236 operands[1] = newreg;
2240 /* Handle the case where reload calls us with an invalid address;
2241 and the case of CONSTANT_P_RTX. */
2242 if (!VECTOR_MODE_P (mode)
2243 && (! general_operand (operands[1], mode)
2244 || ! nonimmediate_operand (operands[0], mode)
2245 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2247 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2251 /* FIXME: In the long term, this switch statement should go away
2252 and be replaced by a sequence of tests based on things like
2258 if (CONSTANT_P (operands[1])
2259 && GET_CODE (operands[1]) != CONST_INT)
2260 operands[1] = force_const_mem (mode, operands[1]);
2266 if (CONSTANT_P (operands[1])
2267 && ! easy_fp_constant (operands[1], mode))
2268 operands[1] = force_const_mem (mode, operands[1]);
2275 if (CONSTANT_P (operands[1])
2276 && !easy_vector_constant (operands[1]))
2277 operands[1] = force_const_mem (mode, operands[1]);
2282 /* Use default pattern for address of ELF small data */
2285 && DEFAULT_ABI == ABI_V4
2286 && (GET_CODE (operands[1]) == SYMBOL_REF
2287 || GET_CODE (operands[1]) == CONST)
2288 && small_data_operand (operands[1], mode))
2290 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2294 if (DEFAULT_ABI == ABI_V4
2295 && mode == Pmode && mode == SImode
2296 && flag_pic == 1 && got_operand (operands[1], mode))
2298 emit_insn (gen_movsi_got (operands[0], operands[1]));
2302 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2303 && TARGET_NO_TOC && ! flag_pic
2305 && CONSTANT_P (operands[1])
2306 && GET_CODE (operands[1]) != HIGH
2307 && GET_CODE (operands[1]) != CONST_INT)
2309 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2311 /* If this is a function address on -mcall-aixdesc,
2312 convert it to the address of the descriptor. */
2313 if (DEFAULT_ABI == ABI_AIX
2314 && GET_CODE (operands[1]) == SYMBOL_REF
2315 && XSTR (operands[1], 0)[0] == '.')
2317 const char *name = XSTR (operands[1], 0);
2319 while (*name == '.')
2321 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2322 CONSTANT_POOL_ADDRESS_P (new_ref)
2323 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2324 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2325 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2326 operands[1] = new_ref;
2329 if (DEFAULT_ABI == ABI_DARWIN)
2331 emit_insn (gen_macho_high (target, operands[1]));
2332 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2336 emit_insn (gen_elf_high (target, operands[1]));
2337 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2341 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2342 and we have put it in the TOC, we just need to make a TOC-relative
2345 && GET_CODE (operands[1]) == SYMBOL_REF
2346 && CONSTANT_POOL_EXPR_P (operands[1])
2347 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2348 get_pool_mode (operands[1])))
2350 operands[1] = create_TOC_reference (operands[1]);
2352 else if (mode == Pmode
2353 && CONSTANT_P (operands[1])
2354 && ((GET_CODE (operands[1]) != CONST_INT
2355 && ! easy_fp_constant (operands[1], mode))
2356 || (GET_CODE (operands[1]) == CONST_INT
2357 && num_insns_constant (operands[1], mode) > 2)
2358 || (GET_CODE (operands[0]) == REG
2359 && FP_REGNO_P (REGNO (operands[0]))))
2360 && GET_CODE (operands[1]) != HIGH
2361 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2362 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2364 /* Emit a USE operation so that the constant isn't deleted if
2365 expensive optimizations are turned on because nobody
2366 references it. This should only be done for operands that
2367 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2368 This should not be done for operands that contain LABEL_REFs.
2369 For now, we just handle the obvious case. */
2370 if (GET_CODE (operands[1]) != LABEL_REF)
2371 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2374 /* Darwin uses a special PIC legitimizer. */
2375 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2378 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2380 if (operands[0] != operands[1])
2381 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2386 /* If we are to limit the number of things we put in the TOC and
2387 this is a symbol plus a constant we can add in one insn,
2388 just put the symbol in the TOC and add the constant. Don't do
2389 this if reload is in progress. */
2390 if (GET_CODE (operands[1]) == CONST
2391 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2392 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2393 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2394 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2395 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2396 && ! side_effects_p (operands[0]))
2399 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2400 rtx other = XEXP (XEXP (operands[1], 0), 1);
2402 sym = force_reg (mode, sym);
2404 emit_insn (gen_addsi3 (operands[0], sym, other));
2406 emit_insn (gen_adddi3 (operands[0], sym, other));
2410 operands[1] = force_const_mem (mode, operands[1]);
2413 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2414 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2415 get_pool_constant (XEXP (operands[1], 0)),
2416 get_pool_mode (XEXP (operands[1], 0))))
2419 = gen_rtx_MEM (mode,
2420 create_TOC_reference (XEXP (operands[1], 0)));
2421 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2422 RTX_UNCHANGING_P (operands[1]) = 1;
2428 if (GET_CODE (operands[0]) == MEM
2429 && GET_CODE (XEXP (operands[0], 0)) != REG
2430 && ! reload_in_progress)
2432 = replace_equiv_address (operands[0],
2433 copy_addr_to_reg (XEXP (operands[0], 0)));
2435 if (GET_CODE (operands[1]) == MEM
2436 && GET_CODE (XEXP (operands[1], 0)) != REG
2437 && ! reload_in_progress)
2439 = replace_equiv_address (operands[1],
2440 copy_addr_to_reg (XEXP (operands[1], 0)));
2447 /* Above, we may have called force_const_mem which may have returned
2448 an invalid address. If we can, fix this up; otherwise, reload will
2449 have to deal with it. */
2450 if (GET_CODE (operands[1]) == MEM
2451 && ! memory_address_p (mode, XEXP (operands[1], 0))
2452 && ! reload_in_progress)
2453 operands[1] = adjust_address (operands[1], mode, 0);
2455 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2459 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2460 for a call to a function whose data type is FNTYPE.
2461 For a library call, FNTYPE is 0.
2463 For incoming args we set the number of arguments in the prototype large
2464 so we never return a PARALLEL. */
2467 init_cumulative_args (cum, fntype, libname, incoming)
2468 CUMULATIVE_ARGS *cum;
2470 rtx libname ATTRIBUTE_UNUSED;
2473 static CUMULATIVE_ARGS zero_cumulative;
2475 *cum = zero_cumulative;
2477 cum->fregno = FP_ARG_MIN_REG;
2478 cum->vregno = ALTIVEC_ARG_MIN_REG;
2479 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2480 cum->call_cookie = CALL_NORMAL;
2481 cum->sysv_gregno = GP_ARG_MIN_REG;
2484 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2486 else if (cum->prototype)
2487 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2488 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2489 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2492 cum->nargs_prototype = 0;
2494 cum->orig_nargs = cum->nargs_prototype;
2496 /* Check for longcall's */
2497 if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2498 cum->call_cookie = CALL_LONG;
2500 if (TARGET_DEBUG_ARG)
2502 fprintf (stderr, "\ninit_cumulative_args:");
2505 tree ret_type = TREE_TYPE (fntype);
2506 fprintf (stderr, " ret code = %s,",
2507 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2510 if (cum->call_cookie & CALL_LONG)
2511 fprintf (stderr, " longcall,");
2513 fprintf (stderr, " proto = %d, nargs = %d\n",
2514 cum->prototype, cum->nargs_prototype);
2518 /* If defined, a C expression which determines whether, and in which
2519 direction, to pad out an argument with extra space. The value
2520 should be of type `enum direction': either `upward' to pad above
2521 the argument, `downward' to pad below, or `none' to inhibit
2524 For the AIX ABI structs are always stored left shifted in their
2528 function_arg_padding (mode, type)
2529 enum machine_mode mode;
2532 if (type != 0 && AGGREGATE_TYPE_P (type))
2535 /* This is the default definition. */
2536 return (! BYTES_BIG_ENDIAN
2539 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2540 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2541 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2542 ? downward : upward));
2545 /* If defined, a C expression that gives the alignment boundary, in bits,
2546 of an argument with the specified mode and type. If it is not defined,
2547 PARM_BOUNDARY is used for all arguments.
2549 V.4 wants long longs to be double word aligned. */
2552 function_arg_boundary (mode, type)
2553 enum machine_mode mode;
2554 tree type ATTRIBUTE_UNUSED;
2556 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2558 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2561 return PARM_BOUNDARY;
2564 /* Update the data in CUM to advance over an argument
2565 of mode MODE and data type TYPE.
2566 (TYPE is null for libcalls where that information may not be available.) */
2569 function_arg_advance (cum, mode, type, named)
2570 CUMULATIVE_ARGS *cum;
2571 enum machine_mode mode;
2575 cum->nargs_prototype--;
2577 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2579 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2582 cum->words += RS6000_ARG_SIZE (mode, type);
2584 else if (DEFAULT_ABI == ABI_V4)
2586 if (TARGET_HARD_FLOAT
2587 && (mode == SFmode || mode == DFmode))
2589 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2594 cum->words += cum->words & 1;
2595 cum->words += RS6000_ARG_SIZE (mode, type);
2601 int gregno = cum->sysv_gregno;
2603 /* Aggregates and IEEE quad get passed by reference. */
2604 if ((type && AGGREGATE_TYPE_P (type))
2608 n_words = RS6000_ARG_SIZE (mode, type);
2610 /* Long long is put in odd registers. */
2611 if (n_words == 2 && (gregno & 1) == 0)
2614 /* Long long is not split between registers and stack. */
2615 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2617 /* Long long is aligned on the stack. */
2619 cum->words += cum->words & 1;
2620 cum->words += n_words;
2623 /* Note: continuing to accumulate gregno past when we've started
2624 spilling to the stack indicates the fact that we've started
2625 spilling to the stack to expand_builtin_saveregs. */
2626 cum->sysv_gregno = gregno + n_words;
2629 if (TARGET_DEBUG_ARG)
2631 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2632 cum->words, cum->fregno);
2633 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2634 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2635 fprintf (stderr, "mode = %4s, named = %d\n",
2636 GET_MODE_NAME (mode), named);
2641 int align = (TARGET_32BIT && (cum->words & 1) != 0
2642 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2644 cum->words += align + RS6000_ARG_SIZE (mode, type);
2646 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2649 if (TARGET_DEBUG_ARG)
2651 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2652 cum->words, cum->fregno);
2653 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2654 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2655 fprintf (stderr, "named = %d, align = %d\n", named, align);
2660 /* Determine where to put an argument to a function.
2661 Value is zero to push the argument on the stack,
2662 or a hard register in which to store the argument.
2664 MODE is the argument's machine mode.
2665 TYPE is the data type of the argument (as a tree).
2666 This is null for libcalls where that information may
2668 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2669 the preceding args and about the function being called.
2670 NAMED is nonzero if this argument is a named parameter
2671 (otherwise it is an extra parameter matching an ellipsis).
2673 On RS/6000 the first eight words of non-FP are normally in registers
2674 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2675 Under V.4, the first 8 FP args are in registers.
2677 If this is floating-point and no prototype is specified, we use
2678 both an FP and integer register (or possibly FP reg and stack). Library
2679 functions (when TYPE is zero) always have the proper types for args,
2680 so we can pass the FP value just in one register. emit_library_function
2681 doesn't support PARALLEL anyway. */
2684 function_arg (cum, mode, type, named)
2685 CUMULATIVE_ARGS *cum;
2686 enum machine_mode mode;
2690 enum rs6000_abi abi = DEFAULT_ABI;
2692 /* Return a marker to indicate whether CR1 needs to set or clear the
2693 bit that V.4 uses to say fp args were passed in registers.
2694 Assume that we don't need the marker for software floating point,
2695 or compiler generated library calls. */
2696 if (mode == VOIDmode)
2699 && TARGET_HARD_FLOAT
2700 && cum->nargs_prototype < 0
2701 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2703 return GEN_INT (cum->call_cookie
2704 | ((cum->fregno == FP_ARG_MIN_REG)
2705 ? CALL_V4_SET_FP_ARGS
2706 : CALL_V4_CLEAR_FP_ARGS));
2709 return GEN_INT (cum->call_cookie);
2712 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2714 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2715 return gen_rtx_REG (mode, cum->vregno);
2719 else if (abi == ABI_V4)
2721 if (TARGET_HARD_FLOAT
2722 && (mode == SFmode || mode == DFmode))
2724 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2725 return gen_rtx_REG (mode, cum->fregno);
2732 int gregno = cum->sysv_gregno;
2734 /* Aggregates and IEEE quad get passed by reference. */
2735 if ((type && AGGREGATE_TYPE_P (type))
2739 n_words = RS6000_ARG_SIZE (mode, type);
2741 /* Long long is put in odd registers. */
2742 if (n_words == 2 && (gregno & 1) == 0)
2745 /* Long long is not split between registers and stack. */
2746 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2747 return gen_rtx_REG (mode, gregno);
2754 int align = (TARGET_32BIT && (cum->words & 1) != 0
2755 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2756 int align_words = cum->words + align;
2758 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2761 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2764 || ((cum->nargs_prototype > 0)
2765 /* IBM AIX extended its linkage convention definition always
2766 to require FP args after register save area hole on the
2768 && (DEFAULT_ABI != ABI_AIX
2770 || (align_words < GP_ARG_NUM_REG))))
2771 return gen_rtx_REG (mode, cum->fregno);
2773 return gen_rtx_PARALLEL (mode,
2775 gen_rtx_EXPR_LIST (VOIDmode,
2776 ((align_words >= GP_ARG_NUM_REG)
2779 + RS6000_ARG_SIZE (mode, type)
2781 /* If this is partially on the stack, then
2782 we only include the portion actually
2783 in registers here. */
2784 ? gen_rtx_REG (SImode,
2785 GP_ARG_MIN_REG + align_words)
2786 : gen_rtx_REG (mode,
2787 GP_ARG_MIN_REG + align_words))),
2789 gen_rtx_EXPR_LIST (VOIDmode,
2790 gen_rtx_REG (mode, cum->fregno),
2793 else if (align_words < GP_ARG_NUM_REG)
2794 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2800 /* For an arg passed partly in registers and partly in memory,
2801 this is the number of registers used.
2802 For args passed entirely in registers or entirely in memory, zero. */
2805 function_arg_partial_nregs (cum, mode, type, named)
2806 CUMULATIVE_ARGS *cum;
2807 enum machine_mode mode;
2809 int named ATTRIBUTE_UNUSED;
2811 if (DEFAULT_ABI == ABI_V4)
2814 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2815 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2817 if (cum->nargs_prototype >= 0)
2821 if (cum->words < GP_ARG_NUM_REG
2822 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2824 int ret = GP_ARG_NUM_REG - cum->words;
2825 if (ret && TARGET_DEBUG_ARG)
2826 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2834 /* A C expression that indicates when an argument must be passed by
2835 reference. If nonzero for an argument, a copy of that argument is
2836 made in memory and a pointer to the argument is passed instead of
2837 the argument itself. The pointer is passed in whatever way is
2838 appropriate for passing a pointer to that type.
2840 Under V.4, structures and unions are passed by reference. */
2843 function_arg_pass_by_reference (cum, mode, type, named)
2844 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2845 enum machine_mode mode ATTRIBUTE_UNUSED;
2847 int named ATTRIBUTE_UNUSED;
2849 if (DEFAULT_ABI == ABI_V4
2850 && ((type && AGGREGATE_TYPE_P (type))
2853 if (TARGET_DEBUG_ARG)
2854 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2862 /* Perform any needed actions needed for a function that is receiving a
2863 variable number of arguments.
2867 MODE and TYPE are the mode and type of the current parameter.
2869 PRETEND_SIZE is a variable that should be set to the amount of stack
2870 that must be pushed by the prolog to pretend that our caller pushed
2873 Normally, this macro will push all remaining incoming registers on the
2874 stack and set PRETEND_SIZE to the length of the registers pushed. */
2877 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2878 CUMULATIVE_ARGS *cum;
2879 enum machine_mode mode;
2885 CUMULATIVE_ARGS next_cum;
2886 int reg_size = TARGET_32BIT ? 4 : 8;
2887 rtx save_area = NULL_RTX, mem;
2888 int first_reg_offset, set;
2892 fntype = TREE_TYPE (current_function_decl);
2893 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2894 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2895 != void_type_node));
2897 /* For varargs, we do not want to skip the dummy va_dcl argument.
2898 For stdargs, we do want to skip the last named argument. */
2901 function_arg_advance (&next_cum, mode, type, 1);
2903 if (DEFAULT_ABI == ABI_V4)
2905 /* Indicate to allocate space on the stack for varargs save area. */
2906 /* ??? Does this really have to be located at a magic spot on the
2907 stack, or can we allocate this with assign_stack_local instead. */
2908 cfun->machine->sysv_varargs_p = 1;
2910 save_area = plus_constant (virtual_stack_vars_rtx,
2911 - RS6000_VARARGS_SIZE);
2913 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2917 first_reg_offset = next_cum.words;
2918 save_area = virtual_incoming_args_rtx;
2919 cfun->machine->sysv_varargs_p = 0;
2921 if (MUST_PASS_IN_STACK (mode, type))
2922 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2925 set = get_varargs_alias_set ();
2926 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2928 mem = gen_rtx_MEM (BLKmode,
2929 plus_constant (save_area,
2930 first_reg_offset * reg_size)),
2931 set_mem_alias_set (mem, set);
2932 set_mem_align (mem, BITS_PER_WORD);
2935 (GP_ARG_MIN_REG + first_reg_offset, mem,
2936 GP_ARG_NUM_REG - first_reg_offset,
2937 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2939 /* ??? Does ABI_V4 need this at all? */
2940 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2943 /* Save FP registers if needed. */
2944 if (DEFAULT_ABI == ABI_V4
2945 && TARGET_HARD_FLOAT && ! no_rtl
2946 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2948 int fregno = next_cum.fregno;
2949 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2950 rtx lab = gen_label_rtx ();
2951 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2953 emit_jump_insn (gen_rtx_SET (VOIDmode,
2955 gen_rtx_IF_THEN_ELSE (VOIDmode,
2956 gen_rtx_NE (VOIDmode, cr1,
2958 gen_rtx_LABEL_REF (VOIDmode, lab),
2961 while (fregno <= FP_ARG_V4_MAX_REG)
2963 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2964 set_mem_alias_set (mem, set);
2965 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2974 /* Create the va_list data type. */
2977 rs6000_build_va_list ()
2979 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2981 /* For AIX, prefer 'char *' because that's what the system
2982 header files like. */
2983 if (DEFAULT_ABI != ABI_V4)
2984 return build_pointer_type (char_type_node);
2986 record = make_lang_type (RECORD_TYPE);
2987 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2989 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
2990 unsigned_char_type_node);
2991 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
2992 unsigned_char_type_node);
2993 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
2995 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
2998 DECL_FIELD_CONTEXT (f_gpr) = record;
2999 DECL_FIELD_CONTEXT (f_fpr) = record;
3000 DECL_FIELD_CONTEXT (f_ovf) = record;
3001 DECL_FIELD_CONTEXT (f_sav) = record;
3003 TREE_CHAIN (record) = type_decl;
3004 TYPE_NAME (record) = type_decl;
3005 TYPE_FIELDS (record) = f_gpr;
3006 TREE_CHAIN (f_gpr) = f_fpr;
3007 TREE_CHAIN (f_fpr) = f_ovf;
3008 TREE_CHAIN (f_ovf) = f_sav;
3010 layout_type (record);
3012 /* The correct type is an array type of one element. */
3013 return build_array_type (record, build_index_type (size_zero_node));
3016 /* Implement va_start. */
3019 rs6000_va_start (stdarg_p, valist, nextarg)
3024 HOST_WIDE_INT words, n_gpr, n_fpr;
3025 tree f_gpr, f_fpr, f_ovf, f_sav;
3026 tree gpr, fpr, ovf, sav, t;
3028 /* Only SVR4 needs something special. */
3029 if (DEFAULT_ABI != ABI_V4)
3031 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
3035 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3036 f_fpr = TREE_CHAIN (f_gpr);
3037 f_ovf = TREE_CHAIN (f_fpr);
3038 f_sav = TREE_CHAIN (f_ovf);
3040 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3041 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3042 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3043 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3044 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3046 /* Count number of gp and fp argument registers used. */
3047 words = current_function_args_info.words;
3048 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3049 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3051 if (TARGET_DEBUG_ARG)
3053 fputs ("va_start: words = ", stderr);
3054 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3055 fputs (", n_gpr = ", stderr);
3056 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3057 fputs (", n_fpr = ", stderr);
3058 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3059 putc ('\n', stderr);
3062 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3063 TREE_SIDE_EFFECTS (t) = 1;
3064 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3066 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3067 TREE_SIDE_EFFECTS (t) = 1;
3068 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3070 /* Find the overflow area. */
3071 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3073 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3074 build_int_2 (words * UNITS_PER_WORD, 0));
3075 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3076 TREE_SIDE_EFFECTS (t) = 1;
3077 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3079 /* Find the register save area. */
3080 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3081 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3082 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3083 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3084 TREE_SIDE_EFFECTS (t) = 1;
3085 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3088 /* Implement va_arg. */
3091 rs6000_va_arg (valist, type)
3094 tree f_gpr, f_fpr, f_ovf, f_sav;
3095 tree gpr, fpr, ovf, sav, reg, t, u;
3096 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3097 rtx lab_false, lab_over, addr_rtx, r;
3099 /* For AIX, the rule is that structures are passed left-aligned in
3100 their stack slot. However, GCC does not presently do this:
3101 structures which are the same size as integer types are passed
3102 right-aligned, as if they were in fact integers. This only
3103 matters for structures of size 1 or 2, or 4 when TARGET_64BIT. */
3104 if (DEFAULT_ABI != ABI_V4)
3106 HOST_WIDE_INT align, rounded_size;
3107 enum machine_mode mode;
3110 /* Compute the rounded size of the type. */
3111 align = PARM_BOUNDARY / BITS_PER_UNIT;
3112 rounded_size = (((int_size_in_bytes (type) + align - 1) / align)
3117 mode = TYPE_MODE (type);
3118 if (mode != BLKmode)
3121 adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT;
3122 if (rounded_size > align)
3125 addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3126 build_int_2 (rounded_size - adj, 0));
3129 addr_rtx = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3130 addr_rtx = copy_to_reg (addr_rtx);
3132 /* Compute new value for AP. */
3133 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3134 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3135 build_int_2 (rounded_size, 0)));
3136 TREE_SIDE_EFFECTS (t) = 1;
3137 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3142 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3143 f_fpr = TREE_CHAIN (f_gpr);
3144 f_ovf = TREE_CHAIN (f_fpr);
3145 f_sav = TREE_CHAIN (f_ovf);
3147 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3148 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3149 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3150 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3151 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3153 size = int_size_in_bytes (type);
3154 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3156 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3158 /* Aggregates and long doubles are passed by reference. */
3164 size = rsize = UNITS_PER_WORD;
3166 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3168 /* FP args go in FP registers, if present. */
3177 /* Otherwise into GP registers. */
3185 /* Pull the value out of the saved registers ... */
3187 lab_false = gen_label_rtx ();
3188 lab_over = gen_label_rtx ();
3189 addr_rtx = gen_reg_rtx (Pmode);
3191 /* Vectors never go in registers. */
3192 if (TREE_CODE (type) != VECTOR_TYPE)
3194 TREE_THIS_VOLATILE (reg) = 1;
3195 emit_cmp_and_jump_insns
3196 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3197 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3200 /* Long long is aligned in the registers. */
3203 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3204 build_int_2 (n_reg - 1, 0));
3205 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3206 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3207 TREE_SIDE_EFFECTS (u) = 1;
3208 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3212 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3216 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3217 build_int_2 (n_reg, 0));
3218 TREE_SIDE_EFFECTS (u) = 1;
3220 u = build1 (CONVERT_EXPR, integer_type_node, u);
3221 TREE_SIDE_EFFECTS (u) = 1;
3223 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3224 TREE_SIDE_EFFECTS (u) = 1;
3226 t = build (PLUS_EXPR, ptr_type_node, t, u);
3227 TREE_SIDE_EFFECTS (t) = 1;
3229 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3231 emit_move_insn (addr_rtx, r);
3233 emit_jump_insn (gen_jump (lab_over));
3237 emit_label (lab_false);
3239 /* ... otherwise out of the overflow area. */
3241 /* Make sure we don't find reg 7 for the next int arg.
3243 All AltiVec vectors go in the overflow area. So in the AltiVec
3244 case we need to get the vectors from the overflow area, but
3245 remember where the GPRs and FPRs are. */
3246 if (n_reg > 1 && TREE_CODE (type) != VECTOR_TYPE)
3248 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3249 TREE_SIDE_EFFECTS (t) = 1;
3250 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3253 /* Care for on-stack alignment if needed. */
3260 /* Vectors are 16 byte aligned. */
3261 if (TREE_CODE (type) == VECTOR_TYPE)
3266 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3267 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3271 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3273 emit_move_insn (addr_rtx, r);
3275 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3276 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3277 TREE_SIDE_EFFECTS (t) = 1;
3278 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3280 emit_label (lab_over);
3284 r = gen_rtx_MEM (Pmode, addr_rtx);
3285 set_mem_alias_set (r, get_varargs_alias_set ());
3286 emit_move_insn (addr_rtx, r);
3294 #define def_builtin(MASK, NAME, TYPE, CODE) \
3296 if ((MASK) & target_flags) \
3297 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3300 struct builtin_description
3302 const unsigned int mask;
3303 const enum insn_code icode;
3304 const char *const name;
3305 const enum rs6000_builtins code;
3308 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3310 static const struct builtin_description bdesc_3arg[] =
3312 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3313 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3314 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3315 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3316 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3317 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3318 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3319 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3320 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3321 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3322 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3323 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3324 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3325 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3326 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3327 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3328 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3329 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3330 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3331 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3332 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3333 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3334 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3337 /* DST operations: void foo (void *, const int, const char). */
3339 static const struct builtin_description bdesc_dst[] =
3341 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3342 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3343 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3344 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3347 /* Simple binary operations: VECc = foo (VECa, VECb). */
3349 static const struct builtin_description bdesc_2arg[] =
3351 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3352 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3353 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3354 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3355 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3356 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3357 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3358 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3359 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3360 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3361 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3362 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3363 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3364 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3365 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3366 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3367 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3368 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3369 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3370 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3371 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3372 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3373 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3374 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3375 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3376 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3377 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3378 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3379 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3380 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3381 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3382 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3383 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3384 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3385 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3386 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3387 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3388 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3389 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3390 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3391 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3392 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3393 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3394 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3395 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3396 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3397 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3398 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3399 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3400 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3401 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3402 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3403 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3404 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3405 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3406 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3407 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3408 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3409 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3410 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3411 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3412 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3413 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3414 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3415 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3416 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3417 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3418 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3419 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3420 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3421 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3422 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3423 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3424 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3425 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3426 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3427 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3428 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3429 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3430 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3431 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3432 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3433 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3434 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3435 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3436 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3437 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3438 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3439 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3440 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3441 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3442 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3443 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3444 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3445 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3446 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3447 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3448 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3449 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3450 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3451 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3452 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3453 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3454 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3455 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3456 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3457 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3458 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3459 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3460 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3461 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3462 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3463 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3466 /* AltiVec predicates. */
3468 struct builtin_description_predicates
3470 const unsigned int mask;
3471 const enum insn_code icode;
3473 const char *const name;
3474 const enum rs6000_builtins code;
3477 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3479 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3480 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3481 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3482 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3483 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3484 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3485 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3486 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3487 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3488 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3489 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3490 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3491 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3494 /* ABS* opreations. */
3496 static const struct builtin_description bdesc_abs[] =
3498 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3499 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3500 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3501 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3502 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3503 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3504 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3507 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3510 static const struct builtin_description bdesc_1arg[] =
3512 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3513 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3514 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3515 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3516 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3517 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3518 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3519 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3520 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3521 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3522 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3523 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3524 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3525 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3526 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3527 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3528 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3532 altivec_expand_unop_builtin (icode, arglist, target)
3533 enum insn_code icode;
3538 tree arg0 = TREE_VALUE (arglist);
3539 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3540 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3541 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3543 /* If we got invalid arguments bail out before generating bad rtl. */
3544 if (arg0 == error_mark_node)
3548 || GET_MODE (target) != tmode
3549 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3550 target = gen_reg_rtx (tmode);
3552 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3553 op0 = copy_to_mode_reg (mode0, op0);
3555 pat = GEN_FCN (icode) (target, op0);
3564 altivec_expand_abs_builtin (icode, arglist, target)
3565 enum insn_code icode;
3569 rtx pat, scratch1, scratch2;
3570 tree arg0 = TREE_VALUE (arglist);
3571 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3572 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3573 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3575 /* If we have invalid arguments, bail out before generating bad rtl. */
3576 if (arg0 == error_mark_node)
3580 || GET_MODE (target) != tmode
3581 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3582 target = gen_reg_rtx (tmode);
3584 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3585 op0 = copy_to_mode_reg (mode0, op0);
3587 scratch1 = gen_reg_rtx (mode0);
3588 scratch2 = gen_reg_rtx (mode0);
3590 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3599 altivec_expand_binop_builtin (icode, arglist, target)
3600 enum insn_code icode;
3605 tree arg0 = TREE_VALUE (arglist);
3606 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3607 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3608 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3609 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3610 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3611 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3613 /* If we got invalid arguments bail out before generating bad rtl. */
3614 if (arg0 == error_mark_node || arg1 == error_mark_node)
3618 || GET_MODE (target) != tmode
3619 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3620 target = gen_reg_rtx (tmode);
3622 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3623 op0 = copy_to_mode_reg (mode0, op0);
3624 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3625 op1 = copy_to_mode_reg (mode1, op1);
3627 pat = GEN_FCN (icode) (target, op0, op1);
3636 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3637 enum insn_code icode;
3643 tree cr6_form = TREE_VALUE (arglist);
3644 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3645 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3646 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3647 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3648 enum machine_mode tmode = SImode;
3649 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3650 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3653 if (TREE_CODE (cr6_form) != INTEGER_CST)
3655 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3659 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3664 /* If we have invalid arguments, bail out before generating bad rtl. */
3665 if (arg0 == error_mark_node || arg1 == error_mark_node)
3669 || GET_MODE (target) != tmode
3670 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3671 target = gen_reg_rtx (tmode);
3673 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3674 op0 = copy_to_mode_reg (mode0, op0);
3675 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3676 op1 = copy_to_mode_reg (mode1, op1);
3678 scratch = gen_reg_rtx (mode0);
3680 pat = GEN_FCN (icode) (scratch, op0, op1,
3681 gen_rtx (SYMBOL_REF, Pmode, opcode));
3686 /* The vec_any* and vec_all* predicates use the same opcodes for two
3687 different operations, but the bits in CR6 will be different
3688 depending on what information we want. So we have to play tricks
3689 with CR6 to get the right bits out.
3691 If you think this is disgusting, look at the specs for the
3692 AltiVec predicates. */
3694 switch (cr6_form_int)
3697 emit_insn (gen_cr6_test_for_zero (target));
3700 emit_insn (gen_cr6_test_for_zero_reverse (target));
3703 emit_insn (gen_cr6_test_for_lt (target));
3706 emit_insn (gen_cr6_test_for_lt_reverse (target));
3709 error ("argument 1 of __builtin_altivec_predicate is out of range");
3717 altivec_expand_stv_builtin (icode, arglist)
3718 enum insn_code icode;
3721 tree arg0 = TREE_VALUE (arglist);
3722 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3723 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3724 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3725 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3726 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3728 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3729 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3730 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3732 /* Invalid arguments. Bail before doing anything stoopid! */
3733 if (arg0 == error_mark_node
3734 || arg1 == error_mark_node
3735 || arg2 == error_mark_node)
3738 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3739 op0 = copy_to_mode_reg (mode2, op0);
3740 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3741 op1 = copy_to_mode_reg (mode0, op1);
3742 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3743 op2 = copy_to_mode_reg (mode1, op2);
3745 pat = GEN_FCN (icode) (op1, op2, op0);
3752 altivec_expand_ternop_builtin (icode, arglist, target)
3753 enum insn_code icode;
3758 tree arg0 = TREE_VALUE (arglist);
3759 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3760 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3761 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3762 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3763 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3764 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3765 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3766 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3767 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3769 /* If we got invalid arguments bail out before generating bad rtl. */
3770 if (arg0 == error_mark_node
3771 || arg1 == error_mark_node
3772 || arg2 == error_mark_node)
3776 || GET_MODE (target) != tmode
3777 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3778 target = gen_reg_rtx (tmode);
3780 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3781 op0 = copy_to_mode_reg (mode0, op0);
3782 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3783 op1 = copy_to_mode_reg (mode1, op1);
3784 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3785 op2 = copy_to_mode_reg (mode2, op2);
3787 pat = GEN_FCN (icode) (target, op0, op1, op2);
3795 altivec_expand_builtin (exp, target)
3799 struct builtin_description *d;
3800 struct builtin_description_predicates *dp;
3802 enum insn_code icode;
3803 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3804 tree arglist = TREE_OPERAND (exp, 1);
3805 tree arg0, arg1, arg2;
3806 rtx op0, op1, op2, pat;
3807 enum machine_mode tmode, mode0, mode1, mode2;
3808 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3812 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3813 icode = CODE_FOR_altivec_lvx_16qi;
3814 arg0 = TREE_VALUE (arglist);
3815 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3816 tmode = insn_data[icode].operand[0].mode;
3817 mode0 = insn_data[icode].operand[1].mode;
3820 || GET_MODE (target) != tmode
3821 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3822 target = gen_reg_rtx (tmode);
3824 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3825 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3827 pat = GEN_FCN (icode) (target, op0);
3833 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3834 icode = CODE_FOR_altivec_lvx_8hi;
3835 arg0 = TREE_VALUE (arglist);
3836 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3837 tmode = insn_data[icode].operand[0].mode;
3838 mode0 = insn_data[icode].operand[1].mode;
3841 || GET_MODE (target) != tmode
3842 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3843 target = gen_reg_rtx (tmode);
3845 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3846 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3848 pat = GEN_FCN (icode) (target, op0);
3854 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3855 icode = CODE_FOR_altivec_lvx_4si;
3856 arg0 = TREE_VALUE (arglist);
3857 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3858 tmode = insn_data[icode].operand[0].mode;
3859 mode0 = insn_data[icode].operand[1].mode;
3862 || GET_MODE (target) != tmode
3863 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3864 target = gen_reg_rtx (tmode);
3866 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3867 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3869 pat = GEN_FCN (icode) (target, op0);
3875 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3876 icode = CODE_FOR_altivec_lvx_4sf;
3877 arg0 = TREE_VALUE (arglist);
3878 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3879 tmode = insn_data[icode].operand[0].mode;
3880 mode0 = insn_data[icode].operand[1].mode;
3883 || GET_MODE (target) != tmode
3884 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3885 target = gen_reg_rtx (tmode);
3887 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3888 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3890 pat = GEN_FCN (icode) (target, op0);
3896 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3897 icode = CODE_FOR_altivec_stvx_16qi;
3898 arg0 = TREE_VALUE (arglist);
3899 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3900 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3901 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3902 mode0 = insn_data[icode].operand[0].mode;
3903 mode1 = insn_data[icode].operand[1].mode;
3905 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3906 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3907 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3908 op1 = copy_to_mode_reg (mode1, op1);
3910 pat = GEN_FCN (icode) (op0, op1);
3915 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3916 icode = CODE_FOR_altivec_stvx_8hi;
3917 arg0 = TREE_VALUE (arglist);
3918 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3919 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3920 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3921 mode0 = insn_data[icode].operand[0].mode;
3922 mode1 = insn_data[icode].operand[1].mode;
3924 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3925 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3926 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3927 op1 = copy_to_mode_reg (mode1, op1);
3929 pat = GEN_FCN (icode) (op0, op1);
3934 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3935 icode = CODE_FOR_altivec_stvx_4si;
3936 arg0 = TREE_VALUE (arglist);
3937 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3938 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3939 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3940 mode0 = insn_data[icode].operand[0].mode;
3941 mode1 = insn_data[icode].operand[1].mode;
3943 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3944 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3945 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3946 op1 = copy_to_mode_reg (mode1, op1);
3948 pat = GEN_FCN (icode) (op0, op1);
3953 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3954 icode = CODE_FOR_altivec_stvx_4sf;
3955 arg0 = TREE_VALUE (arglist);
3956 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3957 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3958 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3959 mode0 = insn_data[icode].operand[0].mode;
3960 mode1 = insn_data[icode].operand[1].mode;
3962 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3963 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3964 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3965 op1 = copy_to_mode_reg (mode1, op1);
3967 pat = GEN_FCN (icode) (op0, op1);
3972 case ALTIVEC_BUILTIN_STVX:
3973 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
3974 case ALTIVEC_BUILTIN_STVEBX:
3975 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
3976 case ALTIVEC_BUILTIN_STVEHX:
3977 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
3978 case ALTIVEC_BUILTIN_STVEWX:
3979 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
3980 case ALTIVEC_BUILTIN_STVXL:
3981 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3983 case ALTIVEC_BUILTIN_MFVSCR:
3984 icode = CODE_FOR_altivec_mfvscr;
3985 tmode = insn_data[icode].operand[0].mode;
3988 || GET_MODE (target) != tmode
3989 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3990 target = gen_reg_rtx (tmode);
3992 pat = GEN_FCN (icode) (target);
3998 case ALTIVEC_BUILTIN_MTVSCR:
3999 icode = CODE_FOR_altivec_mtvscr;
4000 arg0 = TREE_VALUE (arglist);
4001 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4002 mode0 = insn_data[icode].operand[0].mode;
4004 /* If we got invalid arguments bail out before generating bad rtl. */
4005 if (arg0 == error_mark_node)
4008 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4009 op0 = copy_to_mode_reg (mode0, op0);
4011 pat = GEN_FCN (icode) (op0);
4016 case ALTIVEC_BUILTIN_DSSALL:
4017 emit_insn (gen_altivec_dssall ());
4020 case ALTIVEC_BUILTIN_DSS:
4021 icode = CODE_FOR_altivec_dss;
4022 arg0 = TREE_VALUE (arglist);
4023 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4024 mode0 = insn_data[icode].operand[0].mode;
4026 /* If we got invalid arguments bail out before generating bad rtl. */
4027 if (arg0 == error_mark_node)
4030 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4031 op0 = copy_to_mode_reg (mode0, op0);
4033 emit_insn (gen_altivec_dss (op0));
4037 /* Handle DST variants. */
4038 d = (struct builtin_description *) bdesc_dst;
4039 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4040 if (d->code == fcode)
4042 arg0 = TREE_VALUE (arglist);
4043 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4044 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4045 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4046 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4047 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4048 mode0 = insn_data[d->icode].operand[0].mode;
4049 mode1 = insn_data[d->icode].operand[1].mode;
4050 mode2 = insn_data[d->icode].operand[2].mode;
4052 /* Invalid arguments, bail out before generating bad rtl. */
4053 if (arg0 == error_mark_node
4054 || arg1 == error_mark_node
4055 || arg2 == error_mark_node)
4058 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4059 op0 = copy_to_mode_reg (mode0, op0);
4060 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4061 op1 = copy_to_mode_reg (mode1, op1);
4063 if (GET_CODE (op2) != CONST_INT || INTVAL (op2) > 3)
4065 error ("argument 3 of `%s' must be a 2-bit literal", d->name);
4069 pat = GEN_FCN (d->icode) (op0, op1, op2);
4076 /* Expand abs* operations. */
4077 d = (struct builtin_description *) bdesc_abs;
4078 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4079 if (d->code == fcode)
4080 return altivec_expand_abs_builtin (d->icode, arglist, target);
4082 /* Handle simple unary operations. */
4083 d = (struct builtin_description *) bdesc_1arg;
4084 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4085 if (d->code == fcode)
4086 return altivec_expand_unop_builtin (d->icode, arglist, target);
4088 /* Handle simple binary operations. */
4089 d = (struct builtin_description *) bdesc_2arg;
4090 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4091 if (d->code == fcode)
4092 return altivec_expand_binop_builtin (d->icode, arglist, target);
4094 /* Expand the AltiVec predicates. */
4095 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4096 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4097 if (dp->code == fcode)
4098 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4100 /* LV* are funky. We initialized them differently. */
4103 case ALTIVEC_BUILTIN_LVSL:
4104 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4106 case ALTIVEC_BUILTIN_LVSR:
4107 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4109 case ALTIVEC_BUILTIN_LVEBX:
4110 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4112 case ALTIVEC_BUILTIN_LVEHX:
4113 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4115 case ALTIVEC_BUILTIN_LVEWX:
4116 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4118 case ALTIVEC_BUILTIN_LVXL:
4119 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4121 case ALTIVEC_BUILTIN_LVX:
4122 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
4129 /* Handle simple ternary operations. */
4130 d = (struct builtin_description *) bdesc_3arg;
4131 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4132 if (d->code == fcode)
4133 return altivec_expand_ternop_builtin (d->icode, arglist, target);
4139 /* Expand an expression EXP that calls a built-in function,
4140 with result going to TARGET if that's convenient
4141 (and in mode MODE if that's convenient).
4142 SUBTARGET may be used as the target for computing one of EXP's operands.
4143 IGNORE is nonzero if the value is to be ignored. */
4146 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4149 rtx subtarget ATTRIBUTE_UNUSED;
4150 enum machine_mode mode ATTRIBUTE_UNUSED;
4151 int ignore ATTRIBUTE_UNUSED;
4154 return altivec_expand_builtin (exp, target);
4160 rs6000_init_builtins ()
4163 altivec_init_builtins ();
4167 altivec_init_builtins (void)
4169 struct builtin_description *d;
4170 struct builtin_description_predicates *dp;
4173 tree endlink = void_list_node;
4175 tree pint_type_node = build_pointer_type (integer_type_node);
4176 tree pvoid_type_node = build_pointer_type (void_type_node);
4177 tree pshort_type_node = build_pointer_type (short_integer_type_node);
4178 tree pchar_type_node = build_pointer_type (char_type_node);
4179 tree pfloat_type_node = build_pointer_type (float_type_node);
4181 tree v4sf_ftype_v4sf_v4sf_v16qi
4182 = build_function_type (V4SF_type_node,
4183 tree_cons (NULL_TREE, V4SF_type_node,
4184 tree_cons (NULL_TREE, V4SF_type_node,
4185 tree_cons (NULL_TREE,
4188 tree v4si_ftype_v4si_v4si_v16qi
4189 = build_function_type (V4SI_type_node,
4190 tree_cons (NULL_TREE, V4SI_type_node,
4191 tree_cons (NULL_TREE, V4SI_type_node,
4192 tree_cons (NULL_TREE,
4195 tree v8hi_ftype_v8hi_v8hi_v16qi
4196 = build_function_type (V8HI_type_node,
4197 tree_cons (NULL_TREE, V8HI_type_node,
4198 tree_cons (NULL_TREE, V8HI_type_node,
4199 tree_cons (NULL_TREE,
4202 tree v16qi_ftype_v16qi_v16qi_v16qi
4203 = build_function_type (V16QI_type_node,
4204 tree_cons (NULL_TREE, V16QI_type_node,
4205 tree_cons (NULL_TREE, V16QI_type_node,
4206 tree_cons (NULL_TREE,
4210 /* V4SI foo (char). */
4211 tree v4si_ftype_char
4212 = build_function_type (V4SI_type_node,
4213 tree_cons (NULL_TREE, char_type_node, endlink));
4215 /* V8HI foo (char). */
4216 tree v8hi_ftype_char
4217 = build_function_type (V8HI_type_node,
4218 tree_cons (NULL_TREE, char_type_node, endlink));
4220 /* V16QI foo (char). */
4221 tree v16qi_ftype_char
4222 = build_function_type (V16QI_type_node,
4223 tree_cons (NULL_TREE, char_type_node, endlink));
4224 /* V4SF foo (V4SF). */
4225 tree v4sf_ftype_v4sf
4226 = build_function_type (V4SF_type_node,
4227 tree_cons (NULL_TREE, V4SF_type_node, endlink));
4229 /* V4SI foo (int *). */
4230 tree v4si_ftype_pint
4231 = build_function_type (V4SI_type_node,
4232 tree_cons (NULL_TREE, pint_type_node, endlink));
4233 /* V8HI foo (short *). */
4234 tree v8hi_ftype_pshort
4235 = build_function_type (V8HI_type_node,
4236 tree_cons (NULL_TREE, pshort_type_node, endlink));
4237 /* V16QI foo (char *). */
4238 tree v16qi_ftype_pchar
4239 = build_function_type (V16QI_type_node,
4240 tree_cons (NULL_TREE, pchar_type_node, endlink));
4241 /* V4SF foo (float *). */
4242 tree v4sf_ftype_pfloat
4243 = build_function_type (V4SF_type_node,
4244 tree_cons (NULL_TREE, pfloat_type_node, endlink));
4246 /* V8HI foo (V16QI). */
4247 tree v8hi_ftype_v16qi
4248 = build_function_type (V8HI_type_node,
4249 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4251 /* void foo (void *, int, char/literal). */
4252 tree void_ftype_pvoid_int_char
4253 = build_function_type (void_type_node,
4254 tree_cons (NULL_TREE, pvoid_type_node,
4255 tree_cons (NULL_TREE, integer_type_node,
4256 tree_cons (NULL_TREE,
4260 /* void foo (int *, V4SI). */
4261 tree void_ftype_pint_v4si
4262 = build_function_type (void_type_node,
4263 tree_cons (NULL_TREE, pint_type_node,
4264 tree_cons (NULL_TREE, V4SI_type_node,
4266 /* void foo (short *, V8HI). */
4267 tree void_ftype_pshort_v8hi
4268 = build_function_type (void_type_node,
4269 tree_cons (NULL_TREE, pshort_type_node,
4270 tree_cons (NULL_TREE, V8HI_type_node,
4272 /* void foo (char *, V16QI). */
4273 tree void_ftype_pchar_v16qi
4274 = build_function_type (void_type_node,
4275 tree_cons (NULL_TREE, pchar_type_node,
4276 tree_cons (NULL_TREE, V16QI_type_node,
4278 /* void foo (float *, V4SF). */
4279 tree void_ftype_pfloat_v4sf
4280 = build_function_type (void_type_node,
4281 tree_cons (NULL_TREE, pfloat_type_node,
4282 tree_cons (NULL_TREE, V4SF_type_node,
4285 /* void foo (V4SI). */
4286 tree void_ftype_v4si
4287 = build_function_type (void_type_node,
4288 tree_cons (NULL_TREE, V4SI_type_node,
4291 /* void foo (vint, int, void *). */
4292 tree void_ftype_v4si_int_pvoid
4293 = build_function_type (void_type_node,
4294 tree_cons (NULL_TREE, V4SI_type_node,
4295 tree_cons (NULL_TREE, integer_type_node,
4296 tree_cons (NULL_TREE,
4300 /* void foo (vchar, int, void *). */
4301 tree void_ftype_v16qi_int_pvoid
4302 = build_function_type (void_type_node,
4303 tree_cons (NULL_TREE, V16QI_type_node,
4304 tree_cons (NULL_TREE, integer_type_node,
4305 tree_cons (NULL_TREE,
4309 /* void foo (vshort, int, void *). */
4310 tree void_ftype_v8hi_int_pvoid
4311 = build_function_type (void_type_node,
4312 tree_cons (NULL_TREE, V8HI_type_node,
4313 tree_cons (NULL_TREE, integer_type_node,
4314 tree_cons (NULL_TREE,
4318 /* void foo (char). */
4320 = build_function_type (void_type_node,
4321 tree_cons (NULL_TREE, char_type_node,
4324 /* void foo (void). */
4325 tree void_ftype_void
4326 = build_function_type (void_type_node, void_list_node);
4328 /* vshort foo (void). */
4329 tree v8hi_ftype_void
4330 = build_function_type (V8HI_type_node, void_list_node);
4332 tree v4si_ftype_v4si_v4si
4333 = build_function_type (V4SI_type_node,
4334 tree_cons (NULL_TREE, V4SI_type_node,
4335 tree_cons (NULL_TREE, V4SI_type_node,
4338 /* These are for the unsigned 5 bit literals. */
4340 tree v4sf_ftype_v4si_char
4341 = build_function_type (V4SF_type_node,
4342 tree_cons (NULL_TREE, V4SI_type_node,
4343 tree_cons (NULL_TREE, char_type_node,
4345 tree v4si_ftype_v4sf_char
4346 = build_function_type (V4SI_type_node,
4347 tree_cons (NULL_TREE, V4SF_type_node,
4348 tree_cons (NULL_TREE, char_type_node,
4350 tree v4si_ftype_v4si_char
4351 = build_function_type (V4SI_type_node,
4352 tree_cons (NULL_TREE, V4SI_type_node,
4353 tree_cons (NULL_TREE, char_type_node,
4355 tree v8hi_ftype_v8hi_char
4356 = build_function_type (V8HI_type_node,
4357 tree_cons (NULL_TREE, V8HI_type_node,
4358 tree_cons (NULL_TREE, char_type_node,
4360 tree v16qi_ftype_v16qi_char
4361 = build_function_type (V16QI_type_node,
4362 tree_cons (NULL_TREE, V16QI_type_node,
4363 tree_cons (NULL_TREE, char_type_node,
4366 /* These are for the unsigned 4 bit literals. */
4368 tree v16qi_ftype_v16qi_v16qi_char
4369 = build_function_type (V16QI_type_node,
4370 tree_cons (NULL_TREE, V16QI_type_node,
4371 tree_cons (NULL_TREE, V16QI_type_node,
4372 tree_cons (NULL_TREE,
4376 tree v8hi_ftype_v8hi_v8hi_char
4377 = build_function_type (V8HI_type_node,
4378 tree_cons (NULL_TREE, V8HI_type_node,
4379 tree_cons (NULL_TREE, V8HI_type_node,
4380 tree_cons (NULL_TREE,
4384 tree v4si_ftype_v4si_v4si_char
4385 = build_function_type (V4SI_type_node,
4386 tree_cons (NULL_TREE, V4SI_type_node,
4387 tree_cons (NULL_TREE, V4SI_type_node,
4388 tree_cons (NULL_TREE,
4392 tree v4sf_ftype_v4sf_v4sf_char
4393 = build_function_type (V4SF_type_node,
4394 tree_cons (NULL_TREE, V4SF_type_node,
4395 tree_cons (NULL_TREE, V4SF_type_node,
4396 tree_cons (NULL_TREE,
4400 /* End of 4 bit literals. */
4402 tree v4sf_ftype_v4sf_v4sf
4403 = build_function_type (V4SF_type_node,
4404 tree_cons (NULL_TREE, V4SF_type_node,
4405 tree_cons (NULL_TREE, V4SF_type_node,
4407 tree v4sf_ftype_v4sf_v4sf_v4si
4408 = build_function_type (V4SF_type_node,
4409 tree_cons (NULL_TREE, V4SF_type_node,
4410 tree_cons (NULL_TREE, V4SF_type_node,
4411 tree_cons (NULL_TREE,
4414 tree v4sf_ftype_v4sf_v4sf_v4sf
4415 = build_function_type (V4SF_type_node,
4416 tree_cons (NULL_TREE, V4SF_type_node,
4417 tree_cons (NULL_TREE, V4SF_type_node,
4418 tree_cons (NULL_TREE,
4421 tree v4si_ftype_v4si_v4si_v4si
4422 = build_function_type (V4SI_type_node,
4423 tree_cons (NULL_TREE, V4SI_type_node,
4424 tree_cons (NULL_TREE, V4SI_type_node,
4425 tree_cons (NULL_TREE,
4429 tree v8hi_ftype_v8hi_v8hi
4430 = build_function_type (V8HI_type_node,
4431 tree_cons (NULL_TREE, V8HI_type_node,
4432 tree_cons (NULL_TREE, V8HI_type_node,
4434 tree v8hi_ftype_v8hi_v8hi_v8hi
4435 = build_function_type (V8HI_type_node,
4436 tree_cons (NULL_TREE, V8HI_type_node,
4437 tree_cons (NULL_TREE, V8HI_type_node,
4438 tree_cons (NULL_TREE,
4441 tree v4si_ftype_v8hi_v8hi_v4si
4442 = build_function_type (V4SI_type_node,
4443 tree_cons (NULL_TREE, V8HI_type_node,
4444 tree_cons (NULL_TREE, V8HI_type_node,
4445 tree_cons (NULL_TREE,
4448 tree v4si_ftype_v16qi_v16qi_v4si
4449 = build_function_type (V4SI_type_node,
4450 tree_cons (NULL_TREE, V16QI_type_node,
4451 tree_cons (NULL_TREE, V16QI_type_node,
4452 tree_cons (NULL_TREE,
4456 tree v16qi_ftype_v16qi_v16qi
4457 = build_function_type (V16QI_type_node,
4458 tree_cons (NULL_TREE, V16QI_type_node,
4459 tree_cons (NULL_TREE, V16QI_type_node,
4462 tree v4si_ftype_v4sf_v4sf
4463 = build_function_type (V4SI_type_node,
4464 tree_cons (NULL_TREE, V4SF_type_node,
4465 tree_cons (NULL_TREE, V4SF_type_node,
4468 tree v4si_ftype_v4si
4469 = build_function_type (V4SI_type_node,
4470 tree_cons (NULL_TREE, V4SI_type_node, endlink));
4472 tree v8hi_ftype_v8hi
4473 = build_function_type (V8HI_type_node,
4474 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4476 tree v16qi_ftype_v16qi
4477 = build_function_type (V16QI_type_node,
4478 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4480 tree v8hi_ftype_v16qi_v16qi
4481 = build_function_type (V8HI_type_node,
4482 tree_cons (NULL_TREE, V16QI_type_node,
4483 tree_cons (NULL_TREE, V16QI_type_node,
4486 tree v4si_ftype_v8hi_v8hi
4487 = build_function_type (V4SI_type_node,
4488 tree_cons (NULL_TREE, V8HI_type_node,
4489 tree_cons (NULL_TREE, V8HI_type_node,
4492 tree v8hi_ftype_v4si_v4si
4493 = build_function_type (V8HI_type_node,
4494 tree_cons (NULL_TREE, V4SI_type_node,
4495 tree_cons (NULL_TREE, V4SI_type_node,
4498 tree v16qi_ftype_v8hi_v8hi
4499 = build_function_type (V16QI_type_node,
4500 tree_cons (NULL_TREE, V8HI_type_node,
4501 tree_cons (NULL_TREE, V8HI_type_node,
4504 tree v4si_ftype_v16qi_v4si
4505 = build_function_type (V4SI_type_node,
4506 tree_cons (NULL_TREE, V16QI_type_node,
4507 tree_cons (NULL_TREE, V4SI_type_node,
4510 tree v4si_ftype_v16qi_v16qi
4511 = build_function_type (V4SI_type_node,
4512 tree_cons (NULL_TREE, V16QI_type_node,
4513 tree_cons (NULL_TREE, V16QI_type_node,
4516 tree v4si_ftype_v8hi_v4si
4517 = build_function_type (V4SI_type_node,
4518 tree_cons (NULL_TREE, V8HI_type_node,
4519 tree_cons (NULL_TREE, V4SI_type_node,
4522 tree v4si_ftype_v8hi
4523 = build_function_type (V4SI_type_node,
4524 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4526 tree int_ftype_v4si_v4si
4527 = build_function_type (integer_type_node,
4528 tree_cons (NULL_TREE, V4SI_type_node,
4529 tree_cons (NULL_TREE, V4SI_type_node,
4532 tree int_ftype_v4sf_v4sf
4533 = build_function_type (integer_type_node,
4534 tree_cons (NULL_TREE, V4SF_type_node,
4535 tree_cons (NULL_TREE, V4SF_type_node,
4538 tree int_ftype_v16qi_v16qi
4539 = build_function_type (integer_type_node,
4540 tree_cons (NULL_TREE, V16QI_type_node,
4541 tree_cons (NULL_TREE, V16QI_type_node,
4544 tree int_ftype_int_v4si_v4si
4545 = build_function_type
4547 tree_cons (NULL_TREE, integer_type_node,
4548 tree_cons (NULL_TREE, V4SI_type_node,
4549 tree_cons (NULL_TREE, V4SI_type_node,
4552 tree int_ftype_int_v4sf_v4sf
4553 = build_function_type
4555 tree_cons (NULL_TREE, integer_type_node,
4556 tree_cons (NULL_TREE, V4SF_type_node,
4557 tree_cons (NULL_TREE, V4SF_type_node,
4560 tree int_ftype_int_v8hi_v8hi
4561 = build_function_type
4563 tree_cons (NULL_TREE, integer_type_node,
4564 tree_cons (NULL_TREE, V8HI_type_node,
4565 tree_cons (NULL_TREE, V8HI_type_node,
4568 tree int_ftype_int_v16qi_v16qi
4569 = build_function_type
4571 tree_cons (NULL_TREE, integer_type_node,
4572 tree_cons (NULL_TREE, V16QI_type_node,
4573 tree_cons (NULL_TREE, V16QI_type_node,
4576 tree v16qi_ftype_int_pvoid
4577 = build_function_type (V16QI_type_node,
4578 tree_cons (NULL_TREE, integer_type_node,
4579 tree_cons (NULL_TREE, pvoid_type_node,
4582 tree v4si_ftype_int_pvoid
4583 = build_function_type (V4SI_type_node,
4584 tree_cons (NULL_TREE, integer_type_node,
4585 tree_cons (NULL_TREE, pvoid_type_node,
4588 tree v8hi_ftype_int_pvoid
4589 = build_function_type (V8HI_type_node,
4590 tree_cons (NULL_TREE, integer_type_node,
4591 tree_cons (NULL_TREE, pvoid_type_node,
4594 tree int_ftype_v8hi_v8hi
4595 = build_function_type (integer_type_node,
4596 tree_cons (NULL_TREE, V8HI_type_node,
4597 tree_cons (NULL_TREE, V8HI_type_node,
4600 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4601 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4602 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4603 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4604 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4605 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4606 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4607 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4608 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4609 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4610 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4611 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4612 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4613 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4614 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4615 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4616 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4617 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4618 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4619 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4620 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4621 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4622 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4623 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4625 /* Add the simple ternary operators. */
4626 d = (struct builtin_description *) bdesc_3arg;
4627 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4630 enum machine_mode mode0, mode1, mode2, mode3;
4636 mode0 = insn_data[d->icode].operand[0].mode;
4637 mode1 = insn_data[d->icode].operand[1].mode;
4638 mode2 = insn_data[d->icode].operand[2].mode;
4639 mode3 = insn_data[d->icode].operand[3].mode;
4641 /* When all four are of the same mode. */
4642 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4647 type = v4si_ftype_v4si_v4si_v4si;
4650 type = v4sf_ftype_v4sf_v4sf_v4sf;
4653 type = v8hi_ftype_v8hi_v8hi_v8hi;
4656 type = v16qi_ftype_v16qi_v16qi_v16qi;
4662 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4667 type = v4si_ftype_v4si_v4si_v16qi;
4670 type = v4sf_ftype_v4sf_v4sf_v16qi;
4673 type = v8hi_ftype_v8hi_v8hi_v16qi;
4676 type = v16qi_ftype_v16qi_v16qi_v16qi;
4682 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4683 && mode3 == V4SImode)
4684 type = v4si_ftype_v16qi_v16qi_v4si;
4685 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4686 && mode3 == V4SImode)
4687 type = v4si_ftype_v8hi_v8hi_v4si;
4688 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4689 && mode3 == V4SImode)
4690 type = v4sf_ftype_v4sf_v4sf_v4si;
4692 /* vchar, vchar, vchar, 4 bit literal. */
4693 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4695 type = v16qi_ftype_v16qi_v16qi_char;
4697 /* vshort, vshort, vshort, 4 bit literal. */
4698 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4700 type = v8hi_ftype_v8hi_v8hi_char;
4702 /* vint, vint, vint, 4 bit literal. */
4703 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4705 type = v4si_ftype_v4si_v4si_char;
4707 /* vfloat, vfloat, vfloat, 4 bit literal. */
4708 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4710 type = v4sf_ftype_v4sf_v4sf_char;
4715 def_builtin (d->mask, d->name, type, d->code);
4718 /* Add the DST variants. */
4719 d = (struct builtin_description *) bdesc_dst;
4720 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4721 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4723 /* Initialize the predicates. */
4724 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4725 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4727 enum machine_mode mode1;
4730 mode1 = insn_data[dp->icode].operand[1].mode;
4735 type = int_ftype_int_v4si_v4si;
4738 type = int_ftype_int_v8hi_v8hi;
4741 type = int_ftype_int_v16qi_v16qi;
4744 type = int_ftype_int_v4sf_v4sf;
4750 def_builtin (dp->mask, dp->name, type, dp->code);
4753 /* Add the simple binary operators. */
4754 d = (struct builtin_description *) bdesc_2arg;
4755 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4757 enum machine_mode mode0, mode1, mode2;
4763 mode0 = insn_data[d->icode].operand[0].mode;
4764 mode1 = insn_data[d->icode].operand[1].mode;
4765 mode2 = insn_data[d->icode].operand[2].mode;
4767 /* When all three operands are of the same mode. */
4768 if (mode0 == mode1 && mode1 == mode2)
4773 type = v4sf_ftype_v4sf_v4sf;
4776 type = v4si_ftype_v4si_v4si;
4779 type = v16qi_ftype_v16qi_v16qi;
4782 type = v8hi_ftype_v8hi_v8hi;
4789 /* A few other combos we really don't want to do manually. */
4791 /* vint, vfloat, vfloat. */
4792 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4793 type = v4si_ftype_v4sf_v4sf;
4795 /* vshort, vchar, vchar. */
4796 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4797 type = v8hi_ftype_v16qi_v16qi;
4799 /* vint, vshort, vshort. */
4800 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4801 type = v4si_ftype_v8hi_v8hi;
4803 /* vshort, vint, vint. */
4804 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4805 type = v8hi_ftype_v4si_v4si;
4807 /* vchar, vshort, vshort. */
4808 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4809 type = v16qi_ftype_v8hi_v8hi;
4811 /* vint, vchar, vint. */
4812 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4813 type = v4si_ftype_v16qi_v4si;
4815 /* vint, vchar, vchar. */
4816 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4817 type = v4si_ftype_v16qi_v16qi;
4819 /* vint, vshort, vint. */
4820 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4821 type = v4si_ftype_v8hi_v4si;
4823 /* vint, vint, 5 bit literal. */
4824 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4825 type = v4si_ftype_v4si_char;
4827 /* vshort, vshort, 5 bit literal. */
4828 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4829 type = v8hi_ftype_v8hi_char;
4831 /* vchar, vchar, 5 bit literal. */
4832 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4833 type = v16qi_ftype_v16qi_char;
4835 /* vfloat, vint, 5 bit literal. */
4836 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4837 type = v4sf_ftype_v4si_char;
4839 /* vint, vfloat, 5 bit literal. */
4840 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4841 type = v4si_ftype_v4sf_char;
4844 else if (mode0 == SImode)
4849 type = int_ftype_v4si_v4si;
4852 type = int_ftype_v4sf_v4sf;
4855 type = int_ftype_v16qi_v16qi;
4858 type = int_ftype_v8hi_v8hi;
4868 def_builtin (d->mask, d->name, type, d->code);
4871 /* Initialize the abs* operators. */
4872 d = (struct builtin_description *) bdesc_abs;
4873 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4875 enum machine_mode mode0;
4878 mode0 = insn_data[d->icode].operand[0].mode;
4883 type = v4si_ftype_v4si;
4886 type = v8hi_ftype_v8hi;
4889 type = v16qi_ftype_v16qi;
4892 type = v4sf_ftype_v4sf;
4898 def_builtin (d->mask, d->name, type, d->code);
4901 /* Add the simple unary operators. */
4902 d = (struct builtin_description *) bdesc_1arg;
4903 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4905 enum machine_mode mode0, mode1;
4911 mode0 = insn_data[d->icode].operand[0].mode;
4912 mode1 = insn_data[d->icode].operand[1].mode;
4914 if (mode0 == V4SImode && mode1 == QImode)
4915 type = v4si_ftype_char;
4916 else if (mode0 == V8HImode && mode1 == QImode)
4917 type = v8hi_ftype_char;
4918 else if (mode0 == V16QImode && mode1 == QImode)
4919 type = v16qi_ftype_char;
4920 else if (mode0 == V4SFmode && mode1 == V4SFmode)
4921 type = v4sf_ftype_v4sf;
4922 else if (mode0 == V8HImode && mode1 == V16QImode)
4923 type = v8hi_ftype_v16qi;
4924 else if (mode0 == V4SImode && mode1 == V8HImode)
4925 type = v4si_ftype_v8hi;
4929 def_builtin (d->mask, d->name, type, d->code);
4934 /* Generate a memory reference for expand_block_move, copying volatile,
4935 and other bits from an original memory reference. */
4938 expand_block_move_mem (mode, addr, orig_mem)
4939 enum machine_mode mode;
4943 rtx mem = gen_rtx_MEM (mode, addr);
4945 MEM_COPY_ATTRIBUTES (mem, orig_mem);
4949 /* Expand a block move operation, and return 1 if successful. Return 0
4950 if we should let the compiler generate normal code.
4952 operands[0] is the destination
4953 operands[1] is the source
4954 operands[2] is the length
4955 operands[3] is the alignment */
4957 #define MAX_MOVE_REG 4
4960 expand_block_move (operands)
4963 rtx orig_dest = operands[0];
4964 rtx orig_src = operands[1];
4965 rtx bytes_rtx = operands[2];
4966 rtx align_rtx = operands[3];
4967 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
4978 rtx stores[MAX_MOVE_REG];
4981 /* If this is not a fixed size move, just call memcpy */
4985 /* If this is not a fixed size alignment, abort */
4986 if (GET_CODE (align_rtx) != CONST_INT)
4988 align = INTVAL (align_rtx);
4990 /* Anything to move? */
4991 bytes = INTVAL (bytes_rtx);
4995 /* store_one_arg depends on expand_block_move to handle at least the size of
4996 reg_parm_stack_space. */
4997 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
5000 /* Move the address into scratch registers. */
5001 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
5002 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
5004 if (TARGET_STRING) /* string instructions are available */
5006 for ( ; bytes > 0; bytes -= move_bytes)
5008 if (bytes > 24 /* move up to 32 bytes at a time */
5016 && ! fixed_regs[12])
5018 move_bytes = (bytes > 32) ? 32 : bytes;
5019 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
5022 expand_block_move_mem (BLKmode,
5025 GEN_INT ((move_bytes == 32)
5029 else if (bytes > 16 /* move up to 24 bytes at a time */
5035 && ! fixed_regs[10])
5037 move_bytes = (bytes > 24) ? 24 : bytes;
5038 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
5041 expand_block_move_mem (BLKmode,
5044 GEN_INT (move_bytes),
5047 else if (bytes > 8 /* move up to 16 bytes at a time */
5053 move_bytes = (bytes > 16) ? 16 : bytes;
5054 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
5057 expand_block_move_mem (BLKmode,
5060 GEN_INT (move_bytes),
5063 else if (bytes >= 8 && TARGET_POWERPC64
5064 /* 64-bit loads and stores require word-aligned
5066 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5069 tmp_reg = gen_reg_rtx (DImode);
5070 emit_move_insn (tmp_reg,
5071 expand_block_move_mem (DImode,
5072 src_reg, orig_src));
5073 emit_move_insn (expand_block_move_mem (DImode,
5074 dest_reg, orig_dest),
5077 else if (bytes > 4 && !TARGET_POWERPC64)
5078 { /* move up to 8 bytes at a time */
5079 move_bytes = (bytes > 8) ? 8 : bytes;
5080 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
5083 expand_block_move_mem (BLKmode,
5086 GEN_INT (move_bytes),
5089 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5090 { /* move 4 bytes */
5092 tmp_reg = gen_reg_rtx (SImode);
5093 emit_move_insn (tmp_reg,
5094 expand_block_move_mem (SImode,
5095 src_reg, orig_src));
5096 emit_move_insn (expand_block_move_mem (SImode,
5097 dest_reg, orig_dest),
5100 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5101 { /* move 2 bytes */
5103 tmp_reg = gen_reg_rtx (HImode);
5104 emit_move_insn (tmp_reg,
5105 expand_block_move_mem (HImode,
5106 src_reg, orig_src));
5107 emit_move_insn (expand_block_move_mem (HImode,
5108 dest_reg, orig_dest),
5111 else if (bytes == 1) /* move 1 byte */
5114 tmp_reg = gen_reg_rtx (QImode);
5115 emit_move_insn (tmp_reg,
5116 expand_block_move_mem (QImode,
5117 src_reg, orig_src));
5118 emit_move_insn (expand_block_move_mem (QImode,
5119 dest_reg, orig_dest),
5123 { /* move up to 4 bytes at a time */
5124 move_bytes = (bytes > 4) ? 4 : bytes;
5125 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
5128 expand_block_move_mem (BLKmode,
5131 GEN_INT (move_bytes),
5135 if (bytes > move_bytes)
5137 if (! TARGET_POWERPC64)
5139 emit_insn (gen_addsi3 (src_reg, src_reg,
5140 GEN_INT (move_bytes)));
5141 emit_insn (gen_addsi3 (dest_reg, dest_reg,
5142 GEN_INT (move_bytes)));
5146 emit_insn (gen_adddi3 (src_reg, src_reg,
5147 GEN_INT (move_bytes)));
5148 emit_insn (gen_adddi3 (dest_reg, dest_reg,
5149 GEN_INT (move_bytes)));
5155 else /* string instructions not available */
5157 num_reg = offset = 0;
5158 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
5160 /* Calculate the correct offset for src/dest */
5164 dest_addr = dest_reg;
5168 src_addr = plus_constant (src_reg, offset);
5169 dest_addr = plus_constant (dest_reg, offset);
5172 /* Generate the appropriate load and store, saving the stores
5174 if (bytes >= 8 && TARGET_POWERPC64
5175 /* 64-bit loads and stores require word-aligned
5177 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5180 tmp_reg = gen_reg_rtx (DImode);
5181 emit_insn (gen_movdi (tmp_reg,
5182 expand_block_move_mem (DImode,
5185 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
5190 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5193 tmp_reg = gen_reg_rtx (SImode);
5194 emit_insn (gen_movsi (tmp_reg,
5195 expand_block_move_mem (SImode,
5198 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5203 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5206 tmp_reg = gen_reg_rtx (HImode);
5207 emit_insn (gen_movhi (tmp_reg,
5208 expand_block_move_mem (HImode,
5211 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5219 tmp_reg = gen_reg_rtx (QImode);
5220 emit_insn (gen_movqi (tmp_reg,
5221 expand_block_move_mem (QImode,
5224 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5230 if (num_reg >= MAX_MOVE_REG)
5232 for (i = 0; i < num_reg; i++)
5233 emit_insn (stores[i]);
5238 for (i = 0; i < num_reg; i++)
5239 emit_insn (stores[i]);
5246 /* Return 1 if OP is a load multiple operation. It is known to be a
5247 PARALLEL and the first section will be tested. */
5250 load_multiple_operation (op, mode)
5252 enum machine_mode mode ATTRIBUTE_UNUSED;
5254 int count = XVECLEN (op, 0);
5255 unsigned int dest_regno;
5259 /* Perform a quick check so we don't blow up below. */
5261 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5262 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5263 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5266 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5267 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5269 for (i = 1; i < count; i++)
5271 rtx elt = XVECEXP (op, 0, i);
5273 if (GET_CODE (elt) != SET
5274 || GET_CODE (SET_DEST (elt)) != REG
5275 || GET_MODE (SET_DEST (elt)) != SImode
5276 || REGNO (SET_DEST (elt)) != dest_regno + i
5277 || GET_CODE (SET_SRC (elt)) != MEM
5278 || GET_MODE (SET_SRC (elt)) != SImode
5279 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5280 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5281 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5282 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5289 /* Similar, but tests for store multiple. Here, the second vector element
5290 is a CLOBBER. It will be tested later. */
5293 store_multiple_operation (op, mode)
5295 enum machine_mode mode ATTRIBUTE_UNUSED;
5297 int count = XVECLEN (op, 0) - 1;
5298 unsigned int src_regno;
5302 /* Perform a quick check so we don't blow up below. */
5304 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5305 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5306 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5309 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5310 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5312 for (i = 1; i < count; i++)
5314 rtx elt = XVECEXP (op, 0, i + 1);
5316 if (GET_CODE (elt) != SET
5317 || GET_CODE (SET_SRC (elt)) != REG
5318 || GET_MODE (SET_SRC (elt)) != SImode
5319 || REGNO (SET_SRC (elt)) != src_regno + i
5320 || GET_CODE (SET_DEST (elt)) != MEM
5321 || GET_MODE (SET_DEST (elt)) != SImode
5322 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5323 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5324 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5325 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5332 /* Return 1 for a parallel vrsave operation. */
5335 vrsave_operation (op, mode)
5337 enum machine_mode mode ATTRIBUTE_UNUSED;
5339 int count = XVECLEN (op, 0);
5340 unsigned int dest_regno, src_regno;
5344 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5345 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5346 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5349 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5350 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5352 if (dest_regno != VRSAVE_REGNO
5353 && src_regno != VRSAVE_REGNO)
5356 for (i = 1; i < count; i++)
5358 rtx elt = XVECEXP (op, 0, i);
5360 if (GET_CODE (elt) != CLOBBER
5361 && GET_CODE (elt) != SET)
5368 /* Return 1 for an PARALLEL suitable for mtcrf. */
5371 mtcrf_operation (op, mode)
5373 enum machine_mode mode ATTRIBUTE_UNUSED;
5375 int count = XVECLEN (op, 0);
5379 /* Perform a quick check so we don't blow up below. */
5381 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5382 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5383 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5385 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5387 if (GET_CODE (src_reg) != REG
5388 || GET_MODE (src_reg) != SImode
5389 || ! INT_REGNO_P (REGNO (src_reg)))
5392 for (i = 0; i < count; i++)
5394 rtx exp = XVECEXP (op, 0, i);
5398 if (GET_CODE (exp) != SET
5399 || GET_CODE (SET_DEST (exp)) != REG
5400 || GET_MODE (SET_DEST (exp)) != CCmode
5401 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5403 unspec = SET_SRC (exp);
5404 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5406 if (GET_CODE (unspec) != UNSPEC
5407 || XINT (unspec, 1) != 20
5408 || XVECLEN (unspec, 0) != 2
5409 || XVECEXP (unspec, 0, 0) != src_reg
5410 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5411 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5417 /* Return 1 for an PARALLEL suitable for lmw. */
5420 lmw_operation (op, mode)
5422 enum machine_mode mode ATTRIBUTE_UNUSED;
5424 int count = XVECLEN (op, 0);
5425 unsigned int dest_regno;
5427 unsigned int base_regno;
5428 HOST_WIDE_INT offset;
5431 /* Perform a quick check so we don't blow up below. */
5433 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5434 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5435 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5438 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5439 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5442 || count != 32 - (int) dest_regno)
5445 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5448 base_regno = REGNO (src_addr);
5449 if (base_regno == 0)
5452 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5454 offset = INTVAL (XEXP (src_addr, 1));
5455 base_regno = REGNO (XEXP (src_addr, 0));
5460 for (i = 0; i < count; i++)
5462 rtx elt = XVECEXP (op, 0, i);
5465 HOST_WIDE_INT newoffset;
5467 if (GET_CODE (elt) != SET
5468 || GET_CODE (SET_DEST (elt)) != REG
5469 || GET_MODE (SET_DEST (elt)) != SImode
5470 || REGNO (SET_DEST (elt)) != dest_regno + i
5471 || GET_CODE (SET_SRC (elt)) != MEM
5472 || GET_MODE (SET_SRC (elt)) != SImode)
5474 newaddr = XEXP (SET_SRC (elt), 0);
5475 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5480 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5482 addr_reg = XEXP (newaddr, 0);
5483 newoffset = INTVAL (XEXP (newaddr, 1));
5487 if (REGNO (addr_reg) != base_regno
5488 || newoffset != offset + 4 * i)
5495 /* Return 1 for an PARALLEL suitable for stmw. */
5498 stmw_operation (op, mode)
5500 enum machine_mode mode ATTRIBUTE_UNUSED;
5502 int count = XVECLEN (op, 0);
5503 unsigned int src_regno;
5505 unsigned int base_regno;
5506 HOST_WIDE_INT offset;
5509 /* Perform a quick check so we don't blow up below. */
5511 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5512 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5513 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5516 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5517 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5520 || count != 32 - (int) src_regno)
5523 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5526 base_regno = REGNO (dest_addr);
5527 if (base_regno == 0)
5530 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5532 offset = INTVAL (XEXP (dest_addr, 1));
5533 base_regno = REGNO (XEXP (dest_addr, 0));
5538 for (i = 0; i < count; i++)
5540 rtx elt = XVECEXP (op, 0, i);
5543 HOST_WIDE_INT newoffset;
5545 if (GET_CODE (elt) != SET
5546 || GET_CODE (SET_SRC (elt)) != REG
5547 || GET_MODE (SET_SRC (elt)) != SImode
5548 || REGNO (SET_SRC (elt)) != src_regno + i
5549 || GET_CODE (SET_DEST (elt)) != MEM
5550 || GET_MODE (SET_DEST (elt)) != SImode)
5552 newaddr = XEXP (SET_DEST (elt), 0);
5553 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5558 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5560 addr_reg = XEXP (newaddr, 0);
5561 newoffset = INTVAL (XEXP (newaddr, 1));
5565 if (REGNO (addr_reg) != base_regno
5566 || newoffset != offset + 4 * i)
5573 /* A validation routine: say whether CODE, a condition code, and MODE
5574 match. The other alternatives either don't make sense or should
5575 never be generated. */
5578 validate_condition_mode (code, mode)
5580 enum machine_mode mode;
5582 if (GET_RTX_CLASS (code) != '<'
5583 || GET_MODE_CLASS (mode) != MODE_CC)
5586 /* These don't make sense. */
5587 if ((code == GT || code == LT || code == GE || code == LE)
5588 && mode == CCUNSmode)
5591 if ((code == GTU || code == LTU || code == GEU || code == LEU)
5592 && mode != CCUNSmode)
5595 if (mode != CCFPmode
5596 && (code == ORDERED || code == UNORDERED
5597 || code == UNEQ || code == LTGT
5598 || code == UNGT || code == UNLT
5599 || code == UNGE || code == UNLE))
5602 /* These should never be generated except for
5603 flag_unsafe_math_optimizations. */
5604 if (mode == CCFPmode
5605 && ! flag_unsafe_math_optimizations
5606 && (code == LE || code == GE
5607 || code == UNEQ || code == LTGT
5608 || code == UNGT || code == UNLT))
5611 /* These are invalid; the information is not there. */
5612 if (mode == CCEQmode
5613 && code != EQ && code != NE)
5617 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5618 We only check the opcode against the mode of the CC value here. */
5621 branch_comparison_operator (op, mode)
5623 enum machine_mode mode ATTRIBUTE_UNUSED;
5625 enum rtx_code code = GET_CODE (op);
5626 enum machine_mode cc_mode;
5628 if (GET_RTX_CLASS (code) != '<')
5631 cc_mode = GET_MODE (XEXP (op, 0));
5632 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5635 validate_condition_mode (code, cc_mode);
5640 /* Return 1 if OP is a comparison operation that is valid for a branch
5641 insn and which is true if the corresponding bit in the CC register
5645 branch_positive_comparison_operator (op, mode)
5647 enum machine_mode mode;
5651 if (! branch_comparison_operator (op, mode))
5654 code = GET_CODE (op);
5655 return (code == EQ || code == LT || code == GT
5656 || code == LTU || code == GTU
5657 || code == UNORDERED);
5660 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5661 We check the opcode against the mode of the CC value and disallow EQ or
5662 NE comparisons for integers. */
5665 scc_comparison_operator (op, mode)
5667 enum machine_mode mode;
5669 enum rtx_code code = GET_CODE (op);
5670 enum machine_mode cc_mode;
5672 if (GET_MODE (op) != mode && mode != VOIDmode)
5675 if (GET_RTX_CLASS (code) != '<')
5678 cc_mode = GET_MODE (XEXP (op, 0));
5679 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5682 validate_condition_mode (code, cc_mode);
5684 if (code == NE && cc_mode != CCFPmode)
5691 trap_comparison_operator (op, mode)
5693 enum machine_mode mode;
5695 if (mode != VOIDmode && mode != GET_MODE (op))
5697 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5701 boolean_operator (op, mode)
5703 enum machine_mode mode ATTRIBUTE_UNUSED;
5705 enum rtx_code code = GET_CODE (op);
5706 return (code == AND || code == IOR || code == XOR);
5710 boolean_or_operator (op, mode)
5712 enum machine_mode mode ATTRIBUTE_UNUSED;
5714 enum rtx_code code = GET_CODE (op);
5715 return (code == IOR || code == XOR);
5719 min_max_operator (op, mode)
5721 enum machine_mode mode ATTRIBUTE_UNUSED;
5723 enum rtx_code code = GET_CODE (op);
5724 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5727 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5728 mask required to convert the result of a rotate insn into a shift
5729 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
5732 includes_lshift_p (shiftop, andop)
5736 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5738 shift_mask <<= INTVAL (shiftop);
5740 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5743 /* Similar, but for right shift. */
5746 includes_rshift_p (shiftop, andop)
5750 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5752 shift_mask >>= INTVAL (shiftop);
5754 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5757 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5758 to perform a left shift. It must have exactly SHIFTOP least
5759 signifigant 0's, then one or more 1's, then zero or more 0's. */
5762 includes_rldic_lshift_p (shiftop, andop)
5766 if (GET_CODE (andop) == CONST_INT)
5768 HOST_WIDE_INT c, lsb, shift_mask;
5771 if (c == 0 || c == ~0)
5775 shift_mask <<= INTVAL (shiftop);
5777 /* Find the least signifigant one bit. */
5780 /* It must coincide with the LSB of the shift mask. */
5781 if (-lsb != shift_mask)
5784 /* Invert to look for the next transition (if any). */
5787 /* Remove the low group of ones (originally low group of zeros). */
5790 /* Again find the lsb, and check we have all 1's above. */
5794 else if (GET_CODE (andop) == CONST_DOUBLE
5795 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5797 HOST_WIDE_INT low, high, lsb;
5798 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5800 low = CONST_DOUBLE_LOW (andop);
5801 if (HOST_BITS_PER_WIDE_INT < 64)
5802 high = CONST_DOUBLE_HIGH (andop);
5804 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5805 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5808 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5810 shift_mask_high = ~0;
5811 if (INTVAL (shiftop) > 32)
5812 shift_mask_high <<= INTVAL (shiftop) - 32;
5816 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5823 return high == -lsb;
5826 shift_mask_low = ~0;
5827 shift_mask_low <<= INTVAL (shiftop);
5831 if (-lsb != shift_mask_low)
5834 if (HOST_BITS_PER_WIDE_INT < 64)
5839 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5842 return high == -lsb;
5846 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5852 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5853 to perform a left shift. It must have SHIFTOP or more least
5854 signifigant 0's, with the remainder of the word 1's. */
5857 includes_rldicr_lshift_p (shiftop, andop)
5861 if (GET_CODE (andop) == CONST_INT)
5863 HOST_WIDE_INT c, lsb, shift_mask;
5866 shift_mask <<= INTVAL (shiftop);
5869 /* Find the least signifigant one bit. */
5872 /* It must be covered by the shift mask.
5873 This test also rejects c == 0. */
5874 if ((lsb & shift_mask) == 0)
5877 /* Check we have all 1's above the transition, and reject all 1's. */
5878 return c == -lsb && lsb != 1;
5880 else if (GET_CODE (andop) == CONST_DOUBLE
5881 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5883 HOST_WIDE_INT low, lsb, shift_mask_low;
5885 low = CONST_DOUBLE_LOW (andop);
5887 if (HOST_BITS_PER_WIDE_INT < 64)
5889 HOST_WIDE_INT high, shift_mask_high;
5891 high = CONST_DOUBLE_HIGH (andop);
5895 shift_mask_high = ~0;
5896 if (INTVAL (shiftop) > 32)
5897 shift_mask_high <<= INTVAL (shiftop) - 32;
5901 if ((lsb & shift_mask_high) == 0)
5904 return high == -lsb;
5910 shift_mask_low = ~0;
5911 shift_mask_low <<= INTVAL (shiftop);
5915 if ((lsb & shift_mask_low) == 0)
5918 return low == -lsb && lsb != 1;
5924 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5925 for lfq and stfq insns.
5927 Note reg1 and reg2 *must* be hard registers. To be sure we will
5928 abort if we are passed pseudo registers. */
5931 registers_ok_for_quad_peep (reg1, reg2)
5934 /* We might have been passed a SUBREG. */
5935 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5938 return (REGNO (reg1) == REGNO (reg2) - 1);
5941 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5942 addr1 and addr2 must be in consecutive memory locations
5943 (addr2 == addr1 + 8). */
5946 addrs_ok_for_quad_peep (addr1, addr2)
5953 /* Extract an offset (if used) from the first addr. */
5954 if (GET_CODE (addr1) == PLUS)
5956 /* If not a REG, return zero. */
5957 if (GET_CODE (XEXP (addr1, 0)) != REG)
5961 reg1 = REGNO (XEXP (addr1, 0));
5962 /* The offset must be constant! */
5963 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5965 offset1 = INTVAL (XEXP (addr1, 1));
5968 else if (GET_CODE (addr1) != REG)
5972 reg1 = REGNO (addr1);
5973 /* This was a simple (mem (reg)) expression. Offset is 0. */
5977 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5978 if (GET_CODE (addr2) != PLUS)
5981 if (GET_CODE (XEXP (addr2, 0)) != REG
5982 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5985 if (reg1 != REGNO (XEXP (addr2, 0)))
5988 /* The offset for the second addr must be 8 more than the first addr. */
5989 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5992 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
5997 /* Return the register class of a scratch register needed to copy IN into
5998 or out of a register in CLASS in MODE. If it can be done directly,
5999 NO_REGS is returned. */
6002 secondary_reload_class (class, mode, in)
6003 enum reg_class class;
6004 enum machine_mode mode ATTRIBUTE_UNUSED;
6009 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
6011 /* We cannot copy a symbolic operand directly into anything
6012 other than BASE_REGS for TARGET_ELF. So indicate that a
6013 register from BASE_REGS is needed as an intermediate
6016 On Darwin, pic addresses require a load from memory, which
6017 needs a base register. */
6018 if (class != BASE_REGS
6019 && (GET_CODE (in) == SYMBOL_REF
6020 || GET_CODE (in) == HIGH
6021 || GET_CODE (in) == LABEL_REF
6022 || GET_CODE (in) == CONST))
6026 if (GET_CODE (in) == REG)
6029 if (regno >= FIRST_PSEUDO_REGISTER)
6031 regno = true_regnum (in);
6032 if (regno >= FIRST_PSEUDO_REGISTER)
6036 else if (GET_CODE (in) == SUBREG)
6038 regno = true_regnum (in);
6039 if (regno >= FIRST_PSEUDO_REGISTER)
6045 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
6047 if (class == GENERAL_REGS || class == BASE_REGS
6048 || (regno >= 0 && INT_REGNO_P (regno)))
6051 /* Constants, memory, and FP registers can go into FP registers. */
6052 if ((regno == -1 || FP_REGNO_P (regno))
6053 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
6056 /* Memory, and AltiVec registers can go into AltiVec registers. */
6057 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
6058 && class == ALTIVEC_REGS)
6061 /* We can copy among the CR registers. */
6062 if ((class == CR_REGS || class == CR0_REGS)
6063 && regno >= 0 && CR_REGNO_P (regno))
6066 /* Otherwise, we need GENERAL_REGS. */
6067 return GENERAL_REGS;
6070 /* Given a comparison operation, return the bit number in CCR to test. We
6071 know this is a valid comparison.
6073 SCC_P is 1 if this is for an scc. That means that %D will have been
6074 used instead of %C, so the bits will be in different places.
6076 Return -1 if OP isn't a valid comparison for some reason. */
6083 enum rtx_code code = GET_CODE (op);
6084 enum machine_mode cc_mode;
6089 if (GET_RTX_CLASS (code) != '<')
6094 if (GET_CODE (reg) != REG
6095 || ! CR_REGNO_P (REGNO (reg)))
6098 cc_mode = GET_MODE (reg);
6099 cc_regnum = REGNO (reg);
6100 base_bit = 4 * (cc_regnum - CR0_REGNO);
6102 validate_condition_mode (code, cc_mode);
6107 return scc_p ? base_bit + 3 : base_bit + 2;
6109 return base_bit + 2;
6110 case GT: case GTU: case UNLE:
6111 return base_bit + 1;
6112 case LT: case LTU: case UNGE:
6114 case ORDERED: case UNORDERED:
6115 return base_bit + 3;
6118 /* If scc, we will have done a cror to put the bit in the
6119 unordered position. So test that bit. For integer, this is ! LT
6120 unless this is an scc insn. */
6121 return scc_p ? base_bit + 3 : base_bit;
6124 return scc_p ? base_bit + 3 : base_bit + 1;
6131 /* Return the GOT register. */
6134 rs6000_got_register (value)
6135 rtx value ATTRIBUTE_UNUSED;
6137 /* The second flow pass currently (June 1999) can't update
6138 regs_ever_live without disturbing other parts of the compiler, so
6139 update it here to make the prolog/epilogue code happy. */
6140 if (no_new_pseudos && ! regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6141 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
6143 current_function_uses_pic_offset_table = 1;
6145 return pic_offset_table_rtx;
6148 /* Functions to init, mark and free struct machine_function.
6149 These will be called, via pointer variables,
6150 from push_function_context and pop_function_context. */
6153 rs6000_init_machine_status (p)
6156 p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
6160 rs6000_free_machine_status (p)
6163 if (p->machine == NULL)
6171 /* Print an operand. Recognize special options, documented below. */
6174 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6175 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6177 #define SMALL_DATA_RELOC "sda21"
6178 #define SMALL_DATA_REG 0
6182 print_operand (file, x, code)
6190 /* These macros test for integers and extract the low-order bits. */
6192 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
6193 && GET_MODE (X) == VOIDmode)
6195 #define INT_LOWPART(X) \
6196 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6201 /* Write out an instruction after the call which may be replaced
6202 with glue code by the loader. This depends on the AIX version. */
6203 asm_fprintf (file, RS6000_CALL_GLUE);
6206 /* %a is output_address. */
6209 /* If X is a constant integer whose low-order 5 bits are zero,
6210 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6211 in the AIX assembler where "sri" with a zero shift count
6212 writes a trash instruction. */
6213 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6220 /* If constant, low-order 16 bits of constant, unsigned.
6221 Otherwise, write normally. */
6223 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6225 print_operand (file, x, 0);
6229 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6230 for 64-bit mask direction. */
6231 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6234 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6238 /* There used to be a comment for 'C' reading "This is an
6239 optional cror needed for certain floating-point
6240 comparisons. Otherwise write nothing." */
6242 /* Similar, except that this is for an scc, so we must be able to
6243 encode the test in a single bit that is one. We do the above
6244 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6245 if (GET_CODE (x) == LE || GET_CODE (x) == GE
6246 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6248 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6250 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6252 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6255 else if (GET_CODE (x) == NE)
6257 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6259 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6260 base_bit + 2, base_bit + 2);
6265 /* X is a CR register. Print the number of the EQ bit of the CR */
6266 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6267 output_operand_lossage ("invalid %%E value");
6269 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6273 /* X is a CR register. Print the shift count needed to move it
6274 to the high-order four bits. */
6275 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6276 output_operand_lossage ("invalid %%f value");
6278 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6282 /* Similar, but print the count for the rotate in the opposite
6284 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6285 output_operand_lossage ("invalid %%F value");
6287 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6291 /* X is a constant integer. If it is negative, print "m",
6292 otherwise print "z". This is to make a aze or ame insn. */
6293 if (GET_CODE (x) != CONST_INT)
6294 output_operand_lossage ("invalid %%G value");
6295 else if (INTVAL (x) >= 0)
6302 /* If constant, output low-order five bits. Otherwise, write
6305 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6307 print_operand (file, x, 0);
6311 /* If constant, output low-order six bits. Otherwise, write
6314 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6316 print_operand (file, x, 0);
6320 /* Print `i' if this is a constant, else nothing. */
6326 /* Write the bit number in CCR for jump. */
6329 output_operand_lossage ("invalid %%j code");
6331 fprintf (file, "%d", i);
6335 /* Similar, but add one for shift count in rlinm for scc and pass
6336 scc flag to `ccr_bit'. */
6339 output_operand_lossage ("invalid %%J code");
6341 /* If we want bit 31, write a shift count of zero, not 32. */
6342 fprintf (file, "%d", i == 31 ? 0 : i + 1);
6346 /* X must be a constant. Write the 1's complement of the
6349 output_operand_lossage ("invalid %%k value");
6351 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6355 /* X must be a symbolic constant on ELF. Write an
6356 expression suitable for an 'addi' that adds in the low 16
6358 if (GET_CODE (x) != CONST)
6360 print_operand_address (file, x);
6365 if (GET_CODE (XEXP (x, 0)) != PLUS
6366 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6367 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6368 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6369 output_operand_lossage ("invalid %%K value");
6370 print_operand_address (file, XEXP (XEXP (x, 0), 0));
6372 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6376 /* %l is output_asm_label. */
6379 /* Write second word of DImode or DFmode reference. Works on register
6380 or non-indexed memory only. */
6381 if (GET_CODE (x) == REG)
6382 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6383 else if (GET_CODE (x) == MEM)
6385 /* Handle possible auto-increment. Since it is pre-increment and
6386 we have already done it, we can just use an offset of word. */
6387 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6388 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6389 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6392 output_address (XEXP (adjust_address_nv (x, SImode,
6396 if (small_data_operand (x, GET_MODE (x)))
6397 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6398 reg_names[SMALL_DATA_REG]);
6403 /* MB value for a mask operand. */
6404 if (! mask_operand (x, SImode))
6405 output_operand_lossage ("invalid %%m value");
6407 val = INT_LOWPART (x);
6409 /* If the high bit is set and the low bit is not, the value is zero.
6410 If the high bit is zero, the value is the first 1 bit we find from
6412 if ((val & 0x80000000) && ((val & 1) == 0))
6417 else if ((val & 0x80000000) == 0)
6419 for (i = 1; i < 32; i++)
6420 if ((val <<= 1) & 0x80000000)
6422 fprintf (file, "%d", i);
6426 /* Otherwise, look for the first 0 bit from the right. The result is its
6427 number plus 1. We know the low-order bit is one. */
6428 for (i = 0; i < 32; i++)
6429 if (((val >>= 1) & 1) == 0)
6432 /* If we ended in ...01, i would be 0. The correct value is 31, so
6434 fprintf (file, "%d", 31 - i);
6438 /* ME value for a mask operand. */
6439 if (! mask_operand (x, SImode))
6440 output_operand_lossage ("invalid %%M value");
6442 val = INT_LOWPART (x);
6444 /* If the low bit is set and the high bit is not, the value is 31.
6445 If the low bit is zero, the value is the first 1 bit we find from
6447 if ((val & 1) && ((val & 0x80000000) == 0))
6452 else if ((val & 1) == 0)
6454 for (i = 0; i < 32; i++)
6455 if ((val >>= 1) & 1)
6458 /* If we had ....10, i would be 0. The result should be
6459 30, so we need 30 - i. */
6460 fprintf (file, "%d", 30 - i);
6464 /* Otherwise, look for the first 0 bit from the left. The result is its
6465 number minus 1. We know the high-order bit is one. */
6466 for (i = 0; i < 32; i++)
6467 if (((val <<= 1) & 0x80000000) == 0)
6470 fprintf (file, "%d", i);
6473 /* %n outputs the negative of its operand. */
6476 /* Write the number of elements in the vector times 4. */
6477 if (GET_CODE (x) != PARALLEL)
6478 output_operand_lossage ("invalid %%N value");
6480 fprintf (file, "%d", XVECLEN (x, 0) * 4);
6484 /* Similar, but subtract 1 first. */
6485 if (GET_CODE (x) != PARALLEL)
6486 output_operand_lossage ("invalid %%O value");
6488 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6492 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6494 || INT_LOWPART (x) < 0
6495 || (i = exact_log2 (INT_LOWPART (x))) < 0)
6496 output_operand_lossage ("invalid %%p value");
6498 fprintf (file, "%d", i);
6502 /* The operand must be an indirect memory reference. The result
6503 is the register number. */
6504 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6505 || REGNO (XEXP (x, 0)) >= 32)
6506 output_operand_lossage ("invalid %%P value");
6508 fprintf (file, "%d", REGNO (XEXP (x, 0)));
6512 /* This outputs the logical code corresponding to a boolean
6513 expression. The expression may have one or both operands
6514 negated (if one, only the first one). For condition register
6515 logical operations, it will also treat the negated
6516 CR codes as NOTs, but not handle NOTs of them. */
6518 const char *const *t = 0;
6520 enum rtx_code code = GET_CODE (x);
6521 static const char * const tbl[3][3] = {
6522 { "and", "andc", "nor" },
6523 { "or", "orc", "nand" },
6524 { "xor", "eqv", "xor" } };
6528 else if (code == IOR)
6530 else if (code == XOR)
6533 output_operand_lossage ("invalid %%q value");
6535 if (GET_CODE (XEXP (x, 0)) != NOT)
6539 if (GET_CODE (XEXP (x, 1)) == NOT)
6550 /* X is a CR register. Print the mask for `mtcrf'. */
6551 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6552 output_operand_lossage ("invalid %%R value");
6554 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6558 /* Low 5 bits of 32 - value */
6560 output_operand_lossage ("invalid %%s value");
6562 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6566 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6567 CONST_INT 32-bit mask is considered sign-extended so any
6568 transition must occur within the CONST_INT, not on the boundary. */
6569 if (! mask64_operand (x, DImode))
6570 output_operand_lossage ("invalid %%S value");
6572 val = INT_LOWPART (x);
6574 if (val & 1) /* Clear Left */
6576 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6577 if (!((val >>= 1) & 1))
6580 #if HOST_BITS_PER_WIDE_INT == 32
6581 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6583 val = CONST_DOUBLE_HIGH (x);
6588 for (i = 32; i < 64; i++)
6589 if (!((val >>= 1) & 1))
6593 /* i = index of last set bit from right
6594 mask begins at 63 - i from left */
6596 output_operand_lossage ("%%S computed all 1's mask");
6598 fprintf (file, "%d", 63 - i);
6601 else /* Clear Right */
6603 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6604 if ((val >>= 1) & 1)
6607 #if HOST_BITS_PER_WIDE_INT == 32
6608 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6610 val = CONST_DOUBLE_HIGH (x);
6612 if (val == (HOST_WIDE_INT) -1)
6615 for (i = 32; i < 64; i++)
6616 if ((val >>= 1) & 1)
6620 /* i = index of last clear bit from right
6621 mask ends at 62 - i from left */
6623 output_operand_lossage ("%%S computed all 0's mask");
6625 fprintf (file, "%d", 62 - i);
6630 /* Print the symbolic name of a branch target register. */
6631 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6632 && REGNO (x) != COUNT_REGISTER_REGNUM))
6633 output_operand_lossage ("invalid %%T value");
6634 else if (REGNO (x) == LINK_REGISTER_REGNUM)
6635 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6637 fputs ("ctr", file);
6641 /* High-order 16 bits of constant for use in unsigned operand. */
6643 output_operand_lossage ("invalid %%u value");
6645 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6646 (INT_LOWPART (x) >> 16) & 0xffff);
6650 /* High-order 16 bits of constant for use in signed operand. */
6652 output_operand_lossage ("invalid %%v value");
6654 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6655 (INT_LOWPART (x) >> 16) & 0xffff);
6659 /* Print `u' if this has an auto-increment or auto-decrement. */
6660 if (GET_CODE (x) == MEM
6661 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6662 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6667 /* Print the trap code for this operand. */
6668 switch (GET_CODE (x))
6671 fputs ("eq", file); /* 4 */
6674 fputs ("ne", file); /* 24 */
6677 fputs ("lt", file); /* 16 */
6680 fputs ("le", file); /* 20 */
6683 fputs ("gt", file); /* 8 */
6686 fputs ("ge", file); /* 12 */
6689 fputs ("llt", file); /* 2 */
6692 fputs ("lle", file); /* 6 */
6695 fputs ("lgt", file); /* 1 */
6698 fputs ("lge", file); /* 5 */
6706 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6709 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6710 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6712 print_operand (file, x, 0);
6716 /* MB value for a PowerPC64 rldic operand. */
6717 val = (GET_CODE (x) == CONST_INT
6718 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6723 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6724 if ((val <<= 1) < 0)
6727 #if HOST_BITS_PER_WIDE_INT == 32
6728 if (GET_CODE (x) == CONST_INT && i >= 0)
6729 i += 32; /* zero-extend high-part was all 0's */
6730 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6732 val = CONST_DOUBLE_LOW (x);
6739 for ( ; i < 64; i++)
6740 if ((val <<= 1) < 0)
6745 fprintf (file, "%d", i + 1);
6749 if (GET_CODE (x) == MEM
6750 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6755 /* Like 'L', for third word of TImode */
6756 if (GET_CODE (x) == REG)
6757 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6758 else if (GET_CODE (x) == MEM)
6760 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6761 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6762 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6764 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6765 if (small_data_operand (x, GET_MODE (x)))
6766 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6767 reg_names[SMALL_DATA_REG]);
6772 /* X is a SYMBOL_REF. Write out the name preceded by a
6773 period and without any trailing data in brackets. Used for function
6774 names. If we are configured for System V (or the embedded ABI) on
6775 the PowerPC, do not emit the period, since those systems do not use
6776 TOCs and the like. */
6777 if (GET_CODE (x) != SYMBOL_REF)
6780 if (XSTR (x, 0)[0] != '.')
6782 switch (DEFAULT_ABI)
6792 case ABI_AIX_NODESC:
6798 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6800 assemble_name (file, XSTR (x, 0));
6805 /* Like 'L', for last word of TImode. */
6806 if (GET_CODE (x) == REG)
6807 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6808 else if (GET_CODE (x) == MEM)
6810 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6811 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6812 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6814 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6815 if (small_data_operand (x, GET_MODE (x)))
6816 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6817 reg_names[SMALL_DATA_REG]);
6821 /* Print AltiVec memory operand. */
6826 if (GET_CODE (x) != MEM)
6831 if (GET_CODE (tmp) == REG)
6832 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6833 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6835 if (REGNO (XEXP (tmp, 0)) == 0)
6836 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6837 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6839 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6840 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6848 if (GET_CODE (x) == REG)
6849 fprintf (file, "%s", reg_names[REGNO (x)]);
6850 else if (GET_CODE (x) == MEM)
6852 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6853 know the width from the mode. */
6854 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6855 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6856 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6857 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6858 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6859 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6861 output_address (XEXP (x, 0));
6864 output_addr_const (file, x);
6868 output_operand_lossage ("invalid %%xn code");
6872 /* Print the address of an operand. */
6875 print_operand_address (file, x)
6879 if (GET_CODE (x) == REG)
6880 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6881 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6882 || GET_CODE (x) == LABEL_REF)
6884 output_addr_const (file, x);
6885 if (small_data_operand (x, GET_MODE (x)))
6886 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6887 reg_names[SMALL_DATA_REG]);
6888 else if (TARGET_TOC)
6891 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6893 if (REGNO (XEXP (x, 0)) == 0)
6894 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6895 reg_names[ REGNO (XEXP (x, 0)) ]);
6897 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6898 reg_names[ REGNO (XEXP (x, 1)) ]);
6900 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6902 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6903 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6906 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6907 && CONSTANT_P (XEXP (x, 1)))
6909 output_addr_const (file, XEXP (x, 1));
6910 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6914 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6915 && CONSTANT_P (XEXP (x, 1)))
6917 fprintf (file, "lo16(");
6918 output_addr_const (file, XEXP (x, 1));
6919 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6922 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6924 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6926 rtx contains_minus = XEXP (x, 1);
6930 /* Find the (minus (sym) (toc)) buried in X, and temporarily
6931 turn it into (sym) for output_addr_const. */
6932 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6933 contains_minus = XEXP (contains_minus, 0);
6935 minus = XEXP (contains_minus, 0);
6936 symref = XEXP (minus, 0);
6937 XEXP (contains_minus, 0) = symref;
6942 name = XSTR (symref, 0);
6943 newname = alloca (strlen (name) + sizeof ("@toc"));
6944 strcpy (newname, name);
6945 strcat (newname, "@toc");
6946 XSTR (symref, 0) = newname;
6948 output_addr_const (file, XEXP (x, 1));
6950 XSTR (symref, 0) = name;
6951 XEXP (contains_minus, 0) = minus;
6954 output_addr_const (file, XEXP (x, 1));
6956 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6962 /* Target hook for assembling integer objects. The powerpc version has
6963 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6964 is defined. It also needs to handle DI-mode objects on 64-bit
6968 rs6000_assemble_integer (x, size, aligned_p)
6973 #ifdef RELOCATABLE_NEEDS_FIXUP
6974 /* Special handling for SI values. */
6975 if (size == 4 && aligned_p)
6977 extern int in_toc_section PARAMS ((void));
6978 static int recurse = 0;
6980 /* For -mrelocatable, we mark all addresses that need to be fixed up
6981 in the .fixup section. */
6982 if (TARGET_RELOCATABLE
6983 && !in_toc_section ()
6984 && !in_text_section ()
6986 && GET_CODE (x) != CONST_INT
6987 && GET_CODE (x) != CONST_DOUBLE
6993 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6995 ASM_OUTPUT_LABEL (asm_out_file, buf);
6996 fprintf (asm_out_file, "\t.long\t(");
6997 output_addr_const (asm_out_file, x);
6998 fprintf (asm_out_file, ")@fixup\n");
6999 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
7000 ASM_OUTPUT_ALIGN (asm_out_file, 2);
7001 fprintf (asm_out_file, "\t.long\t");
7002 assemble_name (asm_out_file, buf);
7003 fprintf (asm_out_file, "\n\t.previous\n");
7007 /* Remove initial .'s to turn a -mcall-aixdesc function
7008 address into the address of the descriptor, not the function
7010 else if (GET_CODE (x) == SYMBOL_REF
7011 && XSTR (x, 0)[0] == '.'
7012 && DEFAULT_ABI == ABI_AIX)
7014 const char *name = XSTR (x, 0);
7015 while (*name == '.')
7018 fprintf (asm_out_file, "\t.long\t%s\n", name);
7022 #endif /* RELOCATABLE_NEEDS_FIXUP */
7023 return default_assemble_integer (x, size, aligned_p);
7027 rs6000_reverse_condition (mode, code)
7028 enum machine_mode mode;
7031 /* Reversal of FP compares takes care -- an ordered compare
7032 becomes an unordered compare and vice versa. */
7033 if (mode == CCFPmode)
7034 return reverse_condition_maybe_unordered (code);
7036 return reverse_condition (code);
7039 /* Generate a compare for CODE. Return a brand-new rtx that
7040 represents the result of the compare. */
7043 rs6000_generate_compare (code)
7046 enum machine_mode comp_mode;
7049 if (rs6000_compare_fp_p)
7050 comp_mode = CCFPmode;
7051 else if (code == GTU || code == LTU
7052 || code == GEU || code == LEU)
7053 comp_mode = CCUNSmode;
7057 /* First, the compare. */
7058 compare_result = gen_reg_rtx (comp_mode);
7059 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
7060 gen_rtx_COMPARE (comp_mode,
7062 rs6000_compare_op1)));
7064 /* Some kinds of FP comparisons need an OR operation;
7065 except for flag_unsafe_math_optimizations we don't bother. */
7066 if (rs6000_compare_fp_p
7067 && ! flag_unsafe_math_optimizations
7068 && (code == LE || code == GE
7069 || code == UNEQ || code == LTGT
7070 || code == UNGT || code == UNLT))
7072 enum rtx_code or1, or2;
7073 rtx or1_rtx, or2_rtx, compare2_rtx;
7074 rtx or_result = gen_reg_rtx (CCEQmode);
7078 case LE: or1 = LT; or2 = EQ; break;
7079 case GE: or1 = GT; or2 = EQ; break;
7080 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
7081 case LTGT: or1 = LT; or2 = GT; break;
7082 case UNGT: or1 = UNORDERED; or2 = GT; break;
7083 case UNLT: or1 = UNORDERED; or2 = LT; break;
7086 validate_condition_mode (or1, comp_mode);
7087 validate_condition_mode (or2, comp_mode);
7088 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
7089 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
7090 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
7091 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
7093 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
7095 compare_result = or_result;
7099 validate_condition_mode (code, GET_MODE (compare_result));
7101 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
7105 /* Emit the RTL for an sCOND pattern. */
7108 rs6000_emit_sCOND (code, result)
7113 enum machine_mode op_mode;
7115 condition_rtx = rs6000_generate_compare (code);
7117 op_mode = GET_MODE (rs6000_compare_op0);
7118 if (op_mode == VOIDmode)
7119 op_mode = GET_MODE (rs6000_compare_op1);
7121 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7123 PUT_MODE (condition_rtx, DImode);
7124 convert_move (result, condition_rtx, 0);
7128 PUT_MODE (condition_rtx, SImode);
7129 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7133 /* Emit a branch of kind CODE to location LOC. */
7136 rs6000_emit_cbranch (code, loc)
7140 rtx condition_rtx, loc_ref;
7142 condition_rtx = rs6000_generate_compare (code);
7143 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7144 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7145 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7149 /* Return the string to output a conditional branch to LABEL, which is
7150 the operand number of the label, or -1 if the branch is really a
7153 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
7154 condition code register and its mode specifies what kind of
7157 REVERSED is non-zero if we should reverse the sense of the comparison.
7159 INSN is the insn. */
7162 output_cbranch (op, label, reversed, insn)
7168 static char string[64];
7169 enum rtx_code code = GET_CODE (op);
7170 rtx cc_reg = XEXP (op, 0);
7171 enum machine_mode mode = GET_MODE (cc_reg);
7172 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7173 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7174 int really_reversed = reversed ^ need_longbranch;
7180 validate_condition_mode (code, mode);
7182 /* Work out which way this really branches. We could use
7183 reverse_condition_maybe_unordered here always but this
7184 makes the resulting assembler clearer. */
7185 if (really_reversed)
7186 code = rs6000_reverse_condition (mode, code);
7190 /* Not all of these are actually distinct opcodes, but
7191 we distinguish them for clarity of the resulting assembler. */
7193 ccode = "ne"; break;
7195 ccode = "eq"; break;
7197 ccode = "ge"; break;
7198 case GT: case GTU: case UNGT:
7199 ccode = "gt"; break;
7201 ccode = "le"; break;
7202 case LT: case LTU: case UNLT:
7203 ccode = "lt"; break;
7204 case UNORDERED: ccode = "un"; break;
7205 case ORDERED: ccode = "nu"; break;
7206 case UNGE: ccode = "nl"; break;
7207 case UNLE: ccode = "ng"; break;
7212 /* Maybe we have a guess as to how likely the branch is.
7213 The old mnemonics don't have a way to specify this information. */
7214 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7215 if (note != NULL_RTX)
7217 /* PROB is the difference from 50%. */
7218 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7220 /* For branches that are very close to 50%, assume not-taken. */
7221 if (abs (prob) > REG_BR_PROB_BASE / 20
7222 && ((prob > 0) ^ need_longbranch))
7231 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7233 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7235 /* We need to escape any '%' characters in the reg_names string.
7236 Assume they'd only be the first character... */
7237 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7239 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7243 /* If the branch distance was too far, we may have to use an
7244 unconditional branch to go the distance. */
7245 if (need_longbranch)
7246 s += sprintf (s, ",$+8\n\tb %s", label);
7248 s += sprintf (s, ",%s", label);
7254 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7255 operands of the last comparison is nonzero/true, FALSE_COND if it
7256 is zero/false. Return 0 if the hardware has no such operation. */
7259 rs6000_emit_cmove (dest, op, true_cond, false_cond)
7265 enum rtx_code code = GET_CODE (op);
7266 rtx op0 = rs6000_compare_op0;
7267 rtx op1 = rs6000_compare_op1;
7269 enum machine_mode mode = GET_MODE (op0);
7272 /* First, work out if the hardware can do this at all, or
7273 if it's too slow... */
7274 /* If the comparison is an integer one, since we only have fsel
7275 it'll be cheaper to use a branch. */
7276 if (! rs6000_compare_fp_p)
7279 /* Eliminate half of the comparisons by switching operands, this
7280 makes the remaining code simpler. */
7281 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7282 || code == LTGT || code == LT)
7284 code = reverse_condition_maybe_unordered (code);
7286 true_cond = false_cond;
7290 /* UNEQ and LTGT take four instructions for a comparison with zero,
7291 it'll probably be faster to use a branch here too. */
7295 if (GET_CODE (op1) == CONST_DOUBLE)
7296 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7298 /* We're going to try to implement comparions by performing
7299 a subtract, then comparing against zero. Unfortunately,
7300 Inf - Inf is NaN which is not zero, and so if we don't
7301 know that the the operand is finite and the comparison
7302 would treat EQ different to UNORDERED, we can't do it. */
7303 if (! flag_unsafe_math_optimizations
7304 && code != GT && code != UNGE
7305 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7306 /* Constructs of the form (a OP b ? a : b) are safe. */
7307 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7308 || (! rtx_equal_p (op0, true_cond)
7309 && ! rtx_equal_p (op1, true_cond))))
7311 /* At this point we know we can use fsel. */
7313 /* Reduce the comparison to a comparison against zero. */
7314 temp = gen_reg_rtx (mode);
7315 emit_insn (gen_rtx_SET (VOIDmode, temp,
7316 gen_rtx_MINUS (mode, op0, op1)));
7318 op1 = CONST0_RTX (mode);
7320 /* If we don't care about NaNs we can reduce some of the comparisons
7321 down to faster ones. */
7322 if (flag_unsafe_math_optimizations)
7328 true_cond = false_cond;
7341 /* Now, reduce everything down to a GE. */
7348 temp = gen_reg_rtx (mode);
7349 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7354 temp = gen_reg_rtx (mode);
7355 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (mode, op0)));
7360 temp = gen_reg_rtx (mode);
7361 emit_insn (gen_rtx_SET (VOIDmode, temp,
7363 gen_rtx_ABS (mode, op0))));
7368 temp = gen_reg_rtx (mode);
7369 emit_insn (gen_rtx_SET (VOIDmode, temp,
7370 gen_rtx_IF_THEN_ELSE (mode,
7371 gen_rtx_GE (VOIDmode,
7373 true_cond, false_cond)));
7375 true_cond = false_cond;
7377 temp = gen_reg_rtx (mode);
7378 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7383 temp = gen_reg_rtx (mode);
7384 emit_insn (gen_rtx_SET (VOIDmode, temp,
7385 gen_rtx_IF_THEN_ELSE (mode,
7386 gen_rtx_GE (VOIDmode,
7388 true_cond, false_cond)));
7390 false_cond = true_cond;
7392 temp = gen_reg_rtx (mode);
7393 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7401 emit_insn (gen_rtx_SET (VOIDmode, dest,
7402 gen_rtx_IF_THEN_ELSE (GET_MODE (dest),
7403 gen_rtx_GE (VOIDmode,
7405 true_cond, false_cond)));
7410 rs6000_emit_minmax (dest, code, op0, op1)
7416 enum machine_mode mode = GET_MODE (op0);
7418 if (code == SMAX || code == UMAX)
7419 target = emit_conditional_move (dest, GE, op0, op1, mode,
7422 target = emit_conditional_move (dest, GE, op0, op1, mode,
7424 if (target == NULL_RTX)
7427 emit_move_insn (dest, target);
7430 /* This page contains routines that are used to determine what the
7431 function prologue and epilogue code will do and write them out. */
7433 /* Return the first fixed-point register that is required to be
7434 saved. 32 if none. */
7437 first_reg_to_save ()
7441 /* Find lowest numbered live register. */
7442 for (first_reg = 13; first_reg <= 31; first_reg++)
7443 if (regs_ever_live[first_reg]
7444 && (! call_used_regs[first_reg]
7445 || (first_reg == PIC_OFFSET_TABLE_REGNUM
7446 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7447 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7450 if (current_function_profile)
7452 /* AIX must save/restore every register that contains a parameter
7453 before/after the .__mcount call plus an additional register
7454 for the static chain, if needed; use registers from 30 down to 22
7456 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7458 int last_parm_reg, profile_first_reg;
7460 /* Figure out last used parameter register. The proper thing
7461 to do is to walk incoming args of the function. A function
7462 might have live parameter registers even if it has no
7464 for (last_parm_reg = 10;
7465 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7469 /* Calculate first reg for saving parameter registers
7471 Skip reg 31 which may contain the frame pointer. */
7472 profile_first_reg = (33 - last_parm_reg
7473 - (current_function_needs_context ? 1 : 0));
7475 /* Need to skip another reg to account for R31 being PICBASE
7476 (when flag_pic is set) or R30 being used as the frame
7477 pointer (when flag_pic is not set). */
7478 --profile_first_reg;
7480 /* Do not save frame pointer if no parameters needs to be saved. */
7481 if (profile_first_reg == 31)
7482 profile_first_reg = 32;
7484 if (first_reg > profile_first_reg)
7485 first_reg = profile_first_reg;
7488 /* SVR4 may need one register to preserve the static chain. */
7489 else if (current_function_needs_context)
7491 /* Skip reg 31 which may contain the frame pointer. */
7498 if (flag_pic && current_function_uses_pic_offset_table &&
7499 (first_reg > PIC_OFFSET_TABLE_REGNUM))
7500 return PIC_OFFSET_TABLE_REGNUM;
7506 /* Similar, for FP regs. */
7509 first_fp_reg_to_save ()
7513 /* Find lowest numbered live register. */
7514 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7515 if (regs_ever_live[first_reg])
7521 /* Similar, for AltiVec regs. */
7524 first_altivec_reg_to_save ()
7528 /* Stack frame remains as is unless we are in AltiVec ABI. */
7529 if (! TARGET_ALTIVEC_ABI)
7530 return LAST_ALTIVEC_REGNO + 1;
7532 /* Find lowest numbered live register. */
7533 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7534 if (regs_ever_live[i])
7540 /* Return a 32-bit mask of the AltiVec registers we need to set in
7541 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7542 the 32-bit word is 0. */
7545 compute_vrsave_mask ()
7547 unsigned int i, mask = 0;
7549 /* First, find out if we use _any_ altivec registers. */
7550 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7551 if (regs_ever_live[i])
7552 mask |= ALTIVEC_REG_BIT (i);
7557 /* Next, add all registers that are call-clobbered. We do this
7558 because post-reload register optimizers such as regrename_optimize
7559 may choose to use them. They never change the register class
7560 chosen by reload, so cannot create new uses of altivec registers
7561 if there were none before, so the early exit above is safe. */
7562 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7563 altivec registers not saved in the mask, which might well make the
7564 adjustments below more effective in eliding the save/restore of
7565 VRSAVE in small functions. */
7566 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7567 if (call_used_regs[i])
7568 mask |= ALTIVEC_REG_BIT (i);
7570 /* Next, remove the argument registers from the set. These must
7571 be in the VRSAVE mask set by the caller, so we don't need to add
7572 them in again. More importantly, the mask we compute here is
7573 used to generate CLOBBERs in the set_vrsave insn, and we do not
7574 wish the argument registers to die. */
7575 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7576 mask &= ~ALTIVEC_REG_BIT (i);
7578 /* Similarly, remove the return value from the set. */
7581 diddle_return_value (is_altivec_return_reg, &yes);
7583 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7590 is_altivec_return_reg (reg, xyes)
7594 bool *yes = (bool *) xyes;
7595 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7600 /* Calculate the stack information for the current function. This is
7601 complicated by having two separate calling sequences, the AIX calling
7602 sequence and the V.4 calling sequence.
7604 AIX (and Darwin/Mac OS X) stack frames look like:
7606 SP----> +---------------------------------------+
7607 | back chain to caller | 0 0
7608 +---------------------------------------+
7609 | saved CR | 4 8 (8-11)
7610 +---------------------------------------+
7612 +---------------------------------------+
7613 | reserved for compilers | 12 24
7614 +---------------------------------------+
7615 | reserved for binders | 16 32
7616 +---------------------------------------+
7617 | saved TOC pointer | 20 40
7618 +---------------------------------------+
7619 | Parameter save area (P) | 24 48
7620 +---------------------------------------+
7621 | Alloca space (A) | 24+P etc.
7622 +---------------------------------------+
7623 | Local variable space (L) | 24+P+A
7624 +---------------------------------------+
7625 | Float/int conversion temporary (X) | 24+P+A+L
7626 +---------------------------------------+
7627 | Save area for AltiVec registers (W) | 24+P+A+L+X
7628 +---------------------------------------+
7629 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7630 +---------------------------------------+
7631 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7632 +---------------------------------------+
7633 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7634 +---------------------------------------+
7635 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7636 +---------------------------------------+
7637 old SP->| back chain to caller's caller |
7638 +---------------------------------------+
7640 The required alignment for AIX configurations is two words (i.e., 8
7644 V.4 stack frames look like:
7646 SP----> +---------------------------------------+
7647 | back chain to caller | 0
7648 +---------------------------------------+
7649 | caller's saved LR | 4
7650 +---------------------------------------+
7651 | Parameter save area (P) | 8
7652 +---------------------------------------+
7653 | Alloca space (A) | 8+P
7654 +---------------------------------------+
7655 | Varargs save area (V) | 8+P+A
7656 +---------------------------------------+
7657 | Local variable space (L) | 8+P+A+V
7658 +---------------------------------------+
7659 | Float/int conversion temporary (X) | 8+P+A+V+L
7660 +---------------------------------------+
7661 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7662 +---------------------------------------+
7663 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7664 +---------------------------------------+
7665 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7666 +---------------------------------------+
7667 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7668 +---------------------------------------+
7669 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7670 +---------------------------------------+
7671 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7672 +---------------------------------------+
7673 old SP->| back chain to caller's caller |
7674 +---------------------------------------+
7676 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7677 given. (But note below and in sysv4.h that we require only 8 and
7678 may round up the size of our stack frame anyways. The historical
7679 reason is early versions of powerpc-linux which didn't properly
7680 align the stack at program startup. A happy side-effect is that
7681 -mno-eabi libraries can be used with -meabi programs.)
7683 The EABI configuration defaults to the V.4 layout, unless
7684 -mcall-aix is used, in which case the AIX layout is used. However,
7685 the stack alignment requirements may differ. If -mno-eabi is not
7686 given, the required stack alignment is 8 bytes; if -mno-eabi is
7687 given, the required alignment is 16 bytes. (But see V.4 comment
7690 #ifndef ABI_STACK_BOUNDARY
7691 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7695 rs6000_stack_info ()
7697 static rs6000_stack_t info, zero_info;
7698 rs6000_stack_t *info_ptr = &info;
7699 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7700 enum rs6000_abi abi;
7704 /* Zero all fields portably. */
7707 /* Select which calling sequence. */
7708 info_ptr->abi = abi = DEFAULT_ABI;
7710 /* Calculate which registers need to be saved & save area size. */
7711 info_ptr->first_gp_reg_save = first_reg_to_save ();
7712 /* Assume that we will have to save PIC_OFFSET_TABLE_REGNUM,
7713 even if it currently looks like we won't. */
7714 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7715 || (flag_pic == 1 && abi == ABI_V4)
7716 || (flag_pic && abi == ABI_DARWIN))
7717 && info_ptr->first_gp_reg_save > PIC_OFFSET_TABLE_REGNUM)
7718 info_ptr->gp_size = reg_size * (32 - PIC_OFFSET_TABLE_REGNUM);
7720 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7722 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7723 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7725 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7726 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7727 - info_ptr->first_altivec_reg_save);
7729 /* Does this function call anything? */
7730 info_ptr->calls_p = (! current_function_is_leaf
7731 || cfun->machine->ra_needs_full_frame);
7733 /* Determine if we need to save the link register. */
7734 if (rs6000_ra_ever_killed ()
7735 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7736 #ifdef TARGET_RELOCATABLE
7737 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7739 || (info_ptr->first_fp_reg_save != 64
7740 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7741 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7742 || (abi == ABI_V4 && current_function_calls_alloca)
7743 || (DEFAULT_ABI == ABI_DARWIN
7745 && current_function_uses_pic_offset_table)
7746 || info_ptr->calls_p)
7748 info_ptr->lr_save_p = 1;
7749 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7752 /* Determine if we need to save the condition code registers. */
7753 if (regs_ever_live[CR2_REGNO]
7754 || regs_ever_live[CR3_REGNO]
7755 || regs_ever_live[CR4_REGNO])
7757 info_ptr->cr_save_p = 1;
7759 info_ptr->cr_size = reg_size;
7762 /* If the current function calls __builtin_eh_return, then we need
7763 to allocate stack space for registers that will hold data for
7764 the exception handler. */
7765 if (current_function_calls_eh_return)
7768 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7770 ehrd_size = i * UNITS_PER_WORD;
7775 /* Determine various sizes. */
7776 info_ptr->reg_size = reg_size;
7777 info_ptr->fixed_size = RS6000_SAVE_AREA;
7778 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7779 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7780 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7783 if (TARGET_ALTIVEC_ABI)
7785 info_ptr->vrsave_mask = compute_vrsave_mask ();
7786 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7790 info_ptr->vrsave_mask = 0;
7791 info_ptr->vrsave_size = 0;
7794 /* Calculate the offsets. */
7802 case ABI_AIX_NODESC:
7804 info_ptr->fp_save_offset = - info_ptr->fp_size;
7805 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7807 if (TARGET_ALTIVEC_ABI)
7809 info_ptr->vrsave_save_offset
7810 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7812 /* Align stack so vector save area is on a quadword boundary. */
7813 if (info_ptr->altivec_size != 0)
7814 info_ptr->altivec_padding_size
7815 = 16 - (-info_ptr->vrsave_save_offset % 16);
7817 info_ptr->altivec_padding_size = 0;
7819 info_ptr->altivec_save_offset
7820 = info_ptr->vrsave_save_offset
7821 - info_ptr->altivec_padding_size
7822 - info_ptr->altivec_size;
7824 /* Adjust for AltiVec case. */
7825 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7828 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7829 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7830 info_ptr->lr_save_offset = 2*reg_size;
7834 info_ptr->fp_save_offset = - info_ptr->fp_size;
7835 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7836 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7838 if (TARGET_ALTIVEC_ABI)
7840 info_ptr->vrsave_save_offset
7841 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7843 /* Align stack so vector save area is on a quadword boundary. */
7844 if (info_ptr->altivec_size != 0)
7845 info_ptr->altivec_padding_size
7846 = 16 - (-info_ptr->vrsave_save_offset % 16);
7848 info_ptr->altivec_padding_size = 0;
7850 info_ptr->altivec_save_offset
7851 = info_ptr->vrsave_save_offset
7852 - info_ptr->altivec_padding_size
7853 - info_ptr->altivec_size;
7855 /* Adjust for AltiVec case. */
7856 info_ptr->toc_save_offset
7857 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7860 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7861 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7862 info_ptr->lr_save_offset = reg_size;
7866 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7868 + info_ptr->altivec_size
7869 + info_ptr->altivec_padding_size
7870 + info_ptr->vrsave_size
7874 + info_ptr->vrsave_size
7875 + info_ptr->toc_size,
7876 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7879 total_raw_size = (info_ptr->vars_size
7880 + info_ptr->parm_size
7881 + info_ptr->save_size
7882 + info_ptr->varargs_size
7883 + info_ptr->fixed_size);
7885 info_ptr->total_size =
7886 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7888 /* Determine if we need to allocate any stack frame:
7890 For AIX we need to push the stack if a frame pointer is needed
7891 (because the stack might be dynamically adjusted), if we are
7892 debugging, if we make calls, or if the sum of fp_save, gp_save,
7893 and local variables are more than the space needed to save all
7894 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7895 + 18*8 = 288 (GPR13 reserved).
7897 For V.4 we don't have the stack cushion that AIX uses, but assume
7898 that the debugger can handle stackless frames. */
7900 if (info_ptr->calls_p)
7901 info_ptr->push_p = 1;
7903 else if (abi == ABI_V4)
7904 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
7907 info_ptr->push_p = (frame_pointer_needed
7908 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7909 || ((total_raw_size - info_ptr->fixed_size)
7910 > (TARGET_32BIT ? 220 : 288)));
7912 /* Zero offsets if we're not saving those registers. */
7913 if (info_ptr->fp_size == 0)
7914 info_ptr->fp_save_offset = 0;
7916 if (info_ptr->gp_size == 0)
7917 info_ptr->gp_save_offset = 0;
7919 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7920 info_ptr->altivec_save_offset = 0;
7922 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7923 info_ptr->vrsave_save_offset = 0;
7925 if (! info_ptr->lr_save_p)
7926 info_ptr->lr_save_offset = 0;
7928 if (! info_ptr->cr_save_p)
7929 info_ptr->cr_save_offset = 0;
7931 if (! info_ptr->toc_save_p)
7932 info_ptr->toc_save_offset = 0;
7938 debug_stack_info (info)
7939 rs6000_stack_t *info;
7941 const char *abi_string;
7944 info = rs6000_stack_info ();
7946 fprintf (stderr, "\nStack information for function %s:\n",
7947 ((current_function_decl && DECL_NAME (current_function_decl))
7948 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7953 default: abi_string = "Unknown"; break;
7954 case ABI_NONE: abi_string = "NONE"; break;
7956 case ABI_AIX_NODESC: abi_string = "AIX"; break;
7957 case ABI_DARWIN: abi_string = "Darwin"; break;
7958 case ABI_V4: abi_string = "V.4"; break;
7961 fprintf (stderr, "\tABI = %5s\n", abi_string);
7963 if (TARGET_ALTIVEC_ABI)
7964 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7966 if (info->first_gp_reg_save != 32)
7967 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
7969 if (info->first_fp_reg_save != 64)
7970 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
7972 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7973 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7974 info->first_altivec_reg_save);
7976 if (info->lr_save_p)
7977 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
7979 if (info->cr_save_p)
7980 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
7982 if (info->toc_save_p)
7983 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
7985 if (info->vrsave_mask)
7986 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
7989 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
7992 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
7994 if (info->gp_save_offset)
7995 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
7997 if (info->fp_save_offset)
7998 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
8000 if (info->altivec_save_offset)
8001 fprintf (stderr, "\taltivec_save_offset = %5d\n",
8002 info->altivec_save_offset);
8004 if (info->vrsave_save_offset)
8005 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
8006 info->vrsave_save_offset);
8008 if (info->lr_save_offset)
8009 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
8011 if (info->cr_save_offset)
8012 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
8014 if (info->toc_save_offset)
8015 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
8017 if (info->varargs_save_offset)
8018 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
8020 if (info->total_size)
8021 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
8023 if (info->varargs_size)
8024 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
8026 if (info->vars_size)
8027 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
8029 if (info->parm_size)
8030 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
8032 if (info->fixed_size)
8033 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
8036 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
8039 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
8041 if (info->altivec_size)
8042 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
8044 if (info->vrsave_size)
8045 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
8047 if (info->altivec_padding_size)
8048 fprintf (stderr, "\taltivec_padding_size= %5d\n",
8049 info->altivec_padding_size);
8052 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
8055 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
8058 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
8060 if (info->save_size)
8061 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
8063 if (info->reg_size != 4)
8064 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
8066 fprintf (stderr, "\n");
8070 rs6000_return_addr (count, frame)
8074 /* Currently we don't optimize very well between prolog and body
8075 code and for PIC code the code can be actually quite bad, so
8076 don't try to be too clever here. */
8077 if (count != 0 || flag_pic != 0)
8079 cfun->machine->ra_needs_full_frame = 1;
8086 plus_constant (copy_to_reg
8087 (gen_rtx_MEM (Pmode,
8088 memory_address (Pmode, frame))),
8089 RETURN_ADDRESS_OFFSET)));
8092 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
8096 rs6000_ra_ever_killed ()
8100 #ifdef ASM_OUTPUT_MI_THUNK
8101 if (current_function_is_thunk)
8104 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
8105 || cfun->machine->ra_needs_full_frame)
8106 return regs_ever_live[LINK_REGISTER_REGNUM];
8108 push_topmost_sequence ();
8110 pop_topmost_sequence ();
8112 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8116 /* Add a REG_MAYBE_DEAD note to the insn. */
8118 rs6000_maybe_dead (insn)
8121 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
8126 /* Emit instructions needed to load the TOC register.
8127 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8128 a constant pool; or for SVR4 -fpic. */
8131 rs6000_emit_load_toc_table (fromprolog)
8135 dest = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
8137 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
8139 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8141 rtx temp = (fromprolog
8142 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8143 : gen_reg_rtx (Pmode));
8144 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8145 rs6000_maybe_dead (emit_move_insn (dest, temp));
8147 else if (flag_pic == 2)
8150 rtx tempLR = (fromprolog
8151 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8152 : gen_reg_rtx (Pmode));
8153 rtx temp0 = (fromprolog
8154 ? gen_rtx_REG (Pmode, 0)
8155 : gen_reg_rtx (Pmode));
8158 /* possibly create the toc section */
8159 if (! toc_initialized)
8162 function_section (current_function_decl);
8169 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8170 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8172 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8173 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8175 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8177 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8178 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8185 static int reload_toc_labelno = 0;
8187 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8189 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8190 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8192 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8195 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8196 rs6000_maybe_dead (emit_move_insn (temp0,
8197 gen_rtx_MEM (Pmode, dest)));
8199 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8201 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8203 /* This is for AIX code running in non-PIC ELF. */
8206 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8207 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8209 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8210 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8218 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8220 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8225 get_TOC_alias_set ()
8227 static int set = -1;
8229 set = new_alias_set ();
8233 /* This retuns nonzero if the current function uses the TOC. This is
8234 determined by the presence of (unspec ... 7), which is generated by
8235 the various load_toc_* patterns. */
8242 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8245 rtx pat = PATTERN (insn);
8248 if (GET_CODE (pat) == PARALLEL)
8249 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8250 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8251 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8258 create_TOC_reference (symbol)
8261 return gen_rtx_PLUS (Pmode,
8262 gen_rtx_REG (Pmode, TOC_REGISTER),
8263 gen_rtx_CONST (Pmode,
8264 gen_rtx_MINUS (Pmode, symbol,
8265 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8269 /* __throw will restore its own return address to be the same as the
8270 return address of the function that the throw is being made to.
8271 This is unfortunate, because we want to check the original
8272 return address to see if we need to restore the TOC.
8273 So we have to squirrel it away here.
8274 This is used only in compiling __throw and __rethrow.
8276 Most of this code should be removed by CSE. */
8277 static rtx insn_after_throw;
8279 /* This does the saving... */
8281 rs6000_aix_emit_builtin_unwind_init ()
8284 rtx stack_top = gen_reg_rtx (Pmode);
8285 rtx opcode_addr = gen_reg_rtx (Pmode);
8287 insn_after_throw = gen_reg_rtx (SImode);
8289 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8290 emit_move_insn (stack_top, mem);
8292 mem = gen_rtx_MEM (Pmode,
8293 gen_rtx_PLUS (Pmode, stack_top,
8294 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8295 emit_move_insn (opcode_addr, mem);
8296 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8299 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8300 in _eh.o). Only used on AIX.
8302 The idea is that on AIX, function calls look like this:
8303 bl somefunction-trampoline
8307 somefunction-trampoline:
8309 ... load function address in the count register ...
8311 or like this, if the linker determines that this is not a cross-module call
8312 and so the TOC need not be restored:
8315 or like this, if the compiler could determine that this is not a
8318 now, the tricky bit here is that register 2 is saved and restored
8319 by the _linker_, so we can't readily generate debugging information
8320 for it. So we need to go back up the call chain looking at the
8321 insns at return addresses to see which calls saved the TOC register
8322 and so see where it gets restored from.
8324 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8325 just before the actual epilogue.
8327 On the bright side, this incurs no space or time overhead unless an
8328 exception is thrown, except for the extra code in libgcc.a.
8330 The parameter STACKSIZE is a register containing (at runtime)
8331 the amount to be popped off the stack in addition to the stack frame
8332 of this routine (which will be __throw or __rethrow, and so is
8333 guaranteed to have a stack frame). */
8336 rs6000_emit_eh_toc_restore (stacksize)
8340 rtx bottom_of_stack = gen_reg_rtx (Pmode);
8341 rtx tocompare = gen_reg_rtx (SImode);
8342 rtx opcode = gen_reg_rtx (SImode);
8343 rtx opcode_addr = gen_reg_rtx (Pmode);
8345 rtx loop_start = gen_label_rtx ();
8346 rtx no_toc_restore_needed = gen_label_rtx ();
8347 rtx loop_exit = gen_label_rtx ();
8349 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8350 set_mem_alias_set (mem, rs6000_sr_alias_set);
8351 emit_move_insn (bottom_of_stack, mem);
8353 top_of_stack = expand_binop (Pmode, add_optab,
8354 bottom_of_stack, stacksize,
8355 NULL_RTX, 1, OPTAB_WIDEN);
8357 emit_move_insn (tocompare,
8358 GEN_INT (trunc_int_for_mode (TARGET_32BIT
8360 : 0xE8410028, SImode)));
8362 if (insn_after_throw == NULL_RTX)
8364 emit_move_insn (opcode, insn_after_throw);
8366 emit_note (NULL, NOTE_INSN_LOOP_BEG);
8367 emit_label (loop_start);
8369 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8370 SImode, NULL_RTX, NULL_RTX,
8371 no_toc_restore_needed);
8373 mem = gen_rtx_MEM (Pmode,
8374 gen_rtx_PLUS (Pmode, bottom_of_stack,
8375 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8376 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8378 emit_label (no_toc_restore_needed);
8379 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8380 Pmode, NULL_RTX, NULL_RTX,
8383 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8384 set_mem_alias_set (mem, rs6000_sr_alias_set);
8385 emit_move_insn (bottom_of_stack, mem);
8387 mem = gen_rtx_MEM (Pmode,
8388 gen_rtx_PLUS (Pmode, bottom_of_stack,
8389 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8390 emit_move_insn (opcode_addr, mem);
8391 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8393 emit_note (NULL, NOTE_INSN_LOOP_CONT);
8394 emit_jump (loop_start);
8395 emit_note (NULL, NOTE_INSN_LOOP_END);
8396 emit_label (loop_exit);
8398 #endif /* TARGET_AIX */
8400 /* This ties together stack memory (MEM with an alias set of
8401 rs6000_sr_alias_set) and the change to the stack pointer. */
8404 rs6000_emit_stack_tie ()
8406 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8408 set_mem_alias_set (mem, rs6000_sr_alias_set);
8409 emit_insn (gen_stack_tie (mem));
8412 /* Emit the correct code for allocating stack space, as insns.
8413 If COPY_R12, make sure a copy of the old frame is left in r12.
8414 The generated code may use hard register 0 as a temporary. */
8417 rs6000_emit_allocate_stack (size, copy_r12)
8422 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8423 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8424 rtx todec = GEN_INT (-size);
8426 if (current_function_limit_stack)
8428 if (REG_P (stack_limit_rtx)
8429 && REGNO (stack_limit_rtx) > 1
8430 && REGNO (stack_limit_rtx) <= 31)
8432 emit_insn (Pmode == SImode
8433 ? gen_addsi3 (tmp_reg,
8436 : gen_adddi3 (tmp_reg,
8440 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8443 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8445 && DEFAULT_ABI == ABI_V4)
8447 rtx toload = gen_rtx_CONST (VOIDmode,
8448 gen_rtx_PLUS (Pmode,
8452 emit_insn (gen_elf_high (tmp_reg, toload));
8453 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8454 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8458 warning ("stack limit expression is not supported");
8461 if (copy_r12 || ! TARGET_UPDATE)
8462 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8468 /* Need a note here so that try_split doesn't get confused. */
8469 if (get_last_insn() == NULL_RTX)
8470 emit_note (0, NOTE_INSN_DELETED);
8471 insn = emit_move_insn (tmp_reg, todec);
8472 try_split (PATTERN (insn), insn, 0);
8476 if (Pmode == SImode)
8477 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8480 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8485 if (Pmode == SImode)
8486 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8488 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8489 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8490 gen_rtx_REG (Pmode, 12));
8493 RTX_FRAME_RELATED_P (insn) = 1;
8495 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8496 gen_rtx_SET (VOIDmode, stack_reg,
8497 gen_rtx_PLUS (Pmode, stack_reg,
8502 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8505 (mem (plus (blah) (regXX)))
8509 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8512 altivec_frame_fixup (insn, reg, val)
8518 real = copy_rtx (PATTERN (insn));
8520 real = replace_rtx (real, reg, GEN_INT (val));
8522 RTX_FRAME_RELATED_P (insn) = 1;
8523 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8528 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8529 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8530 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8531 deduce these equivalences by itself so it wasn't necessary to hold
8532 its hand so much. */
8535 rs6000_frame_related (insn, reg, val, reg2, rreg)
8544 real = copy_rtx (PATTERN (insn));
8546 real = replace_rtx (real, reg,
8547 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8548 STACK_POINTER_REGNUM),
8551 /* We expect that 'real' is either a SET or a PARALLEL containing
8552 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8553 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8555 if (GET_CODE (real) == SET)
8559 temp = simplify_rtx (SET_SRC (set));
8561 SET_SRC (set) = temp;
8562 temp = simplify_rtx (SET_DEST (set));
8564 SET_DEST (set) = temp;
8565 if (GET_CODE (SET_DEST (set)) == MEM)
8567 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8569 XEXP (SET_DEST (set), 0) = temp;
8572 else if (GET_CODE (real) == PARALLEL)
8575 for (i = 0; i < XVECLEN (real, 0); i++)
8576 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8578 rtx set = XVECEXP (real, 0, i);
8580 temp = simplify_rtx (SET_SRC (set));
8582 SET_SRC (set) = temp;
8583 temp = simplify_rtx (SET_DEST (set));
8585 SET_DEST (set) = temp;
8586 if (GET_CODE (SET_DEST (set)) == MEM)
8588 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8590 XEXP (SET_DEST (set), 0) = temp;
8592 RTX_FRAME_RELATED_P (set) = 1;
8598 if (reg2 != NULL_RTX)
8599 real = replace_rtx (real, reg2, rreg);
8601 RTX_FRAME_RELATED_P (insn) = 1;
8602 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8607 /* Returns an insn that has a vrsave set operation with the
8608 appropriate CLOBBERs. */
8611 generate_set_vrsave (reg, info, epiloguep)
8613 rs6000_stack_t *info;
8617 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8618 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8621 = gen_rtx_SET (VOIDmode,
8623 gen_rtx_UNSPEC_VOLATILE (SImode,
8624 gen_rtvec (2, reg, vrsave),
8629 /* We need to clobber the registers in the mask so the scheduler
8630 does not move sets to VRSAVE before sets of AltiVec registers.
8632 However, if the function receives nonlocal gotos, reload will set
8633 all call saved registers live. We will end up with:
8635 (set (reg 999) (mem))
8636 (parallel [ (set (reg vrsave) (unspec blah))
8637 (clobber (reg 999))])
8639 The clobber will cause the store into reg 999 to be dead, and
8640 flow will attempt to delete an epilogue insn. In this case, we
8641 need an unspec use/set of the register. */
8643 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8644 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8646 if (!epiloguep || call_used_regs [i])
8647 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8648 gen_rtx_REG (V4SImode, i));
8651 rtx reg = gen_rtx_REG (V4SImode, i);
8654 = gen_rtx_SET (VOIDmode,
8656 gen_rtx_UNSPEC (V4SImode,
8657 gen_rtvec (1, reg), 27));
8661 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8663 for (i = 0; i < nclobs; ++i)
8664 XVECEXP (insn, 0, i) = clobs[i];
8669 /* Emit function prologue as insns. */
8672 rs6000_emit_prologue ()
8674 rs6000_stack_t *info = rs6000_stack_info ();
8675 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8676 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8677 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8678 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8679 rtx frame_reg_rtx = sp_reg_rtx;
8680 rtx cr_save_rtx = NULL;
8682 int saving_FPRs_inline;
8683 int using_store_multiple;
8684 HOST_WIDE_INT sp_offset = 0;
8686 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8687 && info->first_gp_reg_save < 31);
8688 saving_FPRs_inline = (info->first_fp_reg_save == 64
8689 || FP_SAVE_INLINE (info->first_fp_reg_save));
8691 /* For V.4, update stack before we do any saving and set back pointer. */
8692 if (info->push_p && DEFAULT_ABI == ABI_V4)
8694 if (info->total_size < 32767)
8695 sp_offset = info->total_size;
8697 frame_reg_rtx = frame_ptr_rtx;
8698 rs6000_emit_allocate_stack (info->total_size,
8699 (frame_reg_rtx != sp_reg_rtx
8702 || info->first_fp_reg_save < 64
8703 || info->first_gp_reg_save < 32
8705 if (frame_reg_rtx != sp_reg_rtx)
8706 rs6000_emit_stack_tie ();
8709 /* Save AltiVec registers if needed. */
8710 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8714 /* There should be a non inline version of this, for when we
8715 are saving lots of vector registers. */
8716 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8717 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8719 rtx areg, savereg, mem;
8722 offset = info->altivec_save_offset + sp_offset
8723 + 16 * (i - info->first_altivec_reg_save);
8725 savereg = gen_rtx_REG (V4SImode, i);
8727 areg = gen_rtx_REG (Pmode, 0);
8728 emit_move_insn (areg, GEN_INT (offset));
8730 /* AltiVec addressing mode is [reg+reg]. */
8731 mem = gen_rtx_MEM (V4SImode,
8732 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8734 set_mem_alias_set (mem, rs6000_sr_alias_set);
8736 insn = emit_move_insn (mem, savereg);
8738 altivec_frame_fixup (insn, areg, offset);
8742 /* VRSAVE is a bit vector representing which AltiVec registers
8743 are used. The OS uses this to determine which vector
8744 registers to save on a context switch. We need to save
8745 VRSAVE on the stack frame, add whatever AltiVec registers we
8746 used in this function, and do the corresponding magic in the
8749 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8751 rtx reg, mem, vrsave;
8754 /* Get VRSAVE onto a GPR. */
8755 reg = gen_rtx_REG (SImode, 12);
8756 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8758 emit_insn (gen_get_vrsave_internal (reg));
8760 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8763 offset = info->vrsave_save_offset + sp_offset;
8765 = gen_rtx_MEM (SImode,
8766 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8767 set_mem_alias_set (mem, rs6000_sr_alias_set);
8768 insn = emit_move_insn (mem, reg);
8770 /* Include the registers in the mask. */
8771 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8773 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8776 /* If we use the link register, get it into r0. */
8777 if (info->lr_save_p)
8778 emit_move_insn (gen_rtx_REG (Pmode, 0),
8779 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8781 /* If we need to save CR, put it into r12. */
8782 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8784 cr_save_rtx = gen_rtx_REG (SImode, 12);
8785 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8788 /* Do any required saving of fpr's. If only one or two to save, do
8789 it ourselves. Otherwise, call function. */
8790 if (saving_FPRs_inline)
8793 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8794 if ((regs_ever_live[info->first_fp_reg_save+i]
8795 && ! call_used_regs[info->first_fp_reg_save+i]))
8798 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8799 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8800 GEN_INT (info->fp_save_offset
8803 mem = gen_rtx_MEM (DFmode, addr);
8804 set_mem_alias_set (mem, rs6000_sr_alias_set);
8806 insn = emit_move_insn (mem, reg);
8807 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8808 NULL_RTX, NULL_RTX);
8811 else if (info->first_fp_reg_save != 64)
8815 const char *alloc_rname;
8817 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8819 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8821 LINK_REGISTER_REGNUM));
8822 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8823 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8824 alloc_rname = ggc_strdup (rname);
8825 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8826 gen_rtx_SYMBOL_REF (Pmode,
8828 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8831 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8832 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8833 GEN_INT (info->fp_save_offset
8834 + sp_offset + 8*i));
8835 mem = gen_rtx_MEM (DFmode, addr);
8836 set_mem_alias_set (mem, rs6000_sr_alias_set);
8838 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8840 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8841 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8842 NULL_RTX, NULL_RTX);
8845 /* Save GPRs. This is done as a PARALLEL if we are using
8846 the store-multiple instructions. */
8847 if (using_store_multiple)
8851 p = rtvec_alloc (32 - info->first_gp_reg_save);
8852 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8853 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8856 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8857 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8858 GEN_INT (info->gp_save_offset
8861 mem = gen_rtx_MEM (reg_mode, addr);
8862 set_mem_alias_set (mem, rs6000_sr_alias_set);
8864 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8866 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8867 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8868 NULL_RTX, NULL_RTX);
8873 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8874 if ((regs_ever_live[info->first_gp_reg_save+i]
8875 && ! call_used_regs[info->first_gp_reg_save+i])
8876 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
8877 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8878 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8881 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8882 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8883 GEN_INT (info->gp_save_offset
8886 mem = gen_rtx_MEM (reg_mode, addr);
8887 set_mem_alias_set (mem, rs6000_sr_alias_set);
8889 insn = emit_move_insn (mem, reg);
8890 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8891 NULL_RTX, NULL_RTX);
8895 /* ??? There's no need to emit actual instructions here, but it's the
8896 easiest way to get the frame unwind information emitted. */
8897 if (current_function_calls_eh_return)
8899 unsigned int i, regno;
8905 regno = EH_RETURN_DATA_REGNO (i);
8906 if (regno == INVALID_REGNUM)
8909 reg = gen_rtx_REG (reg_mode, regno);
8910 addr = plus_constant (frame_reg_rtx,
8911 info->ehrd_offset + sp_offset
8912 + reg_size * (int) i);
8913 mem = gen_rtx_MEM (reg_mode, addr);
8914 set_mem_alias_set (mem, rs6000_sr_alias_set);
8916 insn = emit_move_insn (mem, reg);
8917 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8918 NULL_RTX, NULL_RTX);
8922 /* Save lr if we used it. */
8923 if (info->lr_save_p)
8925 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8926 GEN_INT (info->lr_save_offset + sp_offset));
8927 rtx reg = gen_rtx_REG (Pmode, 0);
8928 rtx mem = gen_rtx_MEM (Pmode, addr);
8929 /* This should not be of rs6000_sr_alias_set, because of
8930 __builtin_return_address. */
8932 insn = emit_move_insn (mem, reg);
8933 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8934 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8937 /* Save CR if we use any that must be preserved. */
8938 if (info->cr_save_p)
8940 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8941 GEN_INT (info->cr_save_offset + sp_offset));
8942 rtx mem = gen_rtx_MEM (SImode, addr);
8944 set_mem_alias_set (mem, rs6000_sr_alias_set);
8946 /* If r12 was used to hold the original sp, copy cr into r0 now
8948 if (REGNO (frame_reg_rtx) == 12)
8950 cr_save_rtx = gen_rtx_REG (SImode, 0);
8951 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8953 insn = emit_move_insn (mem, cr_save_rtx);
8955 /* Now, there's no way that dwarf2out_frame_debug_expr is going
8956 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
8957 OK. All we have to do is specify that _one_ condition code
8958 register is saved in this stack slot. The thrower's epilogue
8959 will then restore all the call-saved registers.
8960 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
8961 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8962 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8965 /* Update stack and set back pointer unless this is V.4,
8966 for which it was done previously. */
8967 if (info->push_p && DEFAULT_ABI != ABI_V4)
8968 rs6000_emit_allocate_stack (info->total_size, FALSE);
8970 /* Set frame pointer, if needed. */
8971 if (frame_pointer_needed)
8973 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8975 RTX_FRAME_RELATED_P (insn) = 1;
8978 /* If we are using PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
8979 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8980 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8981 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM]))
8983 /* If emit_load_toc_table will use the link register, we need to save
8984 it. We use R11 for this purpose because emit_load_toc_table
8985 can use register 0. This allows us to use a plain 'blr' to return
8986 from the procedure more often. */
8987 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8988 && ! info->lr_save_p
8989 && EXIT_BLOCK_PTR->pred != NULL);
8990 if (save_LR_around_toc_setup)
8991 emit_move_insn (gen_rtx_REG (Pmode, 11),
8992 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8994 rs6000_emit_load_toc_table (TRUE);
8996 if (save_LR_around_toc_setup)
8997 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8998 gen_rtx_REG (Pmode, 11));
9001 if (DEFAULT_ABI == ABI_DARWIN
9002 && flag_pic && current_function_uses_pic_offset_table)
9004 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9006 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
9009 emit_move_insn (gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM),
9010 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
9014 /* Write function prologue. */
9017 rs6000_output_function_prologue (file, size)
9019 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9021 rs6000_stack_t *info = rs6000_stack_info ();
9023 if (TARGET_DEBUG_STACK)
9024 debug_stack_info (info);
9026 /* Write .extern for any function we will call to save and restore
9028 if (info->first_fp_reg_save < 64
9029 && !FP_SAVE_INLINE (info->first_fp_reg_save))
9030 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
9031 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
9032 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
9035 /* Write .extern for AIX common mode routines, if needed. */
9036 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
9038 fputs ("\t.extern __mulh\n", file);
9039 fputs ("\t.extern __mull\n", file);
9040 fputs ("\t.extern __divss\n", file);
9041 fputs ("\t.extern __divus\n", file);
9042 fputs ("\t.extern __quoss\n", file);
9043 fputs ("\t.extern __quous\n", file);
9044 common_mode_defined = 1;
9047 if (! HAVE_prologue)
9051 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
9052 the "toplevel" insn chain. */
9053 emit_note (0, NOTE_INSN_DELETED);
9054 rs6000_emit_prologue ();
9055 emit_note (0, NOTE_INSN_DELETED);
9057 if (TARGET_DEBUG_STACK)
9058 debug_rtx_list (get_insns (), 100);
9059 final (get_insns (), file, FALSE, FALSE);
9063 rs6000_pic_labelno++;
9066 /* Emit function epilogue as insns.
9068 At present, dwarf2out_frame_debug_expr doesn't understand
9069 register restores, so we don't bother setting RTX_FRAME_RELATED_P
9070 anywhere in the epilogue. Most of the insns below would in any case
9071 need special notes to explain where r11 is in relation to the stack. */
9074 rs6000_emit_epilogue (sibcall)
9077 rs6000_stack_t *info;
9078 int restoring_FPRs_inline;
9079 int using_load_multiple;
9080 int using_mfcr_multiple;
9081 int use_backchain_to_restore_sp;
9083 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
9084 rtx frame_reg_rtx = sp_reg_rtx;
9085 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
9086 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9089 info = rs6000_stack_info ();
9090 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
9091 && info->first_gp_reg_save < 31);
9092 restoring_FPRs_inline = (sibcall
9093 || current_function_calls_eh_return
9094 || info->first_fp_reg_save == 64
9095 || FP_SAVE_INLINE (info->first_fp_reg_save));
9096 use_backchain_to_restore_sp = (frame_pointer_needed
9097 || current_function_calls_alloca
9098 || info->total_size > 32767);
9099 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9100 || rs6000_cpu == PROCESSOR_PPC603
9101 || rs6000_cpu == PROCESSOR_PPC750
9104 /* If we have a frame pointer, a call to alloca, or a large stack
9105 frame, restore the old stack pointer using the backchain. Otherwise,
9106 we know what size to update it with. */
9107 if (use_backchain_to_restore_sp)
9109 /* Under V.4, don't reset the stack pointer until after we're done
9110 loading the saved registers. */
9111 if (DEFAULT_ABI == ABI_V4)
9112 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
9114 emit_move_insn (frame_reg_rtx,
9115 gen_rtx_MEM (Pmode, sp_reg_rtx));
9118 else if (info->push_p)
9120 if (DEFAULT_ABI == ABI_V4)
9121 sp_offset = info->total_size;
9124 emit_insn (TARGET_32BIT
9125 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9126 GEN_INT (info->total_size))
9127 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9128 GEN_INT (info->total_size)));
9132 /* Restore AltiVec registers if needed. */
9133 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9137 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9138 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9140 rtx addr, areg, mem;
9142 areg = gen_rtx_REG (Pmode, 0);
9144 (areg, GEN_INT (info->altivec_save_offset
9146 + 16 * (i - info->first_altivec_reg_save)));
9148 /* AltiVec addressing mode is [reg+reg]. */
9149 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9150 mem = gen_rtx_MEM (V4SImode, addr);
9151 set_mem_alias_set (mem, rs6000_sr_alias_set);
9153 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9157 /* Restore VRSAVE if needed. */
9158 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9162 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9163 GEN_INT (info->vrsave_save_offset + sp_offset));
9164 mem = gen_rtx_MEM (SImode, addr);
9165 set_mem_alias_set (mem, rs6000_sr_alias_set);
9166 reg = gen_rtx_REG (SImode, 12);
9167 emit_move_insn (reg, mem);
9169 emit_insn (generate_set_vrsave (reg, info, 1));
9172 /* Get the old lr if we saved it. */
9173 if (info->lr_save_p)
9175 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9176 GEN_INT (info->lr_save_offset + sp_offset));
9177 rtx mem = gen_rtx_MEM (Pmode, addr);
9179 set_mem_alias_set (mem, rs6000_sr_alias_set);
9181 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9184 /* Get the old cr if we saved it. */
9185 if (info->cr_save_p)
9187 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9188 GEN_INT (info->cr_save_offset + sp_offset));
9189 rtx mem = gen_rtx_MEM (SImode, addr);
9191 set_mem_alias_set (mem, rs6000_sr_alias_set);
9193 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9196 /* Set LR here to try to overlap restores below. */
9197 if (info->lr_save_p)
9198 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9199 gen_rtx_REG (Pmode, 0));
9201 /* Load exception handler data registers, if needed. */
9202 if (current_function_calls_eh_return)
9204 unsigned int i, regno;
9210 regno = EH_RETURN_DATA_REGNO (i);
9211 if (regno == INVALID_REGNUM)
9214 addr = plus_constant (frame_reg_rtx,
9215 info->ehrd_offset + sp_offset
9216 + reg_size * (int) i);
9217 mem = gen_rtx_MEM (reg_mode, addr);
9218 set_mem_alias_set (mem, rs6000_sr_alias_set);
9220 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9224 /* Restore GPRs. This is done as a PARALLEL if we are using
9225 the load-multiple instructions. */
9226 if (using_load_multiple)
9229 p = rtvec_alloc (32 - info->first_gp_reg_save);
9230 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9232 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9233 GEN_INT (info->gp_save_offset
9236 rtx mem = gen_rtx_MEM (reg_mode, addr);
9238 set_mem_alias_set (mem, rs6000_sr_alias_set);
9241 gen_rtx_SET (VOIDmode,
9242 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9245 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9248 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9249 if ((regs_ever_live[info->first_gp_reg_save+i]
9250 && ! call_used_regs[info->first_gp_reg_save+i])
9251 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
9252 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9253 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9255 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9256 GEN_INT (info->gp_save_offset
9259 rtx mem = gen_rtx_MEM (reg_mode, addr);
9261 set_mem_alias_set (mem, rs6000_sr_alias_set);
9263 emit_move_insn (gen_rtx_REG (reg_mode,
9264 info->first_gp_reg_save + i),
9268 /* Restore fpr's if we need to do it without calling a function. */
9269 if (restoring_FPRs_inline)
9270 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9271 if ((regs_ever_live[info->first_fp_reg_save+i]
9272 && ! call_used_regs[info->first_fp_reg_save+i]))
9275 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9276 GEN_INT (info->fp_save_offset
9279 mem = gen_rtx_MEM (DFmode, addr);
9280 set_mem_alias_set (mem, rs6000_sr_alias_set);
9282 emit_move_insn (gen_rtx_REG (DFmode,
9283 info->first_fp_reg_save + i),
9287 /* If we saved cr, restore it here. Just those that were used. */
9288 if (info->cr_save_p)
9290 rtx r12_rtx = gen_rtx_REG (SImode, 12);
9293 if (using_mfcr_multiple)
9295 for (i = 0; i < 8; i++)
9296 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9302 if (using_mfcr_multiple && count > 1)
9307 p = rtvec_alloc (count);
9310 for (i = 0; i < 8; i++)
9311 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9313 rtvec r = rtvec_alloc (2);
9314 RTVEC_ELT (r, 0) = r12_rtx;
9315 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9316 RTVEC_ELT (p, ndx) =
9317 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9318 gen_rtx_UNSPEC (CCmode, r, 20));
9321 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9326 for (i = 0; i < 8; i++)
9327 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9329 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9335 /* If this is V.4, unwind the stack pointer after all of the loads
9336 have been done. We need to emit a block here so that sched
9337 doesn't decide to move the sp change before the register restores
9338 (which may not have any obvious dependency on the stack). This
9339 doesn't hurt performance, because there is no scheduling that can
9340 be done after this point. */
9341 if (DEFAULT_ABI == ABI_V4)
9343 if (frame_reg_rtx != sp_reg_rtx)
9344 rs6000_emit_stack_tie ();
9346 if (use_backchain_to_restore_sp)
9348 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9350 else if (sp_offset != 0)
9352 emit_insn (Pmode == SImode
9353 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9354 GEN_INT (sp_offset))
9355 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9356 GEN_INT (sp_offset)));
9360 if (current_function_calls_eh_return)
9362 rtx sa = EH_RETURN_STACKADJ_RTX;
9363 emit_insn (Pmode == SImode
9364 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9365 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9371 if (! restoring_FPRs_inline)
9372 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9374 p = rtvec_alloc (2);
9376 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9377 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9379 LINK_REGISTER_REGNUM));
9381 /* If we have to restore more than two FP registers, branch to the
9382 restore function. It will return to our caller. */
9383 if (! restoring_FPRs_inline)
9387 const char *alloc_rname;
9389 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9390 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9391 alloc_rname = ggc_strdup (rname);
9392 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9393 gen_rtx_SYMBOL_REF (Pmode,
9396 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9399 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9400 GEN_INT (info->fp_save_offset + 8*i));
9401 mem = gen_rtx_MEM (DFmode, addr);
9402 set_mem_alias_set (mem, rs6000_sr_alias_set);
9404 RTVEC_ELT (p, i+3) =
9405 gen_rtx_SET (VOIDmode,
9406 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9411 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9415 /* Write function epilogue. */
9418 rs6000_output_function_epilogue (file, size)
9420 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9422 rs6000_stack_t *info = rs6000_stack_info ();
9423 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9425 if (! HAVE_epilogue)
9427 rtx insn = get_last_insn ();
9428 /* If the last insn was a BARRIER, we don't have to write anything except
9430 if (GET_CODE (insn) == NOTE)
9431 insn = prev_nonnote_insn (insn);
9432 if (insn == 0 || GET_CODE (insn) != BARRIER)
9434 /* This is slightly ugly, but at least we don't have two
9435 copies of the epilogue-emitting code. */
9438 /* A NOTE_INSN_DELETED is supposed to be at the start
9439 and end of the "toplevel" insn chain. */
9440 emit_note (0, NOTE_INSN_DELETED);
9441 rs6000_emit_epilogue (FALSE);
9442 emit_note (0, NOTE_INSN_DELETED);
9444 if (TARGET_DEBUG_STACK)
9445 debug_rtx_list (get_insns (), 100);
9446 final (get_insns (), file, FALSE, FALSE);
9451 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9454 We don't output a traceback table if -finhibit-size-directive was
9455 used. The documentation for -finhibit-size-directive reads
9456 ``don't output a @code{.size} assembler directive, or anything
9457 else that would cause trouble if the function is split in the
9458 middle, and the two halves are placed at locations far apart in
9459 memory.'' The traceback table has this property, since it
9460 includes the offset from the start of the function to the
9461 traceback table itself.
9463 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9464 different traceback table. */
9465 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9467 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9468 const char *language_string = lang_hooks.name;
9469 int fixed_parms = 0, float_parms = 0, parm_info = 0;
9472 while (*fname == '.') /* V.4 encodes . in the name */
9475 /* Need label immediately before tbtab, so we can compute its offset
9476 from the function start. */
9479 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9480 ASM_OUTPUT_LABEL (file, fname);
9482 /* The .tbtab pseudo-op can only be used for the first eight
9483 expressions, since it can't handle the possibly variable
9484 length fields that follow. However, if you omit the optional
9485 fields, the assembler outputs zeros for all optional fields
9486 anyways, giving each variable length field is minimum length
9487 (as defined in sys/debug.h). Thus we can not use the .tbtab
9488 pseudo-op at all. */
9490 /* An all-zero word flags the start of the tbtab, for debuggers
9491 that have to find it by searching forward from the entry
9492 point or from the current pc. */
9493 fputs ("\t.long 0\n", file);
9495 /* Tbtab format type. Use format type 0. */
9496 fputs ("\t.byte 0,", file);
9498 /* Language type. Unfortunately, there doesn't seem to be any
9499 official way to get this info, so we use language_string. C
9500 is 0. C++ is 9. No number defined for Obj-C, so use the
9501 value for C for now. There is no official value for Java,
9502 although IBM appears to be using 13. There is no official value
9503 for Chill, so we've chosen 44 pseudo-randomly. */
9504 if (! strcmp (language_string, "GNU C")
9505 || ! strcmp (language_string, "GNU Objective-C"))
9507 else if (! strcmp (language_string, "GNU F77"))
9509 else if (! strcmp (language_string, "GNU Ada"))
9511 else if (! strcmp (language_string, "GNU Pascal"))
9513 else if (! strcmp (language_string, "GNU C++"))
9515 else if (! strcmp (language_string, "GNU Java"))
9517 else if (! strcmp (language_string, "GNU CHILL"))
9521 fprintf (file, "%d,", i);
9523 /* 8 single bit fields: global linkage (not set for C extern linkage,
9524 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9525 from start of procedure stored in tbtab, internal function, function
9526 has controlled storage, function has no toc, function uses fp,
9527 function logs/aborts fp operations. */
9528 /* Assume that fp operations are used if any fp reg must be saved. */
9529 fprintf (file, "%d,",
9530 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9532 /* 6 bitfields: function is interrupt handler, name present in
9533 proc table, function calls alloca, on condition directives
9534 (controls stack walks, 3 bits), saves condition reg, saves
9536 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9537 set up as a frame pointer, even when there is no alloca call. */
9538 fprintf (file, "%d,",
9539 ((optional_tbtab << 6)
9540 | ((optional_tbtab & frame_pointer_needed) << 5)
9541 | (info->cr_save_p << 1)
9542 | (info->lr_save_p)));
9544 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9546 fprintf (file, "%d,",
9547 (info->push_p << 7) | (64 - info->first_fp_reg_save));
9549 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9550 fprintf (file, "%d,", (32 - first_reg_to_save ()));
9554 /* Compute the parameter info from the function decl argument
9557 int next_parm_info_bit = 31;
9559 for (decl = DECL_ARGUMENTS (current_function_decl);
9560 decl; decl = TREE_CHAIN (decl))
9562 rtx parameter = DECL_INCOMING_RTL (decl);
9563 enum machine_mode mode = GET_MODE (parameter);
9565 if (GET_CODE (parameter) == REG)
9567 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9575 else if (mode == DFmode)
9580 /* If only one bit will fit, don't or in this entry. */
9581 if (next_parm_info_bit > 0)
9582 parm_info |= (bits << (next_parm_info_bit - 1));
9583 next_parm_info_bit -= 2;
9587 fixed_parms += ((GET_MODE_SIZE (mode)
9588 + (UNITS_PER_WORD - 1))
9590 next_parm_info_bit -= 1;
9596 /* Number of fixed point parameters. */
9597 /* This is actually the number of words of fixed point parameters; thus
9598 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9599 fprintf (file, "%d,", fixed_parms);
9601 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9603 /* This is actually the number of fp registers that hold parameters;
9604 and thus the maximum value is 13. */
9605 /* Set parameters on stack bit if parameters are not in their original
9606 registers, regardless of whether they are on the stack? Xlc
9607 seems to set the bit when not optimizing. */
9608 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9610 if (! optional_tbtab)
9613 /* Optional fields follow. Some are variable length. */
9615 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9617 /* There is an entry for each parameter in a register, in the order that
9618 they occur in the parameter list. Any intervening arguments on the
9619 stack are ignored. If the list overflows a long (max possible length
9620 34 bits) then completely leave off all elements that don't fit. */
9621 /* Only emit this long if there was at least one parameter. */
9622 if (fixed_parms || float_parms)
9623 fprintf (file, "\t.long %d\n", parm_info);
9625 /* Offset from start of code to tb table. */
9626 fputs ("\t.long ", file);
9627 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9629 RS6000_OUTPUT_BASENAME (file, fname);
9631 assemble_name (file, fname);
9635 RS6000_OUTPUT_BASENAME (file, fname);
9637 assemble_name (file, fname);
9641 /* Interrupt handler mask. */
9642 /* Omit this long, since we never set the interrupt handler bit
9645 /* Number of CTL (controlled storage) anchors. */
9646 /* Omit this long, since the has_ctl bit is never set above. */
9648 /* Displacement into stack of each CTL anchor. */
9649 /* Omit this list of longs, because there are no CTL anchors. */
9651 /* Length of function name. */
9652 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9654 /* Function name. */
9655 assemble_string (fname, strlen (fname));
9657 /* Register for alloca automatic storage; this is always reg 31.
9658 Only emit this if the alloca bit was set above. */
9659 if (frame_pointer_needed)
9660 fputs ("\t.byte 31\n", file);
9662 fputs ("\t.align 2\n", file);
9667 /* A C compound statement that outputs the assembler code for a thunk
9668 function, used to implement C++ virtual function calls with
9669 multiple inheritance. The thunk acts as a wrapper around a virtual
9670 function, adjusting the implicit object parameter before handing
9671 control off to the real function.
9673 First, emit code to add the integer DELTA to the location that
9674 contains the incoming first argument. Assume that this argument
9675 contains a pointer, and is the one used to pass the `this' pointer
9676 in C++. This is the incoming argument *before* the function
9677 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9678 values of all other incoming arguments.
9680 After the addition, emit code to jump to FUNCTION, which is a
9681 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9682 not touch the return address. Hence returning from FUNCTION will
9683 return to whoever called the current `thunk'.
9685 The effect must be as if FUNCTION had been called directly with the
9686 adjusted first argument. This macro is responsible for emitting
9687 all of the code for a thunk function; output_function_prologue()
9688 and output_function_epilogue() are not invoked.
9690 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9691 been extracted from it.) It might possibly be useful on some
9692 targets, but probably not.
9694 If you do not define this macro, the target-independent code in the
9695 C++ frontend will generate a less efficient heavyweight thunk that
9696 calls FUNCTION instead of jumping to it. The generic approach does
9697 not support varargs. */
9700 output_mi_thunk (file, thunk_fndecl, delta, function)
9702 tree thunk_fndecl ATTRIBUTE_UNUSED;
9706 const char *this_reg =
9707 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9710 const char *r0 = reg_names[0];
9711 const char *toc = reg_names[2];
9712 const char *schain = reg_names[11];
9713 const char *r12 = reg_names[12];
9715 static int labelno = 0;
9717 /* Small constants that can be done by one add instruction. */
9718 if (delta >= -32768 && delta <= 32767)
9720 if (! TARGET_NEW_MNEMONICS)
9721 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9723 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9726 /* Large constants that can be done by one addis instruction. */
9727 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9728 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9731 /* 32-bit constants that can be done by an add and addis instruction. */
9732 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9734 /* Break into two pieces, propagating the sign bit from the low
9735 word to the upper word. */
9736 int delta_high = delta >> 16;
9737 int delta_low = delta & 0xffff;
9738 if ((delta_low & 0x8000) != 0)
9741 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9744 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9747 if (! TARGET_NEW_MNEMONICS)
9748 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9750 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9753 /* 64-bit constants, fixme */
9757 /* Get the prefix in front of the names. */
9758 switch (DEFAULT_ABI)
9768 case ABI_AIX_NODESC:
9773 /* If the function is compiled in this module, jump to it directly.
9774 Otherwise, load up its address and jump to it. */
9776 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9778 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9779 && ! lookup_attribute ("longcall",
9780 TYPE_ATTRIBUTES (TREE_TYPE (function))))
9782 fprintf (file, "\tb %s", prefix);
9783 assemble_name (file, fname);
9784 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9790 switch (DEFAULT_ABI)
9796 /* Set up a TOC entry for the function. */
9797 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9799 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9802 if (TARGET_MINIMAL_TOC)
9803 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9806 fputs ("\t.tc ", file);
9807 assemble_name (file, fname);
9808 fputs ("[TC],", file);
9810 assemble_name (file, fname);
9813 if (TARGET_MINIMAL_TOC)
9814 asm_fprintf (file, (TARGET_32BIT)
9815 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9816 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9817 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9818 assemble_name (file, buf);
9819 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9820 fputs ("-(.LCTOC1)", file);
9821 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9823 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9827 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9830 asm_fprintf (file, "\tmtctr %s\n", r0);
9832 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9835 asm_fprintf (file, "\tbctr\n");
9838 case ABI_AIX_NODESC:
9840 fprintf (file, "\tb %s", prefix);
9841 assemble_name (file, fname);
9842 if (flag_pic) fputs ("@plt", file);
9848 fprintf (file, "\tb %s", prefix);
9849 if (flag_pic && !machopic_name_defined_p (fname))
9850 assemble_name (file, machopic_stub_name (fname));
9852 assemble_name (file, fname);
9861 /* A quick summary of the various types of 'constant-pool tables'
9864 Target Flags Name One table per
9865 AIX (none) AIX TOC object file
9866 AIX -mfull-toc AIX TOC object file
9867 AIX -mminimal-toc AIX minimal TOC translation unit
9868 SVR4/EABI (none) SVR4 SDATA object file
9869 SVR4/EABI -fpic SVR4 pic object file
9870 SVR4/EABI -fPIC SVR4 PIC translation unit
9871 SVR4/EABI -mrelocatable EABI TOC function
9872 SVR4/EABI -maix AIX TOC object file
9873 SVR4/EABI -maix -mminimal-toc
9874 AIX minimal TOC translation unit
9876 Name Reg. Set by entries contains:
9877 made by addrs? fp? sum?
9879 AIX TOC 2 crt0 as Y option option
9880 AIX minimal TOC 30 prolog gcc Y Y option
9881 SVR4 SDATA 13 crt0 gcc N Y N
9882 SVR4 pic 30 prolog ld Y not yet N
9883 SVR4 PIC 30 prolog gcc Y option option
9884 EABI TOC 30 prolog gcc Y option option
9888 /* Hash table stuff for keeping track of TOC entries. */
9890 struct toc_hash_struct
9892 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9893 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
9895 enum machine_mode key_mode;
9899 static htab_t toc_hash_table;
9901 /* Hash functions for the hash table. */
9904 rs6000_hash_constant (k)
9907 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9908 const char *format = GET_RTX_FORMAT (GET_CODE (k));
9909 int flen = strlen (format);
9912 if (GET_CODE (k) == LABEL_REF)
9913 return result * 1231 + X0INT (XEXP (k, 0), 3);
9915 if (GET_CODE (k) == CONST_DOUBLE)
9917 else if (GET_CODE (k) == CODE_LABEL)
9922 for (; fidx < flen; fidx++)
9923 switch (format[fidx])
9928 const char *str = XSTR (k, fidx);
9930 result = result * 613 + len;
9931 for (i = 0; i < len; i++)
9932 result = result * 613 + (unsigned) str[i];
9937 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9941 result = result * 613 + (unsigned) XINT (k, fidx);
9944 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9945 result = result * 613 + (unsigned) XWINT (k, fidx);
9949 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9950 result = result * 613 + (unsigned) (XWINT (k, fidx)
9961 toc_hash_function (hash_entry)
9962 const void * hash_entry;
9964 const struct toc_hash_struct *thc =
9965 (const struct toc_hash_struct *) hash_entry;
9966 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9969 /* Compare H1 and H2 for equivalence. */
9972 toc_hash_eq (h1, h2)
9976 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9977 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9979 if (((const struct toc_hash_struct *) h1)->key_mode
9980 != ((const struct toc_hash_struct *) h2)->key_mode)
9983 /* Gotcha: One of these const_doubles will be in memory.
9984 The other may be on the constant-pool chain.
9985 So rtx_equal_p will think they are different... */
9988 if (GET_CODE (r1) != GET_CODE (r2)
9989 || GET_MODE (r1) != GET_MODE (r2))
9991 if (GET_CODE (r1) == CONST_DOUBLE)
9993 int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
9995 for (i = 1; i < format_len; i++)
9996 if (XWINT (r1, i) != XWINT (r2, i))
10001 else if (GET_CODE (r1) == LABEL_REF)
10002 return (CODE_LABEL_NUMBER (XEXP (r1, 0))
10003 == CODE_LABEL_NUMBER (XEXP (r2, 0)));
10005 return rtx_equal_p (r1, r2);
10008 /* Mark the hash table-entry HASH_ENTRY. */
10011 toc_hash_mark_entry (hash_slot, unused)
10013 void * unused ATTRIBUTE_UNUSED;
10015 const struct toc_hash_struct * hash_entry =
10016 *(const struct toc_hash_struct **) hash_slot;
10017 rtx r = hash_entry->key;
10018 ggc_set_mark (hash_entry);
10019 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
10020 if (GET_CODE (r) == LABEL_REF)
10023 ggc_set_mark (XEXP (r, 0));
10030 /* Mark all the elements of the TOC hash-table *HT. */
10033 toc_hash_mark_table (vht)
10038 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
10041 /* These are the names given by the C++ front-end to vtables, and
10042 vtable-like objects. Ideally, this logic should not be here;
10043 instead, there should be some programmatic way of inquiring as
10044 to whether or not an object is a vtable. */
10046 #define VTABLE_NAME_P(NAME) \
10047 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
10048 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
10049 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
10050 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
10053 rs6000_output_symbol_ref (file, x)
10057 /* Currently C++ toc references to vtables can be emitted before it
10058 is decided whether the vtable is public or private. If this is
10059 the case, then the linker will eventually complain that there is
10060 a reference to an unknown section. Thus, for vtables only,
10061 we emit the TOC reference to reference the symbol and not the
10063 const char *name = XSTR (x, 0);
10065 if (VTABLE_NAME_P (name))
10067 RS6000_OUTPUT_BASENAME (file, name);
10070 assemble_name (file, name);
10073 /* Output a TOC entry. We derive the entry name from what is being
10077 output_toc (file, x, labelno, mode)
10081 enum machine_mode mode;
10084 const char *name = buf;
10085 const char *real_name;
10092 /* When the linker won't eliminate them, don't output duplicate
10093 TOC entries (this happens on AIX if there is any kind of TOC,
10094 and on SVR4 under -fPIC or -mrelocatable). */
10097 struct toc_hash_struct *h;
10100 h = ggc_alloc (sizeof (*h));
10102 h->key_mode = mode;
10103 h->labelno = labelno;
10105 found = htab_find_slot (toc_hash_table, h, 1);
10106 if (*found == NULL)
10108 else /* This is indeed a duplicate.
10109 Set this label equal to that label. */
10111 fputs ("\t.set ", file);
10112 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10113 fprintf (file, "%d,", labelno);
10114 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10115 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
10121 /* If we're going to put a double constant in the TOC, make sure it's
10122 aligned properly when strict alignment is on. */
10123 if (GET_CODE (x) == CONST_DOUBLE
10124 && STRICT_ALIGNMENT
10125 && GET_MODE_BITSIZE (mode) >= 64
10126 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10127 ASM_OUTPUT_ALIGN (file, 3);
10130 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10132 /* Handle FP constants specially. Note that if we have a minimal
10133 TOC, things we put here aren't actually in the TOC, so we can allow
10135 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10137 REAL_VALUE_TYPE rv;
10140 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10141 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10145 if (TARGET_MINIMAL_TOC)
10146 fputs (DOUBLE_INT_ASM_OP, file);
10148 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10149 fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
10154 if (TARGET_MINIMAL_TOC)
10155 fputs ("\t.long ", file);
10157 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10158 fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
10162 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10164 REAL_VALUE_TYPE rv;
10167 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10168 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10172 if (TARGET_MINIMAL_TOC)
10173 fputs (DOUBLE_INT_ASM_OP, file);
10175 fprintf (file, "\t.tc FS_%lx[TC],", l);
10176 fprintf (file, "0x%lx00000000\n", l);
10181 if (TARGET_MINIMAL_TOC)
10182 fputs ("\t.long ", file);
10184 fprintf (file, "\t.tc FS_%lx[TC],", l);
10185 fprintf (file, "0x%lx\n", l);
10189 else if (GET_MODE (x) == VOIDmode
10190 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10192 unsigned HOST_WIDE_INT low;
10193 HOST_WIDE_INT high;
10195 if (GET_CODE (x) == CONST_DOUBLE)
10197 low = CONST_DOUBLE_LOW (x);
10198 high = CONST_DOUBLE_HIGH (x);
10201 #if HOST_BITS_PER_WIDE_INT == 32
10204 high = (low & 0x80000000) ? ~0 : 0;
10208 low = INTVAL (x) & 0xffffffff;
10209 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10213 /* TOC entries are always Pmode-sized, but since this
10214 is a bigendian machine then if we're putting smaller
10215 integer constants in the TOC we have to pad them.
10216 (This is still a win over putting the constants in
10217 a separate constant pool, because then we'd have
10218 to have both a TOC entry _and_ the actual constant.)
10220 For a 32-bit target, CONST_INT values are loaded and shifted
10221 entirely within `low' and can be stored in one TOC entry. */
10223 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10224 abort ();/* It would be easy to make this work, but it doesn't now. */
10226 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10227 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10228 POINTER_SIZE, &low, &high, 0);
10232 if (TARGET_MINIMAL_TOC)
10233 fputs (DOUBLE_INT_ASM_OP, file);
10235 fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long) high, (long) low);
10236 fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
10241 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10243 if (TARGET_MINIMAL_TOC)
10244 fputs ("\t.long ", file);
10246 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10247 (long) high, (long) low);
10248 fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
10252 if (TARGET_MINIMAL_TOC)
10253 fputs ("\t.long ", file);
10255 fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
10256 fprintf (file, "0x%lx\n", (long) low);
10262 if (GET_CODE (x) == CONST)
10264 if (GET_CODE (XEXP (x, 0)) != PLUS)
10267 base = XEXP (XEXP (x, 0), 0);
10268 offset = INTVAL (XEXP (XEXP (x, 0), 1));
10271 if (GET_CODE (base) == SYMBOL_REF)
10272 name = XSTR (base, 0);
10273 else if (GET_CODE (base) == LABEL_REF)
10274 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10275 else if (GET_CODE (base) == CODE_LABEL)
10276 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10280 STRIP_NAME_ENCODING (real_name, name);
10281 if (TARGET_MINIMAL_TOC)
10282 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10285 fprintf (file, "\t.tc %s", real_name);
10288 fprintf (file, ".N%d", - offset);
10290 fprintf (file, ".P%d", offset);
10292 fputs ("[TC],", file);
10295 /* Currently C++ toc references to vtables can be emitted before it
10296 is decided whether the vtable is public or private. If this is
10297 the case, then the linker will eventually complain that there is
10298 a TOC reference to an unknown section. Thus, for vtables only,
10299 we emit the TOC reference to reference the symbol and not the
10301 if (VTABLE_NAME_P (name))
10303 RS6000_OUTPUT_BASENAME (file, name);
10305 fprintf (file, "%d", offset);
10306 else if (offset > 0)
10307 fprintf (file, "+%d", offset);
10310 output_addr_const (file, x);
10314 /* Output an assembler pseudo-op to write an ASCII string of N characters
10315 starting at P to FILE.
10317 On the RS/6000, we have to do this using the .byte operation and
10318 write out special characters outside the quoted string.
10319 Also, the assembler is broken; very long strings are truncated,
10320 so we must artificially break them up early. */
10323 output_ascii (file, p, n)
10329 int i, count_string;
10330 const char *for_string = "\t.byte \"";
10331 const char *for_decimal = "\t.byte ";
10332 const char *to_close = NULL;
10335 for (i = 0; i < n; i++)
10338 if (c >= ' ' && c < 0177)
10341 fputs (for_string, file);
10344 /* Write two quotes to get one. */
10352 for_decimal = "\"\n\t.byte ";
10356 if (count_string >= 512)
10358 fputs (to_close, file);
10360 for_string = "\t.byte \"";
10361 for_decimal = "\t.byte ";
10369 fputs (for_decimal, file);
10370 fprintf (file, "%d", c);
10372 for_string = "\n\t.byte \"";
10373 for_decimal = ", ";
10379 /* Now close the string if we have written one. Then end the line. */
10381 fputs (to_close, file);
10384 /* Generate a unique section name for FILENAME for a section type
10385 represented by SECTION_DESC. Output goes into BUF.
10387 SECTION_DESC can be any string, as long as it is different for each
10388 possible section type.
10390 We name the section in the same manner as xlc. The name begins with an
10391 underscore followed by the filename (after stripping any leading directory
10392 names) with the last period replaced by the string SECTION_DESC. If
10393 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10397 rs6000_gen_section_name (buf, filename, section_desc)
10399 const char *filename;
10400 const char *section_desc;
10402 const char *q, *after_last_slash, *last_period = 0;
10406 after_last_slash = filename;
10407 for (q = filename; *q; q++)
10410 after_last_slash = q + 1;
10411 else if (*q == '.')
10415 len = strlen (after_last_slash) + strlen (section_desc) + 2;
10416 *buf = (char *) permalloc (len);
10421 for (q = after_last_slash; *q; q++)
10423 if (q == last_period)
10425 strcpy (p, section_desc);
10426 p += strlen (section_desc);
10429 else if (ISALNUM (*q))
10433 if (last_period == 0)
10434 strcpy (p, section_desc);
10439 /* Emit profile function. */
10442 output_profile_hook (labelno)
10445 if (DEFAULT_ABI == ABI_AIX)
10448 const char *label_name;
10453 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10454 STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10455 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10457 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10460 else if (DEFAULT_ABI == ABI_DARWIN)
10462 const char *mcount_name = RS6000_MCOUNT;
10463 int caller_addr_regno = LINK_REGISTER_REGNUM;
10465 /* Be conservative and always set this, at least for now. */
10466 current_function_uses_pic_offset_table = 1;
10469 /* For PIC code, set up a stub and collect the caller's address
10470 from r0, which is where the prologue puts it. */
10473 mcount_name = machopic_stub_name (mcount_name);
10474 if (current_function_uses_pic_offset_table)
10475 caller_addr_regno = 0;
10478 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10480 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10484 /* Write function profiler code. */
10487 output_function_profiler (file, labelno)
10493 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10494 switch (DEFAULT_ABI)
10500 case ABI_AIX_NODESC:
10501 fprintf (file, "\tmflr %s\n", reg_names[0]);
10504 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10505 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10506 reg_names[0], reg_names[1]);
10507 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10508 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10509 assemble_name (file, buf);
10510 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10512 else if (flag_pic > 1)
10514 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10515 reg_names[0], reg_names[1]);
10516 /* Now, we need to get the address of the label. */
10517 fputs ("\tbl 1f\n\t.long ", file);
10518 assemble_name (file, buf);
10519 fputs ("-.\n1:", file);
10520 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10521 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10522 reg_names[0], reg_names[11]);
10523 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10524 reg_names[0], reg_names[0], reg_names[11]);
10528 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10529 assemble_name (file, buf);
10530 fputs ("@ha\n", file);
10531 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10532 reg_names[0], reg_names[1]);
10533 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10534 assemble_name (file, buf);
10535 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10538 if (current_function_needs_context)
10539 asm_fprintf (file, "\tmr %s,%s\n",
10540 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10541 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10542 if (current_function_needs_context)
10543 asm_fprintf (file, "\tmr %s,%s\n",
10544 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10549 /* Don't do anything, done in output_profile_hook (). */
10555 /* Adjust the cost of a scheduling dependency. Return the new cost of
10556 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10559 rs6000_adjust_cost (insn, link, dep_insn, cost)
10562 rtx dep_insn ATTRIBUTE_UNUSED;
10565 if (! recog_memoized (insn))
10568 if (REG_NOTE_KIND (link) != 0)
10571 if (REG_NOTE_KIND (link) == 0)
10573 /* Data dependency; DEP_INSN writes a register that INSN reads
10574 some cycles later. */
10575 switch (get_attr_type (insn))
10578 /* Tell the first scheduling pass about the latency between
10579 a mtctr and bctr (and mtlr and br/blr). The first
10580 scheduling pass will not know about this latency since
10581 the mtctr instruction, which has the latency associated
10582 to it, will be generated by reload. */
10583 return TARGET_POWER ? 5 : 4;
10585 /* Leave some extra cycles between a compare and its
10586 dependent branch, to inhibit expensive mispredicts. */
10587 if ((rs6000_cpu_attr == CPU_PPC750
10588 || rs6000_cpu_attr == CPU_PPC7400
10589 || rs6000_cpu_attr == CPU_PPC7450)
10590 && recog_memoized (dep_insn)
10591 && (INSN_CODE (dep_insn) >= 0)
10592 && (get_attr_type (dep_insn) == TYPE_COMPARE
10593 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10594 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10595 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10600 /* Fall out to return default cost. */
10606 /* A C statement (sans semicolon) to update the integer scheduling
10607 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10608 INSN earlier, increase the priority to execute INSN later. Do not
10609 define this macro if you do not need to adjust the scheduling
10610 priorities of insns. */
10613 rs6000_adjust_priority (insn, priority)
10614 rtx insn ATTRIBUTE_UNUSED;
10617 /* On machines (like the 750) which have asymmetric integer units,
10618 where one integer unit can do multiply and divides and the other
10619 can't, reduce the priority of multiply/divide so it is scheduled
10620 before other integer operations. */
10623 if (! INSN_P (insn))
10626 if (GET_CODE (PATTERN (insn)) == USE)
10629 switch (rs6000_cpu_attr) {
10631 switch (get_attr_type (insn))
10638 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10639 priority, priority);
10640 if (priority >= 0 && priority < 0x01000000)
10650 /* Return how many instructions the machine can issue per cycle. */
10653 rs6000_issue_rate ()
10655 switch (rs6000_cpu_attr) {
10656 case CPU_RIOS1: /* ? */
10658 case CPU_PPC601: /* ? */
10677 /* Length in units of the trampoline for entering a nested function. */
10680 rs6000_trampoline_size ()
10684 switch (DEFAULT_ABI)
10690 ret = (TARGET_32BIT) ? 12 : 24;
10695 case ABI_AIX_NODESC:
10696 ret = (TARGET_32BIT) ? 40 : 48;
10703 /* Emit RTL insns to initialize the variable parts of a trampoline.
10704 FNADDR is an RTX for the address of the function's pure code.
10705 CXT is an RTX for the static chain value for the function. */
10708 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10713 enum machine_mode pmode = Pmode;
10714 int regsize = (TARGET_32BIT) ? 4 : 8;
10715 rtx ctx_reg = force_reg (pmode, cxt);
10717 switch (DEFAULT_ABI)
10722 /* Macros to shorten the code expansions below. */
10723 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10724 #define MEM_PLUS(addr,offset) \
10725 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10727 /* Under AIX, just build the 3 word function descriptor */
10730 rtx fn_reg = gen_reg_rtx (pmode);
10731 rtx toc_reg = gen_reg_rtx (pmode);
10732 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10733 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10734 emit_move_insn (MEM_DEREF (addr), fn_reg);
10735 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10736 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10740 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10743 case ABI_AIX_NODESC:
10744 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10745 FALSE, VOIDmode, 4,
10747 GEN_INT (rs6000_trampoline_size ()), SImode,
10757 /* Table of valid machine attributes. */
10759 const struct attribute_spec rs6000_attribute_table[] =
10761 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10762 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10763 { NULL, 0, 0, false, false, false, NULL }
10766 /* Handle a "longcall" attribute; arguments as in struct
10767 attribute_spec.handler. */
10770 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10773 tree args ATTRIBUTE_UNUSED;
10774 int flags ATTRIBUTE_UNUSED;
10775 bool *no_add_attrs;
10777 if (TREE_CODE (*node) != FUNCTION_TYPE
10778 && TREE_CODE (*node) != FIELD_DECL
10779 && TREE_CODE (*node) != TYPE_DECL)
10781 warning ("`%s' attribute only applies to functions",
10782 IDENTIFIER_POINTER (name));
10783 *no_add_attrs = true;
10789 /* Return a reference suitable for calling a function with the
10790 longcall attribute. */
10793 rs6000_longcall_ref (call_ref)
10796 const char *call_name;
10799 if (GET_CODE (call_ref) != SYMBOL_REF)
10802 /* System V adds '.' to the internal name, so skip them. */
10803 call_name = XSTR (call_ref, 0);
10804 if (*call_name == '.')
10806 while (*call_name == '.')
10809 node = get_identifier (call_name);
10810 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10813 return force_reg (Pmode, call_ref);
10817 /* A C statement or statements to switch to the appropriate section
10818 for output of RTX in mode MODE. You can assume that RTX is some
10819 kind of constant in RTL. The argument MODE is redundant except in
10820 the case of a `const_int' rtx. Select the section by calling
10821 `text_section' or one of the alternatives for other sections.
10823 Do not define this macro if you put all constants in the read-only
10826 #ifdef USING_ELFOS_H
10829 rs6000_select_rtx_section (mode, x)
10830 enum machine_mode mode;
10833 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10839 /* A C statement or statements to switch to the appropriate
10840 section for output of DECL. DECL is either a `VAR_DECL' node
10841 or a constant of some sort. RELOC indicates whether forming
10842 the initial value of DECL requires link-time relocations. */
10845 rs6000_select_section (decl, reloc)
10849 int size = int_size_in_bytes (TREE_TYPE (decl));
10852 static void (* const sec_funcs[4]) PARAMS ((void)) = {
10859 needs_sdata = (size > 0
10860 && size <= g_switch_value
10861 && rs6000_sdata != SDATA_NONE
10862 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10864 if (TREE_CODE (decl) == STRING_CST)
10865 readonly = ! flag_writable_strings;
10866 else if (TREE_CODE (decl) == VAR_DECL)
10867 readonly = (! (flag_pic && reloc)
10868 && TREE_READONLY (decl)
10869 && ! TREE_SIDE_EFFECTS (decl)
10870 && DECL_INITIAL (decl)
10871 && DECL_INITIAL (decl) != error_mark_node
10872 && TREE_CONSTANT (DECL_INITIAL (decl)));
10873 else if (TREE_CODE (decl) == CONSTRUCTOR)
10874 readonly = (! (flag_pic && reloc)
10875 && ! TREE_SIDE_EFFECTS (decl)
10876 && TREE_CONSTANT (decl));
10879 if (needs_sdata && rs6000_sdata != SDATA_EABI)
10882 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10885 /* A C statement to build up a unique section name, expressed as a
10886 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10887 RELOC indicates whether the initial value of EXP requires
10888 link-time relocations. If you do not define this macro, GCC will use
10889 the symbol name prefixed by `.' as the section name. Note - this
10890 macro can now be called for uninitialized data items as well as
10891 initialised data and functions. */
10894 rs6000_unique_section (decl, reloc)
10902 const char *prefix;
10904 static const char *const prefixes[7][2] =
10906 { ".rodata.", ".gnu.linkonce.r." },
10907 { ".sdata2.", ".gnu.linkonce.s2." },
10908 { ".data.", ".gnu.linkonce.d." },
10909 { ".sdata.", ".gnu.linkonce.s." },
10910 { ".bss.", ".gnu.linkonce.b." },
10911 { ".sbss.", ".gnu.linkonce.sb." },
10912 { ".text.", ".gnu.linkonce.t." }
10915 if (TREE_CODE (decl) == FUNCTION_DECL)
10924 if (TREE_CODE (decl) == STRING_CST)
10925 readonly = ! flag_writable_strings;
10926 else if (TREE_CODE (decl) == VAR_DECL)
10927 readonly = (! (flag_pic && reloc)
10928 && TREE_READONLY (decl)
10929 && ! TREE_SIDE_EFFECTS (decl)
10930 && TREE_CONSTANT (DECL_INITIAL (decl)));
10932 size = int_size_in_bytes (TREE_TYPE (decl));
10933 needs_sdata = (size > 0
10934 && size <= g_switch_value
10935 && rs6000_sdata != SDATA_NONE
10936 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10938 if (DECL_INITIAL (decl) == 0
10939 || DECL_INITIAL (decl) == error_mark_node)
10941 else if (! readonly)
10948 /* .sdata2 is only for EABI. */
10949 if (sec == 0 && rs6000_sdata != SDATA_EABI)
10955 STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10956 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10957 len = strlen (name) + strlen (prefix);
10958 string = alloca (len + 1);
10960 sprintf (string, "%s%s", prefix, name);
10962 DECL_SECTION_NAME (decl) = build_string (len, string);
10966 /* If we are referencing a function that is static or is known to be
10967 in this file, make the SYMBOL_REF special. We can use this to indicate
10968 that we can branch to this function without emitting a no-op after the
10969 call. For real AIX calling sequences, we also replace the
10970 function name with the real name (1 or 2 leading .'s), rather than
10971 the function descriptor name. This saves a lot of overriding code
10972 to read the prefixes. */
10975 rs6000_encode_section_info (decl, first)
10982 if (TREE_CODE (decl) == FUNCTION_DECL)
10984 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10985 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10986 && ! DECL_WEAK (decl))
10987 SYMBOL_REF_FLAG (sym_ref) = 1;
10989 if (DEFAULT_ABI == ABI_AIX)
10991 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10992 size_t len2 = strlen (XSTR (sym_ref, 0));
10993 char *str = alloca (len1 + len2 + 1);
10996 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10998 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
11001 else if (rs6000_sdata != SDATA_NONE
11002 && DEFAULT_ABI == ABI_V4
11003 && TREE_CODE (decl) == VAR_DECL)
11005 int size = int_size_in_bytes (TREE_TYPE (decl));
11006 tree section_name = DECL_SECTION_NAME (decl);
11007 const char *name = (char *)0;
11012 if (TREE_CODE (section_name) == STRING_CST)
11014 name = TREE_STRING_POINTER (section_name);
11015 len = TREE_STRING_LENGTH (section_name);
11021 if ((size > 0 && size <= g_switch_value)
11023 && ((len == sizeof (".sdata") - 1
11024 && strcmp (name, ".sdata") == 0)
11025 || (len == sizeof (".sdata2") - 1
11026 && strcmp (name, ".sdata2") == 0)
11027 || (len == sizeof (".sbss") - 1
11028 && strcmp (name, ".sbss") == 0)
11029 || (len == sizeof (".sbss2") - 1
11030 && strcmp (name, ".sbss2") == 0)
11031 || (len == sizeof (".PPC.EMB.sdata0") - 1
11032 && strcmp (name, ".PPC.EMB.sdata0") == 0)
11033 || (len == sizeof (".PPC.EMB.sbss0") - 1
11034 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
11036 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11037 size_t len = strlen (XSTR (sym_ref, 0));
11038 char *str = alloca (len + 2);
11041 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
11042 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
11047 #endif /* USING_ELFOS_H */
11050 /* Return a REG that occurs in ADDR with coefficient 1.
11051 ADDR can be effectively incremented by incrementing REG.
11053 r0 is special and we must not select it as an address
11054 register by this routine since our caller will try to
11055 increment the returned register via an "la" instruction. */
11058 find_addr_reg (addr)
11061 while (GET_CODE (addr) == PLUS)
11063 if (GET_CODE (XEXP (addr, 0)) == REG
11064 && REGNO (XEXP (addr, 0)) != 0)
11065 addr = XEXP (addr, 0);
11066 else if (GET_CODE (XEXP (addr, 1)) == REG
11067 && REGNO (XEXP (addr, 1)) != 0)
11068 addr = XEXP (addr, 1);
11069 else if (CONSTANT_P (XEXP (addr, 0)))
11070 addr = XEXP (addr, 1);
11071 else if (CONSTANT_P (XEXP (addr, 1)))
11072 addr = XEXP (addr, 0);
11076 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
11082 rs6000_fatal_bad_address (op)
11085 fatal_insn ("bad address", op);
11088 /* Called to register all of our global variables with the garbage
11092 rs6000_add_gc_roots ()
11094 ggc_add_rtx_root (&rs6000_compare_op0, 1);
11095 ggc_add_rtx_root (&rs6000_compare_op1, 1);
11097 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11098 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11099 toc_hash_mark_table);
11102 machopic_add_gc_roots ();
11109 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
11110 reference and a constant. */
11113 symbolic_operand (op)
11116 switch (GET_CODE (op))
11123 return (GET_CODE (op) == SYMBOL_REF ||
11124 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11125 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11126 && GET_CODE (XEXP (op, 1)) == CONST_INT);
11133 #ifdef RS6000_LONG_BRANCH
11135 static tree stub_list = 0;
11137 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
11138 procedure calls to the linked list. */
11141 add_compiler_stub (label_name, function_name, line_number)
11143 tree function_name;
11146 tree stub = build_tree_list (function_name, label_name);
11147 TREE_TYPE (stub) = build_int_2 (line_number, 0);
11148 TREE_CHAIN (stub) = stub_list;
11152 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
11153 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
11154 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
11156 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11157 handling procedure calls from the linked list and initializes the
11161 output_compiler_stub ()
11164 char label_buf[256];
11166 tree tmp_stub, stub;
11169 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11171 fprintf (asm_out_file,
11172 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11174 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11175 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11176 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11177 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11179 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11181 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11184 label_buf[0] = '_';
11185 strcpy (label_buf+1,
11186 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11189 strcpy (tmp_buf, "lis r12,hi16(");
11190 strcat (tmp_buf, label_buf);
11191 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11192 strcat (tmp_buf, label_buf);
11193 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11194 output_asm_insn (tmp_buf, 0);
11196 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11197 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11198 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11199 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11205 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
11206 already there or not. */
11209 no_previous_def (function_name)
11210 tree function_name;
11213 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11214 if (function_name == STUB_FUNCTION_NAME (stub))
11219 /* GET_PREV_LABEL gets the label name from the previous definition of
11223 get_prev_label (function_name)
11224 tree function_name;
11227 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11228 if (function_name == STUB_FUNCTION_NAME (stub))
11229 return STUB_LABEL_NAME (stub);
11233 /* INSN is either a function call or a millicode call. It may have an
11234 unconditional jump in its delay slot.
11236 CALL_DEST is the routine we are calling. */
11239 output_call (insn, call_dest, operand_number)
11242 int operand_number;
11244 static char buf[256];
11245 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11248 tree funname = get_identifier (XSTR (call_dest, 0));
11250 if (no_previous_def (funname))
11253 rtx label_rtx = gen_label_rtx ();
11254 char *label_buf, temp_buf[256];
11255 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11256 CODE_LABEL_NUMBER (label_rtx));
11257 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11258 labelname = get_identifier (label_buf);
11259 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11261 line_number = NOTE_LINE_NUMBER (insn);
11262 add_compiler_stub (labelname, funname, line_number);
11265 labelname = get_prev_label (funname);
11267 sprintf (buf, "jbsr %%z%d,%.246s",
11268 operand_number, IDENTIFIER_POINTER (labelname));
11273 sprintf (buf, "bl %%z%d", operand_number);
11278 #endif /* RS6000_LONG_BRANCH */
11280 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11282 const char *const symbol_ = (SYMBOL); \
11283 char *buffer_ = (BUF); \
11284 if (symbol_[0] == '"') \
11286 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11288 else if (name_needs_quotes(symbol_)) \
11290 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11294 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11299 /* Generate PIC and indirect symbol stubs. */
11302 machopic_output_stub (file, symb, stub)
11304 const char *symb, *stub;
11306 unsigned int length;
11307 char *symbol_name, *lazy_ptr_name;
11308 char *local_label_0;
11309 static int label = 0;
11311 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11312 STRIP_NAME_ENCODING (symb, symb);
11316 length = strlen (symb);
11317 symbol_name = alloca (length + 32);
11318 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11320 lazy_ptr_name = alloca (length + 32);
11321 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11323 local_label_0 = alloca (length + 32);
11324 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11327 machopic_picsymbol_stub_section ();
11329 machopic_symbol_stub_section ();
11331 fprintf (file, "%s:\n", stub);
11332 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11336 fprintf (file, "\tmflr r0\n");
11337 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11338 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11339 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11340 lazy_ptr_name, local_label_0);
11341 fprintf (file, "\tmtlr r0\n");
11342 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11343 lazy_ptr_name, local_label_0);
11344 fprintf (file, "\tmtctr r12\n");
11345 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11346 lazy_ptr_name, local_label_0);
11347 fprintf (file, "\tbctr\n");
11350 fprintf (file, "non-pure not supported\n");
11352 machopic_lazy_symbol_ptr_section ();
11353 fprintf (file, "%s:\n", lazy_ptr_name);
11354 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11355 fprintf (file, "\t.long dyld_stub_binding_helper\n");
11358 /* Legitimize PIC addresses. If the address is already
11359 position-independent, we return ORIG. Newly generated
11360 position-independent addresses go into a reg. This is REG if non
11361 zero, otherwise we allocate register(s) as necessary. */
11363 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11366 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11368 enum machine_mode mode;
11373 if (reg == NULL && ! reload_in_progress && ! reload_completed)
11374 reg = gen_reg_rtx (Pmode);
11376 if (GET_CODE (orig) == CONST)
11378 if (GET_CODE (XEXP (orig, 0)) == PLUS
11379 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11382 if (GET_CODE (XEXP (orig, 0)) == PLUS)
11385 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11388 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11394 if (GET_CODE (offset) == CONST_INT)
11396 if (SMALL_INT (offset))
11397 return plus_constant (base, INTVAL (offset));
11398 else if (! reload_in_progress && ! reload_completed)
11399 offset = force_reg (Pmode, offset);
11402 rtx mem = force_const_mem (Pmode, orig);
11403 return machopic_legitimize_pic_address (mem, Pmode, reg);
11406 return gen_rtx (PLUS, Pmode, base, offset);
11409 /* Fall back on generic machopic code. */
11410 return machopic_legitimize_pic_address (orig, mode, reg);
11413 /* This is just a placeholder to make linking work without having to
11414 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11415 ever needed for Darwin (not too likely!) this would have to get a
11416 real definition. */
11423 #endif /* TARGET_MACHO */
11426 static unsigned int
11427 rs6000_elf_section_type_flags (decl, name, reloc)
11432 unsigned int flags = default_section_type_flags (decl, name, reloc);
11434 if (TARGET_RELOCATABLE)
11435 flags |= SECTION_WRITE;
11440 /* Record an element in the table of global constructors. SYMBOL is
11441 a SYMBOL_REF of the function to be called; PRIORITY is a number
11442 between 0 and MAX_INIT_PRIORITY.
11444 This differs from default_named_section_asm_out_constructor in
11445 that we have special handling for -mrelocatable. */
11448 rs6000_elf_asm_out_constructor (symbol, priority)
11452 const char *section = ".ctors";
11455 if (priority != DEFAULT_INIT_PRIORITY)
11457 sprintf (buf, ".ctors.%.5u",
11458 /* Invert the numbering so the linker puts us in the proper
11459 order; constructors are run from right to left, and the
11460 linker sorts in increasing order. */
11461 MAX_INIT_PRIORITY - priority);
11465 named_section_flags (section, SECTION_WRITE);
11466 assemble_align (POINTER_SIZE);
11468 if (TARGET_RELOCATABLE)
11470 fputs ("\t.long (", asm_out_file);
11471 output_addr_const (asm_out_file, symbol);
11472 fputs (")@fixup\n", asm_out_file);
11475 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11479 rs6000_elf_asm_out_destructor (symbol, priority)
11483 const char *section = ".dtors";
11486 if (priority != DEFAULT_INIT_PRIORITY)
11488 sprintf (buf, ".dtors.%.5u",
11489 /* Invert the numbering so the linker puts us in the proper
11490 order; constructors are run from right to left, and the
11491 linker sorts in increasing order. */
11492 MAX_INIT_PRIORITY - priority);
11496 named_section_flags (section, SECTION_WRITE);
11497 assemble_align (POINTER_SIZE);
11499 if (TARGET_RELOCATABLE)
11501 fputs ("\t.long (", asm_out_file);
11502 output_addr_const (asm_out_file, symbol);
11503 fputs (")@fixup\n", asm_out_file);
11506 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11510 #ifdef OBJECT_FORMAT_COFF
11512 xcoff_asm_named_section (name, flags)
11514 unsigned int flags ATTRIBUTE_UNUSED;
11516 fprintf (asm_out_file, "\t.csect %s\n", name);