1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Set to non-zero once AIX common-mode calls have been defined. */
78 static int common_mode_defined;
80 /* Save information from a "cmpxx" operation until the branch or scc is
82 rtx rs6000_compare_op0, rs6000_compare_op1;
83 int rs6000_compare_fp_p;
85 /* Label number of label created for -mrelocatable, to call to so we can
86 get the address of the GOT section */
87 int rs6000_pic_labelno;
90 /* Which abi to adhere to */
91 const char *rs6000_abi_name = RS6000_ABI_NAME;
93 /* Semantics of the small data area */
94 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
96 /* Which small data model to use */
97 const char *rs6000_sdata_name = (char *)0;
99 /* Counter for labels which are to be placed in .fixup. */
100 int fixuplabelno = 0;
103 /* ABI enumeration available for subtarget to use. */
104 enum rs6000_abi rs6000_current_abi;
106 /* ABI string from -mabi= option. */
107 const char *rs6000_abi_string;
110 const char *rs6000_debug_name;
111 int rs6000_debug_stack; /* debug stack applications */
112 int rs6000_debug_arg; /* debug argument handling */
114 /* Flag to say the TOC is initialized */
116 char toc_label_name[10];
118 /* Alias set for saves and restores from the rs6000 stack. */
119 static int rs6000_sr_alias_set;
121 static void rs6000_add_gc_roots PARAMS ((void));
122 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
123 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
124 static void validate_condition_mode
125 PARAMS ((enum rtx_code, enum machine_mode));
126 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
127 static void rs6000_maybe_dead PARAMS ((rtx));
128 static void rs6000_emit_stack_tie PARAMS ((void));
129 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
130 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
131 static unsigned rs6000_hash_constant PARAMS ((rtx));
132 static unsigned toc_hash_function PARAMS ((const void *));
133 static int toc_hash_eq PARAMS ((const void *, const void *));
134 static int toc_hash_mark_entry PARAMS ((void **, void *));
135 static void toc_hash_mark_table PARAMS ((void *));
136 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
137 static void rs6000_free_machine_status PARAMS ((struct function *));
138 static void rs6000_init_machine_status PARAMS ((struct function *));
139 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
140 static int rs6000_ra_ever_killed PARAMS ((void));
141 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
142 const struct attribute_spec rs6000_attribute_table[];
143 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
144 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
145 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
146 HOST_WIDE_INT, HOST_WIDE_INT));
148 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
150 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
151 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
153 #ifdef OBJECT_FORMAT_COFF
154 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
156 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
157 static int rs6000_adjust_priority PARAMS ((rtx, int));
158 static int rs6000_issue_rate PARAMS ((void));
160 static void rs6000_init_builtins PARAMS ((void));
161 static void altivec_init_builtins PARAMS ((void));
162 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
163 static rtx altivec_expand_builtin PARAMS ((tree, rtx));
164 static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
165 static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
166 static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
167 static void rs6000_parse_abi_options PARAMS ((void));
168 static int first_altivec_reg_to_save PARAMS ((void));
169 static unsigned int compute_vrsave_mask PARAMS ((void));
170 static void is_altivec_return_reg PARAMS ((rtx, void *));
171 int vrsave_operation PARAMS ((rtx, enum machine_mode));
172 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
173 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
175 /* Default register names. */
176 char rs6000_reg_names[][8] =
178 "0", "1", "2", "3", "4", "5", "6", "7",
179 "8", "9", "10", "11", "12", "13", "14", "15",
180 "16", "17", "18", "19", "20", "21", "22", "23",
181 "24", "25", "26", "27", "28", "29", "30", "31",
182 "0", "1", "2", "3", "4", "5", "6", "7",
183 "8", "9", "10", "11", "12", "13", "14", "15",
184 "16", "17", "18", "19", "20", "21", "22", "23",
185 "24", "25", "26", "27", "28", "29", "30", "31",
186 "mq", "lr", "ctr","ap",
187 "0", "1", "2", "3", "4", "5", "6", "7",
189 /* AltiVec registers. */
190 "0", "1", "2", "3", "4", "5", "6", "7",
191 "8", "9", "10", "11", "12", "13", "14", "15",
192 "16", "17", "18", "19", "20", "21", "22", "23",
193 "24", "25", "26", "27", "28", "29", "30", "31",
197 #ifdef TARGET_REGNAMES
198 static const char alt_reg_names[][8] =
200 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
201 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
202 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
203 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
204 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
205 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
206 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
207 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
208 "mq", "lr", "ctr", "ap",
209 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
211 /* AltiVec registers. */
212 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
213 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
214 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
215 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
220 #ifndef MASK_STRICT_ALIGN
221 #define MASK_STRICT_ALIGN 0
224 /* Initialize the GCC target structure. */
225 #undef TARGET_ATTRIBUTE_TABLE
226 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
228 #undef TARGET_ASM_ALIGNED_DI_OP
229 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
231 /* Default unaligned ops are only provided for ELF. Find the ops needed
232 for non-ELF systems. */
233 #ifndef OBJECT_FORMAT_ELF
234 #ifdef OBJECT_FORMAT_COFF
235 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
237 #undef TARGET_ASM_UNALIGNED_HI_OP
238 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
239 #undef TARGET_ASM_UNALIGNED_SI_OP
240 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
241 #undef TARGET_ASM_UNALIGNED_DI_OP
242 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
245 #undef TARGET_ASM_UNALIGNED_HI_OP
246 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
247 #undef TARGET_ASM_UNALIGNED_SI_OP
248 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
252 /* This hook deals with fixups for relocatable code and DI-mode objects
254 #undef TARGET_ASM_INTEGER
255 #define TARGET_ASM_INTEGER rs6000_assemble_integer
257 #undef TARGET_ASM_FUNCTION_PROLOGUE
258 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
259 #undef TARGET_ASM_FUNCTION_EPILOGUE
260 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
263 #undef TARGET_SECTION_TYPE_FLAGS
264 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
267 #undef TARGET_SCHED_ISSUE_RATE
268 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
269 #undef TARGET_SCHED_ADJUST_COST
270 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
271 #undef TARGET_SCHED_ADJUST_PRIORITY
272 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
274 #undef TARGET_INIT_BUILTINS
275 #define TARGET_INIT_BUILTINS rs6000_init_builtins
277 #undef TARGET_EXPAND_BUILTIN
278 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
280 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
281 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
283 struct gcc_target targetm = TARGET_INITIALIZER;
285 /* Override command line options. Mostly we process the processor
286 type and sometimes adjust other TARGET_ options. */
289 rs6000_override_options (default_cpu)
290 const char *default_cpu;
293 struct rs6000_cpu_select *ptr;
295 /* Simplify the entries below by making a mask for any POWER
296 variant and any PowerPC variant. */
298 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
299 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
300 | MASK_PPC_GFXOPT | MASK_POWERPC64)
301 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
305 const char *const name; /* Canonical processor name. */
306 const enum processor_type processor; /* Processor type enum value. */
307 const int target_enable; /* Target flags to enable. */
308 const int target_disable; /* Target flags to disable. */
309 } const processor_target_table[]
310 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
311 POWER_MASKS | POWERPC_MASKS},
312 {"power", PROCESSOR_POWER,
313 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
314 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
315 {"power2", PROCESSOR_POWER,
316 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
317 POWERPC_MASKS | MASK_NEW_MNEMONICS},
318 {"power3", PROCESSOR_PPC630,
319 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
320 POWER_MASKS | MASK_PPC_GPOPT},
321 {"powerpc", PROCESSOR_POWERPC,
322 MASK_POWERPC | MASK_NEW_MNEMONICS,
323 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
324 {"powerpc64", PROCESSOR_POWERPC64,
325 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
326 POWER_MASKS | POWERPC_OPT_MASKS},
327 {"rios", PROCESSOR_RIOS1,
328 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
329 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
330 {"rios1", PROCESSOR_RIOS1,
331 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
332 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
333 {"rsc", PROCESSOR_PPC601,
334 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
335 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
336 {"rsc1", PROCESSOR_PPC601,
337 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
338 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
339 {"rios2", PROCESSOR_RIOS2,
340 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
341 POWERPC_MASKS | MASK_NEW_MNEMONICS},
342 {"rs64a", PROCESSOR_RS64A,
343 MASK_POWERPC | MASK_NEW_MNEMONICS,
344 POWER_MASKS | POWERPC_OPT_MASKS},
345 {"401", PROCESSOR_PPC403,
346 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
347 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
348 {"403", PROCESSOR_PPC403,
349 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
350 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
351 {"405", PROCESSOR_PPC405,
352 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
353 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
354 {"505", PROCESSOR_MPCCORE,
355 MASK_POWERPC | MASK_NEW_MNEMONICS,
356 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
357 {"601", PROCESSOR_PPC601,
358 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
359 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
360 {"602", PROCESSOR_PPC603,
361 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
362 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
363 {"603", PROCESSOR_PPC603,
364 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
365 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
366 {"603e", PROCESSOR_PPC603,
367 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
368 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
369 {"ec603e", PROCESSOR_PPC603,
370 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
371 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
372 {"604", PROCESSOR_PPC604,
373 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
374 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
375 {"604e", PROCESSOR_PPC604e,
376 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
377 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
378 {"620", PROCESSOR_PPC620,
379 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
380 POWER_MASKS | MASK_PPC_GPOPT},
381 {"630", PROCESSOR_PPC630,
382 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
383 POWER_MASKS | MASK_PPC_GPOPT},
384 {"740", PROCESSOR_PPC750,
385 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
386 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
387 {"750", PROCESSOR_PPC750,
388 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
389 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
390 {"7400", PROCESSOR_PPC7400,
391 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
392 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
393 {"7450", PROCESSOR_PPC7450,
394 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
395 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
396 {"801", PROCESSOR_MPCCORE,
397 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
398 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
399 {"821", PROCESSOR_MPCCORE,
400 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
401 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
402 {"823", PROCESSOR_MPCCORE,
403 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
404 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
405 {"860", PROCESSOR_MPCCORE,
406 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
407 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
409 size_t ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
411 /* Save current -mmultiple/-mno-multiple status. */
412 int multiple = TARGET_MULTIPLE;
413 /* Save current -mstring/-mno-string status. */
414 int string = TARGET_STRING;
416 /* Identify the processor type. */
417 rs6000_select[0].string = default_cpu;
418 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
420 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
422 ptr = &rs6000_select[i];
423 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
425 for (j = 0; j < ptt_size; j++)
426 if (! strcmp (ptr->string, processor_target_table[j].name))
429 rs6000_cpu = processor_target_table[j].processor;
433 target_flags |= processor_target_table[j].target_enable;
434 target_flags &= ~processor_target_table[j].target_disable;
440 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
444 /* If we are optimizing big endian systems for space, use the store
445 multiple instructions. */
446 if (BYTES_BIG_ENDIAN && optimize_size)
447 target_flags |= MASK_MULTIPLE;
449 /* If -mmultiple or -mno-multiple was explicitly used, don't
450 override with the processor default */
451 if (TARGET_MULTIPLE_SET)
452 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
454 /* If -mstring or -mno-string was explicitly used, don't override
455 with the processor default. */
456 if (TARGET_STRING_SET)
457 target_flags = (target_flags & ~MASK_STRING) | string;
459 /* Don't allow -mmultiple or -mstring on little endian systems
460 unless the cpu is a 750, because the hardware doesn't support the
461 instructions used in little endian mode, and causes an alignment
462 trap. The 750 does not cause an alignment trap (except when the
463 target is unaligned). */
465 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
469 target_flags &= ~MASK_MULTIPLE;
470 if (TARGET_MULTIPLE_SET)
471 warning ("-mmultiple is not supported on little endian systems");
476 target_flags &= ~MASK_STRING;
477 if (TARGET_STRING_SET)
478 warning ("-mstring is not supported on little endian systems");
482 if (flag_pic && DEFAULT_ABI == ABI_AIX)
484 warning ("-f%s ignored (all code is position independent)",
485 (flag_pic > 1) ? "PIC" : "pic");
489 #ifdef XCOFF_DEBUGGING_INFO
490 if (flag_function_sections && (write_symbols != NO_DEBUG)
491 && DEFAULT_ABI == ABI_AIX)
493 warning ("-ffunction-sections disabled on AIX when debugging");
494 flag_function_sections = 0;
497 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
499 warning ("-fdata-sections not supported on AIX");
500 flag_data_sections = 0;
504 /* Set debug flags */
505 if (rs6000_debug_name)
507 if (! strcmp (rs6000_debug_name, "all"))
508 rs6000_debug_stack = rs6000_debug_arg = 1;
509 else if (! strcmp (rs6000_debug_name, "stack"))
510 rs6000_debug_stack = 1;
511 else if (! strcmp (rs6000_debug_name, "arg"))
512 rs6000_debug_arg = 1;
514 error ("unknown -mdebug-%s switch", rs6000_debug_name);
517 /* Set size of long double */
518 rs6000_long_double_type_size = 64;
519 if (rs6000_long_double_size_string)
522 int size = strtol (rs6000_long_double_size_string, &tail, 10);
523 if (*tail != '\0' || (size != 64 && size != 128))
524 error ("Unknown switch -mlong-double-%s",
525 rs6000_long_double_size_string);
527 rs6000_long_double_type_size = size;
530 /* Handle -mabi= options. */
531 rs6000_parse_abi_options ();
533 #ifdef TARGET_REGNAMES
534 /* If the user desires alternate register names, copy in the
535 alternate names now. */
537 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
540 #ifdef SUBTARGET_OVERRIDE_OPTIONS
541 SUBTARGET_OVERRIDE_OPTIONS;
543 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
544 SUBSUBTARGET_OVERRIDE_OPTIONS;
547 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
548 If -maix-struct-return or -msvr4-struct-return was explicitly
549 used, don't override with the ABI default. */
550 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
552 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
553 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
555 target_flags |= MASK_AIX_STRUCT_RET;
558 /* Register global variables with the garbage collector. */
559 rs6000_add_gc_roots ();
561 /* Allocate an alias set for register saves & restores from stack. */
562 rs6000_sr_alias_set = new_alias_set ();
565 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
567 /* We can only guarantee the availability of DI pseudo-ops when
568 assembling for 64-bit targets. */
571 targetm.asm_out.aligned_op.di = NULL;
572 targetm.asm_out.unaligned_op.di = NULL;
575 /* Arrange to save and restore machine status around nested functions. */
576 init_machine_status = rs6000_init_machine_status;
577 free_machine_status = rs6000_free_machine_status;
580 /* Handle -mabi= options. */
582 rs6000_parse_abi_options ()
584 if (rs6000_abi_string == 0)
586 else if (! strcmp (rs6000_abi_string, "altivec"))
587 rs6000_altivec_abi = 1;
589 error ("unknown ABI specified: '%s'", rs6000_abi_string);
593 optimization_options (level, size)
594 int level ATTRIBUTE_UNUSED;
595 int size ATTRIBUTE_UNUSED;
599 /* Do anything needed at the start of the asm file. */
602 rs6000_file_start (file, default_cpu)
604 const char *default_cpu;
608 const char *start = buffer;
609 struct rs6000_cpu_select *ptr;
611 if (flag_verbose_asm)
613 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
614 rs6000_select[0].string = default_cpu;
616 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
618 ptr = &rs6000_select[i];
619 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
621 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
627 switch (rs6000_sdata)
629 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
630 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
631 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
632 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
635 if (rs6000_sdata && g_switch_value)
637 fprintf (file, "%s -G %d", start, g_switch_value);
648 /* Create a CONST_DOUBLE from a string. */
651 rs6000_float_const (string, mode)
653 enum machine_mode mode;
655 REAL_VALUE_TYPE value;
656 value = REAL_VALUE_ATOF (string, mode);
657 return immed_real_const_1 (value, mode);
660 /* Return non-zero if this function is known to have a null epilogue. */
665 if (reload_completed)
667 rs6000_stack_t *info = rs6000_stack_info ();
669 if (info->first_gp_reg_save == 32
670 && info->first_fp_reg_save == 64
671 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
674 && info->vrsave_mask == 0
682 /* Returns 1 always. */
685 any_operand (op, mode)
686 rtx op ATTRIBUTE_UNUSED;
687 enum machine_mode mode ATTRIBUTE_UNUSED;
692 /* Returns 1 if op is the count register. */
694 count_register_operand (op, mode)
696 enum machine_mode mode ATTRIBUTE_UNUSED;
698 if (GET_CODE (op) != REG)
701 if (REGNO (op) == COUNT_REGISTER_REGNUM)
704 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
711 xer_operand (op, mode)
713 enum machine_mode mode ATTRIBUTE_UNUSED;
715 if (GET_CODE (op) != REG)
718 if (XER_REGNO_P (REGNO (op)))
724 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
725 by such constants completes more quickly. */
728 s8bit_cint_operand (op, mode)
730 enum machine_mode mode ATTRIBUTE_UNUSED;
732 return ( GET_CODE (op) == CONST_INT
733 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
736 /* Return 1 if OP is a constant that can fit in a D field. */
739 short_cint_operand (op, mode)
741 enum machine_mode mode ATTRIBUTE_UNUSED;
743 return (GET_CODE (op) == CONST_INT
744 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
747 /* Similar for an unsigned D field. */
750 u_short_cint_operand (op, mode)
752 enum machine_mode mode ATTRIBUTE_UNUSED;
754 return (GET_CODE (op) == CONST_INT
755 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'));
758 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
761 non_short_cint_operand (op, mode)
763 enum machine_mode mode ATTRIBUTE_UNUSED;
765 return (GET_CODE (op) == CONST_INT
766 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
769 /* Returns 1 if OP is a CONST_INT that is a positive value
770 and an exact power of 2. */
773 exact_log2_cint_operand (op, mode)
775 enum machine_mode mode ATTRIBUTE_UNUSED;
777 return (GET_CODE (op) == CONST_INT
779 && exact_log2 (INTVAL (op)) >= 0);
782 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
786 gpc_reg_operand (op, mode)
788 enum machine_mode mode;
790 return (register_operand (op, mode)
791 && (GET_CODE (op) != REG
792 || (REGNO (op) >= ARG_POINTER_REGNUM
793 && !XER_REGNO_P (REGNO (op)))
794 || REGNO (op) < MQ_REGNO));
797 /* Returns 1 if OP is either a pseudo-register or a register denoting a
801 cc_reg_operand (op, mode)
803 enum machine_mode mode;
805 return (register_operand (op, mode)
806 && (GET_CODE (op) != REG
807 || REGNO (op) >= FIRST_PSEUDO_REGISTER
808 || CR_REGNO_P (REGNO (op))));
811 /* Returns 1 if OP is either a pseudo-register or a register denoting a
812 CR field that isn't CR0. */
815 cc_reg_not_cr0_operand (op, mode)
817 enum machine_mode mode;
819 return (register_operand (op, mode)
820 && (GET_CODE (op) != REG
821 || REGNO (op) >= FIRST_PSEUDO_REGISTER
822 || CR_REGNO_NOT_CR0_P (REGNO (op))));
825 /* Returns 1 if OP is either a constant integer valid for a D-field or
826 a non-special register. If a register, it must be in the proper
827 mode unless MODE is VOIDmode. */
830 reg_or_short_operand (op, mode)
832 enum machine_mode mode;
834 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
837 /* Similar, except check if the negation of the constant would be
838 valid for a D-field. */
841 reg_or_neg_short_operand (op, mode)
843 enum machine_mode mode;
845 if (GET_CODE (op) == CONST_INT)
846 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
848 return gpc_reg_operand (op, mode);
851 /* Return 1 if the operand is either a register or an integer whose
852 high-order 16 bits are zero. */
855 reg_or_u_short_operand (op, mode)
857 enum machine_mode mode;
859 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
862 /* Return 1 is the operand is either a non-special register or ANY
866 reg_or_cint_operand (op, mode)
868 enum machine_mode mode;
870 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
873 /* Return 1 is the operand is either a non-special register or ANY
874 32-bit signed constant integer. */
877 reg_or_arith_cint_operand (op, mode)
879 enum machine_mode mode;
881 return (gpc_reg_operand (op, mode)
882 || (GET_CODE (op) == CONST_INT
883 #if HOST_BITS_PER_WIDE_INT != 32
884 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
885 < (unsigned HOST_WIDE_INT) 0x100000000ll)
890 /* Return 1 is the operand is either a non-special register or a 32-bit
891 signed constant integer valid for 64-bit addition. */
894 reg_or_add_cint64_operand (op, mode)
896 enum machine_mode mode;
898 return (gpc_reg_operand (op, mode)
899 || (GET_CODE (op) == CONST_INT
900 && INTVAL (op) < 0x7fff8000
901 #if HOST_BITS_PER_WIDE_INT != 32
902 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
908 /* Return 1 is the operand is either a non-special register or a 32-bit
909 signed constant integer valid for 64-bit subtraction. */
912 reg_or_sub_cint64_operand (op, mode)
914 enum machine_mode mode;
916 return (gpc_reg_operand (op, mode)
917 || (GET_CODE (op) == CONST_INT
918 && (- INTVAL (op)) < 0x7fff8000
919 #if HOST_BITS_PER_WIDE_INT != 32
920 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
926 /* Return 1 is the operand is either a non-special register or ANY
927 32-bit unsigned constant integer. */
930 reg_or_logical_cint_operand (op, mode)
932 enum machine_mode mode;
934 if (GET_CODE (op) == CONST_INT)
936 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
938 if (GET_MODE_BITSIZE (mode) <= 32)
945 return ((INTVAL (op) & GET_MODE_MASK (mode)
946 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
948 else if (GET_CODE (op) == CONST_DOUBLE)
950 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
954 return CONST_DOUBLE_HIGH (op) == 0;
957 return gpc_reg_operand (op, mode);
960 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
963 got_operand (op, mode)
965 enum machine_mode mode ATTRIBUTE_UNUSED;
967 return (GET_CODE (op) == SYMBOL_REF
968 || GET_CODE (op) == CONST
969 || GET_CODE (op) == LABEL_REF);
972 /* Return 1 if the operand is a simple references that can be loaded via
973 the GOT (labels involving addition aren't allowed). */
976 got_no_const_operand (op, mode)
978 enum machine_mode mode ATTRIBUTE_UNUSED;
980 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
983 /* Return the number of instructions it takes to form a constant in an
987 num_insns_constant_wide (value)
990 /* signed constant loadable with {cal|addi} */
991 if (CONST_OK_FOR_LETTER_P (value, 'I'))
994 /* constant loadable with {cau|addis} */
995 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
998 #if HOST_BITS_PER_WIDE_INT == 64
999 else if (TARGET_POWERPC64)
1001 HOST_WIDE_INT low = value & 0xffffffff;
1002 HOST_WIDE_INT high = value >> 32;
1004 low = (low ^ 0x80000000) - 0x80000000; /* sign extend */
1006 if (high == 0 && (low & 0x80000000) == 0)
1009 else if (high == -1 && (low & 0x80000000) != 0)
1013 return num_insns_constant_wide (high) + 1;
1016 return (num_insns_constant_wide (high)
1017 + num_insns_constant_wide (low) + 1);
1026 num_insns_constant (op, mode)
1028 enum machine_mode mode;
1030 if (GET_CODE (op) == CONST_INT)
1032 #if HOST_BITS_PER_WIDE_INT == 64
1033 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1034 && mask64_operand (op, mode))
1038 return num_insns_constant_wide (INTVAL (op));
1041 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1046 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1047 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1048 return num_insns_constant_wide ((HOST_WIDE_INT)l);
1051 else if (GET_CODE (op) == CONST_DOUBLE)
1057 int endian = (WORDS_BIG_ENDIAN == 0);
1059 if (mode == VOIDmode || mode == DImode)
1061 high = CONST_DOUBLE_HIGH (op);
1062 low = CONST_DOUBLE_LOW (op);
1066 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1067 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1069 low = l[1 - endian];
1073 return (num_insns_constant_wide (low)
1074 + num_insns_constant_wide (high));
1078 if (high == 0 && (low & 0x80000000) == 0)
1079 return num_insns_constant_wide (low);
1081 else if (high == -1 && (low & 0x80000000) != 0)
1082 return num_insns_constant_wide (low);
1084 else if (mask64_operand (op, mode))
1088 return num_insns_constant_wide (high) + 1;
1091 return (num_insns_constant_wide (high)
1092 + num_insns_constant_wide (low) + 1);
1100 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1101 register with one instruction per word. We only do this if we can
1102 safely read CONST_DOUBLE_{LOW,HIGH}. */
1105 easy_fp_constant (op, mode)
1107 enum machine_mode mode;
1109 if (GET_CODE (op) != CONST_DOUBLE
1110 || GET_MODE (op) != mode
1111 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1114 /* Consider all constants with -msoft-float to be easy. */
1115 if (TARGET_SOFT_FLOAT && mode != DImode)
1118 /* If we are using V.4 style PIC, consider all constants to be hard. */
1119 if (flag_pic && DEFAULT_ABI == ABI_V4)
1122 #ifdef TARGET_RELOCATABLE
1123 /* Similarly if we are using -mrelocatable, consider all constants
1125 if (TARGET_RELOCATABLE)
1134 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1135 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1137 return (num_insns_constant_wide ((HOST_WIDE_INT)k[0]) == 1
1138 && num_insns_constant_wide ((HOST_WIDE_INT)k[1]) == 1);
1141 else if (mode == SFmode)
1146 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1147 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1149 return num_insns_constant_wide (l) == 1;
1152 else if (mode == DImode)
1153 return ((TARGET_POWERPC64
1154 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1155 || (num_insns_constant (op, DImode) <= 2));
1157 else if (mode == SImode)
1163 /* Return 1 if the operand is 0.0. */
1165 zero_fp_constant (op, mode)
1167 enum machine_mode mode;
1169 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1172 /* Return 1 if the operand is in volatile memory. Note that during
1173 the RTL generation phase, memory_operand does not return TRUE for
1174 volatile memory references. So this function allows us to
1175 recognize volatile references where its safe. */
1178 volatile_mem_operand (op, mode)
1180 enum machine_mode mode;
1182 if (GET_CODE (op) != MEM)
1185 if (!MEM_VOLATILE_P (op))
1188 if (mode != GET_MODE (op))
1191 if (reload_completed)
1192 return memory_operand (op, mode);
1194 if (reload_in_progress)
1195 return strict_memory_address_p (mode, XEXP (op, 0));
1197 return memory_address_p (mode, XEXP (op, 0));
1200 /* Return 1 if the operand is an offsettable memory operand. */
1203 offsettable_mem_operand (op, mode)
1205 enum machine_mode mode;
1207 return ((GET_CODE (op) == MEM)
1208 && offsettable_address_p (reload_completed || reload_in_progress,
1209 mode, XEXP (op, 0)));
1212 /* Return 1 if the operand is either an easy FP constant (see above) or
1216 mem_or_easy_const_operand (op, mode)
1218 enum machine_mode mode;
1220 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1223 /* Return 1 if the operand is either a non-special register or an item
1224 that can be used as the operand of a `mode' add insn. */
1227 add_operand (op, mode)
1229 enum machine_mode mode;
1231 if (GET_CODE (op) == CONST_INT)
1232 return (CONST_OK_FOR_LETTER_P (INTVAL(op), 'I')
1233 || CONST_OK_FOR_LETTER_P (INTVAL(op), 'L'));
1235 return gpc_reg_operand (op, mode);
1238 /* Return 1 if OP is a constant but not a valid add_operand. */
1241 non_add_cint_operand (op, mode)
1243 enum machine_mode mode ATTRIBUTE_UNUSED;
1245 return (GET_CODE (op) == CONST_INT
1246 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000
1247 && ! CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1250 /* Return 1 if the operand is a non-special register or a constant that
1251 can be used as the operand of an OR or XOR insn on the RS/6000. */
1254 logical_operand (op, mode)
1256 enum machine_mode mode;
1258 HOST_WIDE_INT opl, oph;
1260 if (gpc_reg_operand (op, mode))
1263 if (GET_CODE (op) == CONST_INT)
1265 opl = INTVAL (op) & GET_MODE_MASK (mode);
1267 #if HOST_BITS_PER_WIDE_INT <= 32
1268 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1272 else if (GET_CODE (op) == CONST_DOUBLE)
1274 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1277 opl = CONST_DOUBLE_LOW (op);
1278 oph = CONST_DOUBLE_HIGH (op);
1285 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1286 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1289 /* Return 1 if C is a constant that is not a logical operand (as
1290 above), but could be split into one. */
1293 non_logical_cint_operand (op, mode)
1295 enum machine_mode mode;
1297 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1298 && ! logical_operand (op, mode)
1299 && reg_or_logical_cint_operand (op, mode));
1302 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1303 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1304 Reject all ones and all zeros, since these should have been optimized
1305 away and confuse the making of MB and ME. */
1308 mask_operand (op, mode)
1310 enum machine_mode mode ATTRIBUTE_UNUSED;
1312 HOST_WIDE_INT c, lsb;
1314 if (GET_CODE (op) != CONST_INT)
1319 /* We don't change the number of transitions by inverting,
1320 so make sure we start with the LS bit zero. */
1324 /* Reject all zeros or all ones. */
1328 /* Find the first transition. */
1331 /* Invert to look for a second transition. */
1334 /* Erase first transition. */
1337 /* Find the second transition (if any). */
1340 /* Match if all the bits above are 1's (or c is zero). */
1344 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1345 It is if there are no more than one 1->0 or 0->1 transitions.
1346 Reject all ones and all zeros, since these should have been optimized
1347 away and confuse the making of MB and ME. */
1350 mask64_operand (op, mode)
1352 enum machine_mode mode;
1354 if (GET_CODE (op) == CONST_INT)
1356 HOST_WIDE_INT c, lsb;
1358 /* We don't change the number of transitions by inverting,
1359 so make sure we start with the LS bit zero. */
1364 /* Reject all zeros or all ones. */
1368 /* Find the transition, and check that all bits above are 1's. */
1372 else if (GET_CODE (op) == CONST_DOUBLE
1373 && (mode == VOIDmode || mode == DImode))
1375 HOST_WIDE_INT low, high, lsb;
1377 if (HOST_BITS_PER_WIDE_INT < 64)
1378 high = CONST_DOUBLE_HIGH (op);
1380 low = CONST_DOUBLE_LOW (op);
1383 if (HOST_BITS_PER_WIDE_INT < 64)
1390 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1394 return high == -lsb;
1398 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1404 /* Return 1 if the operand is either a non-special register or a constant
1405 that can be used as the operand of a PowerPC64 logical AND insn. */
1408 and64_operand (op, mode)
1410 enum machine_mode mode;
1412 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1413 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1415 return (logical_operand (op, mode) || mask64_operand (op, mode));
1418 /* Return 1 if the operand is either a non-special register or a
1419 constant that can be used as the operand of an RS/6000 logical AND insn. */
1422 and_operand (op, mode)
1424 enum machine_mode mode;
1426 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1427 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1429 return (logical_operand (op, mode) || mask_operand (op, mode));
1432 /* Return 1 if the operand is a general register or memory operand. */
1435 reg_or_mem_operand (op, mode)
1437 enum machine_mode mode;
1439 return (gpc_reg_operand (op, mode)
1440 || memory_operand (op, mode)
1441 || volatile_mem_operand (op, mode));
1444 /* Return 1 if the operand is a general register or memory operand without
1445 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1449 lwa_operand (op, mode)
1451 enum machine_mode mode;
1455 if (reload_completed && GET_CODE (inner) == SUBREG)
1456 inner = SUBREG_REG (inner);
1458 return gpc_reg_operand (inner, mode)
1459 || (memory_operand (inner, mode)
1460 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1461 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1462 && (GET_CODE (XEXP (inner, 0)) != PLUS
1463 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1464 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1467 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1468 to CALL. This is a SYMBOL_REF or a pseudo-register, which will be
1472 call_operand (op, mode)
1474 enum machine_mode mode;
1476 if (mode != VOIDmode && GET_MODE (op) != mode)
1479 return (GET_CODE (op) == SYMBOL_REF
1480 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1483 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1484 this file and the function is not weakly defined. */
1487 current_file_function_operand (op, mode)
1489 enum machine_mode mode ATTRIBUTE_UNUSED;
1491 return (GET_CODE (op) == SYMBOL_REF
1492 && (SYMBOL_REF_FLAG (op)
1493 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1494 && ! DECL_WEAK (current_function_decl))));
1497 /* Return 1 if this operand is a valid input for a move insn. */
1500 input_operand (op, mode)
1502 enum machine_mode mode;
1504 /* Memory is always valid. */
1505 if (memory_operand (op, mode))
1508 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1509 if (GET_CODE (op) == CONSTANT_P_RTX)
1512 /* For floating-point, easy constants are valid. */
1513 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1515 && easy_fp_constant (op, mode))
1518 /* Allow any integer constant. */
1519 if (GET_MODE_CLASS (mode) == MODE_INT
1520 && (GET_CODE (op) == CONST_INT
1521 || GET_CODE (op) == CONST_DOUBLE))
1524 /* For floating-point or multi-word mode, the only remaining valid type
1526 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1527 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1528 return register_operand (op, mode);
1530 /* The only cases left are integral modes one word or smaller (we
1531 do not get called for MODE_CC values). These can be in any
1533 if (register_operand (op, mode))
1536 /* A SYMBOL_REF referring to the TOC is valid. */
1537 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1540 /* A constant pool expression (relative to the TOC) is valid */
1541 if (TOC_RELATIVE_EXPR_P (op))
1544 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1546 if (DEFAULT_ABI == ABI_V4
1547 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1548 && small_data_operand (op, Pmode))
1554 /* Return 1 for an operand in small memory on V.4/eabi. */
1557 small_data_operand (op, mode)
1558 rtx op ATTRIBUTE_UNUSED;
1559 enum machine_mode mode ATTRIBUTE_UNUSED;
1564 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1567 if (DEFAULT_ABI != ABI_V4)
1570 if (GET_CODE (op) == SYMBOL_REF)
1573 else if (GET_CODE (op) != CONST
1574 || GET_CODE (XEXP (op, 0)) != PLUS
1575 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1576 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1581 rtx sum = XEXP (op, 0);
1582 HOST_WIDE_INT summand;
1584 /* We have to be careful here, because it is the referenced address
1585 that must be 32k from _SDA_BASE_, not just the symbol. */
1586 summand = INTVAL (XEXP (sum, 1));
1587 if (summand < 0 || summand > g_switch_value)
1590 sym_ref = XEXP (sum, 0);
1593 if (*XSTR (sym_ref, 0) != '@')
1604 constant_pool_expr_1 (op, have_sym, have_toc)
1609 switch (GET_CODE(op))
1612 if (CONSTANT_POOL_ADDRESS_P (op))
1614 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1622 else if (! strcmp (XSTR (op, 0), toc_label_name))
1631 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc) &&
1632 constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc);
1634 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1643 constant_pool_expr_p (op)
1648 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1652 toc_relative_expr_p (op)
1657 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1660 /* Try machine-dependent ways of modifying an illegitimate address
1661 to be legitimate. If we find one, return the new, valid address.
1662 This is used from only one place: `memory_address' in explow.c.
1664 OLDX is the address as it was before break_out_memory_refs was
1665 called. In some cases it is useful to look at this to decide what
1668 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1670 It is always safe for this function to do nothing. It exists to
1671 recognize opportunities to optimize the output.
1673 On RS/6000, first check for the sum of a register with a constant
1674 integer that is out of range. If so, generate code to add the
1675 constant with the low-order 16 bits masked to the register and force
1676 this result into another register (this can be done with `cau').
1677 Then generate an address of REG+(CONST&0xffff), allowing for the
1678 possibility of bit 16 being a one.
1680 Then check for the sum of a register and something not constant, try to
1681 load the other things into a register and return the sum. */
1683 rs6000_legitimize_address (x, oldx, mode)
1685 rtx oldx ATTRIBUTE_UNUSED;
1686 enum machine_mode mode;
1688 if (GET_CODE (x) == PLUS
1689 && GET_CODE (XEXP (x, 0)) == REG
1690 && GET_CODE (XEXP (x, 1)) == CONST_INT
1691 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1693 HOST_WIDE_INT high_int, low_int;
1695 high_int = INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff);
1696 low_int = INTVAL (XEXP (x, 1)) & 0xffff;
1697 if (low_int & 0x8000)
1698 high_int += 0x10000, low_int |= ((HOST_WIDE_INT) -1) << 16;
1699 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1700 GEN_INT (high_int)), 0);
1701 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1703 else if (GET_CODE (x) == PLUS
1704 && GET_CODE (XEXP (x, 0)) == REG
1705 && GET_CODE (XEXP (x, 1)) != CONST_INT
1706 && GET_MODE_NUNITS (mode) == 1
1707 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1708 && (TARGET_POWERPC64 || mode != DImode)
1711 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1712 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1714 else if (ALTIVEC_VECTOR_MODE (mode))
1718 /* Make sure both operands are registers. */
1719 if (GET_CODE (x) == PLUS)
1720 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1721 force_reg (Pmode, XEXP (x, 1)));
1723 reg = force_reg (Pmode, x);
1726 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1727 && GET_CODE (x) != CONST_INT
1728 && GET_CODE (x) != CONST_DOUBLE
1730 && GET_MODE_NUNITS (mode) == 1
1731 && (GET_MODE_BITSIZE (mode) <= 32
1732 || (TARGET_HARD_FLOAT && mode == DFmode)))
1734 rtx reg = gen_reg_rtx (Pmode);
1735 emit_insn (gen_elf_high (reg, (x)));
1736 return gen_rtx_LO_SUM (Pmode, reg, (x));
1738 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1740 && GET_CODE (x) != CONST_INT
1741 && GET_CODE (x) != CONST_DOUBLE
1743 && (TARGET_HARD_FLOAT || mode != DFmode)
1747 rtx reg = gen_reg_rtx (Pmode);
1748 emit_insn (gen_macho_high (reg, (x)));
1749 return gen_rtx_LO_SUM (Pmode, reg, (x));
1752 && CONSTANT_POOL_EXPR_P (x)
1753 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1755 return create_TOC_reference (x);
1761 /* The convention appears to be to define this wherever it is used.
1762 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1763 is now used here. */
1764 #ifndef REG_MODE_OK_FOR_BASE_P
1765 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1768 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1769 replace the input X, or the original X if no replacement is called for.
1770 The output parameter *WIN is 1 if the calling macro should goto WIN,
1773 For RS/6000, we wish to handle large displacements off a base
1774 register by splitting the addend across an addiu/addis and the mem insn.
1775 This cuts number of extra insns needed from 3 to 1.
1777 On Darwin, we use this to generate code for floating point constants.
1778 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1779 The Darwin code is inside #if TARGET_MACHO because only then is
1780 machopic_function_base_name() defined. */
1782 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1784 enum machine_mode mode;
1787 int ind_levels ATTRIBUTE_UNUSED;
1790 /* We must recognize output that we have already generated ourselves. */
1791 if (GET_CODE (x) == PLUS
1792 && GET_CODE (XEXP (x, 0)) == PLUS
1793 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1794 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1795 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1797 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1798 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1799 opnum, (enum reload_type)type);
1804 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1805 && GET_CODE (x) == LO_SUM
1806 && GET_CODE (XEXP (x, 0)) == PLUS
1807 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1808 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1809 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1810 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1811 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1812 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1813 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1815 /* Result of previous invocation of this function on Darwin
1816 floating point constant. */
1817 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1818 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1819 opnum, (enum reload_type)type);
1824 if (GET_CODE (x) == PLUS
1825 && GET_CODE (XEXP (x, 0)) == REG
1826 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1827 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1828 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1830 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1831 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1833 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1835 /* Check for 32-bit overflow. */
1836 if (high + low != val)
1842 /* Reload the high part into a base reg; leave the low part
1843 in the mem directly. */
1845 x = gen_rtx_PLUS (GET_MODE (x),
1846 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1850 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1851 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1852 opnum, (enum reload_type)type);
1857 if (GET_CODE (x) == SYMBOL_REF
1858 && DEFAULT_ABI == ABI_DARWIN
1861 /* Darwin load of floating point constant. */
1862 rtx offset = gen_rtx (CONST, Pmode,
1863 gen_rtx (MINUS, Pmode, x,
1864 gen_rtx (SYMBOL_REF, Pmode,
1865 machopic_function_base_name ())));
1866 x = gen_rtx (LO_SUM, GET_MODE (x),
1867 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
1868 gen_rtx (HIGH, Pmode, offset)), offset);
1869 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1870 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1871 opnum, (enum reload_type)type);
1877 && CONSTANT_POOL_EXPR_P (x)
1878 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
1880 (x) = create_TOC_reference (x);
1888 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1889 that is a valid memory address for an instruction.
1890 The MODE argument is the machine mode for the MEM expression
1891 that wants to use this address.
1893 On the RS/6000, there are four valid address: a SYMBOL_REF that
1894 refers to a constant pool entry of an address (or the sum of it
1895 plus a constant), a short (16-bit signed) constant plus a register,
1896 the sum of two registers, or a register indirect, possibly with an
1897 auto-increment. For DFmode and DImode with an constant plus register,
1898 we must ensure that both words are addressable or PowerPC64 with offset
1901 For modes spanning multiple registers (DFmode in 32-bit GPRs,
1902 32-bit DImode, TImode), indexed addressing cannot be used because
1903 adjacent memory cells are accessed by adding word-sized offsets
1904 during assembly output. */
1906 rs6000_legitimate_address (mode, x, reg_ok_strict)
1907 enum machine_mode mode;
1911 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
1913 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
1915 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
1917 if (LEGITIMATE_SMALL_DATA_P (mode, x))
1919 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
1921 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
1923 && GET_CODE (x) == PLUS
1924 && GET_CODE (XEXP (x, 0)) == REG
1925 && XEXP (x, 0) == virtual_stack_vars_rtx
1926 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1928 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
1931 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1932 && (TARGET_POWERPC64 || mode != DImode)
1933 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
1935 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
1940 /* Try to output insns to set TARGET equal to the constant C if it can
1941 be done in less than N insns. Do all computations in MODE.
1942 Returns the place where the output has been placed if it can be
1943 done and the insns have been emitted. If it would take more than N
1944 insns, zero is returned and no insns and emitted. */
1947 rs6000_emit_set_const (dest, mode, source, n)
1949 enum machine_mode mode;
1950 int n ATTRIBUTE_UNUSED;
1952 HOST_WIDE_INT c0, c1;
1954 if (mode == QImode || mode == HImode || mode == SImode)
1957 dest = gen_reg_rtx (mode);
1958 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
1962 if (GET_CODE (source) == CONST_INT)
1964 c0 = INTVAL (source);
1967 else if (GET_CODE (source) == CONST_DOUBLE)
1969 #if HOST_BITS_PER_WIDE_INT >= 64
1970 c0 = CONST_DOUBLE_LOW (source);
1973 c0 = CONST_DOUBLE_LOW (source);
1974 c1 = CONST_DOUBLE_HIGH (source);
1980 return rs6000_emit_set_long_const (dest, c0, c1);
1983 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
1984 fall back to a straight forward decomposition. We do this to avoid
1985 exponential run times encountered when looking for longer sequences
1986 with rs6000_emit_set_const. */
1988 rs6000_emit_set_long_const (dest, c1, c2)
1990 HOST_WIDE_INT c1, c2;
1992 if (!TARGET_POWERPC64)
1994 rtx operand1, operand2;
1996 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
1998 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2000 emit_move_insn (operand1, GEN_INT (c1));
2001 emit_move_insn (operand2, GEN_INT (c2));
2005 HOST_WIDE_INT d1, d2, d2_s, d3, d4;
2007 /* This function is called by rs6000_emit_allocate_stack after reload
2008 with a dest of r0. r0 is an invalid register for addsi. Use an addi
2009 and a shift instead. */
2010 int regnum = REGNO (dest);
2012 /* Decompose the entire word */
2013 #if HOST_BITS_PER_WIDE_INT >= 64
2014 if (c2 != -(c1 < 0))
2016 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2018 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2020 c1 = (c1 - d2) >> 32;
2021 d3 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2023 d4 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2027 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2029 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2034 d3 = ((c2 & 0xffff) ^ 0x8000) - 0x8000;
2036 d4 = ((c2 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2041 /* Construct the high word */
2044 emit_move_insn (dest, GEN_INT (d4));
2046 emit_move_insn (dest,
2047 gen_rtx_PLUS (DImode, dest, GEN_INT (d3)));
2050 emit_move_insn (dest, GEN_INT (d3));
2052 /* Shift it into place */
2053 if (d3 != 0 || d4 != 0)
2054 if (regnum == 0 && d2 != 0)
2055 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2057 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2059 /* Add in the low bits. */
2062 if (d3 != 0 || d4 != 0)
2066 emit_move_insn (dest, gen_rtx_PLUS (DImode, dest,
2068 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest,
2072 emit_move_insn (dest, gen_rtx_PLUS (DImode, dest,
2076 emit_move_insn (dest, GEN_INT (d2));
2079 if (d2 != 0 || d3 != 0 || d4 != 0)
2080 emit_move_insn (dest, gen_rtx_PLUS (DImode, dest, GEN_INT (d1)));
2082 emit_move_insn (dest, GEN_INT (d1));
2088 /* Emit a move from SOURCE to DEST in mode MODE. */
2090 rs6000_emit_move (dest, source, mode)
2093 enum machine_mode mode;
2097 operands[1] = source;
2099 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2100 if (GET_CODE (operands[1]) == CONST_DOUBLE
2101 && ! FLOAT_MODE_P (mode)
2102 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2104 /* FIXME. This should never happen. */
2105 /* Since it seems that it does, do the safe thing and convert
2108 GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands[1]), mode));
2110 if (GET_CODE (operands[1]) == CONST_DOUBLE
2111 && ! FLOAT_MODE_P (mode)
2112 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2113 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2114 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2115 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2118 /* Check if GCC is setting up a block move that will end up using FP
2119 registers as temporaries. We must make sure this is acceptable. */
2120 if (GET_CODE (operands[0]) == MEM
2121 && GET_CODE (operands[1]) == MEM
2123 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2124 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2125 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2126 ? 32 : MEM_ALIGN (operands[0])))
2127 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2129 : MEM_ALIGN (operands[1]))))
2130 && ! MEM_VOLATILE_P (operands [0])
2131 && ! MEM_VOLATILE_P (operands [1]))
2133 emit_move_insn (adjust_address (operands[0], SImode, 0),
2134 adjust_address (operands[1], SImode, 0));
2135 emit_move_insn (adjust_address (operands[0], SImode, 4),
2136 adjust_address (operands[1], SImode, 4));
2140 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2141 operands[1] = force_reg (mode, operands[1]);
2143 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2144 && GET_CODE (operands[0]) == MEM)
2148 if (reload_in_progress || reload_completed)
2149 regnum = true_regnum (operands[1]);
2150 else if (GET_CODE (operands[1]) == REG)
2151 regnum = REGNO (operands[1]);
2155 /* If operands[1] is a register, on POWER it may have
2156 double-precision data in it, so truncate it to single
2158 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2161 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2162 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2163 operands[1] = newreg;
2167 /* Handle the case where reload calls us with an invalid address;
2168 and the case of CONSTANT_P_RTX. */
2169 if (! general_operand (operands[1], mode)
2170 || ! nonimmediate_operand (operands[0], mode)
2171 || GET_CODE (operands[1]) == CONSTANT_P_RTX)
2173 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2177 /* FIXME: In the long term, this switch statement should go away
2178 and be replaced by a sequence of tests based on things like
2184 if (CONSTANT_P (operands[1])
2185 && GET_CODE (operands[1]) != CONST_INT)
2186 operands[1] = force_const_mem (mode, operands[1]);
2192 if (CONSTANT_P (operands[1])
2193 && ! easy_fp_constant (operands[1], mode))
2194 operands[1] = force_const_mem (mode, operands[1]);
2201 /* fixme: aldyh -- allow vector constants when they are implemented. */
2202 if (CONSTANT_P (operands[1]))
2203 operands[1] = force_const_mem (mode, operands[1]);
2208 /* Use default pattern for address of ELF small data */
2211 && DEFAULT_ABI == ABI_V4
2212 && (GET_CODE (operands[1]) == SYMBOL_REF
2213 || GET_CODE (operands[1]) == CONST)
2214 && small_data_operand (operands[1], mode))
2216 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2220 if (DEFAULT_ABI == ABI_V4
2221 && mode == Pmode && mode == SImode
2222 && flag_pic == 1 && got_operand (operands[1], mode))
2224 emit_insn (gen_movsi_got (operands[0], operands[1]));
2228 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2229 && TARGET_NO_TOC && ! flag_pic
2231 && CONSTANT_P (operands[1])
2232 && GET_CODE (operands[1]) != HIGH
2233 && GET_CODE (operands[1]) != CONST_INT)
2235 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2237 /* If this is a function address on -mcall-aixdesc,
2238 convert it to the address of the descriptor. */
2239 if (DEFAULT_ABI == ABI_AIX
2240 && GET_CODE (operands[1]) == SYMBOL_REF
2241 && XSTR (operands[1], 0)[0] == '.')
2243 const char *name = XSTR (operands[1], 0);
2245 while (*name == '.')
2247 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2248 CONSTANT_POOL_ADDRESS_P (new_ref)
2249 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2250 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2251 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2252 operands[1] = new_ref;
2255 if (DEFAULT_ABI == ABI_DARWIN)
2257 emit_insn (gen_macho_high (target, operands[1]));
2258 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2262 emit_insn (gen_elf_high (target, operands[1]));
2263 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2267 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2268 and we have put it in the TOC, we just need to make a TOC-relative
2271 && GET_CODE (operands[1]) == SYMBOL_REF
2272 && CONSTANT_POOL_EXPR_P (operands[1])
2273 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2274 get_pool_mode (operands[1])))
2276 operands[1] = create_TOC_reference (operands[1]);
2278 else if (mode == Pmode
2279 && CONSTANT_P (operands[1])
2280 && ((GET_CODE (operands[1]) != CONST_INT
2281 && ! easy_fp_constant (operands[1], mode))
2282 || (GET_CODE (operands[1]) == CONST_INT
2283 && num_insns_constant (operands[1], mode) > 2)
2284 || (GET_CODE (operands[0]) == REG
2285 && FP_REGNO_P (REGNO (operands[0]))))
2286 && GET_CODE (operands[1]) != HIGH
2287 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2288 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2290 /* Emit a USE operation so that the constant isn't deleted if
2291 expensive optimizations are turned on because nobody
2292 references it. This should only be done for operands that
2293 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2294 This should not be done for operands that contain LABEL_REFs.
2295 For now, we just handle the obvious case. */
2296 if (GET_CODE (operands[1]) != LABEL_REF)
2297 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2300 /* Darwin uses a special PIC legitimizer. */
2301 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2304 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2306 if (operands[0] != operands[1])
2307 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2312 /* If we are to limit the number of things we put in the TOC and
2313 this is a symbol plus a constant we can add in one insn,
2314 just put the symbol in the TOC and add the constant. Don't do
2315 this if reload is in progress. */
2316 if (GET_CODE (operands[1]) == CONST
2317 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2318 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2319 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2320 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2321 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2322 && ! side_effects_p (operands[0]))
2325 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2326 rtx other = XEXP (XEXP (operands[1], 0), 1);
2328 sym = force_reg (mode, sym);
2330 emit_insn (gen_addsi3 (operands[0], sym, other));
2332 emit_insn (gen_adddi3 (operands[0], sym, other));
2336 operands[1] = force_const_mem (mode, operands[1]);
2339 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2340 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2341 get_pool_constant (XEXP (operands[1], 0)),
2342 get_pool_mode (XEXP (operands[1], 0))))
2345 = gen_rtx_MEM (mode,
2346 create_TOC_reference (XEXP (operands[1], 0)));
2347 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2348 RTX_UNCHANGING_P (operands[1]) = 1;
2354 if (GET_CODE (operands[0]) == MEM
2355 && GET_CODE (XEXP (operands[0], 0)) != REG
2356 && ! reload_in_progress)
2358 = replace_equiv_address (operands[0],
2359 copy_addr_to_reg (XEXP (operands[0], 0)));
2361 if (GET_CODE (operands[1]) == MEM
2362 && GET_CODE (XEXP (operands[1], 0)) != REG
2363 && ! reload_in_progress)
2365 = replace_equiv_address (operands[1],
2366 copy_addr_to_reg (XEXP (operands[1], 0)));
2373 /* Above, we may have called force_const_mem which may have returned
2374 an invalid address. If we can, fix this up; otherwise, reload will
2375 have to deal with it. */
2376 if (GET_CODE (operands[1]) == MEM
2377 && ! memory_address_p (mode, XEXP (operands[1], 0))
2378 && ! reload_in_progress)
2379 operands[1] = adjust_address (operands[1], mode, 0);
2381 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2385 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2386 for a call to a function whose data type is FNTYPE.
2387 For a library call, FNTYPE is 0.
2389 For incoming args we set the number of arguments in the prototype large
2390 so we never return a PARALLEL. */
2393 init_cumulative_args (cum, fntype, libname, incoming)
2394 CUMULATIVE_ARGS *cum;
2396 rtx libname ATTRIBUTE_UNUSED;
2399 static CUMULATIVE_ARGS zero_cumulative;
2401 *cum = zero_cumulative;
2403 cum->fregno = FP_ARG_MIN_REG;
2404 cum->vregno = ALTIVEC_ARG_MIN_REG;
2405 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2406 cum->call_cookie = CALL_NORMAL;
2407 cum->sysv_gregno = GP_ARG_MIN_REG;
2410 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2412 else if (cum->prototype)
2413 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2414 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2415 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2418 cum->nargs_prototype = 0;
2420 cum->orig_nargs = cum->nargs_prototype;
2422 /* Check for longcall's */
2423 if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2424 cum->call_cookie = CALL_LONG;
2426 if (TARGET_DEBUG_ARG)
2428 fprintf (stderr, "\ninit_cumulative_args:");
2431 tree ret_type = TREE_TYPE (fntype);
2432 fprintf (stderr, " ret code = %s,",
2433 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2436 if (cum->call_cookie & CALL_LONG)
2437 fprintf (stderr, " longcall,");
2439 fprintf (stderr, " proto = %d, nargs = %d\n",
2440 cum->prototype, cum->nargs_prototype);
2444 /* If defined, a C expression which determines whether, and in which
2445 direction, to pad out an argument with extra space. The value
2446 should be of type `enum direction': either `upward' to pad above
2447 the argument, `downward' to pad below, or `none' to inhibit
2450 For the AIX ABI structs are always stored left shifted in their
2454 function_arg_padding (mode, type)
2455 enum machine_mode mode;
2458 if (type != 0 && AGGREGATE_TYPE_P (type))
2461 /* This is the default definition. */
2462 return (! BYTES_BIG_ENDIAN
2465 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2466 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2467 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2468 ? downward : upward));
2471 /* If defined, a C expression that gives the alignment boundary, in bits,
2472 of an argument with the specified mode and type. If it is not defined,
2473 PARM_BOUNDARY is used for all arguments.
2475 V.4 wants long longs to be double word aligned. */
2478 function_arg_boundary (mode, type)
2479 enum machine_mode mode;
2480 tree type ATTRIBUTE_UNUSED;
2482 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2484 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2487 return PARM_BOUNDARY;
2490 /* Update the data in CUM to advance over an argument
2491 of mode MODE and data type TYPE.
2492 (TYPE is null for libcalls where that information may not be available.) */
2495 function_arg_advance (cum, mode, type, named)
2496 CUMULATIVE_ARGS *cum;
2497 enum machine_mode mode;
2501 cum->nargs_prototype--;
2503 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2505 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2508 cum->words += RS6000_ARG_SIZE (mode, type);
2510 else if (DEFAULT_ABI == ABI_V4)
2512 if (TARGET_HARD_FLOAT
2513 && (mode == SFmode || mode == DFmode))
2515 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2520 cum->words += cum->words & 1;
2521 cum->words += RS6000_ARG_SIZE (mode, type);
2527 int gregno = cum->sysv_gregno;
2529 /* Aggregates and IEEE quad get passed by reference. */
2530 if ((type && AGGREGATE_TYPE_P (type))
2534 n_words = RS6000_ARG_SIZE (mode, type);
2536 /* Long long is put in odd registers. */
2537 if (n_words == 2 && (gregno & 1) == 0)
2540 /* Long long is not split between registers and stack. */
2541 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2543 /* Long long is aligned on the stack. */
2545 cum->words += cum->words & 1;
2546 cum->words += n_words;
2549 /* Note: continuing to accumulate gregno past when we've started
2550 spilling to the stack indicates the fact that we've started
2551 spilling to the stack to expand_builtin_saveregs. */
2552 cum->sysv_gregno = gregno + n_words;
2555 if (TARGET_DEBUG_ARG)
2557 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2558 cum->words, cum->fregno);
2559 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2560 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2561 fprintf (stderr, "mode = %4s, named = %d\n",
2562 GET_MODE_NAME (mode), named);
2567 int align = (TARGET_32BIT && (cum->words & 1) != 0
2568 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2570 cum->words += align + RS6000_ARG_SIZE (mode, type);
2572 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2575 if (TARGET_DEBUG_ARG)
2577 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2578 cum->words, cum->fregno);
2579 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2580 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2581 fprintf (stderr, "named = %d, align = %d\n", named, align);
2586 /* Determine where to put an argument to a function.
2587 Value is zero to push the argument on the stack,
2588 or a hard register in which to store the argument.
2590 MODE is the argument's machine mode.
2591 TYPE is the data type of the argument (as a tree).
2592 This is null for libcalls where that information may
2594 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2595 the preceding args and about the function being called.
2596 NAMED is nonzero if this argument is a named parameter
2597 (otherwise it is an extra parameter matching an ellipsis).
2599 On RS/6000 the first eight words of non-FP are normally in registers
2600 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2601 Under V.4, the first 8 FP args are in registers.
2603 If this is floating-point and no prototype is specified, we use
2604 both an FP and integer register (or possibly FP reg and stack). Library
2605 functions (when TYPE is zero) always have the proper types for args,
2606 so we can pass the FP value just in one register. emit_library_function
2607 doesn't support PARALLEL anyway. */
2610 function_arg (cum, mode, type, named)
2611 CUMULATIVE_ARGS *cum;
2612 enum machine_mode mode;
2616 enum rs6000_abi abi = DEFAULT_ABI;
2618 /* Return a marker to indicate whether CR1 needs to set or clear the
2619 bit that V.4 uses to say fp args were passed in registers.
2620 Assume that we don't need the marker for software floating point,
2621 or compiler generated library calls. */
2622 if (mode == VOIDmode)
2625 && TARGET_HARD_FLOAT
2626 && cum->nargs_prototype < 0
2627 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2629 return GEN_INT (cum->call_cookie
2630 | ((cum->fregno == FP_ARG_MIN_REG)
2631 ? CALL_V4_SET_FP_ARGS
2632 : CALL_V4_CLEAR_FP_ARGS));
2635 return GEN_INT (cum->call_cookie);
2638 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2640 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2641 return gen_rtx_REG (mode, cum->vregno);
2645 else if (abi == ABI_V4)
2647 if (TARGET_HARD_FLOAT
2648 && (mode == SFmode || mode == DFmode))
2650 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2651 return gen_rtx_REG (mode, cum->fregno);
2658 int gregno = cum->sysv_gregno;
2660 /* Aggregates and IEEE quad get passed by reference. */
2661 if ((type && AGGREGATE_TYPE_P (type))
2665 n_words = RS6000_ARG_SIZE (mode, type);
2667 /* Long long is put in odd registers. */
2668 if (n_words == 2 && (gregno & 1) == 0)
2671 /* Long long is not split between registers and stack. */
2672 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2673 return gen_rtx_REG (mode, gregno);
2680 int align = (TARGET_32BIT && (cum->words & 1) != 0
2681 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2682 int align_words = cum->words + align;
2684 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2687 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2690 || ((cum->nargs_prototype > 0)
2691 /* IBM AIX extended its linkage convention definition always
2692 to require FP args after register save area hole on the
2694 && (DEFAULT_ABI != ABI_AIX
2696 || (align_words < GP_ARG_NUM_REG))))
2697 return gen_rtx_REG (mode, cum->fregno);
2699 return gen_rtx_PARALLEL (mode,
2701 gen_rtx_EXPR_LIST (VOIDmode,
2702 ((align_words >= GP_ARG_NUM_REG)
2705 + RS6000_ARG_SIZE (mode, type)
2707 /* If this is partially on the stack, then
2708 we only include the portion actually
2709 in registers here. */
2710 ? gen_rtx_REG (SImode,
2711 GP_ARG_MIN_REG + align_words)
2712 : gen_rtx_REG (mode,
2713 GP_ARG_MIN_REG + align_words))),
2715 gen_rtx_EXPR_LIST (VOIDmode,
2716 gen_rtx_REG (mode, cum->fregno),
2719 else if (align_words < GP_ARG_NUM_REG)
2720 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2726 /* For an arg passed partly in registers and partly in memory,
2727 this is the number of registers used.
2728 For args passed entirely in registers or entirely in memory, zero. */
2731 function_arg_partial_nregs (cum, mode, type, named)
2732 CUMULATIVE_ARGS *cum;
2733 enum machine_mode mode;
2735 int named ATTRIBUTE_UNUSED;
2737 if (DEFAULT_ABI == ABI_V4)
2740 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2741 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2743 if (cum->nargs_prototype >= 0)
2747 if (cum->words < GP_ARG_NUM_REG
2748 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2750 int ret = GP_ARG_NUM_REG - cum->words;
2751 if (ret && TARGET_DEBUG_ARG)
2752 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2760 /* A C expression that indicates when an argument must be passed by
2761 reference. If nonzero for an argument, a copy of that argument is
2762 made in memory and a pointer to the argument is passed instead of
2763 the argument itself. The pointer is passed in whatever way is
2764 appropriate for passing a pointer to that type.
2766 Under V.4, structures and unions are passed by reference. */
2769 function_arg_pass_by_reference (cum, mode, type, named)
2770 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2771 enum machine_mode mode ATTRIBUTE_UNUSED;
2773 int named ATTRIBUTE_UNUSED;
2775 if (DEFAULT_ABI == ABI_V4
2776 && ((type && AGGREGATE_TYPE_P (type))
2779 if (TARGET_DEBUG_ARG)
2780 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2788 /* Perform any needed actions needed for a function that is receiving a
2789 variable number of arguments.
2793 MODE and TYPE are the mode and type of the current parameter.
2795 PRETEND_SIZE is a variable that should be set to the amount of stack
2796 that must be pushed by the prolog to pretend that our caller pushed
2799 Normally, this macro will push all remaining incoming registers on the
2800 stack and set PRETEND_SIZE to the length of the registers pushed. */
2803 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2804 CUMULATIVE_ARGS *cum;
2805 enum machine_mode mode;
2811 CUMULATIVE_ARGS next_cum;
2812 int reg_size = TARGET_32BIT ? 4 : 8;
2813 rtx save_area = NULL_RTX, mem;
2814 int first_reg_offset, set;
2818 fntype = TREE_TYPE (current_function_decl);
2819 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2820 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2821 != void_type_node));
2823 /* For varargs, we do not want to skip the dummy va_dcl argument.
2824 For stdargs, we do want to skip the last named argument. */
2827 function_arg_advance (&next_cum, mode, type, 1);
2829 if (DEFAULT_ABI == ABI_V4)
2831 /* Indicate to allocate space on the stack for varargs save area. */
2832 /* ??? Does this really have to be located at a magic spot on the
2833 stack, or can we allocate this with assign_stack_local instead. */
2834 cfun->machine->sysv_varargs_p = 1;
2836 save_area = plus_constant (virtual_stack_vars_rtx,
2837 - RS6000_VARARGS_SIZE);
2839 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2843 first_reg_offset = next_cum.words;
2844 save_area = virtual_incoming_args_rtx;
2845 cfun->machine->sysv_varargs_p = 0;
2847 if (MUST_PASS_IN_STACK (mode, type))
2848 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2851 set = get_varargs_alias_set ();
2852 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2854 mem = gen_rtx_MEM (BLKmode,
2855 plus_constant (save_area,
2856 first_reg_offset * reg_size)),
2857 set_mem_alias_set (mem, set);
2858 set_mem_align (mem, BITS_PER_WORD);
2861 (GP_ARG_MIN_REG + first_reg_offset, mem,
2862 GP_ARG_NUM_REG - first_reg_offset,
2863 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2865 /* ??? Does ABI_V4 need this at all? */
2866 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2869 /* Save FP registers if needed. */
2870 if (DEFAULT_ABI == ABI_V4
2871 && TARGET_HARD_FLOAT && ! no_rtl
2872 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2874 int fregno = next_cum.fregno;
2875 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2876 rtx lab = gen_label_rtx ();
2877 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2879 emit_jump_insn (gen_rtx_SET (VOIDmode,
2881 gen_rtx_IF_THEN_ELSE (VOIDmode,
2882 gen_rtx_NE (VOIDmode, cr1,
2884 gen_rtx_LABEL_REF (VOIDmode, lab),
2887 while (fregno <= FP_ARG_V4_MAX_REG)
2889 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2890 set_mem_alias_set (mem, set);
2891 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2900 /* Create the va_list data type. */
2903 rs6000_build_va_list ()
2905 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2907 /* For AIX, prefer 'char *' because that's what the system
2908 header files like. */
2909 if (DEFAULT_ABI != ABI_V4)
2910 return build_pointer_type (char_type_node);
2912 record = make_lang_type (RECORD_TYPE);
2913 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2915 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
2916 unsigned_char_type_node);
2917 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
2918 unsigned_char_type_node);
2919 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
2921 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
2924 DECL_FIELD_CONTEXT (f_gpr) = record;
2925 DECL_FIELD_CONTEXT (f_fpr) = record;
2926 DECL_FIELD_CONTEXT (f_ovf) = record;
2927 DECL_FIELD_CONTEXT (f_sav) = record;
2929 TREE_CHAIN (record) = type_decl;
2930 TYPE_NAME (record) = type_decl;
2931 TYPE_FIELDS (record) = f_gpr;
2932 TREE_CHAIN (f_gpr) = f_fpr;
2933 TREE_CHAIN (f_fpr) = f_ovf;
2934 TREE_CHAIN (f_ovf) = f_sav;
2936 layout_type (record);
2938 /* The correct type is an array type of one element. */
2939 return build_array_type (record, build_index_type (size_zero_node));
2942 /* Implement va_start. */
2945 rs6000_va_start (stdarg_p, valist, nextarg)
2950 HOST_WIDE_INT words, n_gpr, n_fpr;
2951 tree f_gpr, f_fpr, f_ovf, f_sav;
2952 tree gpr, fpr, ovf, sav, t;
2954 /* Only SVR4 needs something special. */
2955 if (DEFAULT_ABI != ABI_V4)
2957 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
2961 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
2962 f_fpr = TREE_CHAIN (f_gpr);
2963 f_ovf = TREE_CHAIN (f_fpr);
2964 f_sav = TREE_CHAIN (f_ovf);
2966 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
2967 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
2968 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
2969 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
2970 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
2972 /* Count number of gp and fp argument registers used. */
2973 words = current_function_args_info.words;
2974 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
2975 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
2977 if (TARGET_DEBUG_ARG)
2979 fputs ("va_start: words = ", stderr);
2980 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
2981 fputs (", n_gpr = ", stderr);
2982 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
2983 fputs (", n_fpr = ", stderr);
2984 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
2985 putc ('\n', stderr);
2988 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
2989 TREE_SIDE_EFFECTS (t) = 1;
2990 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2992 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
2993 TREE_SIDE_EFFECTS (t) = 1;
2994 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2996 /* Find the overflow area. */
2997 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
2999 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3000 build_int_2 (words * UNITS_PER_WORD, 0));
3001 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3002 TREE_SIDE_EFFECTS (t) = 1;
3003 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3005 /* Find the register save area. */
3006 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3007 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3008 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3009 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3010 TREE_SIDE_EFFECTS (t) = 1;
3011 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3014 /* Implement va_arg. */
3017 rs6000_va_arg (valist, type)
3020 tree f_gpr, f_fpr, f_ovf, f_sav;
3021 tree gpr, fpr, ovf, sav, reg, t, u;
3022 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3023 rtx lab_false, lab_over, addr_rtx, r;
3025 /* For AIX, the rule is that structures are passed left-aligned in
3026 their stack slot. However, GCC does not presently do this:
3027 structures which are the same size as integer types are passed
3028 right-aligned, as if they were in fact integers. This only
3029 matters for structures of size 1 or 2, or 4 when TARGET_64BIT. */
3030 if (DEFAULT_ABI != ABI_V4)
3032 HOST_WIDE_INT align, rounded_size;
3033 enum machine_mode mode;
3036 /* Compute the rounded size of the type. */
3037 align = PARM_BOUNDARY / BITS_PER_UNIT;
3038 rounded_size = (((int_size_in_bytes (type) + align - 1) / align)
3043 mode = TYPE_MODE (type);
3044 if (mode != BLKmode)
3047 adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT;
3048 if (rounded_size > align)
3051 addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3052 build_int_2 (rounded_size - adj, 0));
3055 addr_rtx = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3056 addr_rtx = copy_to_reg (addr_rtx);
3058 /* Compute new value for AP. */
3059 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3060 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3061 build_int_2 (rounded_size, 0)));
3062 TREE_SIDE_EFFECTS (t) = 1;
3063 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3068 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3069 f_fpr = TREE_CHAIN (f_gpr);
3070 f_ovf = TREE_CHAIN (f_fpr);
3071 f_sav = TREE_CHAIN (f_ovf);
3073 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3074 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3075 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3076 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3077 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3079 size = int_size_in_bytes (type);
3080 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3082 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3084 /* Aggregates and long doubles are passed by reference. */
3090 size = rsize = UNITS_PER_WORD;
3092 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3094 /* FP args go in FP registers, if present. */
3103 /* Otherwise into GP registers. */
3111 /* Pull the value out of the saved registers ... */
3113 lab_false = gen_label_rtx ();
3114 lab_over = gen_label_rtx ();
3115 addr_rtx = gen_reg_rtx (Pmode);
3117 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3118 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3121 /* Long long is aligned in the registers. */
3124 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3125 build_int_2 (n_reg - 1, 0));
3126 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3127 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3128 TREE_SIDE_EFFECTS (u) = 1;
3129 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3133 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3137 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, build_int_2 (n_reg, 0));
3138 TREE_SIDE_EFFECTS (u) = 1;
3140 u = build1 (CONVERT_EXPR, integer_type_node, u);
3141 TREE_SIDE_EFFECTS (u) = 1;
3143 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3144 TREE_SIDE_EFFECTS (u) = 1;
3146 t = build (PLUS_EXPR, ptr_type_node, t, u);
3147 TREE_SIDE_EFFECTS (t) = 1;
3149 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3151 emit_move_insn (addr_rtx, r);
3153 emit_jump_insn (gen_jump (lab_over));
3155 emit_label (lab_false);
3157 /* ... otherwise out of the overflow area. */
3159 /* Make sure we don't find reg 7 for the next int arg. */
3162 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3163 TREE_SIDE_EFFECTS (t) = 1;
3164 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3167 /* Care for on-stack alignment if needed. */
3172 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (7, 0));
3173 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-8, -1));
3177 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3179 emit_move_insn (addr_rtx, r);
3181 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3182 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3183 TREE_SIDE_EFFECTS (t) = 1;
3184 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3186 emit_label (lab_over);
3190 r = gen_rtx_MEM (Pmode, addr_rtx);
3191 set_mem_alias_set (r, get_varargs_alias_set ());
3192 emit_move_insn (addr_rtx, r);
3200 #define def_builtin(MASK, NAME, TYPE, CODE) \
3202 if ((MASK) & target_flags) \
3203 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3206 struct builtin_description
3208 const unsigned int mask;
3209 const enum insn_code icode;
3210 const char *const name;
3211 const enum rs6000_builtins code;
3214 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3216 static const struct builtin_description bdesc_3arg[] =
3218 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3219 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3220 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3221 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3222 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3223 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3224 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3225 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3226 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3227 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3228 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3229 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3230 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3231 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3232 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3233 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3234 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3235 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3236 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3237 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3238 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3239 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3240 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3243 /* Simple binary operations: VECc = foo (VECa, VECb). */
3245 static const struct builtin_description bdesc_2arg[] =
3247 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3248 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3249 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3250 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3251 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3252 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3253 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3254 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3255 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3256 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3257 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3258 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3259 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3260 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3261 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3262 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3263 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3264 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3265 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3266 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3267 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3268 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3269 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3270 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3271 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3272 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3273 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3274 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3275 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3276 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3277 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3278 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3279 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3280 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3281 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3282 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3283 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3284 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3285 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3286 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3287 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3288 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3289 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3290 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3291 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3292 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3293 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3294 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3295 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3296 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3297 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3298 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3299 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3300 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3301 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3302 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3303 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3304 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3305 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3306 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3307 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3308 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3309 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3310 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3311 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3312 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3313 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3314 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3315 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3316 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3317 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3318 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3319 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3320 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3321 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3322 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3323 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3324 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3325 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3326 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3327 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3328 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3329 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3330 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3331 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3332 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3333 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3334 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3335 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3336 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3337 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3338 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3339 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3340 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3341 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3342 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3343 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3344 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3345 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3346 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3347 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3348 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3349 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3350 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3351 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3352 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3353 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3354 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3355 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3356 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3357 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3358 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3359 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3362 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3365 static const struct builtin_description bdesc_1arg[] =
3367 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3368 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3369 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3370 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3371 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3372 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3373 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3374 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3375 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3376 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3377 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3378 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3379 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3380 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3381 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3382 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3383 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3387 altivec_expand_unop_builtin (icode, arglist, target)
3388 enum insn_code icode;
3393 tree arg0 = TREE_VALUE (arglist);
3394 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3395 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3396 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3398 /* If we got invalid arguments bail out before generating bad rtl. */
3399 if (arg0 == error_mark_node)
3403 || GET_MODE (target) != tmode
3404 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3405 target = gen_reg_rtx (tmode);
3407 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3408 op0 = copy_to_mode_reg (mode0, op0);
3410 pat = GEN_FCN (icode) (target, op0);
3418 altivec_expand_binop_builtin (icode, arglist, target)
3419 enum insn_code icode;
3424 tree arg0 = TREE_VALUE (arglist);
3425 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3426 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3427 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3428 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3429 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3430 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3432 /* If we got invalid arguments bail out before generating bad rtl. */
3433 if (arg0 == error_mark_node || arg1 == error_mark_node)
3437 || GET_MODE (target) != tmode
3438 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3439 target = gen_reg_rtx (tmode);
3441 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3442 op0 = copy_to_mode_reg (mode0, op0);
3443 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3444 op1 = copy_to_mode_reg (mode1, op1);
3446 pat = GEN_FCN (icode) (target, op0, op1);
3454 altivec_expand_ternop_builtin (icode, arglist, target)
3455 enum insn_code icode;
3460 tree arg0 = TREE_VALUE (arglist);
3461 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3462 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3463 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3464 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3465 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3466 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3467 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3468 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3469 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3471 /* If we got invalid arguments bail out before generating bad rtl. */
3472 if (arg0 == error_mark_node
3473 || arg1 == error_mark_node
3474 || arg2 == error_mark_node)
3478 || GET_MODE (target) != tmode
3479 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3480 target = gen_reg_rtx (tmode);
3482 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3483 op0 = copy_to_mode_reg (mode0, op0);
3484 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3485 op1 = copy_to_mode_reg (mode1, op1);
3486 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3487 op2 = copy_to_mode_reg (mode2, op2);
3489 pat = GEN_FCN (icode) (target, op0, op1, op2);
3497 altivec_expand_builtin (exp, target)
3501 struct builtin_description *d;
3503 enum insn_code icode;
3504 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3505 tree arglist = TREE_OPERAND (exp, 1);
3508 enum machine_mode tmode, mode0, mode1;
3509 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3513 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3514 icode = CODE_FOR_altivec_lvx_16qi;
3515 arg0 = TREE_VALUE (arglist);
3516 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3517 tmode = insn_data[icode].operand[0].mode;
3518 mode0 = insn_data[icode].operand[1].mode;
3521 || GET_MODE (target) != tmode
3522 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3523 target = gen_reg_rtx (tmode);
3525 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3526 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3528 pat = GEN_FCN (icode) (target, op0);
3534 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3535 icode = CODE_FOR_altivec_lvx_8hi;
3536 arg0 = TREE_VALUE (arglist);
3537 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3538 tmode = insn_data[icode].operand[0].mode;
3539 mode0 = insn_data[icode].operand[1].mode;
3542 || GET_MODE (target) != tmode
3543 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3544 target = gen_reg_rtx (tmode);
3546 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3547 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3549 pat = GEN_FCN (icode) (target, op0);
3555 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3556 icode = CODE_FOR_altivec_lvx_4si;
3557 arg0 = TREE_VALUE (arglist);
3558 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3559 tmode = insn_data[icode].operand[0].mode;
3560 mode0 = insn_data[icode].operand[1].mode;
3563 || GET_MODE (target) != tmode
3564 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3565 target = gen_reg_rtx (tmode);
3567 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3568 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3570 pat = GEN_FCN (icode) (target, op0);
3576 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3577 icode = CODE_FOR_altivec_lvx_4sf;
3578 arg0 = TREE_VALUE (arglist);
3579 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3580 tmode = insn_data[icode].operand[0].mode;
3581 mode0 = insn_data[icode].operand[1].mode;
3584 || GET_MODE (target) != tmode
3585 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3586 target = gen_reg_rtx (tmode);
3588 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3589 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3591 pat = GEN_FCN (icode) (target, op0);
3597 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3598 icode = CODE_FOR_altivec_stvx_16qi;
3599 arg0 = TREE_VALUE (arglist);
3600 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3601 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3602 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3603 mode0 = insn_data[icode].operand[0].mode;
3604 mode1 = insn_data[icode].operand[1].mode;
3606 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3607 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3608 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3609 op1 = copy_to_mode_reg (mode1, op1);
3611 pat = GEN_FCN (icode) (op0, op1);
3617 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3618 icode = CODE_FOR_altivec_stvx_8hi;
3619 arg0 = TREE_VALUE (arglist);
3620 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3621 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3622 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3623 mode0 = insn_data[icode].operand[0].mode;
3624 mode1 = insn_data[icode].operand[1].mode;
3626 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3627 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3628 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3629 op1 = copy_to_mode_reg (mode1, op1);
3631 pat = GEN_FCN (icode) (op0, op1);
3637 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3638 icode = CODE_FOR_altivec_stvx_4si;
3639 arg0 = TREE_VALUE (arglist);
3640 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3641 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3642 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3643 mode0 = insn_data[icode].operand[0].mode;
3644 mode1 = insn_data[icode].operand[1].mode;
3646 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3647 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3648 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3649 op1 = copy_to_mode_reg (mode1, op1);
3651 pat = GEN_FCN (icode) (op0, op1);
3657 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3658 icode = CODE_FOR_altivec_stvx_4sf;
3659 arg0 = TREE_VALUE (arglist);
3660 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3661 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3662 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3663 mode0 = insn_data[icode].operand[0].mode;
3664 mode1 = insn_data[icode].operand[1].mode;
3666 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3667 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3668 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3669 op1 = copy_to_mode_reg (mode1, op1);
3671 pat = GEN_FCN (icode) (op0, op1);
3678 /* Handle simple unary operations. */
3679 d = (struct builtin_description *) bdesc_1arg;
3680 for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
3681 if (d->code == fcode)
3682 return altivec_expand_unop_builtin (d->icode, arglist, target);
3684 /* Handle simple binary operations. */
3685 d = (struct builtin_description *) bdesc_2arg;
3686 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
3687 if (d->code == fcode)
3688 return altivec_expand_binop_builtin (d->icode, arglist, target);
3690 /* Handle simple ternary operations. */
3691 d = (struct builtin_description *) bdesc_3arg;
3692 for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
3693 if (d->code == fcode)
3694 return altivec_expand_ternop_builtin (d->icode, arglist, target);
3700 /* Expand an expression EXP that calls a built-in function,
3701 with result going to TARGET if that's convenient
3702 (and in mode MODE if that's convenient).
3703 SUBTARGET may be used as the target for computing one of EXP's operands.
3704 IGNORE is nonzero if the value is to be ignored. */
3707 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
3710 rtx subtarget ATTRIBUTE_UNUSED;
3711 enum machine_mode mode ATTRIBUTE_UNUSED;
3712 int ignore ATTRIBUTE_UNUSED;
3715 return altivec_expand_builtin (exp, target);
3721 rs6000_init_builtins ()
3724 altivec_init_builtins ();
3728 altivec_init_builtins (void)
3730 struct builtin_description * d;
3733 tree endlink = void_list_node;
3735 tree pint_type_node = build_pointer_type (integer_type_node);
3736 tree pshort_type_node = build_pointer_type (short_integer_type_node);
3737 tree pchar_type_node = build_pointer_type (char_type_node);
3738 tree pfloat_type_node = build_pointer_type (float_type_node);
3740 tree v4sf_ftype_v4sf_v4sf_v16qi
3741 = build_function_type (V4SF_type_node,
3742 tree_cons (NULL_TREE, V4SF_type_node,
3743 tree_cons (NULL_TREE, V4SF_type_node,
3744 tree_cons (NULL_TREE,
3747 tree v4si_ftype_v4si_v4si_v16qi
3748 = build_function_type (V4SI_type_node,
3749 tree_cons (NULL_TREE, V4SI_type_node,
3750 tree_cons (NULL_TREE, V4SI_type_node,
3751 tree_cons (NULL_TREE,
3754 tree v8hi_ftype_v8hi_v8hi_v16qi
3755 = build_function_type (V8HI_type_node,
3756 tree_cons (NULL_TREE, V8HI_type_node,
3757 tree_cons (NULL_TREE, V8HI_type_node,
3758 tree_cons (NULL_TREE,
3761 tree v16qi_ftype_v16qi_v16qi_v16qi
3762 = build_function_type (V16QI_type_node,
3763 tree_cons (NULL_TREE, V16QI_type_node,
3764 tree_cons (NULL_TREE, V16QI_type_node,
3765 tree_cons (NULL_TREE,
3769 /* V4SI foo (char). */
3770 tree v4si_ftype_char
3771 = build_function_type (V4SI_type_node,
3772 tree_cons (NULL_TREE, char_type_node, endlink));
3774 /* V8HI foo (char). */
3775 tree v8hi_ftype_char
3776 = build_function_type (V8HI_type_node,
3777 tree_cons (NULL_TREE, char_type_node, endlink));
3779 /* V16QI foo (char). */
3780 tree v16qi_ftype_char
3781 = build_function_type (V16QI_type_node,
3782 tree_cons (NULL_TREE, char_type_node, endlink));
3783 /* V4SF foo (V4SF). */
3784 tree v4sf_ftype_v4sf
3785 = build_function_type (V4SF_type_node,
3786 tree_cons (NULL_TREE, V4SF_type_node, endlink));
3788 /* V4SI foo (int *). */
3789 tree v4si_ftype_pint
3790 = build_function_type (V4SI_type_node,
3791 tree_cons (NULL_TREE, pint_type_node, endlink));
3792 /* V8HI foo (short *). */
3793 tree v8hi_ftype_pshort
3794 = build_function_type (V8HI_type_node,
3795 tree_cons (NULL_TREE, pshort_type_node, endlink));
3796 /* V16QI foo (char *). */
3797 tree v16qi_ftype_pchar
3798 = build_function_type (V16QI_type_node,
3799 tree_cons (NULL_TREE, pchar_type_node, endlink));
3800 /* V4SF foo (float *). */
3801 tree v4sf_ftype_pfloat
3802 = build_function_type (V4SF_type_node,
3803 tree_cons (NULL_TREE, pfloat_type_node, endlink));
3805 /* V8HI foo (V16QI). */
3806 tree v8hi_ftype_v16qi
3807 = build_function_type (V8HI_type_node,
3808 tree_cons (NULL_TREE, V16QI_type_node, endlink));
3810 /* void foo (int *, V4SI). */
3811 tree void_ftype_pint_v4si
3812 = build_function_type (void_type_node,
3813 tree_cons (NULL_TREE, pint_type_node,
3814 tree_cons (NULL_TREE, V4SI_type_node,
3816 /* void foo (short *, V8HI). */
3817 tree void_ftype_pshort_v8hi
3818 = build_function_type (void_type_node,
3819 tree_cons (NULL_TREE, pshort_type_node,
3820 tree_cons (NULL_TREE, V8HI_type_node,
3822 /* void foo (char *, V16QI). */
3823 tree void_ftype_pchar_v16qi
3824 = build_function_type (void_type_node,
3825 tree_cons (NULL_TREE, pchar_type_node,
3826 tree_cons (NULL_TREE, V16QI_type_node,
3828 /* void foo (float *, V4SF). */
3829 tree void_ftype_pfloat_v4sf
3830 = build_function_type (void_type_node,
3831 tree_cons (NULL_TREE, pfloat_type_node,
3832 tree_cons (NULL_TREE, V4SF_type_node,
3835 tree v4si_ftype_v4si_v4si
3836 = build_function_type (V4SI_type_node,
3837 tree_cons (NULL_TREE, V4SI_type_node,
3838 tree_cons (NULL_TREE, V4SI_type_node,
3841 /* These are for the unsigned 5 bit literals. */
3843 tree v4sf_ftype_v4si_char
3844 = build_function_type (V4SF_type_node,
3845 tree_cons (NULL_TREE, V4SI_type_node,
3846 tree_cons (NULL_TREE, char_type_node,
3848 tree v4si_ftype_v4sf_char
3849 = build_function_type (V4SI_type_node,
3850 tree_cons (NULL_TREE, V4SF_type_node,
3851 tree_cons (NULL_TREE, char_type_node,
3853 tree v4si_ftype_v4si_char
3854 = build_function_type (V4SI_type_node,
3855 tree_cons (NULL_TREE, V4SI_type_node,
3856 tree_cons (NULL_TREE, char_type_node,
3858 tree v8hi_ftype_v8hi_char
3859 = build_function_type (V8HI_type_node,
3860 tree_cons (NULL_TREE, V8HI_type_node,
3861 tree_cons (NULL_TREE, char_type_node,
3863 tree v16qi_ftype_v16qi_char
3864 = build_function_type (V16QI_type_node,
3865 tree_cons (NULL_TREE, V16QI_type_node,
3866 tree_cons (NULL_TREE, char_type_node,
3869 /* These are for the unsigned 4 bit literals. */
3871 tree v16qi_ftype_v16qi_v16qi_char
3872 = build_function_type (V16QI_type_node,
3873 tree_cons (NULL_TREE, V16QI_type_node,
3874 tree_cons (NULL_TREE, V16QI_type_node,
3875 tree_cons (NULL_TREE,
3879 tree v8hi_ftype_v8hi_v8hi_char
3880 = build_function_type (V8HI_type_node,
3881 tree_cons (NULL_TREE, V8HI_type_node,
3882 tree_cons (NULL_TREE, V8HI_type_node,
3883 tree_cons (NULL_TREE,
3887 tree v4si_ftype_v4si_v4si_char
3888 = build_function_type (V4SI_type_node,
3889 tree_cons (NULL_TREE, V4SI_type_node,
3890 tree_cons (NULL_TREE, V4SI_type_node,
3891 tree_cons (NULL_TREE,
3895 tree v4sf_ftype_v4sf_v4sf_char
3896 = build_function_type (V4SF_type_node,
3897 tree_cons (NULL_TREE, V4SF_type_node,
3898 tree_cons (NULL_TREE, V4SF_type_node,
3899 tree_cons (NULL_TREE,
3903 /* End of 4 bit literals. */
3905 tree v4sf_ftype_v4sf_v4sf
3906 = build_function_type (V4SF_type_node,
3907 tree_cons (NULL_TREE, V4SF_type_node,
3908 tree_cons (NULL_TREE, V4SF_type_node,
3910 tree v4sf_ftype_v4sf_v4sf_v4si
3911 = build_function_type (V4SF_type_node,
3912 tree_cons (NULL_TREE, V4SF_type_node,
3913 tree_cons (NULL_TREE, V4SF_type_node,
3914 tree_cons (NULL_TREE,
3917 tree v4sf_ftype_v4sf_v4sf_v4sf
3918 = build_function_type (V4SF_type_node,
3919 tree_cons (NULL_TREE, V4SF_type_node,
3920 tree_cons (NULL_TREE, V4SF_type_node,
3921 tree_cons (NULL_TREE,
3924 tree v4si_ftype_v4si_v4si_v4si
3925 = build_function_type (V4SI_type_node,
3926 tree_cons (NULL_TREE, V4SI_type_node,
3927 tree_cons (NULL_TREE, V4SI_type_node,
3928 tree_cons (NULL_TREE,
3932 tree v8hi_ftype_v8hi_v8hi
3933 = build_function_type (V8HI_type_node,
3934 tree_cons (NULL_TREE, V8HI_type_node,
3935 tree_cons (NULL_TREE, V8HI_type_node,
3937 tree v8hi_ftype_v8hi_v8hi_v8hi
3938 = build_function_type (V8HI_type_node,
3939 tree_cons (NULL_TREE, V8HI_type_node,
3940 tree_cons (NULL_TREE, V8HI_type_node,
3941 tree_cons (NULL_TREE,
3944 tree v4si_ftype_v8hi_v8hi_v4si
3945 = build_function_type (V4SI_type_node,
3946 tree_cons (NULL_TREE, V8HI_type_node,
3947 tree_cons (NULL_TREE, V8HI_type_node,
3948 tree_cons (NULL_TREE,
3951 tree v4si_ftype_v16qi_v16qi_v4si
3952 = build_function_type (V4SI_type_node,
3953 tree_cons (NULL_TREE, V16QI_type_node,
3954 tree_cons (NULL_TREE, V16QI_type_node,
3955 tree_cons (NULL_TREE,
3959 tree v16qi_ftype_v16qi_v16qi
3960 = build_function_type (V16QI_type_node,
3961 tree_cons (NULL_TREE, V16QI_type_node,
3962 tree_cons (NULL_TREE, V16QI_type_node,
3965 tree v4si_ftype_v4sf_v4sf
3966 = build_function_type (V4SI_type_node,
3967 tree_cons (NULL_TREE, V4SF_type_node,
3968 tree_cons (NULL_TREE, V4SF_type_node,
3971 tree v8hi_ftype_v16qi_v16qi
3972 = build_function_type (V8HI_type_node,
3973 tree_cons (NULL_TREE, V16QI_type_node,
3974 tree_cons (NULL_TREE, V16QI_type_node,
3977 tree v4si_ftype_v8hi_v8hi
3978 = build_function_type (V4SI_type_node,
3979 tree_cons (NULL_TREE, V8HI_type_node,
3980 tree_cons (NULL_TREE, V8HI_type_node,
3983 tree v8hi_ftype_v4si_v4si
3984 = build_function_type (V8HI_type_node,
3985 tree_cons (NULL_TREE, V4SI_type_node,
3986 tree_cons (NULL_TREE, V4SI_type_node,
3989 tree v16qi_ftype_v8hi_v8hi
3990 = build_function_type (V16QI_type_node,
3991 tree_cons (NULL_TREE, V8HI_type_node,
3992 tree_cons (NULL_TREE, V8HI_type_node,
3995 tree v4si_ftype_v16qi_v4si
3996 = build_function_type (V4SI_type_node,
3997 tree_cons (NULL_TREE, V16QI_type_node,
3998 tree_cons (NULL_TREE, V4SI_type_node,
4001 tree v4si_ftype_v8hi_v4si
4002 = build_function_type (V4SI_type_node,
4003 tree_cons (NULL_TREE, V8HI_type_node,
4004 tree_cons (NULL_TREE, V4SI_type_node,
4007 tree v4si_ftype_v8hi
4008 = build_function_type (V4SI_type_node,
4009 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4011 tree int_ftype_v4si_v4si
4012 = build_function_type (integer_type_node,
4013 tree_cons (NULL_TREE, V4SI_type_node,
4014 tree_cons (NULL_TREE, V4SI_type_node,
4017 tree int_ftype_v4sf_v4sf
4018 = build_function_type (integer_type_node,
4019 tree_cons (NULL_TREE, V4SF_type_node,
4020 tree_cons (NULL_TREE, V4SF_type_node,
4023 tree int_ftype_v16qi_v16qi
4024 = build_function_type (integer_type_node,
4025 tree_cons (NULL_TREE, V16QI_type_node,
4026 tree_cons (NULL_TREE, V16QI_type_node,
4029 tree int_ftype_v8hi_v8hi
4030 = build_function_type (integer_type_node,
4031 tree_cons (NULL_TREE, V8HI_type_node,
4032 tree_cons (NULL_TREE, V8HI_type_node,
4035 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4036 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4037 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4038 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4039 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4040 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4041 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4042 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4044 /* Add the simple ternary operators. */
4045 d = (struct builtin_description *) bdesc_3arg;
4046 for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
4049 enum machine_mode mode0, mode1, mode2, mode3;
4055 mode0 = insn_data[d->icode].operand[0].mode;
4056 mode1 = insn_data[d->icode].operand[1].mode;
4057 mode2 = insn_data[d->icode].operand[2].mode;
4058 mode3 = insn_data[d->icode].operand[3].mode;
4060 /* When all four are of the same mode. */
4061 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4066 type = v4si_ftype_v4si_v4si_v4si;
4069 type = v4sf_ftype_v4sf_v4sf_v4sf;
4072 type = v8hi_ftype_v8hi_v8hi_v8hi;
4075 type = v16qi_ftype_v16qi_v16qi_v16qi;
4081 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4086 type = v4si_ftype_v4si_v4si_v16qi;
4089 type = v4sf_ftype_v4sf_v4sf_v16qi;
4092 type = v8hi_ftype_v8hi_v8hi_v16qi;
4095 type = v16qi_ftype_v16qi_v16qi_v16qi;
4101 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4102 && mode3 == V4SImode)
4103 type = v4si_ftype_v16qi_v16qi_v4si;
4104 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4105 && mode3 == V4SImode)
4106 type = v4si_ftype_v8hi_v8hi_v4si;
4107 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4108 && mode3 == V4SImode)
4109 type = v4sf_ftype_v4sf_v4sf_v4si;
4111 /* vchar, vchar, vchar, 4 bit literal. */
4112 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4114 type = v16qi_ftype_v16qi_v16qi_char;
4116 /* vshort, vshort, vshort, 4 bit literal. */
4117 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4119 type = v8hi_ftype_v8hi_v8hi_char;
4121 /* vint, vint, vint, 4 bit literal. */
4122 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4124 type = v4si_ftype_v4si_v4si_char;
4126 /* vfloat, vfloat, vfloat, 4 bit literal. */
4127 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4129 type = v4sf_ftype_v4sf_v4sf_char;
4134 def_builtin (d->mask, d->name, type, d->code);
4137 /* Add the simple binary operators. */
4138 d = (struct builtin_description *) bdesc_2arg;
4139 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4141 enum machine_mode mode0, mode1, mode2;
4147 mode0 = insn_data[d->icode].operand[0].mode;
4148 mode1 = insn_data[d->icode].operand[1].mode;
4149 mode2 = insn_data[d->icode].operand[2].mode;
4151 /* When all three operands are of the same mode. */
4152 if (mode0 == mode1 && mode1 == mode2)
4157 type = v4sf_ftype_v4sf_v4sf;
4160 type = v4si_ftype_v4si_v4si;
4163 type = v16qi_ftype_v16qi_v16qi;
4166 type = v8hi_ftype_v8hi_v8hi;
4173 /* A few other combos we really don't want to do manually. */
4175 /* vint, vfloat, vfloat. */
4176 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4177 type = v4si_ftype_v4sf_v4sf;
4179 /* vshort, vchar, vchar. */
4180 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4181 type = v8hi_ftype_v16qi_v16qi;
4183 /* vint, vshort, vshort. */
4184 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4185 type = v4si_ftype_v8hi_v8hi;
4187 /* vshort, vint, vint. */
4188 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4189 type = v8hi_ftype_v4si_v4si;
4191 /* vchar, vshort, vshort. */
4192 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4193 type = v16qi_ftype_v8hi_v8hi;
4195 /* vint, vchar, vint. */
4196 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4197 type = v4si_ftype_v16qi_v4si;
4199 /* vint, vshort, vint. */
4200 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4201 type = v4si_ftype_v8hi_v4si;
4203 /* vint, vint, 5 bit literal. */
4204 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4205 type = v4si_ftype_v4si_char;
4207 /* vshort, vshort, 5 bit literal. */
4208 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4209 type = v8hi_ftype_v8hi_char;
4211 /* vchar, vchar, 5 bit literal. */
4212 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4213 type = v16qi_ftype_v16qi_char;
4215 /* vfloat, vint, 5 bit literal. */
4216 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4217 type = v4sf_ftype_v4si_char;
4219 /* vint, vfloat, 5 bit literal. */
4220 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4221 type = v4si_ftype_v4sf_char;
4224 else if (mode0 == SImode)
4229 type = int_ftype_v4si_v4si;
4232 type = int_ftype_v4sf_v4sf;
4235 type = int_ftype_v16qi_v16qi;
4238 type = int_ftype_v8hi_v8hi;
4248 def_builtin (d->mask, d->name, type, d->code);
4251 /* Add the simple unary operators. */
4252 d = (struct builtin_description *) bdesc_1arg;
4253 for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4255 enum machine_mode mode0, mode1;
4261 mode0 = insn_data[d->icode].operand[0].mode;
4262 mode1 = insn_data[d->icode].operand[1].mode;
4264 if (mode0 == V4SImode && mode1 == QImode)
4265 type = v4si_ftype_char;
4266 else if (mode0 == V8HImode && mode1 == QImode)
4267 type = v8hi_ftype_char;
4268 else if (mode0 == V16QImode && mode1 == QImode)
4269 type = v16qi_ftype_char;
4270 else if (mode0 == V4SFmode && mode1 == V4SFmode)
4271 type = v4sf_ftype_v4sf;
4272 else if (mode0 == V8HImode && mode1 == V16QImode)
4273 type = v8hi_ftype_v16qi;
4274 else if (mode0 == V4SImode && mode1 == V8HImode)
4275 type = v4si_ftype_v8hi;
4279 def_builtin (d->mask, d->name, type, d->code);
4284 /* Generate a memory reference for expand_block_move, copying volatile,
4285 and other bits from an original memory reference. */
4288 expand_block_move_mem (mode, addr, orig_mem)
4289 enum machine_mode mode;
4293 rtx mem = gen_rtx_MEM (mode, addr);
4295 MEM_COPY_ATTRIBUTES (mem, orig_mem);
4299 /* Expand a block move operation, and return 1 if successful. Return 0
4300 if we should let the compiler generate normal code.
4302 operands[0] is the destination
4303 operands[1] is the source
4304 operands[2] is the length
4305 operands[3] is the alignment */
4307 #define MAX_MOVE_REG 4
4310 expand_block_move (operands)
4313 rtx orig_dest = operands[0];
4314 rtx orig_src = operands[1];
4315 rtx bytes_rtx = operands[2];
4316 rtx align_rtx = operands[3];
4317 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
4328 rtx stores[MAX_MOVE_REG];
4331 /* If this is not a fixed size move, just call memcpy */
4335 /* If this is not a fixed size alignment, abort */
4336 if (GET_CODE (align_rtx) != CONST_INT)
4338 align = INTVAL (align_rtx);
4340 /* Anything to move? */
4341 bytes = INTVAL (bytes_rtx);
4345 /* store_one_arg depends on expand_block_move to handle at least the size of
4346 reg_parm_stack_space. */
4347 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4350 /* Move the address into scratch registers. */
4351 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
4352 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
4354 if (TARGET_STRING) /* string instructions are available */
4356 for ( ; bytes > 0; bytes -= move_bytes)
4358 if (bytes > 24 /* move up to 32 bytes at a time */
4366 && ! fixed_regs[12])
4368 move_bytes = (bytes > 32) ? 32 : bytes;
4369 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
4372 expand_block_move_mem (BLKmode,
4375 GEN_INT ((move_bytes == 32)
4379 else if (bytes > 16 /* move up to 24 bytes at a time */
4385 && ! fixed_regs[10])
4387 move_bytes = (bytes > 24) ? 24 : bytes;
4388 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
4391 expand_block_move_mem (BLKmode,
4394 GEN_INT (move_bytes),
4397 else if (bytes > 8 /* move up to 16 bytes at a time */
4403 move_bytes = (bytes > 16) ? 16 : bytes;
4404 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
4407 expand_block_move_mem (BLKmode,
4410 GEN_INT (move_bytes),
4413 else if (bytes >= 8 && TARGET_POWERPC64
4414 /* 64-bit loads and stores require word-aligned
4416 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4419 tmp_reg = gen_reg_rtx (DImode);
4420 emit_move_insn (tmp_reg,
4421 expand_block_move_mem (DImode,
4422 src_reg, orig_src));
4423 emit_move_insn (expand_block_move_mem (DImode,
4424 dest_reg, orig_dest),
4427 else if (bytes > 4 && !TARGET_POWERPC64)
4428 { /* move up to 8 bytes at a time */
4429 move_bytes = (bytes > 8) ? 8 : bytes;
4430 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
4433 expand_block_move_mem (BLKmode,
4436 GEN_INT (move_bytes),
4439 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
4440 { /* move 4 bytes */
4442 tmp_reg = gen_reg_rtx (SImode);
4443 emit_move_insn (tmp_reg,
4444 expand_block_move_mem (SImode,
4445 src_reg, orig_src));
4446 emit_move_insn (expand_block_move_mem (SImode,
4447 dest_reg, orig_dest),
4450 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
4451 { /* move 2 bytes */
4453 tmp_reg = gen_reg_rtx (HImode);
4454 emit_move_insn (tmp_reg,
4455 expand_block_move_mem (HImode,
4456 src_reg, orig_src));
4457 emit_move_insn (expand_block_move_mem (HImode,
4458 dest_reg, orig_dest),
4461 else if (bytes == 1) /* move 1 byte */
4464 tmp_reg = gen_reg_rtx (QImode);
4465 emit_move_insn (tmp_reg,
4466 expand_block_move_mem (QImode,
4467 src_reg, orig_src));
4468 emit_move_insn (expand_block_move_mem (QImode,
4469 dest_reg, orig_dest),
4473 { /* move up to 4 bytes at a time */
4474 move_bytes = (bytes > 4) ? 4 : bytes;
4475 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
4478 expand_block_move_mem (BLKmode,
4481 GEN_INT (move_bytes),
4485 if (bytes > move_bytes)
4487 if (! TARGET_POWERPC64)
4489 emit_insn (gen_addsi3 (src_reg, src_reg,
4490 GEN_INT (move_bytes)));
4491 emit_insn (gen_addsi3 (dest_reg, dest_reg,
4492 GEN_INT (move_bytes)));
4496 emit_insn (gen_adddi3 (src_reg, src_reg,
4497 GEN_INT (move_bytes)));
4498 emit_insn (gen_adddi3 (dest_reg, dest_reg,
4499 GEN_INT (move_bytes)));
4505 else /* string instructions not available */
4507 num_reg = offset = 0;
4508 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
4510 /* Calculate the correct offset for src/dest */
4514 dest_addr = dest_reg;
4518 src_addr = plus_constant (src_reg, offset);
4519 dest_addr = plus_constant (dest_reg, offset);
4522 /* Generate the appropriate load and store, saving the stores
4524 if (bytes >= 8 && TARGET_POWERPC64
4525 /* 64-bit loads and stores require word-aligned
4527 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4530 tmp_reg = gen_reg_rtx (DImode);
4531 emit_insn (gen_movdi (tmp_reg,
4532 expand_block_move_mem (DImode,
4535 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
4540 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
4543 tmp_reg = gen_reg_rtx (SImode);
4544 emit_insn (gen_movsi (tmp_reg,
4545 expand_block_move_mem (SImode,
4548 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
4553 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
4556 tmp_reg = gen_reg_rtx (HImode);
4557 emit_insn (gen_movhi (tmp_reg,
4558 expand_block_move_mem (HImode,
4561 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
4569 tmp_reg = gen_reg_rtx (QImode);
4570 emit_insn (gen_movqi (tmp_reg,
4571 expand_block_move_mem (QImode,
4574 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
4580 if (num_reg >= MAX_MOVE_REG)
4582 for (i = 0; i < num_reg; i++)
4583 emit_insn (stores[i]);
4588 for (i = 0; i < num_reg; i++)
4589 emit_insn (stores[i]);
4596 /* Return 1 if OP is a load multiple operation. It is known to be a
4597 PARALLEL and the first section will be tested. */
4600 load_multiple_operation (op, mode)
4602 enum machine_mode mode ATTRIBUTE_UNUSED;
4604 int count = XVECLEN (op, 0);
4605 unsigned int dest_regno;
4609 /* Perform a quick check so we don't blow up below. */
4611 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4612 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
4613 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
4616 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
4617 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
4619 for (i = 1; i < count; i++)
4621 rtx elt = XVECEXP (op, 0, i);
4623 if (GET_CODE (elt) != SET
4624 || GET_CODE (SET_DEST (elt)) != REG
4625 || GET_MODE (SET_DEST (elt)) != SImode
4626 || REGNO (SET_DEST (elt)) != dest_regno + i
4627 || GET_CODE (SET_SRC (elt)) != MEM
4628 || GET_MODE (SET_SRC (elt)) != SImode
4629 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
4630 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
4631 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4632 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
4639 /* Similar, but tests for store multiple. Here, the second vector element
4640 is a CLOBBER. It will be tested later. */
4643 store_multiple_operation (op, mode)
4645 enum machine_mode mode ATTRIBUTE_UNUSED;
4647 int count = XVECLEN (op, 0) - 1;
4648 unsigned int src_regno;
4652 /* Perform a quick check so we don't blow up below. */
4654 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4655 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
4656 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
4659 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
4660 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
4662 for (i = 1; i < count; i++)
4664 rtx elt = XVECEXP (op, 0, i + 1);
4666 if (GET_CODE (elt) != SET
4667 || GET_CODE (SET_SRC (elt)) != REG
4668 || GET_MODE (SET_SRC (elt)) != SImode
4669 || REGNO (SET_SRC (elt)) != src_regno + i
4670 || GET_CODE (SET_DEST (elt)) != MEM
4671 || GET_MODE (SET_DEST (elt)) != SImode
4672 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4673 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4674 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4675 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
4682 /* Return 1 for a parallel vrsave operation. */
4685 vrsave_operation (op, mode)
4687 enum machine_mode mode ATTRIBUTE_UNUSED;
4689 int count = XVECLEN (op, 0);
4690 unsigned int dest_regno, src_regno;
4694 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4695 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
4696 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
4699 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
4700 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
4702 if (dest_regno != VRSAVE_REGNO
4703 && src_regno != VRSAVE_REGNO)
4706 for (i = 1; i < count; i++)
4708 rtx elt = XVECEXP (op, 0, i);
4710 if (GET_CODE (elt) != CLOBBER
4711 && GET_CODE (elt) != SET)
4718 /* Return 1 for an PARALLEL suitable for mtcrf. */
4721 mtcrf_operation (op, mode)
4723 enum machine_mode mode ATTRIBUTE_UNUSED;
4725 int count = XVECLEN (op, 0);
4729 /* Perform a quick check so we don't blow up below. */
4731 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4732 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
4733 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
4735 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
4737 if (GET_CODE (src_reg) != REG
4738 || GET_MODE (src_reg) != SImode
4739 || ! INT_REGNO_P (REGNO (src_reg)))
4742 for (i = 0; i < count; i++)
4744 rtx exp = XVECEXP (op, 0, i);
4748 if (GET_CODE (exp) != SET
4749 || GET_CODE (SET_DEST (exp)) != REG
4750 || GET_MODE (SET_DEST (exp)) != CCmode
4751 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
4753 unspec = SET_SRC (exp);
4754 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
4756 if (GET_CODE (unspec) != UNSPEC
4757 || XINT (unspec, 1) != 20
4758 || XVECLEN (unspec, 0) != 2
4759 || XVECEXP (unspec, 0, 0) != src_reg
4760 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
4761 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
4767 /* Return 1 for an PARALLEL suitable for lmw. */
4770 lmw_operation (op, mode)
4772 enum machine_mode mode ATTRIBUTE_UNUSED;
4774 int count = XVECLEN (op, 0);
4775 unsigned int dest_regno;
4777 unsigned int base_regno;
4778 HOST_WIDE_INT offset;
4781 /* Perform a quick check so we don't blow up below. */
4783 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4784 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
4785 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
4788 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
4789 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
4792 || count != 32 - (int) dest_regno)
4795 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
4798 base_regno = REGNO (src_addr);
4799 if (base_regno == 0)
4802 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
4804 offset = INTVAL (XEXP (src_addr, 1));
4805 base_regno = REGNO (XEXP (src_addr, 0));
4810 for (i = 0; i < count; i++)
4812 rtx elt = XVECEXP (op, 0, i);
4815 HOST_WIDE_INT newoffset;
4817 if (GET_CODE (elt) != SET
4818 || GET_CODE (SET_DEST (elt)) != REG
4819 || GET_MODE (SET_DEST (elt)) != SImode
4820 || REGNO (SET_DEST (elt)) != dest_regno + i
4821 || GET_CODE (SET_SRC (elt)) != MEM
4822 || GET_MODE (SET_SRC (elt)) != SImode)
4824 newaddr = XEXP (SET_SRC (elt), 0);
4825 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
4830 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
4832 addr_reg = XEXP (newaddr, 0);
4833 newoffset = INTVAL (XEXP (newaddr, 1));
4837 if (REGNO (addr_reg) != base_regno
4838 || newoffset != offset + 4 * i)
4845 /* Return 1 for an PARALLEL suitable for stmw. */
4848 stmw_operation (op, mode)
4850 enum machine_mode mode ATTRIBUTE_UNUSED;
4852 int count = XVECLEN (op, 0);
4853 unsigned int src_regno;
4855 unsigned int base_regno;
4856 HOST_WIDE_INT offset;
4859 /* Perform a quick check so we don't blow up below. */
4861 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4862 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
4863 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
4866 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
4867 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
4870 || count != 32 - (int) src_regno)
4873 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
4876 base_regno = REGNO (dest_addr);
4877 if (base_regno == 0)
4880 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
4882 offset = INTVAL (XEXP (dest_addr, 1));
4883 base_regno = REGNO (XEXP (dest_addr, 0));
4888 for (i = 0; i < count; i++)
4890 rtx elt = XVECEXP (op, 0, i);
4893 HOST_WIDE_INT newoffset;
4895 if (GET_CODE (elt) != SET
4896 || GET_CODE (SET_SRC (elt)) != REG
4897 || GET_MODE (SET_SRC (elt)) != SImode
4898 || REGNO (SET_SRC (elt)) != src_regno + i
4899 || GET_CODE (SET_DEST (elt)) != MEM
4900 || GET_MODE (SET_DEST (elt)) != SImode)
4902 newaddr = XEXP (SET_DEST (elt), 0);
4903 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
4908 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
4910 addr_reg = XEXP (newaddr, 0);
4911 newoffset = INTVAL (XEXP (newaddr, 1));
4915 if (REGNO (addr_reg) != base_regno
4916 || newoffset != offset + 4 * i)
4923 /* A validation routine: say whether CODE, a condition code, and MODE
4924 match. The other alternatives either don't make sense or should
4925 never be generated. */
4928 validate_condition_mode (code, mode)
4930 enum machine_mode mode;
4932 if (GET_RTX_CLASS (code) != '<'
4933 || GET_MODE_CLASS (mode) != MODE_CC)
4936 /* These don't make sense. */
4937 if ((code == GT || code == LT || code == GE || code == LE)
4938 && mode == CCUNSmode)
4941 if ((code == GTU || code == LTU || code == GEU || code == LEU)
4942 && mode != CCUNSmode)
4945 if (mode != CCFPmode
4946 && (code == ORDERED || code == UNORDERED
4947 || code == UNEQ || code == LTGT
4948 || code == UNGT || code == UNLT
4949 || code == UNGE || code == UNLE))
4952 /* These should never be generated except for
4953 flag_unsafe_math_optimizations. */
4954 if (mode == CCFPmode
4955 && ! flag_unsafe_math_optimizations
4956 && (code == LE || code == GE
4957 || code == UNEQ || code == LTGT
4958 || code == UNGT || code == UNLT))
4961 /* These are invalid; the information is not there. */
4962 if (mode == CCEQmode
4963 && code != EQ && code != NE)
4967 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
4968 We only check the opcode against the mode of the CC value here. */
4971 branch_comparison_operator (op, mode)
4973 enum machine_mode mode ATTRIBUTE_UNUSED;
4975 enum rtx_code code = GET_CODE (op);
4976 enum machine_mode cc_mode;
4978 if (GET_RTX_CLASS (code) != '<')
4981 cc_mode = GET_MODE (XEXP (op, 0));
4982 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
4985 validate_condition_mode (code, cc_mode);
4990 /* Return 1 if OP is a comparison operation that is valid for a branch
4991 insn and which is true if the corresponding bit in the CC register
4995 branch_positive_comparison_operator (op, mode)
4997 enum machine_mode mode;
5001 if (! branch_comparison_operator (op, mode))
5004 code = GET_CODE (op);
5005 return (code == EQ || code == LT || code == GT
5006 || code == LTU || code == GTU
5007 || code == UNORDERED);
5010 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5011 We check the opcode against the mode of the CC value and disallow EQ or
5012 NE comparisons for integers. */
5015 scc_comparison_operator (op, mode)
5017 enum machine_mode mode;
5019 enum rtx_code code = GET_CODE (op);
5020 enum machine_mode cc_mode;
5022 if (GET_MODE (op) != mode && mode != VOIDmode)
5025 if (GET_RTX_CLASS (code) != '<')
5028 cc_mode = GET_MODE (XEXP (op, 0));
5029 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5032 validate_condition_mode (code, cc_mode);
5034 if (code == NE && cc_mode != CCFPmode)
5041 trap_comparison_operator (op, mode)
5043 enum machine_mode mode;
5045 if (mode != VOIDmode && mode != GET_MODE (op))
5047 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5051 boolean_operator (op, mode)
5053 enum machine_mode mode ATTRIBUTE_UNUSED;
5055 enum rtx_code code = GET_CODE (op);
5056 return (code == AND || code == IOR || code == XOR);
5060 boolean_or_operator (op, mode)
5062 enum machine_mode mode ATTRIBUTE_UNUSED;
5064 enum rtx_code code = GET_CODE (op);
5065 return (code == IOR || code == XOR);
5069 min_max_operator (op, mode)
5071 enum machine_mode mode ATTRIBUTE_UNUSED;
5073 enum rtx_code code = GET_CODE (op);
5074 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5077 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5078 mask required to convert the result of a rotate insn into a shift
5079 left insn of SHIFTOP bits. Both are known to be CONST_INT. */
5082 includes_lshift_p (shiftop, andop)
5086 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5088 shift_mask <<= INTVAL (shiftop);
5090 return (INTVAL (andop) & ~shift_mask) == 0;
5093 /* Similar, but for right shift. */
5096 includes_rshift_p (shiftop, andop)
5100 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5102 shift_mask >>= INTVAL (shiftop);
5104 return (INTVAL (andop) & ~shift_mask) == 0;
5107 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5108 to perform a left shift. It must have exactly SHIFTOP least
5109 signifigant 0's, then one or more 1's, then zero or more 0's. */
5112 includes_rldic_lshift_p (shiftop, andop)
5116 if (GET_CODE (andop) == CONST_INT)
5118 HOST_WIDE_INT c, lsb, shift_mask;
5121 if (c == 0 || c == ~0)
5125 shift_mask <<= INTVAL (shiftop);
5127 /* Find the least signifigant one bit. */
5130 /* It must coincide with the LSB of the shift mask. */
5131 if (-lsb != shift_mask)
5134 /* Invert to look for the next transition (if any). */
5137 /* Remove the low group of ones (originally low group of zeros). */
5140 /* Again find the lsb, and check we have all 1's above. */
5144 else if (GET_CODE (andop) == CONST_DOUBLE
5145 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5147 HOST_WIDE_INT low, high, lsb;
5148 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5150 low = CONST_DOUBLE_LOW (andop);
5151 if (HOST_BITS_PER_WIDE_INT < 64)
5152 high = CONST_DOUBLE_HIGH (andop);
5154 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5155 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5158 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5160 shift_mask_high = ~0;
5161 if (INTVAL (shiftop) > 32)
5162 shift_mask_high <<= INTVAL (shiftop) - 32;
5166 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5173 return high == -lsb;
5176 shift_mask_low = ~0;
5177 shift_mask_low <<= INTVAL (shiftop);
5181 if (-lsb != shift_mask_low)
5184 if (HOST_BITS_PER_WIDE_INT < 64)
5189 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5192 return high == -lsb;
5196 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5202 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5203 to perform a left shift. It must have SHIFTOP or more least
5204 signifigant 0's, with the remainder of the word 1's. */
5207 includes_rldicr_lshift_p (shiftop, andop)
5211 if (GET_CODE (andop) == CONST_INT)
5213 HOST_WIDE_INT c, lsb, shift_mask;
5216 shift_mask <<= INTVAL (shiftop);
5219 /* Find the least signifigant one bit. */
5222 /* It must be covered by the shift mask.
5223 This test also rejects c == 0. */
5224 if ((lsb & shift_mask) == 0)
5227 /* Check we have all 1's above the transition, and reject all 1's. */
5228 return c == -lsb && lsb != 1;
5230 else if (GET_CODE (andop) == CONST_DOUBLE
5231 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5233 HOST_WIDE_INT low, lsb, shift_mask_low;
5235 low = CONST_DOUBLE_LOW (andop);
5237 if (HOST_BITS_PER_WIDE_INT < 64)
5239 HOST_WIDE_INT high, shift_mask_high;
5241 high = CONST_DOUBLE_HIGH (andop);
5245 shift_mask_high = ~0;
5246 if (INTVAL (shiftop) > 32)
5247 shift_mask_high <<= INTVAL (shiftop) - 32;
5251 if ((lsb & shift_mask_high) == 0)
5254 return high == -lsb;
5260 shift_mask_low = ~0;
5261 shift_mask_low <<= INTVAL (shiftop);
5265 if ((lsb & shift_mask_low) == 0)
5268 return low == -lsb && lsb != 1;
5274 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5275 for lfq and stfq insns.
5277 Note reg1 and reg2 *must* be hard registers. To be sure we will
5278 abort if we are passed pseudo registers. */
5281 registers_ok_for_quad_peep (reg1, reg2)
5284 /* We might have been passed a SUBREG. */
5285 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5288 return (REGNO (reg1) == REGNO (reg2) - 1);
5291 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5292 addr1 and addr2 must be in consecutive memory locations
5293 (addr2 == addr1 + 8). */
5296 addrs_ok_for_quad_peep (addr1, addr2)
5303 /* Extract an offset (if used) from the first addr. */
5304 if (GET_CODE (addr1) == PLUS)
5306 /* If not a REG, return zero. */
5307 if (GET_CODE (XEXP (addr1, 0)) != REG)
5311 reg1 = REGNO (XEXP (addr1, 0));
5312 /* The offset must be constant! */
5313 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5315 offset1 = INTVAL (XEXP (addr1, 1));
5318 else if (GET_CODE (addr1) != REG)
5322 reg1 = REGNO (addr1);
5323 /* This was a simple (mem (reg)) expression. Offset is 0. */
5327 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5328 if (GET_CODE (addr2) != PLUS)
5331 if (GET_CODE (XEXP (addr2, 0)) != REG
5332 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5335 if (reg1 != REGNO (XEXP (addr2, 0)))
5338 /* The offset for the second addr must be 8 more than the first addr. */
5339 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5342 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
5347 /* Return the register class of a scratch register needed to copy IN into
5348 or out of a register in CLASS in MODE. If it can be done directly,
5349 NO_REGS is returned. */
5352 secondary_reload_class (class, mode, in)
5353 enum reg_class class;
5354 enum machine_mode mode ATTRIBUTE_UNUSED;
5359 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5361 /* We cannot copy a symbolic operand directly into anything
5362 other than BASE_REGS for TARGET_ELF. So indicate that a
5363 register from BASE_REGS is needed as an intermediate
5366 On Darwin, pic addresses require a load from memory, which
5367 needs a base register. */
5368 if (class != BASE_REGS
5369 && (GET_CODE (in) == SYMBOL_REF
5370 || GET_CODE (in) == HIGH
5371 || GET_CODE (in) == LABEL_REF
5372 || GET_CODE (in) == CONST))
5376 if (GET_CODE (in) == REG)
5379 if (regno >= FIRST_PSEUDO_REGISTER)
5381 regno = true_regnum (in);
5382 if (regno >= FIRST_PSEUDO_REGISTER)
5386 else if (GET_CODE (in) == SUBREG)
5388 regno = true_regnum (in);
5389 if (regno >= FIRST_PSEUDO_REGISTER)
5395 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
5397 if (class == GENERAL_REGS || class == BASE_REGS
5398 || (regno >= 0 && INT_REGNO_P (regno)))
5401 /* Constants, memory, and FP registers can go into FP registers. */
5402 if ((regno == -1 || FP_REGNO_P (regno))
5403 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
5406 /* Memory, and AltiVec registers can go into AltiVec registers. */
5407 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
5408 && class == ALTIVEC_REGS)
5411 /* We can copy among the CR registers. */
5412 if ((class == CR_REGS || class == CR0_REGS)
5413 && regno >= 0 && CR_REGNO_P (regno))
5416 /* Otherwise, we need GENERAL_REGS. */
5417 return GENERAL_REGS;
5420 /* Given a comparison operation, return the bit number in CCR to test. We
5421 know this is a valid comparison.
5423 SCC_P is 1 if this is for an scc. That means that %D will have been
5424 used instead of %C, so the bits will be in different places.
5426 Return -1 if OP isn't a valid comparison for some reason. */
5433 enum rtx_code code = GET_CODE (op);
5434 enum machine_mode cc_mode;
5439 if (GET_RTX_CLASS (code) != '<')
5444 if (GET_CODE (reg) != REG
5445 || ! CR_REGNO_P (REGNO (reg)))
5448 cc_mode = GET_MODE (reg);
5449 cc_regnum = REGNO (reg);
5450 base_bit = 4 * (cc_regnum - CR0_REGNO);
5452 validate_condition_mode (code, cc_mode);
5457 return scc_p ? base_bit + 3 : base_bit + 2;
5459 return base_bit + 2;
5460 case GT: case GTU: case UNLE:
5461 return base_bit + 1;
5462 case LT: case LTU: case UNGE:
5464 case ORDERED: case UNORDERED:
5465 return base_bit + 3;
5468 /* If scc, we will have done a cror to put the bit in the
5469 unordered position. So test that bit. For integer, this is ! LT
5470 unless this is an scc insn. */
5471 return scc_p ? base_bit + 3 : base_bit;
5474 return scc_p ? base_bit + 3 : base_bit + 1;
5481 /* Return the GOT register. */
5484 rs6000_got_register (value)
5485 rtx value ATTRIBUTE_UNUSED;
5487 /* The second flow pass currently (June 1999) can't update
5488 regs_ever_live without disturbing other parts of the compiler, so
5489 update it here to make the prolog/epilogue code happy. */
5490 if (no_new_pseudos && ! regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5491 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
5493 current_function_uses_pic_offset_table = 1;
5495 return pic_offset_table_rtx;
5498 /* Functions to init, mark and free struct machine_function.
5499 These will be called, via pointer variables,
5500 from push_function_context and pop_function_context. */
5503 rs6000_init_machine_status (p)
5506 p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
5510 rs6000_free_machine_status (p)
5513 if (p->machine == NULL)
5521 /* Print an operand. Recognize special options, documented below. */
5524 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
5525 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
5527 #define SMALL_DATA_RELOC "sda21"
5528 #define SMALL_DATA_REG 0
5532 print_operand (file, x, code)
5540 /* These macros test for integers and extract the low-order bits. */
5542 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
5543 && GET_MODE (X) == VOIDmode)
5545 #define INT_LOWPART(X) \
5546 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
5551 /* Write out an instruction after the call which may be replaced
5552 with glue code by the loader. This depends on the AIX version. */
5553 asm_fprintf (file, RS6000_CALL_GLUE);
5556 /* %a is output_address. */
5559 /* If X is a constant integer whose low-order 5 bits are zero,
5560 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
5561 in the AIX assembler where "sri" with a zero shift count
5562 writes a trash instruction. */
5563 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
5570 /* If constant, low-order 16 bits of constant, unsigned.
5571 Otherwise, write normally. */
5573 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
5575 print_operand (file, x, 0);
5579 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
5580 for 64-bit mask direction. */
5581 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
5584 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
5588 /* There used to be a comment for 'C' reading "This is an
5589 optional cror needed for certain floating-point
5590 comparisons. Otherwise write nothing." */
5592 /* Similar, except that this is for an scc, so we must be able to
5593 encode the test in a single bit that is one. We do the above
5594 for any LE, GE, GEU, or LEU and invert the bit for NE. */
5595 if (GET_CODE (x) == LE || GET_CODE (x) == GE
5596 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
5598 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
5600 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
5602 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
5605 else if (GET_CODE (x) == NE)
5607 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
5609 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
5610 base_bit + 2, base_bit + 2);
5615 /* X is a CR register. Print the number of the EQ bit of the CR */
5616 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5617 output_operand_lossage ("invalid %%E value");
5619 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
5623 /* X is a CR register. Print the shift count needed to move it
5624 to the high-order four bits. */
5625 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5626 output_operand_lossage ("invalid %%f value");
5628 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
5632 /* Similar, but print the count for the rotate in the opposite
5634 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5635 output_operand_lossage ("invalid %%F value");
5637 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
5641 /* X is a constant integer. If it is negative, print "m",
5642 otherwise print "z". This is to make a aze or ame insn. */
5643 if (GET_CODE (x) != CONST_INT)
5644 output_operand_lossage ("invalid %%G value");
5645 else if (INTVAL (x) >= 0)
5652 /* If constant, output low-order five bits. Otherwise, write
5655 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
5657 print_operand (file, x, 0);
5661 /* If constant, output low-order six bits. Otherwise, write
5664 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
5666 print_operand (file, x, 0);
5670 /* Print `i' if this is a constant, else nothing. */
5676 /* Write the bit number in CCR for jump. */
5679 output_operand_lossage ("invalid %%j code");
5681 fprintf (file, "%d", i);
5685 /* Similar, but add one for shift count in rlinm for scc and pass
5686 scc flag to `ccr_bit'. */
5689 output_operand_lossage ("invalid %%J code");
5691 /* If we want bit 31, write a shift count of zero, not 32. */
5692 fprintf (file, "%d", i == 31 ? 0 : i + 1);
5696 /* X must be a constant. Write the 1's complement of the
5699 output_operand_lossage ("invalid %%k value");
5701 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
5705 /* X must be a symbolic constant on ELF. Write an
5706 expression suitable for an 'addi' that adds in the low 16
5708 if (GET_CODE (x) != CONST)
5710 print_operand_address (file, x);
5715 if (GET_CODE (XEXP (x, 0)) != PLUS
5716 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
5717 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
5718 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
5719 output_operand_lossage ("invalid %%K value");
5720 print_operand_address (file, XEXP (XEXP (x, 0), 0));
5722 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
5726 /* %l is output_asm_label. */
5729 /* Write second word of DImode or DFmode reference. Works on register
5730 or non-indexed memory only. */
5731 if (GET_CODE (x) == REG)
5732 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
5733 else if (GET_CODE (x) == MEM)
5735 /* Handle possible auto-increment. Since it is pre-increment and
5736 we have already done it, we can just use an offset of word. */
5737 if (GET_CODE (XEXP (x, 0)) == PRE_INC
5738 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
5739 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
5742 output_address (XEXP (adjust_address_nv (x, SImode,
5746 if (small_data_operand (x, GET_MODE (x)))
5747 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
5748 reg_names[SMALL_DATA_REG]);
5753 /* MB value for a mask operand. */
5754 if (! mask_operand (x, VOIDmode))
5755 output_operand_lossage ("invalid %%m value");
5757 val = INT_LOWPART (x);
5759 /* If the high bit is set and the low bit is not, the value is zero.
5760 If the high bit is zero, the value is the first 1 bit we find from
5762 if ((val & 0x80000000) && ((val & 1) == 0))
5767 else if ((val & 0x80000000) == 0)
5769 for (i = 1; i < 32; i++)
5770 if ((val <<= 1) & 0x80000000)
5772 fprintf (file, "%d", i);
5776 /* Otherwise, look for the first 0 bit from the right. The result is its
5777 number plus 1. We know the low-order bit is one. */
5778 for (i = 0; i < 32; i++)
5779 if (((val >>= 1) & 1) == 0)
5782 /* If we ended in ...01, i would be 0. The correct value is 31, so
5784 fprintf (file, "%d", 31 - i);
5788 /* ME value for a mask operand. */
5789 if (! mask_operand (x, VOIDmode))
5790 output_operand_lossage ("invalid %%M value");
5792 val = INT_LOWPART (x);
5794 /* If the low bit is set and the high bit is not, the value is 31.
5795 If the low bit is zero, the value is the first 1 bit we find from
5797 if ((val & 1) && ((val & 0x80000000) == 0))
5802 else if ((val & 1) == 0)
5804 for (i = 0; i < 32; i++)
5805 if ((val >>= 1) & 1)
5808 /* If we had ....10, i would be 0. The result should be
5809 30, so we need 30 - i. */
5810 fprintf (file, "%d", 30 - i);
5814 /* Otherwise, look for the first 0 bit from the left. The result is its
5815 number minus 1. We know the high-order bit is one. */
5816 for (i = 0; i < 32; i++)
5817 if (((val <<= 1) & 0x80000000) == 0)
5820 fprintf (file, "%d", i);
5823 /* %n outputs the negative of its operand. */
5826 /* Write the number of elements in the vector times 4. */
5827 if (GET_CODE (x) != PARALLEL)
5828 output_operand_lossage ("invalid %%N value");
5830 fprintf (file, "%d", XVECLEN (x, 0) * 4);
5834 /* Similar, but subtract 1 first. */
5835 if (GET_CODE (x) != PARALLEL)
5836 output_operand_lossage ("invalid %%O value");
5838 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
5842 /* X is a CONST_INT that is a power of two. Output the logarithm. */
5844 || INT_LOWPART (x) < 0
5845 || (i = exact_log2 (INT_LOWPART (x))) < 0)
5846 output_operand_lossage ("invalid %%p value");
5848 fprintf (file, "%d", i);
5852 /* The operand must be an indirect memory reference. The result
5853 is the register number. */
5854 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
5855 || REGNO (XEXP (x, 0)) >= 32)
5856 output_operand_lossage ("invalid %%P value");
5858 fprintf (file, "%d", REGNO (XEXP (x, 0)));
5862 /* This outputs the logical code corresponding to a boolean
5863 expression. The expression may have one or both operands
5864 negated (if one, only the first one). For condition register
5865 logical operations, it will also treat the negated
5866 CR codes as NOTs, but not handle NOTs of them. */
5868 const char *const *t = 0;
5870 enum rtx_code code = GET_CODE (x);
5871 static const char * const tbl[3][3] = {
5872 { "and", "andc", "nor" },
5873 { "or", "orc", "nand" },
5874 { "xor", "eqv", "xor" } };
5878 else if (code == IOR)
5880 else if (code == XOR)
5883 output_operand_lossage ("invalid %%q value");
5885 if (GET_CODE (XEXP (x, 0)) != NOT)
5889 if (GET_CODE (XEXP (x, 1)) == NOT)
5900 /* X is a CR register. Print the mask for `mtcrf'. */
5901 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5902 output_operand_lossage ("invalid %%R value");
5904 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
5908 /* Low 5 bits of 32 - value */
5910 output_operand_lossage ("invalid %%s value");
5912 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
5916 /* PowerPC64 mask position. All 0's and all 1's are excluded.
5917 CONST_INT 32-bit mask is considered sign-extended so any
5918 transition must occur within the CONST_INT, not on the boundary. */
5919 if (! mask64_operand (x, VOIDmode))
5920 output_operand_lossage ("invalid %%S value");
5922 val = INT_LOWPART (x);
5924 if (val & 1) /* Clear Left */
5926 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
5927 if (!((val >>= 1) & 1))
5930 #if HOST_BITS_PER_WIDE_INT == 32
5931 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
5933 val = CONST_DOUBLE_HIGH (x);
5938 for (i = 32; i < 64; i++)
5939 if (!((val >>= 1) & 1))
5943 /* i = index of last set bit from right
5944 mask begins at 63 - i from left */
5946 output_operand_lossage ("%%S computed all 1's mask");
5948 fprintf (file, "%d", 63 - i);
5951 else /* Clear Right */
5953 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
5954 if ((val >>= 1) & 1)
5957 #if HOST_BITS_PER_WIDE_INT == 32
5958 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
5960 val = CONST_DOUBLE_HIGH (x);
5962 if (val == (HOST_WIDE_INT) -1)
5965 for (i = 32; i < 64; i++)
5966 if ((val >>= 1) & 1)
5970 /* i = index of last clear bit from right
5971 mask ends at 62 - i from left */
5973 output_operand_lossage ("%%S computed all 0's mask");
5975 fprintf (file, "%d", 62 - i);
5980 /* Print the symbolic name of a branch target register. */
5981 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
5982 && REGNO (x) != COUNT_REGISTER_REGNUM))
5983 output_operand_lossage ("invalid %%T value");
5984 else if (REGNO (x) == LINK_REGISTER_REGNUM)
5985 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
5987 fputs ("ctr", file);
5991 /* High-order 16 bits of constant for use in unsigned operand. */
5993 output_operand_lossage ("invalid %%u value");
5995 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
5996 (INT_LOWPART (x) >> 16) & 0xffff);
6000 /* High-order 16 bits of constant for use in signed operand. */
6002 output_operand_lossage ("invalid %%v value");
6004 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6005 (INT_LOWPART (x) >> 16) & 0xffff);
6009 /* Print `u' if this has an auto-increment or auto-decrement. */
6010 if (GET_CODE (x) == MEM
6011 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6012 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6017 /* Print the trap code for this operand. */
6018 switch (GET_CODE (x))
6021 fputs ("eq", file); /* 4 */
6024 fputs ("ne", file); /* 24 */
6027 fputs ("lt", file); /* 16 */
6030 fputs ("le", file); /* 20 */
6033 fputs ("gt", file); /* 8 */
6036 fputs ("ge", file); /* 12 */
6039 fputs ("llt", file); /* 2 */
6042 fputs ("lle", file); /* 6 */
6045 fputs ("lgt", file); /* 1 */
6048 fputs ("lge", file); /* 5 */
6056 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6059 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6060 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6062 print_operand (file, x, 0);
6066 /* MB value for a PowerPC64 rldic operand. */
6067 val = (GET_CODE (x) == CONST_INT
6068 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6073 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6074 if ((val <<= 1) < 0)
6077 #if HOST_BITS_PER_WIDE_INT == 32
6078 if (GET_CODE (x) == CONST_INT && i >= 0)
6079 i += 32; /* zero-extend high-part was all 0's */
6080 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6082 val = CONST_DOUBLE_LOW (x);
6089 for ( ; i < 64; i++)
6090 if ((val <<= 1) < 0)
6095 fprintf (file, "%d", i + 1);
6099 if (GET_CODE (x) == MEM
6100 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6105 /* Like 'L', for third word of TImode */
6106 if (GET_CODE (x) == REG)
6107 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6108 else if (GET_CODE (x) == MEM)
6110 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6111 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6112 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6114 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6115 if (small_data_operand (x, GET_MODE (x)))
6116 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6117 reg_names[SMALL_DATA_REG]);
6122 /* X is a SYMBOL_REF. Write out the name preceded by a
6123 period and without any trailing data in brackets. Used for function
6124 names. If we are configured for System V (or the embedded ABI) on
6125 the PowerPC, do not emit the period, since those systems do not use
6126 TOCs and the like. */
6127 if (GET_CODE (x) != SYMBOL_REF)
6130 if (XSTR (x, 0)[0] != '.')
6132 switch (DEFAULT_ABI)
6142 case ABI_AIX_NODESC:
6148 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6150 assemble_name (file, XSTR (x, 0));
6155 /* Like 'L', for last word of TImode. */
6156 if (GET_CODE (x) == REG)
6157 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6158 else if (GET_CODE (x) == MEM)
6160 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6161 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6162 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6164 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6165 if (small_data_operand (x, GET_MODE (x)))
6166 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6167 reg_names[SMALL_DATA_REG]);
6171 /* Print AltiVec memory operand. */
6176 if (GET_CODE (x) != MEM)
6181 if (GET_CODE (tmp) == REG)
6182 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6183 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6185 if (REGNO (XEXP (tmp, 0)) == 0)
6186 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6187 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6189 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6190 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6198 if (GET_CODE (x) == REG)
6199 fprintf (file, "%s", reg_names[REGNO (x)]);
6200 else if (GET_CODE (x) == MEM)
6202 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6203 know the width from the mode. */
6204 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6205 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6206 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6207 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6208 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6209 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6211 output_address (XEXP (x, 0));
6214 output_addr_const (file, x);
6218 output_operand_lossage ("invalid %%xn code");
6222 /* Print the address of an operand. */
6225 print_operand_address (file, x)
6229 if (GET_CODE (x) == REG)
6230 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6231 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6232 || GET_CODE (x) == LABEL_REF)
6234 output_addr_const (file, x);
6235 if (small_data_operand (x, GET_MODE (x)))
6236 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6237 reg_names[SMALL_DATA_REG]);
6238 else if (TARGET_TOC)
6241 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6243 if (REGNO (XEXP (x, 0)) == 0)
6244 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6245 reg_names[ REGNO (XEXP (x, 0)) ]);
6247 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6248 reg_names[ REGNO (XEXP (x, 1)) ]);
6250 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6252 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6253 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6256 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6257 && CONSTANT_P (XEXP (x, 1)))
6259 output_addr_const (file, XEXP (x, 1));
6260 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6264 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6265 && CONSTANT_P (XEXP (x, 1)))
6267 fprintf (file, "lo16(");
6268 output_addr_const (file, XEXP (x, 1));
6269 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6272 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6274 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6276 rtx contains_minus = XEXP (x, 1);
6280 /* Find the (minus (sym) (toc)) buried in X, and temporarily
6281 turn it into (sym) for output_addr_const. */
6282 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6283 contains_minus = XEXP (contains_minus, 0);
6285 minus = XEXP (contains_minus, 0);
6286 symref = XEXP (minus, 0);
6287 XEXP (contains_minus, 0) = symref;
6292 name = XSTR (symref, 0);
6293 newname = alloca (strlen (name) + sizeof ("@toc"));
6294 strcpy (newname, name);
6295 strcat (newname, "@toc");
6296 XSTR (symref, 0) = newname;
6298 output_addr_const (file, XEXP (x, 1));
6300 XSTR (symref, 0) = name;
6301 XEXP (contains_minus, 0) = minus;
6304 output_addr_const (file, XEXP (x, 1));
6306 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6312 /* Target hook for assembling integer objects. The powerpc version has
6313 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6314 is defined. It also needs to handle DI-mode objects on 64-bit
6318 rs6000_assemble_integer (x, size, aligned_p)
6323 #ifdef RELOCATABLE_NEEDS_FIXUP
6324 /* Special handling for SI values. */
6325 if (size == 4 && aligned_p)
6327 extern int in_toc_section PARAMS ((void));
6328 static int recurse = 0;
6330 /* For -mrelocatable, we mark all addresses that need to be fixed up
6331 in the .fixup section. */
6332 if (TARGET_RELOCATABLE
6333 && !in_toc_section ()
6334 && !in_text_section ()
6336 && GET_CODE (x) != CONST_INT
6337 && GET_CODE (x) != CONST_DOUBLE
6343 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6345 ASM_OUTPUT_LABEL (asm_out_file, buf);
6346 fprintf (asm_out_file, "\t.long\t(");
6347 output_addr_const (asm_out_file, x);
6348 fprintf (asm_out_file, ")@fixup\n");
6349 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6350 ASM_OUTPUT_ALIGN (asm_out_file, 2);
6351 fprintf (asm_out_file, "\t.long\t");
6352 assemble_name (asm_out_file, buf);
6353 fprintf (asm_out_file, "\n\t.previous\n");
6357 /* Remove initial .'s to turn a -mcall-aixdesc function
6358 address into the address of the descriptor, not the function
6360 else if (GET_CODE (x) == SYMBOL_REF
6361 && XSTR (x, 0)[0] == '.'
6362 && DEFAULT_ABI == ABI_AIX)
6364 const char *name = XSTR (x, 0);
6365 while (*name == '.')
6368 fprintf (asm_out_file, "\t.long\t%s\n", name);
6372 #endif /* RELOCATABLE_NEEDS_FIXUP */
6373 return default_assemble_integer (x, size, aligned_p);
6377 rs6000_reverse_condition (mode, code)
6378 enum machine_mode mode;
6381 /* Reversal of FP compares takes care -- an ordered compare
6382 becomes an unordered compare and vice versa. */
6383 if (mode == CCFPmode)
6384 return reverse_condition_maybe_unordered (code);
6386 return reverse_condition (code);
6389 /* Generate a compare for CODE. Return a brand-new rtx that
6390 represents the result of the compare. */
6393 rs6000_generate_compare (code)
6396 enum machine_mode comp_mode;
6399 if (rs6000_compare_fp_p)
6400 comp_mode = CCFPmode;
6401 else if (code == GTU || code == LTU
6402 || code == GEU || code == LEU)
6403 comp_mode = CCUNSmode;
6407 /* First, the compare. */
6408 compare_result = gen_reg_rtx (comp_mode);
6409 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
6410 gen_rtx_COMPARE (comp_mode,
6412 rs6000_compare_op1)));
6414 /* Some kinds of FP comparisons need an OR operation;
6415 except for flag_unsafe_math_optimizations we don't bother. */
6416 if (rs6000_compare_fp_p
6417 && ! flag_unsafe_math_optimizations
6418 && (code == LE || code == GE
6419 || code == UNEQ || code == LTGT
6420 || code == UNGT || code == UNLT))
6422 enum rtx_code or1, or2;
6423 rtx or1_rtx, or2_rtx, compare2_rtx;
6424 rtx or_result = gen_reg_rtx (CCEQmode);
6428 case LE: or1 = LT; or2 = EQ; break;
6429 case GE: or1 = GT; or2 = EQ; break;
6430 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
6431 case LTGT: or1 = LT; or2 = GT; break;
6432 case UNGT: or1 = UNORDERED; or2 = GT; break;
6433 case UNLT: or1 = UNORDERED; or2 = LT; break;
6436 validate_condition_mode (or1, comp_mode);
6437 validate_condition_mode (or2, comp_mode);
6438 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
6439 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
6440 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
6441 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
6443 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
6445 compare_result = or_result;
6449 validate_condition_mode (code, GET_MODE (compare_result));
6451 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
6455 /* Emit the RTL for an sCOND pattern. */
6458 rs6000_emit_sCOND (code, result)
6463 enum machine_mode op_mode;
6465 condition_rtx = rs6000_generate_compare (code);
6467 op_mode = GET_MODE (rs6000_compare_op0);
6468 if (op_mode == VOIDmode)
6469 op_mode = GET_MODE (rs6000_compare_op1);
6471 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
6473 PUT_MODE (condition_rtx, DImode);
6474 convert_move (result, condition_rtx, 0);
6478 PUT_MODE (condition_rtx, SImode);
6479 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
6483 /* Emit a branch of kind CODE to location LOC. */
6486 rs6000_emit_cbranch (code, loc)
6490 rtx condition_rtx, loc_ref;
6492 condition_rtx = rs6000_generate_compare (code);
6493 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
6494 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
6495 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
6499 /* Return the string to output a conditional branch to LABEL, which is
6500 the operand number of the label, or -1 if the branch is really a
6503 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
6504 condition code register and its mode specifies what kind of
6507 REVERSED is non-zero if we should reverse the sense of the comparison.
6509 INSN is the insn. */
6512 output_cbranch (op, label, reversed, insn)
6518 static char string[64];
6519 enum rtx_code code = GET_CODE (op);
6520 rtx cc_reg = XEXP (op, 0);
6521 enum machine_mode mode = GET_MODE (cc_reg);
6522 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
6523 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
6524 int really_reversed = reversed ^ need_longbranch;
6530 validate_condition_mode (code, mode);
6532 /* Work out which way this really branches. We could use
6533 reverse_condition_maybe_unordered here always but this
6534 makes the resulting assembler clearer. */
6535 if (really_reversed)
6536 code = rs6000_reverse_condition (mode, code);
6540 /* Not all of these are actually distinct opcodes, but
6541 we distinguish them for clarity of the resulting assembler. */
6543 ccode = "ne"; break;
6545 ccode = "eq"; break;
6547 ccode = "ge"; break;
6548 case GT: case GTU: case UNGT:
6549 ccode = "gt"; break;
6551 ccode = "le"; break;
6552 case LT: case LTU: case UNLT:
6553 ccode = "lt"; break;
6554 case UNORDERED: ccode = "un"; break;
6555 case ORDERED: ccode = "nu"; break;
6556 case UNGE: ccode = "nl"; break;
6557 case UNLE: ccode = "ng"; break;
6562 /* Maybe we have a guess as to how likely the branch is.
6563 The old mnemonics don't have a way to specify this information. */
6564 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
6565 if (note != NULL_RTX)
6567 /* PROB is the difference from 50%. */
6568 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
6570 /* For branches that are very close to 50%, assume not-taken. */
6571 if (abs (prob) > REG_BR_PROB_BASE / 20
6572 && ((prob > 0) ^ need_longbranch))
6581 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
6583 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
6585 /* We need to escape any '%' characters in the reg_names string.
6586 Assume they'd only be the first character... */
6587 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
6589 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
6593 /* If the branch distance was too far, we may have to use an
6594 unconditional branch to go the distance. */
6595 if (need_longbranch)
6596 s += sprintf (s, ",$+8\n\tb %s", label);
6598 s += sprintf (s, ",%s", label);
6604 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
6605 operands of the last comparison is nonzero/true, FALSE_COND if it
6606 is zero/false. Return 0 if the hardware has no such operation. */
6609 rs6000_emit_cmove (dest, op, true_cond, false_cond)
6615 enum rtx_code code = GET_CODE (op);
6616 rtx op0 = rs6000_compare_op0;
6617 rtx op1 = rs6000_compare_op1;
6619 enum machine_mode mode = GET_MODE (op0);
6622 /* First, work out if the hardware can do this at all, or
6623 if it's too slow... */
6624 /* If the comparison is an integer one, since we only have fsel
6625 it'll be cheaper to use a branch. */
6626 if (! rs6000_compare_fp_p)
6629 /* Eliminate half of the comparisons by switching operands, this
6630 makes the remaining code simpler. */
6631 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
6632 || code == LTGT || code == LT)
6634 code = reverse_condition_maybe_unordered (code);
6636 true_cond = false_cond;
6640 /* UNEQ and LTGT take four instructions for a comparison with zero,
6641 it'll probably be faster to use a branch here too. */
6645 if (GET_CODE (op1) == CONST_DOUBLE)
6646 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
6648 /* We're going to try to implement comparions by performing
6649 a subtract, then comparing against zero. Unfortunately,
6650 Inf - Inf is NaN which is not zero, and so if we don't
6651 know that the the operand is finite and the comparison
6652 would treat EQ different to UNORDERED, we can't do it. */
6653 if (! flag_unsafe_math_optimizations
6654 && code != GT && code != UNGE
6655 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
6656 /* Constructs of the form (a OP b ? a : b) are safe. */
6657 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
6658 || (! rtx_equal_p (op0, true_cond)
6659 && ! rtx_equal_p (op1, true_cond))))
6661 /* At this point we know we can use fsel. */
6663 /* Reduce the comparison to a comparison against zero. */
6664 temp = gen_reg_rtx (mode);
6665 emit_insn (gen_rtx_SET (VOIDmode, temp,
6666 gen_rtx_MINUS (mode, op0, op1)));
6668 op1 = CONST0_RTX (mode);
6670 /* If we don't care about NaNs we can reduce some of the comparisons
6671 down to faster ones. */
6672 if (flag_unsafe_math_optimizations)
6678 true_cond = false_cond;
6691 /* Now, reduce everything down to a GE. */
6698 temp = gen_reg_rtx (mode);
6699 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
6704 temp = gen_reg_rtx (mode);
6705 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (mode, op0)));
6710 temp = gen_reg_rtx (mode);
6711 emit_insn (gen_rtx_SET (VOIDmode, temp,
6713 gen_rtx_ABS (mode, op0))));
6718 temp = gen_reg_rtx (mode);
6719 emit_insn (gen_rtx_SET (VOIDmode, temp,
6720 gen_rtx_IF_THEN_ELSE (mode,
6721 gen_rtx_GE (VOIDmode,
6723 true_cond, false_cond)));
6725 true_cond = false_cond;
6727 temp = gen_reg_rtx (mode);
6728 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
6733 temp = gen_reg_rtx (mode);
6734 emit_insn (gen_rtx_SET (VOIDmode, temp,
6735 gen_rtx_IF_THEN_ELSE (mode,
6736 gen_rtx_GE (VOIDmode,
6738 true_cond, false_cond)));
6740 false_cond = true_cond;
6742 temp = gen_reg_rtx (mode);
6743 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
6751 emit_insn (gen_rtx_SET (VOIDmode, dest,
6752 gen_rtx_IF_THEN_ELSE (GET_MODE (dest),
6753 gen_rtx_GE (VOIDmode,
6755 true_cond, false_cond)));
6760 rs6000_emit_minmax (dest, code, op0, op1)
6766 enum machine_mode mode = GET_MODE (op0);
6768 if (code == SMAX || code == UMAX)
6769 target = emit_conditional_move (dest, GE, op0, op1, mode,
6772 target = emit_conditional_move (dest, GE, op0, op1, mode,
6774 if (target == NULL_RTX)
6777 emit_move_insn (dest, target);
6780 /* This page contains routines that are used to determine what the
6781 function prologue and epilogue code will do and write them out. */
6783 /* Return the first fixed-point register that is required to be
6784 saved. 32 if none. */
6787 first_reg_to_save ()
6791 /* Find lowest numbered live register. */
6792 for (first_reg = 13; first_reg <= 31; first_reg++)
6793 if (regs_ever_live[first_reg]
6794 && (! call_used_regs[first_reg]
6795 || (first_reg == PIC_OFFSET_TABLE_REGNUM
6796 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
6797 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
6800 if (current_function_profile)
6802 /* AIX must save/restore every register that contains a parameter
6803 before/after the .__mcount call plus an additional register
6804 for the static chain, if needed; use registers from 30 down to 22
6806 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
6808 int last_parm_reg, profile_first_reg;
6810 /* Figure out last used parameter register. The proper thing
6811 to do is to walk incoming args of the function. A function
6812 might have live parameter registers even if it has no
6814 for (last_parm_reg = 10;
6815 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
6819 /* Calculate first reg for saving parameter registers
6821 Skip reg 31 which may contain the frame pointer. */
6822 profile_first_reg = (33 - last_parm_reg
6823 - (current_function_needs_context ? 1 : 0));
6825 /* Need to skip another reg to account for R31 being PICBASE
6826 (when flag_pic is set) or R30 being used as the frame
6827 pointer (when flag_pic is not set). */
6828 --profile_first_reg;
6830 /* Do not save frame pointer if no parameters needs to be saved. */
6831 if (profile_first_reg == 31)
6832 profile_first_reg = 32;
6834 if (first_reg > profile_first_reg)
6835 first_reg = profile_first_reg;
6838 /* SVR4 may need one register to preserve the static chain. */
6839 else if (current_function_needs_context)
6841 /* Skip reg 31 which may contain the frame pointer. */
6848 if (flag_pic && current_function_uses_pic_offset_table &&
6849 (first_reg > PIC_OFFSET_TABLE_REGNUM))
6850 return PIC_OFFSET_TABLE_REGNUM;
6856 /* Similar, for FP regs. */
6859 first_fp_reg_to_save ()
6863 /* Find lowest numbered live register. */
6864 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6865 if (regs_ever_live[first_reg])
6871 /* Similar, for AltiVec regs. */
6874 first_altivec_reg_to_save ()
6878 /* Stack frame remains as is unless we are in AltiVec ABI. */
6879 if (! TARGET_ALTIVEC_ABI)
6880 return LAST_ALTIVEC_REGNO + 1;
6882 /* Find lowest numbered live register. */
6883 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6884 if (regs_ever_live[i])
6890 /* Return a 32-bit mask of the AltiVec registers we need to set in
6891 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
6892 the 32-bit word is 0. */
6895 compute_vrsave_mask ()
6897 unsigned int i, mask = 0;
6899 /* First, find out if we use _any_ altivec registers. */
6900 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6901 if (regs_ever_live[i])
6902 mask |= ALTIVEC_REG_BIT (i);
6907 /* Next, add all registers that are call-clobbered. We do this
6908 because post-reload register optimizers such as regrename_optimize
6909 may choose to use them. They never change the register class
6910 chosen by reload, so cannot create new uses of altivec registers
6911 if there were none before, so the early exit above is safe. */
6912 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
6913 altivec registers not saved in the mask, which might well make the
6914 adjustments below more effective in eliding the save/restore of
6915 VRSAVE in small functions. */
6916 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6917 if (call_used_regs[i])
6918 mask |= ALTIVEC_REG_BIT (i);
6920 /* Next, remove the argument registers from the set. These must
6921 be in the VRSAVE mask set by the caller, so we don't need to add
6922 them in again. More importantly, the mask we compute here is
6923 used to generate CLOBBERs in the set_vrsave insn, and we do not
6924 wish the argument registers to die. */
6925 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
6926 mask &= ~ALTIVEC_REG_BIT (i);
6928 /* Similarly, remove the return value from the set. */
6931 diddle_return_value (is_altivec_return_reg, &yes);
6933 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
6940 is_altivec_return_reg (reg, xyes)
6944 bool *yes = (bool *) xyes;
6945 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
6950 /* Calculate the stack information for the current function. This is
6951 complicated by having two separate calling sequences, the AIX calling
6952 sequence and the V.4 calling sequence.
6954 AIX (and Darwin/Mac OS X) stack frames look like:
6956 SP----> +---------------------------------------+
6957 | back chain to caller | 0 0
6958 +---------------------------------------+
6959 | saved CR | 4 8 (8-11)
6960 +---------------------------------------+
6962 +---------------------------------------+
6963 | reserved for compilers | 12 24
6964 +---------------------------------------+
6965 | reserved for binders | 16 32
6966 +---------------------------------------+
6967 | saved TOC pointer | 20 40
6968 +---------------------------------------+
6969 | Parameter save area (P) | 24 48
6970 +---------------------------------------+
6971 | Alloca space (A) | 24+P etc.
6972 +---------------------------------------+
6973 | Local variable space (L) | 24+P+A
6974 +---------------------------------------+
6975 | Float/int conversion temporary (X) | 24+P+A+L
6976 +---------------------------------------+
6977 | Save area for AltiVec registers (W) | 24+P+A+L+X
6978 +---------------------------------------+
6979 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
6980 +---------------------------------------+
6981 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
6982 +---------------------------------------+
6983 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
6984 +---------------------------------------+
6985 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
6986 +---------------------------------------+
6987 old SP->| back chain to caller's caller |
6988 +---------------------------------------+
6990 The required alignment for AIX configurations is two words (i.e., 8
6994 V.4 stack frames look like:
6996 SP----> +---------------------------------------+
6997 | back chain to caller | 0
6998 +---------------------------------------+
6999 | caller's saved LR | 4
7000 +---------------------------------------+
7001 | Parameter save area (P) | 8
7002 +---------------------------------------+
7003 | Alloca space (A) | 8+P
7004 +---------------------------------------+
7005 | Varargs save area (V) | 8+P+A
7006 +---------------------------------------+
7007 | Local variable space (L) | 8+P+A+V
7008 +---------------------------------------+
7009 | Float/int conversion temporary (X) | 8+P+A+V+L
7010 +---------------------------------------+
7011 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7012 +---------------------------------------+
7013 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7014 +---------------------------------------+
7015 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7016 +---------------------------------------+
7017 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7018 +---------------------------------------+
7019 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7020 +---------------------------------------+
7021 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7022 +---------------------------------------+
7023 old SP->| back chain to caller's caller |
7024 +---------------------------------------+
7026 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7027 given. (But note below and in sysv4.h that we require only 8 and
7028 may round up the size of our stack frame anyways. The historical
7029 reason is early versions of powerpc-linux which didn't properly
7030 align the stack at program startup. A happy side-effect is that
7031 -mno-eabi libraries can be used with -meabi programs.)
7033 The EABI configuration defaults to the V.4 layout, unless
7034 -mcall-aix is used, in which case the AIX layout is used. However,
7035 the stack alignment requirements may differ. If -mno-eabi is not
7036 given, the required stack alignment is 8 bytes; if -mno-eabi is
7037 given, the required alignment is 16 bytes. (But see V.4 comment
7040 #ifndef ABI_STACK_BOUNDARY
7041 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7045 rs6000_stack_info ()
7047 static rs6000_stack_t info, zero_info;
7048 rs6000_stack_t *info_ptr = &info;
7049 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7050 enum rs6000_abi abi;
7054 /* Zero all fields portably. */
7057 /* Select which calling sequence. */
7058 info_ptr->abi = abi = DEFAULT_ABI;
7060 /* Calculate which registers need to be saved & save area size. */
7061 info_ptr->first_gp_reg_save = first_reg_to_save ();
7062 /* Assume that we will have to save PIC_OFFSET_TABLE_REGNUM,
7063 even if it currently looks like we won't. */
7064 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7065 || (flag_pic == 1 && abi == ABI_V4)
7066 || (flag_pic && abi == ABI_DARWIN))
7067 && info_ptr->first_gp_reg_save > PIC_OFFSET_TABLE_REGNUM)
7068 info_ptr->gp_size = reg_size * (32 - PIC_OFFSET_TABLE_REGNUM);
7070 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7072 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7073 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7075 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7076 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7077 - info_ptr->first_altivec_reg_save);
7079 /* Does this function call anything? */
7080 info_ptr->calls_p = (! current_function_is_leaf
7081 || cfun->machine->ra_needs_full_frame);
7083 /* Determine if we need to save the link register. */
7084 if (rs6000_ra_ever_killed ()
7085 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7086 #ifdef TARGET_RELOCATABLE
7087 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7089 || (info_ptr->first_fp_reg_save != 64
7090 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7091 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7092 || (abi == ABI_V4 && current_function_calls_alloca)
7093 || (DEFAULT_ABI == ABI_DARWIN
7095 && current_function_uses_pic_offset_table)
7096 || info_ptr->calls_p)
7098 info_ptr->lr_save_p = 1;
7099 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7102 /* Determine if we need to save the condition code registers. */
7103 if (regs_ever_live[CR2_REGNO]
7104 || regs_ever_live[CR3_REGNO]
7105 || regs_ever_live[CR4_REGNO])
7107 info_ptr->cr_save_p = 1;
7109 info_ptr->cr_size = reg_size;
7112 /* If the current function calls __builtin_eh_return, then we need
7113 to allocate stack space for registers that will hold data for
7114 the exception handler. */
7115 if (current_function_calls_eh_return)
7118 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7120 ehrd_size = i * UNITS_PER_WORD;
7125 /* Determine various sizes. */
7126 info_ptr->reg_size = reg_size;
7127 info_ptr->fixed_size = RS6000_SAVE_AREA;
7128 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7129 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7130 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7133 if (TARGET_ALTIVEC_ABI)
7135 info_ptr->vrsave_mask = compute_vrsave_mask ();
7136 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7140 info_ptr->vrsave_mask = 0;
7141 info_ptr->vrsave_size = 0;
7144 /* Calculate the offsets. */
7152 case ABI_AIX_NODESC:
7154 info_ptr->fp_save_offset = - info_ptr->fp_size;
7155 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7157 if (TARGET_ALTIVEC_ABI)
7159 info_ptr->vrsave_save_offset
7160 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7162 /* Align stack so vector save area is on a quadword boundary. */
7163 if (info_ptr->altivec_size != 0)
7164 info_ptr->altivec_padding_size
7165 = 16 - (-info_ptr->vrsave_save_offset % 16);
7167 info_ptr->altivec_padding_size = 0;
7169 info_ptr->altivec_save_offset
7170 = info_ptr->vrsave_save_offset
7171 - info_ptr->altivec_padding_size
7172 - info_ptr->altivec_size;
7174 /* Adjust for AltiVec case. */
7175 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7178 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7179 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7180 info_ptr->lr_save_offset = 2*reg_size;
7184 info_ptr->fp_save_offset = - info_ptr->fp_size;
7185 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7186 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7188 if (TARGET_ALTIVEC_ABI)
7190 info_ptr->vrsave_save_offset
7191 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7193 /* Align stack so vector save area is on a quadword boundary. */
7194 if (info_ptr->altivec_size != 0)
7195 info_ptr->altivec_padding_size
7196 = 16 - (-info_ptr->vrsave_save_offset % 16);
7198 info_ptr->altivec_padding_size = 0;
7200 info_ptr->altivec_save_offset
7201 = info_ptr->vrsave_save_offset
7202 - info_ptr->altivec_padding_size
7203 - info_ptr->altivec_size;
7205 /* Adjust for AltiVec case. */
7206 info_ptr->toc_save_offset
7207 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7210 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7211 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7212 info_ptr->lr_save_offset = reg_size;
7216 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7218 + info_ptr->altivec_size
7219 + info_ptr->altivec_padding_size
7220 + info_ptr->vrsave_size
7224 + info_ptr->vrsave_size
7225 + info_ptr->toc_size,
7226 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7229 total_raw_size = (info_ptr->vars_size
7230 + info_ptr->parm_size
7231 + info_ptr->save_size
7232 + info_ptr->varargs_size
7233 + info_ptr->fixed_size);
7235 info_ptr->total_size =
7236 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7238 /* Determine if we need to allocate any stack frame:
7240 For AIX we need to push the stack if a frame pointer is needed
7241 (because the stack might be dynamically adjusted), if we are
7242 debugging, if we make calls, or if the sum of fp_save, gp_save,
7243 and local variables are more than the space needed to save all
7244 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7245 + 18*8 = 288 (GPR13 reserved).
7247 For V.4 we don't have the stack cushion that AIX uses, but assume
7248 that the debugger can handle stackless frames. */
7250 if (info_ptr->calls_p)
7251 info_ptr->push_p = 1;
7253 else if (abi == ABI_V4)
7254 info_ptr->push_p = (total_raw_size > info_ptr->fixed_size
7255 || info_ptr->calls_p);
7258 info_ptr->push_p = (frame_pointer_needed
7259 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7260 || ((total_raw_size - info_ptr->fixed_size)
7261 > (TARGET_32BIT ? 220 : 288)));
7263 /* Zero offsets if we're not saving those registers. */
7264 if (info_ptr->fp_size == 0)
7265 info_ptr->fp_save_offset = 0;
7267 if (info_ptr->gp_size == 0)
7268 info_ptr->gp_save_offset = 0;
7270 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7271 info_ptr->altivec_save_offset = 0;
7273 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7274 info_ptr->vrsave_save_offset = 0;
7276 if (! info_ptr->lr_save_p)
7277 info_ptr->lr_save_offset = 0;
7279 if (! info_ptr->cr_save_p)
7280 info_ptr->cr_save_offset = 0;
7282 if (! info_ptr->toc_save_p)
7283 info_ptr->toc_save_offset = 0;
7289 debug_stack_info (info)
7290 rs6000_stack_t *info;
7292 const char *abi_string;
7295 info = rs6000_stack_info ();
7297 fprintf (stderr, "\nStack information for function %s:\n",
7298 ((current_function_decl && DECL_NAME (current_function_decl))
7299 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7304 default: abi_string = "Unknown"; break;
7305 case ABI_NONE: abi_string = "NONE"; break;
7307 case ABI_AIX_NODESC: abi_string = "AIX"; break;
7308 case ABI_DARWIN: abi_string = "Darwin"; break;
7309 case ABI_V4: abi_string = "V.4"; break;
7312 fprintf (stderr, "\tABI = %5s\n", abi_string);
7314 if (TARGET_ALTIVEC_ABI)
7315 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7317 if (info->first_gp_reg_save != 32)
7318 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
7320 if (info->first_fp_reg_save != 64)
7321 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
7323 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7324 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7325 info->first_altivec_reg_save);
7327 if (info->lr_save_p)
7328 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
7330 if (info->cr_save_p)
7331 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
7333 if (info->toc_save_p)
7334 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
7336 if (info->vrsave_mask)
7337 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
7340 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
7343 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
7345 if (info->gp_save_offset)
7346 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
7348 if (info->fp_save_offset)
7349 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
7351 if (info->altivec_save_offset)
7352 fprintf (stderr, "\taltivec_save_offset = %5d\n",
7353 info->altivec_save_offset);
7355 if (info->vrsave_save_offset)
7356 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
7357 info->vrsave_save_offset);
7359 if (info->lr_save_offset)
7360 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
7362 if (info->cr_save_offset)
7363 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
7365 if (info->toc_save_offset)
7366 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
7368 if (info->varargs_save_offset)
7369 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
7371 if (info->total_size)
7372 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
7374 if (info->varargs_size)
7375 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
7377 if (info->vars_size)
7378 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
7380 if (info->parm_size)
7381 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
7383 if (info->fixed_size)
7384 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
7387 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
7390 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
7392 if (info->altivec_size)
7393 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
7395 if (info->vrsave_size)
7396 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
7398 if (info->altivec_padding_size)
7399 fprintf (stderr, "\taltivec_padding_size= %5d\n",
7400 info->altivec_padding_size);
7403 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
7406 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
7409 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
7411 if (info->save_size)
7412 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
7414 if (info->reg_size != 4)
7415 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
7417 fprintf (stderr, "\n");
7421 rs6000_return_addr (count, frame)
7425 /* Currently we don't optimize very well between prolog and body
7426 code and for PIC code the code can be actually quite bad, so
7427 don't try to be too clever here. */
7430 || DEFAULT_ABI == ABI_AIX
7431 || DEFAULT_ABI == ABI_AIX_NODESC)
7433 cfun->machine->ra_needs_full_frame = 1;
7440 plus_constant (copy_to_reg
7441 (gen_rtx_MEM (Pmode,
7442 memory_address (Pmode, frame))),
7443 RETURN_ADDRESS_OFFSET)));
7446 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
7450 rs6000_ra_ever_killed ()
7454 #ifdef ASM_OUTPUT_MI_THUNK
7455 if (current_function_is_thunk)
7458 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
7459 || cfun->machine->ra_needs_full_frame)
7460 return regs_ever_live[LINK_REGISTER_REGNUM];
7462 push_topmost_sequence ();
7464 pop_topmost_sequence ();
7466 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
7470 /* Add a REG_MAYBE_DEAD note to the insn. */
7472 rs6000_maybe_dead (insn)
7475 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
7480 /* Emit instructions needed to load the TOC register.
7481 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
7482 a constant pool; or for SVR4 -fpic. */
7485 rs6000_emit_load_toc_table (fromprolog)
7489 dest = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
7491 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
7493 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7495 rtx temp = (fromprolog
7496 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
7497 : gen_reg_rtx (Pmode));
7498 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
7499 rs6000_maybe_dead (emit_move_insn (dest, temp));
7501 else if (flag_pic == 2)
7504 rtx tempLR = (fromprolog
7505 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
7506 : gen_reg_rtx (Pmode));
7507 rtx temp0 = (fromprolog
7508 ? gen_rtx_REG (Pmode, 0)
7509 : gen_reg_rtx (Pmode));
7512 /* possibly create the toc section */
7513 if (! toc_initialized)
7516 function_section (current_function_decl);
7523 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
7524 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7526 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
7527 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7529 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
7531 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
7532 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
7539 static int reload_toc_labelno = 0;
7541 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
7543 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
7544 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7546 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
7549 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
7550 rs6000_maybe_dead (emit_move_insn (temp0,
7551 gen_rtx_MEM (Pmode, dest)));
7553 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
7555 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
7557 /* This is for AIX code running in non-PIC ELF. */
7560 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
7561 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7563 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
7564 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
7572 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
7574 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
7579 get_TOC_alias_set ()
7581 static int set = -1;
7583 set = new_alias_set ();
7587 /* This retuns nonzero if the current function uses the TOC. This is
7588 determined by the presence of (unspec ... 7), which is generated by
7589 the various load_toc_* patterns. */
7596 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7599 rtx pat = PATTERN (insn);
7602 if (GET_CODE (pat) == PARALLEL)
7603 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7604 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
7605 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
7612 create_TOC_reference (symbol)
7615 return gen_rtx_PLUS (Pmode,
7616 gen_rtx_REG (Pmode, TOC_REGISTER),
7617 gen_rtx_CONST (Pmode,
7618 gen_rtx_MINUS (Pmode, symbol,
7619 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
7623 /* __throw will restore its own return address to be the same as the
7624 return address of the function that the throw is being made to.
7625 This is unfortunate, because we want to check the original
7626 return address to see if we need to restore the TOC.
7627 So we have to squirrel it away here.
7628 This is used only in compiling __throw and __rethrow.
7630 Most of this code should be removed by CSE. */
7631 static rtx insn_after_throw;
7633 /* This does the saving... */
7635 rs6000_aix_emit_builtin_unwind_init ()
7638 rtx stack_top = gen_reg_rtx (Pmode);
7639 rtx opcode_addr = gen_reg_rtx (Pmode);
7641 insn_after_throw = gen_reg_rtx (SImode);
7643 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
7644 emit_move_insn (stack_top, mem);
7646 mem = gen_rtx_MEM (Pmode,
7647 gen_rtx_PLUS (Pmode, stack_top,
7648 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
7649 emit_move_insn (opcode_addr, mem);
7650 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
7653 /* Emit insns to _restore_ the TOC register, at runtime (specifically
7654 in _eh.o). Only used on AIX.
7656 The idea is that on AIX, function calls look like this:
7657 bl somefunction-trampoline
7661 somefunction-trampoline:
7663 ... load function address in the count register ...
7665 or like this, if the linker determines that this is not a cross-module call
7666 and so the TOC need not be restored:
7669 or like this, if the compiler could determine that this is not a
7672 now, the tricky bit here is that register 2 is saved and restored
7673 by the _linker_, so we can't readily generate debugging information
7674 for it. So we need to go back up the call chain looking at the
7675 insns at return addresses to see which calls saved the TOC register
7676 and so see where it gets restored from.
7678 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
7679 just before the actual epilogue.
7681 On the bright side, this incurs no space or time overhead unless an
7682 exception is thrown, except for the extra code in libgcc.a.
7684 The parameter STACKSIZE is a register containing (at runtime)
7685 the amount to be popped off the stack in addition to the stack frame
7686 of this routine (which will be __throw or __rethrow, and so is
7687 guaranteed to have a stack frame). */
7690 rs6000_emit_eh_toc_restore (stacksize)
7694 rtx bottom_of_stack = gen_reg_rtx (Pmode);
7695 rtx tocompare = gen_reg_rtx (SImode);
7696 rtx opcode = gen_reg_rtx (SImode);
7697 rtx opcode_addr = gen_reg_rtx (Pmode);
7699 rtx loop_start = gen_label_rtx ();
7700 rtx no_toc_restore_needed = gen_label_rtx ();
7701 rtx loop_exit = gen_label_rtx ();
7703 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
7704 set_mem_alias_set (mem, rs6000_sr_alias_set);
7705 emit_move_insn (bottom_of_stack, mem);
7707 top_of_stack = expand_binop (Pmode, add_optab,
7708 bottom_of_stack, stacksize,
7709 NULL_RTX, 1, OPTAB_WIDEN);
7711 emit_move_insn (tocompare,
7712 GEN_INT (trunc_int_for_mode (TARGET_32BIT
7714 : 0xE8410028, SImode)));
7716 if (insn_after_throw == NULL_RTX)
7718 emit_move_insn (opcode, insn_after_throw);
7720 emit_note (NULL, NOTE_INSN_LOOP_BEG);
7721 emit_label (loop_start);
7723 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
7724 SImode, NULL_RTX, NULL_RTX,
7725 no_toc_restore_needed);
7727 mem = gen_rtx_MEM (Pmode,
7728 gen_rtx_PLUS (Pmode, bottom_of_stack,
7729 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
7730 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
7732 emit_label (no_toc_restore_needed);
7733 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
7734 Pmode, NULL_RTX, NULL_RTX,
7737 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
7738 set_mem_alias_set (mem, rs6000_sr_alias_set);
7739 emit_move_insn (bottom_of_stack, mem);
7741 mem = gen_rtx_MEM (Pmode,
7742 gen_rtx_PLUS (Pmode, bottom_of_stack,
7743 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
7744 emit_move_insn (opcode_addr, mem);
7745 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
7747 emit_note (NULL, NOTE_INSN_LOOP_CONT);
7748 emit_jump (loop_start);
7749 emit_note (NULL, NOTE_INSN_LOOP_END);
7750 emit_label (loop_exit);
7752 #endif /* TARGET_AIX */
7754 /* This ties together stack memory (MEM with an alias set of
7755 rs6000_sr_alias_set) and the change to the stack pointer. */
7758 rs6000_emit_stack_tie ()
7760 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
7762 set_mem_alias_set (mem, rs6000_sr_alias_set);
7763 emit_insn (gen_stack_tie (mem));
7766 /* Emit the correct code for allocating stack space, as insns.
7767 If COPY_R12, make sure a copy of the old frame is left in r12.
7768 The generated code may use hard register 0 as a temporary. */
7771 rs6000_emit_allocate_stack (size, copy_r12)
7776 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
7777 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
7778 rtx todec = GEN_INT (-size);
7780 if (current_function_limit_stack)
7782 if (REG_P (stack_limit_rtx)
7783 && REGNO (stack_limit_rtx) > 1
7784 && REGNO (stack_limit_rtx) <= 31)
7786 emit_insn (Pmode == SImode
7787 ? gen_addsi3 (tmp_reg,
7790 : gen_adddi3 (tmp_reg,
7794 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
7797 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
7799 && DEFAULT_ABI == ABI_V4)
7801 rtx toload = gen_rtx_CONST (VOIDmode,
7802 gen_rtx_PLUS (Pmode,
7806 emit_insn (gen_elf_high (tmp_reg, toload));
7807 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
7808 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
7812 warning ("stack limit expression is not supported");
7815 if (copy_r12 || ! TARGET_UPDATE)
7816 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
7822 /* Need a note here so that try_split doesn't get confused. */
7823 if (get_last_insn() == NULL_RTX)
7824 emit_note (0, NOTE_INSN_DELETED);
7825 insn = emit_move_insn (tmp_reg, todec);
7826 try_split (PATTERN (insn), insn, 0);
7830 if (Pmode == SImode)
7831 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
7834 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
7839 if (Pmode == SImode)
7840 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
7842 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
7843 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
7844 gen_rtx_REG (Pmode, 12));
7847 RTX_FRAME_RELATED_P (insn) = 1;
7849 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7850 gen_rtx_SET (VOIDmode, stack_reg,
7851 gen_rtx_PLUS (Pmode, stack_reg,
7856 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
7859 (mem (plus (blah) (regXX)))
7863 (mem (plus (blah) (const VALUE_OF_REGXX))). */
7866 altivec_frame_fixup (insn, reg, val)
7872 real = copy_rtx (PATTERN (insn));
7874 real = replace_rtx (real, reg, GEN_INT (val));
7876 RTX_FRAME_RELATED_P (insn) = 1;
7877 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7882 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
7883 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
7884 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
7885 deduce these equivalences by itself so it wasn't necessary to hold
7886 its hand so much. */
7889 rs6000_frame_related (insn, reg, val, reg2, rreg)
7898 real = copy_rtx (PATTERN (insn));
7900 real = replace_rtx (real, reg,
7901 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
7902 STACK_POINTER_REGNUM),
7905 /* We expect that 'real' is either a SET or a PARALLEL containing
7906 SETs (and possibly other stuff). In a PARALLEL, all the SETs
7907 are important so they all have to be marked RTX_FRAME_RELATED_P. */
7909 if (GET_CODE (real) == SET)
7913 temp = simplify_rtx (SET_SRC (set));
7915 SET_SRC (set) = temp;
7916 temp = simplify_rtx (SET_DEST (set));
7918 SET_DEST (set) = temp;
7919 if (GET_CODE (SET_DEST (set)) == MEM)
7921 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
7923 XEXP (SET_DEST (set), 0) = temp;
7926 else if (GET_CODE (real) == PARALLEL)
7929 for (i = 0; i < XVECLEN (real, 0); i++)
7930 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
7932 rtx set = XVECEXP (real, 0, i);
7934 temp = simplify_rtx (SET_SRC (set));
7936 SET_SRC (set) = temp;
7937 temp = simplify_rtx (SET_DEST (set));
7939 SET_DEST (set) = temp;
7940 if (GET_CODE (SET_DEST (set)) == MEM)
7942 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
7944 XEXP (SET_DEST (set), 0) = temp;
7946 RTX_FRAME_RELATED_P (set) = 1;
7952 if (reg2 != NULL_RTX)
7953 real = replace_rtx (real, reg2, rreg);
7955 RTX_FRAME_RELATED_P (insn) = 1;
7956 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7961 /* Returns an insn that has a vrsave set operation with the
7962 appropriate CLOBBERs. */
7965 generate_set_vrsave (reg, info, epiloguep)
7967 rs6000_stack_t *info;
7971 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
7972 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
7975 = gen_rtx_SET (VOIDmode,
7977 gen_rtx_UNSPEC_VOLATILE (SImode,
7978 gen_rtvec (2, reg, vrsave),
7983 /* We need to clobber the registers in the mask so the scheduler
7984 does not move sets to VRSAVE before sets of AltiVec registers.
7986 However, if the function receives nonlocal gotos, reload will set
7987 all call saved registers live. We will end up with:
7989 (set (reg 999) (mem))
7990 (parallel [ (set (reg vrsave) (unspec blah))
7991 (clobber (reg 999))])
7993 The clobber will cause the store into reg 999 to be dead, and
7994 flow will attempt to delete an epilogue insn. In this case, we
7995 need an unspec use/set of the register. */
7997 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7998 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8000 if (!epiloguep || call_used_regs [i])
8001 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8002 gen_rtx_REG (V4SImode, i));
8005 rtx reg = gen_rtx_REG (V4SImode, i);
8008 = gen_rtx_SET (VOIDmode,
8010 gen_rtx_UNSPEC (V4SImode,
8011 gen_rtvec (1, reg), 27));
8015 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8017 for (i = 0; i < nclobs; ++i)
8018 XVECEXP (insn, 0, i) = clobs[i];
8023 /* Emit function prologue as insns. */
8026 rs6000_emit_prologue ()
8028 rs6000_stack_t *info = rs6000_stack_info ();
8029 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8030 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8031 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8032 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8033 rtx frame_reg_rtx = sp_reg_rtx;
8034 rtx cr_save_rtx = NULL;
8036 int saving_FPRs_inline;
8037 int using_store_multiple;
8038 HOST_WIDE_INT sp_offset = 0;
8040 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8041 && info->first_gp_reg_save < 31);
8042 saving_FPRs_inline = (info->first_fp_reg_save == 64
8043 || FP_SAVE_INLINE (info->first_fp_reg_save));
8045 /* For V.4, update stack before we do any saving and set back pointer. */
8046 if (info->push_p && DEFAULT_ABI == ABI_V4)
8048 if (info->total_size < 32767)
8049 sp_offset = info->total_size;
8051 frame_reg_rtx = frame_ptr_rtx;
8052 rs6000_emit_allocate_stack (info->total_size,
8053 (frame_reg_rtx != sp_reg_rtx
8056 || info->first_fp_reg_save < 64
8057 || info->first_gp_reg_save < 32
8059 if (frame_reg_rtx != sp_reg_rtx)
8060 rs6000_emit_stack_tie ();
8063 /* Save AltiVec registers if needed. */
8064 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8068 /* There should be a non inline version of this, for when we
8069 are saving lots of vector registers. */
8070 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8071 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8073 rtx areg, savereg, mem;
8076 offset = info->altivec_save_offset + sp_offset
8077 + 16 * (i - info->first_altivec_reg_save);
8079 savereg = gen_rtx_REG (V4SImode, i);
8081 areg = gen_rtx_REG (Pmode, 0);
8082 emit_move_insn (areg, GEN_INT (offset));
8084 /* AltiVec addressing mode is [reg+reg]. */
8085 mem = gen_rtx_MEM (V4SImode,
8086 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8088 set_mem_alias_set (mem, rs6000_sr_alias_set);
8090 insn = emit_move_insn (mem, savereg);
8092 altivec_frame_fixup (insn, areg, offset);
8096 /* VRSAVE is a bit vector representing which AltiVec registers
8097 are used. The OS uses this to determine which vector
8098 registers to save on a context switch. We need to save
8099 VRSAVE on the stack frame, add whatever AltiVec registers we
8100 used in this function, and do the corresponding magic in the
8103 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8105 rtx reg, mem, vrsave;
8108 /* Get VRSAVE onto a GPR. */
8109 reg = gen_rtx_REG (SImode, 12);
8110 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8111 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8114 offset = info->vrsave_save_offset + sp_offset;
8116 = gen_rtx_MEM (SImode,
8117 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8118 set_mem_alias_set (mem, rs6000_sr_alias_set);
8119 insn = emit_move_insn (mem, reg);
8121 /* Include the registers in the mask. */
8122 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8124 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8127 /* If we use the link register, get it into r0. */
8128 if (info->lr_save_p)
8129 emit_move_insn (gen_rtx_REG (Pmode, 0),
8130 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8132 /* If we need to save CR, put it into r12. */
8133 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8135 cr_save_rtx = gen_rtx_REG (SImode, 12);
8136 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8139 /* Do any required saving of fpr's. If only one or two to save, do
8140 it ourselves. Otherwise, call function. */
8141 if (saving_FPRs_inline)
8144 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8145 if ((regs_ever_live[info->first_fp_reg_save+i]
8146 && ! call_used_regs[info->first_fp_reg_save+i]))
8149 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8150 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8151 GEN_INT (info->fp_save_offset
8154 mem = gen_rtx_MEM (DFmode, addr);
8155 set_mem_alias_set (mem, rs6000_sr_alias_set);
8157 insn = emit_move_insn (mem, reg);
8158 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8159 NULL_RTX, NULL_RTX);
8162 else if (info->first_fp_reg_save != 64)
8166 const char *alloc_rname;
8168 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8170 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8172 LINK_REGISTER_REGNUM));
8173 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8174 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8175 alloc_rname = ggc_strdup (rname);
8176 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8177 gen_rtx_SYMBOL_REF (Pmode,
8179 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8182 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8183 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8184 GEN_INT (info->fp_save_offset
8185 + sp_offset + 8*i));
8186 mem = gen_rtx_MEM (DFmode, addr);
8187 set_mem_alias_set (mem, rs6000_sr_alias_set);
8189 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8191 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8192 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8193 NULL_RTX, NULL_RTX);
8196 /* Save GPRs. This is done as a PARALLEL if we are using
8197 the store-multiple instructions. */
8198 if (using_store_multiple)
8202 p = rtvec_alloc (32 - info->first_gp_reg_save);
8203 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8204 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8207 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8208 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8209 GEN_INT (info->gp_save_offset
8212 mem = gen_rtx_MEM (reg_mode, addr);
8213 set_mem_alias_set (mem, rs6000_sr_alias_set);
8215 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8217 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8218 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8219 NULL_RTX, NULL_RTX);
8224 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8225 if ((regs_ever_live[info->first_gp_reg_save+i]
8226 && ! call_used_regs[info->first_gp_reg_save+i])
8227 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
8228 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8229 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8232 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8233 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8234 GEN_INT (info->gp_save_offset
8237 mem = gen_rtx_MEM (reg_mode, addr);
8238 set_mem_alias_set (mem, rs6000_sr_alias_set);
8240 insn = emit_move_insn (mem, reg);
8241 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8242 NULL_RTX, NULL_RTX);
8246 /* ??? There's no need to emit actual instructions here, but it's the
8247 easiest way to get the frame unwind information emitted. */
8248 if (current_function_calls_eh_return)
8250 unsigned int i, regno;
8256 regno = EH_RETURN_DATA_REGNO (i);
8257 if (regno == INVALID_REGNUM)
8260 reg = gen_rtx_REG (reg_mode, regno);
8261 addr = plus_constant (frame_reg_rtx,
8262 info->ehrd_offset + sp_offset
8263 + reg_size * (int) i);
8264 mem = gen_rtx_MEM (reg_mode, addr);
8265 set_mem_alias_set (mem, rs6000_sr_alias_set);
8267 insn = emit_move_insn (mem, reg);
8268 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8269 NULL_RTX, NULL_RTX);
8273 /* Save lr if we used it. */
8274 if (info->lr_save_p)
8276 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8277 GEN_INT (info->lr_save_offset + sp_offset));
8278 rtx reg = gen_rtx_REG (Pmode, 0);
8279 rtx mem = gen_rtx_MEM (Pmode, addr);
8280 /* This should not be of rs6000_sr_alias_set, because of
8281 __builtin_return_address. */
8283 insn = emit_move_insn (mem, reg);
8284 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8285 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8288 /* Save CR if we use any that must be preserved. */
8289 if (info->cr_save_p)
8291 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8292 GEN_INT (info->cr_save_offset + sp_offset));
8293 rtx mem = gen_rtx_MEM (SImode, addr);
8295 set_mem_alias_set (mem, rs6000_sr_alias_set);
8297 /* If r12 was used to hold the original sp, copy cr into r0 now
8299 if (REGNO (frame_reg_rtx) == 12)
8301 cr_save_rtx = gen_rtx_REG (SImode, 0);
8302 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8304 insn = emit_move_insn (mem, cr_save_rtx);
8306 /* Now, there's no way that dwarf2out_frame_debug_expr is going
8307 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
8308 OK. All we have to do is specify that _one_ condition code
8309 register is saved in this stack slot. The thrower's epilogue
8310 will then restore all the call-saved registers.
8311 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
8312 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8313 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8316 /* Update stack and set back pointer unless this is V.4,
8317 for which it was done previously. */
8318 if (info->push_p && DEFAULT_ABI != ABI_V4)
8319 rs6000_emit_allocate_stack (info->total_size, FALSE);
8321 /* Set frame pointer, if needed. */
8322 if (frame_pointer_needed)
8324 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8326 RTX_FRAME_RELATED_P (insn) = 1;
8329 /* If we are using PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
8330 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8331 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8332 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM]))
8334 /* If emit_load_toc_table will use the link register, we need to save
8335 it. We use R11 for this purpose because emit_load_toc_table
8336 can use register 0. This allows us to use a plain 'blr' to return
8337 from the procedure more often. */
8338 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8339 && ! info->lr_save_p
8340 && EXIT_BLOCK_PTR->pred != NULL);
8341 if (save_LR_around_toc_setup)
8342 emit_move_insn (gen_rtx_REG (Pmode, 11),
8343 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8345 rs6000_emit_load_toc_table (TRUE);
8347 if (save_LR_around_toc_setup)
8348 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8349 gen_rtx_REG (Pmode, 11));
8352 if (DEFAULT_ABI == ABI_DARWIN
8353 && flag_pic && current_function_uses_pic_offset_table)
8355 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8357 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8360 emit_move_insn (gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM),
8361 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
8365 /* Write function prologue. */
8368 rs6000_output_function_prologue (file, size)
8370 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8372 rs6000_stack_t *info = rs6000_stack_info ();
8374 if (TARGET_DEBUG_STACK)
8375 debug_stack_info (info);
8377 /* Write .extern for any function we will call to save and restore
8379 if (info->first_fp_reg_save < 64
8380 && !FP_SAVE_INLINE (info->first_fp_reg_save))
8381 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
8382 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
8383 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
8386 /* Write .extern for AIX common mode routines, if needed. */
8387 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
8389 fputs ("\t.extern __mulh\n", file);
8390 fputs ("\t.extern __mull\n", file);
8391 fputs ("\t.extern __divss\n", file);
8392 fputs ("\t.extern __divus\n", file);
8393 fputs ("\t.extern __quoss\n", file);
8394 fputs ("\t.extern __quous\n", file);
8395 common_mode_defined = 1;
8398 if (! HAVE_prologue)
8402 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
8403 the "toplevel" insn chain. */
8404 emit_note (0, NOTE_INSN_DELETED);
8405 rs6000_emit_prologue ();
8406 emit_note (0, NOTE_INSN_DELETED);
8408 if (TARGET_DEBUG_STACK)
8409 debug_rtx_list (get_insns (), 100);
8410 final (get_insns (), file, FALSE, FALSE);
8414 rs6000_pic_labelno++;
8417 /* Emit function epilogue as insns.
8419 At present, dwarf2out_frame_debug_expr doesn't understand
8420 register restores, so we don't bother setting RTX_FRAME_RELATED_P
8421 anywhere in the epilogue. Most of the insns below would in any case
8422 need special notes to explain where r11 is in relation to the stack. */
8425 rs6000_emit_epilogue (sibcall)
8428 rs6000_stack_t *info;
8429 int restoring_FPRs_inline;
8430 int using_load_multiple;
8431 int using_mfcr_multiple;
8432 int use_backchain_to_restore_sp;
8434 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
8435 rtx frame_reg_rtx = sp_reg_rtx;
8436 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8437 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8440 info = rs6000_stack_info ();
8441 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8442 && info->first_gp_reg_save < 31);
8443 restoring_FPRs_inline = (sibcall
8444 || current_function_calls_eh_return
8445 || info->first_fp_reg_save == 64
8446 || FP_SAVE_INLINE (info->first_fp_reg_save));
8447 use_backchain_to_restore_sp = (frame_pointer_needed
8448 || current_function_calls_alloca
8449 || info->total_size > 32767);
8450 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
8451 || rs6000_cpu == PROCESSOR_PPC603
8452 || rs6000_cpu == PROCESSOR_PPC750
8455 /* If we have a frame pointer, a call to alloca, or a large stack
8456 frame, restore the old stack pointer using the backchain. Otherwise,
8457 we know what size to update it with. */
8458 if (use_backchain_to_restore_sp)
8460 /* Under V.4, don't reset the stack pointer until after we're done
8461 loading the saved registers. */
8462 if (DEFAULT_ABI == ABI_V4)
8463 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
8465 emit_move_insn (frame_reg_rtx,
8466 gen_rtx_MEM (Pmode, sp_reg_rtx));
8469 else if (info->push_p)
8471 if (DEFAULT_ABI == ABI_V4)
8472 sp_offset = info->total_size;
8475 emit_insn (TARGET_32BIT
8476 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
8477 GEN_INT (info->total_size))
8478 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
8479 GEN_INT (info->total_size)));
8483 /* Restore AltiVec registers if needed. */
8484 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8488 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8489 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8491 rtx addr, areg, mem;
8493 areg = gen_rtx_REG (Pmode, 0);
8495 (areg, GEN_INT (info->altivec_save_offset
8497 + 16 * (i - info->first_altivec_reg_save)));
8499 /* AltiVec addressing mode is [reg+reg]. */
8500 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
8501 mem = gen_rtx_MEM (V4SImode, addr);
8502 set_mem_alias_set (mem, rs6000_sr_alias_set);
8504 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
8508 /* Restore VRSAVE if needed. */
8509 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
8513 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8514 GEN_INT (info->vrsave_save_offset + sp_offset));
8515 mem = gen_rtx_MEM (SImode, addr);
8516 set_mem_alias_set (mem, rs6000_sr_alias_set);
8517 reg = gen_rtx_REG (SImode, 12);
8518 emit_move_insn (reg, mem);
8520 emit_insn (generate_set_vrsave (reg, info, 1));
8523 /* Get the old lr if we saved it. */
8524 if (info->lr_save_p)
8526 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8527 GEN_INT (info->lr_save_offset + sp_offset));
8528 rtx mem = gen_rtx_MEM (Pmode, addr);
8530 set_mem_alias_set (mem, rs6000_sr_alias_set);
8532 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
8535 /* Get the old cr if we saved it. */
8536 if (info->cr_save_p)
8538 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8539 GEN_INT (info->cr_save_offset + sp_offset));
8540 rtx mem = gen_rtx_MEM (SImode, addr);
8542 set_mem_alias_set (mem, rs6000_sr_alias_set);
8544 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
8547 /* Set LR here to try to overlap restores below. */
8548 if (info->lr_save_p)
8549 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8550 gen_rtx_REG (Pmode, 0));
8552 /* Load exception handler data registers, if needed. */
8553 if (current_function_calls_eh_return)
8555 unsigned int i, regno;
8561 regno = EH_RETURN_DATA_REGNO (i);
8562 if (regno == INVALID_REGNUM)
8565 addr = plus_constant (frame_reg_rtx,
8566 info->ehrd_offset + sp_offset
8567 + reg_size * (int) i);
8568 mem = gen_rtx_MEM (reg_mode, addr);
8569 set_mem_alias_set (mem, rs6000_sr_alias_set);
8571 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
8575 /* Restore GPRs. This is done as a PARALLEL if we are using
8576 the load-multiple instructions. */
8577 if (using_load_multiple)
8580 p = rtvec_alloc (32 - info->first_gp_reg_save);
8581 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8583 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8584 GEN_INT (info->gp_save_offset
8587 rtx mem = gen_rtx_MEM (reg_mode, addr);
8589 set_mem_alias_set (mem, rs6000_sr_alias_set);
8592 gen_rtx_SET (VOIDmode,
8593 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
8596 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8599 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8600 if ((regs_ever_live[info->first_gp_reg_save+i]
8601 && ! call_used_regs[info->first_gp_reg_save+i])
8602 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
8603 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8604 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8606 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8607 GEN_INT (info->gp_save_offset
8610 rtx mem = gen_rtx_MEM (reg_mode, addr);
8612 set_mem_alias_set (mem, rs6000_sr_alias_set);
8614 emit_move_insn (gen_rtx_REG (reg_mode,
8615 info->first_gp_reg_save + i),
8619 /* Restore fpr's if we need to do it without calling a function. */
8620 if (restoring_FPRs_inline)
8621 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8622 if ((regs_ever_live[info->first_fp_reg_save+i]
8623 && ! call_used_regs[info->first_fp_reg_save+i]))
8626 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8627 GEN_INT (info->fp_save_offset
8630 mem = gen_rtx_MEM (DFmode, addr);
8631 set_mem_alias_set (mem, rs6000_sr_alias_set);
8633 emit_move_insn (gen_rtx_REG (DFmode,
8634 info->first_fp_reg_save + i),
8638 /* If we saved cr, restore it here. Just those that were used. */
8639 if (info->cr_save_p)
8641 rtx r12_rtx = gen_rtx_REG (SImode, 12);
8644 if (using_mfcr_multiple)
8646 for (i = 0; i < 8; i++)
8647 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
8653 if (using_mfcr_multiple && count > 1)
8658 p = rtvec_alloc (count);
8661 for (i = 0; i < 8; i++)
8662 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
8664 rtvec r = rtvec_alloc (2);
8665 RTVEC_ELT (r, 0) = r12_rtx;
8666 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
8667 RTVEC_ELT (p, ndx) =
8668 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
8669 gen_rtx_UNSPEC (CCmode, r, 20));
8672 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8677 for (i = 0; i < 8; i++)
8678 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
8680 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
8686 /* If this is V.4, unwind the stack pointer after all of the loads
8687 have been done. We need to emit a block here so that sched
8688 doesn't decide to move the sp change before the register restores
8689 (which may not have any obvious dependency on the stack). This
8690 doesn't hurt performance, because there is no scheduling that can
8691 be done after this point. */
8692 if (DEFAULT_ABI == ABI_V4)
8694 if (frame_reg_rtx != sp_reg_rtx)
8695 rs6000_emit_stack_tie ();
8697 if (use_backchain_to_restore_sp)
8699 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
8701 else if (sp_offset != 0)
8703 emit_insn (Pmode == SImode
8704 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
8705 GEN_INT (sp_offset))
8706 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
8707 GEN_INT (sp_offset)));
8711 if (current_function_calls_eh_return)
8713 rtx sa = EH_RETURN_STACKADJ_RTX;
8714 emit_insn (Pmode == SImode
8715 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
8716 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
8722 if (! restoring_FPRs_inline)
8723 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
8725 p = rtvec_alloc (2);
8727 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
8728 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8730 LINK_REGISTER_REGNUM));
8732 /* If we have to restore more than two FP registers, branch to the
8733 restore function. It will return to our caller. */
8734 if (! restoring_FPRs_inline)
8738 const char *alloc_rname;
8740 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
8741 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
8742 alloc_rname = ggc_strdup (rname);
8743 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
8744 gen_rtx_SYMBOL_REF (Pmode,
8747 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8750 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
8751 GEN_INT (info->fp_save_offset + 8*i));
8752 mem = gen_rtx_MEM (DFmode, addr);
8753 set_mem_alias_set (mem, rs6000_sr_alias_set);
8755 RTVEC_ELT (p, i+3) =
8756 gen_rtx_SET (VOIDmode,
8757 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
8762 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
8766 /* Write function epilogue. */
8769 rs6000_output_function_epilogue (file, size)
8771 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8773 rs6000_stack_t *info = rs6000_stack_info ();
8775 if (! HAVE_epilogue)
8777 rtx insn = get_last_insn ();
8778 /* If the last insn was a BARRIER, we don't have to write anything except
8780 if (GET_CODE (insn) == NOTE)
8781 insn = prev_nonnote_insn (insn);
8782 if (insn == 0 || GET_CODE (insn) != BARRIER)
8784 /* This is slightly ugly, but at least we don't have two
8785 copies of the epilogue-emitting code. */
8788 /* A NOTE_INSN_DELETED is supposed to be at the start
8789 and end of the "toplevel" insn chain. */
8790 emit_note (0, NOTE_INSN_DELETED);
8791 rs6000_emit_epilogue (FALSE);
8792 emit_note (0, NOTE_INSN_DELETED);
8794 if (TARGET_DEBUG_STACK)
8795 debug_rtx_list (get_insns (), 100);
8796 final (get_insns (), file, FALSE, FALSE);
8801 /* Output a traceback table here. See /usr/include/sys/debug.h for info
8804 We don't output a traceback table if -finhibit-size-directive was
8805 used. The documentation for -finhibit-size-directive reads
8806 ``don't output a @code{.size} assembler directive, or anything
8807 else that would cause trouble if the function is split in the
8808 middle, and the two halves are placed at locations far apart in
8809 memory.'' The traceback table has this property, since it
8810 includes the offset from the start of the function to the
8811 traceback table itself.
8813 System V.4 Powerpc's (and the embedded ABI derived from it) use a
8814 different traceback table. */
8815 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
8817 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
8818 const char *language_string = lang_hooks.name;
8819 int fixed_parms, float_parms, parm_info;
8822 while (*fname == '.') /* V.4 encodes . in the name */
8825 /* Need label immediately before tbtab, so we can compute its offset
8826 from the function start. */
8829 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
8830 ASM_OUTPUT_LABEL (file, fname);
8832 /* The .tbtab pseudo-op can only be used for the first eight
8833 expressions, since it can't handle the possibly variable
8834 length fields that follow. However, if you omit the optional
8835 fields, the assembler outputs zeros for all optional fields
8836 anyways, giving each variable length field is minimum length
8837 (as defined in sys/debug.h). Thus we can not use the .tbtab
8838 pseudo-op at all. */
8840 /* An all-zero word flags the start of the tbtab, for debuggers
8841 that have to find it by searching forward from the entry
8842 point or from the current pc. */
8843 fputs ("\t.long 0\n", file);
8845 /* Tbtab format type. Use format type 0. */
8846 fputs ("\t.byte 0,", file);
8848 /* Language type. Unfortunately, there doesn't seem to be any
8849 official way to get this info, so we use language_string. C
8850 is 0. C++ is 9. No number defined for Obj-C, so use the
8851 value for C for now. There is no official value for Java,
8852 although IBM appears to be using 13. There is no official value
8853 for Chill, so we've chosen 44 pseudo-randomly. */
8854 if (! strcmp (language_string, "GNU C")
8855 || ! strcmp (language_string, "GNU Objective-C"))
8857 else if (! strcmp (language_string, "GNU F77"))
8859 else if (! strcmp (language_string, "GNU Ada"))
8861 else if (! strcmp (language_string, "GNU Pascal"))
8863 else if (! strcmp (language_string, "GNU C++"))
8865 else if (! strcmp (language_string, "GNU Java"))
8867 else if (! strcmp (language_string, "GNU CHILL"))
8871 fprintf (file, "%d,", i);
8873 /* 8 single bit fields: global linkage (not set for C extern linkage,
8874 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
8875 from start of procedure stored in tbtab, internal function, function
8876 has controlled storage, function has no toc, function uses fp,
8877 function logs/aborts fp operations. */
8878 /* Assume that fp operations are used if any fp reg must be saved. */
8879 fprintf (file, "%d,", (1 << 5) | ((info->first_fp_reg_save != 64) << 1));
8881 /* 6 bitfields: function is interrupt handler, name present in
8882 proc table, function calls alloca, on condition directives
8883 (controls stack walks, 3 bits), saves condition reg, saves
8885 /* The `function calls alloca' bit seems to be set whenever reg 31 is
8886 set up as a frame pointer, even when there is no alloca call. */
8887 fprintf (file, "%d,",
8888 ((1 << 6) | (frame_pointer_needed << 5)
8889 | (info->cr_save_p << 1) | (info->lr_save_p)));
8891 /* 3 bitfields: saves backchain, spare bit, number of fpr saved
8893 fprintf (file, "%d,",
8894 (info->push_p << 7) | (64 - info->first_fp_reg_save));
8896 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
8897 fprintf (file, "%d,", (32 - first_reg_to_save ()));
8900 /* Compute the parameter info from the function decl argument
8903 int next_parm_info_bit;
8905 next_parm_info_bit = 31;
8910 for (decl = DECL_ARGUMENTS (current_function_decl);
8911 decl; decl = TREE_CHAIN (decl))
8913 rtx parameter = DECL_INCOMING_RTL (decl);
8914 enum machine_mode mode = GET_MODE (parameter);
8916 if (GET_CODE (parameter) == REG)
8918 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
8926 else if (mode == DFmode)
8931 /* If only one bit will fit, don't or in this entry. */
8932 if (next_parm_info_bit > 0)
8933 parm_info |= (bits << (next_parm_info_bit - 1));
8934 next_parm_info_bit -= 2;
8938 fixed_parms += ((GET_MODE_SIZE (mode)
8939 + (UNITS_PER_WORD - 1))
8941 next_parm_info_bit -= 1;
8947 /* Number of fixed point parameters. */
8948 /* This is actually the number of words of fixed point parameters; thus
8949 an 8 byte struct counts as 2; and thus the maximum value is 8. */
8950 fprintf (file, "%d,", fixed_parms);
8952 /* 2 bitfields: number of floating point parameters (7 bits), parameters
8954 /* This is actually the number of fp registers that hold parameters;
8955 and thus the maximum value is 13. */
8956 /* Set parameters on stack bit if parameters are not in their original
8957 registers, regardless of whether they are on the stack? Xlc
8958 seems to set the bit when not optimizing. */
8959 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
8961 /* Optional fields follow. Some are variable length. */
8963 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
8965 /* There is an entry for each parameter in a register, in the order that
8966 they occur in the parameter list. Any intervening arguments on the
8967 stack are ignored. If the list overflows a long (max possible length
8968 34 bits) then completely leave off all elements that don't fit. */
8969 /* Only emit this long if there was at least one parameter. */
8970 if (fixed_parms || float_parms)
8971 fprintf (file, "\t.long %d\n", parm_info);
8973 /* Offset from start of code to tb table. */
8974 fputs ("\t.long ", file);
8975 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
8977 RS6000_OUTPUT_BASENAME (file, fname);
8979 assemble_name (file, fname);
8983 RS6000_OUTPUT_BASENAME (file, fname);
8985 assemble_name (file, fname);
8989 /* Interrupt handler mask. */
8990 /* Omit this long, since we never set the interrupt handler bit
8993 /* Number of CTL (controlled storage) anchors. */
8994 /* Omit this long, since the has_ctl bit is never set above. */
8996 /* Displacement into stack of each CTL anchor. */
8997 /* Omit this list of longs, because there are no CTL anchors. */
8999 /* Length of function name. */
9000 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9002 /* Function name. */
9003 assemble_string (fname, strlen (fname));
9005 /* Register for alloca automatic storage; this is always reg 31.
9006 Only emit this if the alloca bit was set above. */
9007 if (frame_pointer_needed)
9008 fputs ("\t.byte 31\n", file);
9012 /* A C compound statement that outputs the assembler code for a thunk
9013 function, used to implement C++ virtual function calls with
9014 multiple inheritance. The thunk acts as a wrapper around a virtual
9015 function, adjusting the implicit object parameter before handing
9016 control off to the real function.
9018 First, emit code to add the integer DELTA to the location that
9019 contains the incoming first argument. Assume that this argument
9020 contains a pointer, and is the one used to pass the `this' pointer
9021 in C++. This is the incoming argument *before* the function
9022 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9023 values of all other incoming arguments.
9025 After the addition, emit code to jump to FUNCTION, which is a
9026 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9027 not touch the return address. Hence returning from FUNCTION will
9028 return to whoever called the current `thunk'.
9030 The effect must be as if FUNCTION had been called directly with the
9031 adjusted first argument. This macro is responsible for emitting
9032 all of the code for a thunk function; output_function_prologue()
9033 and output_function_epilogue() are not invoked.
9035 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9036 been extracted from it.) It might possibly be useful on some
9037 targets, but probably not.
9039 If you do not define this macro, the target-independent code in the
9040 C++ frontend will generate a less efficient heavyweight thunk that
9041 calls FUNCTION instead of jumping to it. The generic approach does
9042 not support varargs. */
9045 output_mi_thunk (file, thunk_fndecl, delta, function)
9047 tree thunk_fndecl ATTRIBUTE_UNUSED;
9051 const char *this_reg =
9052 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9055 const char *r0 = reg_names[0];
9056 const char *toc = reg_names[2];
9057 const char *schain = reg_names[11];
9058 const char *r12 = reg_names[12];
9060 static int labelno = 0;
9062 /* Small constants that can be done by one add instruction. */
9063 if (delta >= -32768 && delta <= 32767)
9065 if (! TARGET_NEW_MNEMONICS)
9066 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9068 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9071 /* Large constants that can be done by one addis instruction. */
9072 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9073 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9076 /* 32-bit constants that can be done by an add and addis instruction. */
9077 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9079 /* Break into two pieces, propagating the sign bit from the low
9080 word to the upper word. */
9081 int delta_high = delta >> 16;
9082 int delta_low = delta & 0xffff;
9083 if ((delta_low & 0x8000) != 0)
9086 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9089 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9092 if (! TARGET_NEW_MNEMONICS)
9093 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9095 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9098 /* 64-bit constants, fixme */
9102 /* Get the prefix in front of the names. */
9103 switch (DEFAULT_ABI)
9113 case ABI_AIX_NODESC:
9118 /* If the function is compiled in this module, jump to it directly.
9119 Otherwise, load up its address and jump to it. */
9121 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9123 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9124 && ! lookup_attribute ("longcall",
9125 TYPE_ATTRIBUTES (TREE_TYPE (function))))
9127 fprintf (file, "\tb %s", prefix);
9128 assemble_name (file, fname);
9129 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9135 switch (DEFAULT_ABI)
9141 /* Set up a TOC entry for the function. */
9142 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9144 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9147 if (TARGET_MINIMAL_TOC)
9148 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9151 fputs ("\t.tc ", file);
9152 assemble_name (file, fname);
9153 fputs ("[TC],", file);
9155 assemble_name (file, fname);
9158 if (TARGET_MINIMAL_TOC)
9159 asm_fprintf (file, (TARGET_32BIT)
9160 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9161 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9162 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9163 assemble_name (file, buf);
9164 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9165 fputs ("-(.LCTOC1)", file);
9166 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9168 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9172 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9175 asm_fprintf (file, "\tmtctr %s\n", r0);
9177 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9180 asm_fprintf (file, "\tbctr\n");
9183 case ABI_AIX_NODESC:
9185 fprintf (file, "\tb %s", prefix);
9186 assemble_name (file, fname);
9187 if (flag_pic) fputs ("@plt", file);
9193 fprintf (file, "\tb %s", prefix);
9194 if (flag_pic && !machopic_name_defined_p (fname))
9195 assemble_name (file, machopic_stub_name (fname));
9197 assemble_name (file, fname);
9206 /* A quick summary of the various types of 'constant-pool tables'
9209 Target Flags Name One table per
9210 AIX (none) AIX TOC object file
9211 AIX -mfull-toc AIX TOC object file
9212 AIX -mminimal-toc AIX minimal TOC translation unit
9213 SVR4/EABI (none) SVR4 SDATA object file
9214 SVR4/EABI -fpic SVR4 pic object file
9215 SVR4/EABI -fPIC SVR4 PIC translation unit
9216 SVR4/EABI -mrelocatable EABI TOC function
9217 SVR4/EABI -maix AIX TOC object file
9218 SVR4/EABI -maix -mminimal-toc
9219 AIX minimal TOC translation unit
9221 Name Reg. Set by entries contains:
9222 made by addrs? fp? sum?
9224 AIX TOC 2 crt0 as Y option option
9225 AIX minimal TOC 30 prolog gcc Y Y option
9226 SVR4 SDATA 13 crt0 gcc N Y N
9227 SVR4 pic 30 prolog ld Y not yet N
9228 SVR4 PIC 30 prolog gcc Y option option
9229 EABI TOC 30 prolog gcc Y option option
9233 /* Hash table stuff for keeping track of TOC entries. */
9235 struct toc_hash_struct
9237 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9238 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
9240 enum machine_mode key_mode;
9244 static htab_t toc_hash_table;
9246 /* Hash functions for the hash table. */
9249 rs6000_hash_constant (k)
9252 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9253 const char *format = GET_RTX_FORMAT (GET_CODE (k));
9254 int flen = strlen (format);
9257 if (GET_CODE (k) == LABEL_REF)
9258 return result * 1231 + X0INT (XEXP (k, 0), 3);
9260 if (GET_CODE (k) == CONST_DOUBLE)
9262 else if (GET_CODE (k) == CODE_LABEL)
9267 for (; fidx < flen; fidx++)
9268 switch (format[fidx])
9273 const char *str = XSTR (k, fidx);
9275 result = result * 613 + len;
9276 for (i = 0; i < len; i++)
9277 result = result * 613 + (unsigned) str[i];
9282 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9286 result = result * 613 + (unsigned) XINT (k, fidx);
9289 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9290 result = result * 613 + (unsigned) XWINT (k, fidx);
9294 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9295 result = result * 613 + (unsigned) (XWINT (k, fidx)
9306 toc_hash_function (hash_entry)
9307 const void * hash_entry;
9309 const struct toc_hash_struct *thc =
9310 (const struct toc_hash_struct *) hash_entry;
9311 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9314 /* Compare H1 and H2 for equivalence. */
9317 toc_hash_eq (h1, h2)
9321 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9322 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9324 if (((const struct toc_hash_struct *) h1)->key_mode
9325 != ((const struct toc_hash_struct *) h2)->key_mode)
9328 /* Gotcha: One of these const_doubles will be in memory.
9329 The other may be on the constant-pool chain.
9330 So rtx_equal_p will think they are different... */
9333 if (GET_CODE (r1) != GET_CODE (r2)
9334 || GET_MODE (r1) != GET_MODE (r2))
9336 if (GET_CODE (r1) == CONST_DOUBLE)
9338 int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
9340 for (i = 1; i < format_len; i++)
9341 if (XWINT (r1, i) != XWINT (r2, i))
9346 else if (GET_CODE (r1) == LABEL_REF)
9347 return (CODE_LABEL_NUMBER (XEXP (r1, 0))
9348 == CODE_LABEL_NUMBER (XEXP (r2, 0)));
9350 return rtx_equal_p (r1, r2);
9353 /* Mark the hash table-entry HASH_ENTRY. */
9356 toc_hash_mark_entry (hash_slot, unused)
9358 void * unused ATTRIBUTE_UNUSED;
9360 const struct toc_hash_struct * hash_entry =
9361 *(const struct toc_hash_struct **) hash_slot;
9362 rtx r = hash_entry->key;
9363 ggc_set_mark (hash_entry);
9364 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
9365 if (GET_CODE (r) == LABEL_REF)
9368 ggc_set_mark (XEXP (r, 0));
9375 /* Mark all the elements of the TOC hash-table *HT. */
9378 toc_hash_mark_table (vht)
9383 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
9386 /* These are the names given by the C++ front-end to vtables, and
9387 vtable-like objects. Ideally, this logic should not be here;
9388 instead, there should be some programmatic way of inquiring as
9389 to whether or not an object is a vtable. */
9391 #define VTABLE_NAME_P(NAME) \
9392 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
9393 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
9394 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
9395 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
9398 rs6000_output_symbol_ref (file, x)
9402 /* Currently C++ toc references to vtables can be emitted before it
9403 is decided whether the vtable is public or private. If this is
9404 the case, then the linker will eventually complain that there is
9405 a reference to an unknown section. Thus, for vtables only,
9406 we emit the TOC reference to reference the symbol and not the
9408 const char *name = XSTR (x, 0);
9410 if (VTABLE_NAME_P (name))
9412 RS6000_OUTPUT_BASENAME (file, name);
9415 assemble_name (file, name);
9418 /* Output a TOC entry. We derive the entry name from what is being
9422 output_toc (file, x, labelno, mode)
9426 enum machine_mode mode;
9429 const char *name = buf;
9430 const char *real_name;
9437 /* When the linker won't eliminate them, don't output duplicate
9438 TOC entries (this happens on AIX if there is any kind of TOC,
9439 and on SVR4 under -fPIC or -mrelocatable). */
9442 struct toc_hash_struct *h;
9445 h = ggc_alloc (sizeof (*h));
9448 h->labelno = labelno;
9450 found = htab_find_slot (toc_hash_table, h, 1);
9453 else /* This is indeed a duplicate.
9454 Set this label equal to that label. */
9456 fputs ("\t.set ", file);
9457 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9458 fprintf (file, "%d,", labelno);
9459 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9460 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9466 /* If we're going to put a double constant in the TOC, make sure it's
9467 aligned properly when strict alignment is on. */
9468 if (GET_CODE (x) == CONST_DOUBLE
9470 && GET_MODE_BITSIZE (mode) >= 64
9471 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
9472 ASM_OUTPUT_ALIGN (file, 3);
9475 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
9477 /* Handle FP constants specially. Note that if we have a minimal
9478 TOC, things we put here aren't actually in the TOC, so we can allow
9480 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
9485 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
9486 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
9490 if (TARGET_MINIMAL_TOC)
9491 fputs (DOUBLE_INT_ASM_OP, file);
9493 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
9494 fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
9499 if (TARGET_MINIMAL_TOC)
9500 fputs ("\t.long ", file);
9502 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
9503 fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
9507 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
9512 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
9513 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
9517 if (TARGET_MINIMAL_TOC)
9518 fputs (DOUBLE_INT_ASM_OP, file);
9520 fprintf (file, "\t.tc FS_%lx[TC],", l);
9521 fprintf (file, "0x%lx00000000\n", l);
9526 if (TARGET_MINIMAL_TOC)
9527 fputs ("\t.long ", file);
9529 fprintf (file, "\t.tc FS_%lx[TC],", l);
9530 fprintf (file, "0x%lx\n", l);
9534 else if (GET_MODE (x) == VOIDmode
9535 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
9537 unsigned HOST_WIDE_INT low;
9540 if (GET_CODE (x) == CONST_DOUBLE)
9542 low = CONST_DOUBLE_LOW (x);
9543 high = CONST_DOUBLE_HIGH (x);
9546 #if HOST_BITS_PER_WIDE_INT == 32
9549 high = (low & 0x80000000) ? ~0 : 0;
9553 low = INTVAL (x) & 0xffffffff;
9554 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
9558 /* TOC entries are always Pmode-sized, but since this
9559 is a bigendian machine then if we're putting smaller
9560 integer constants in the TOC we have to pad them.
9561 (This is still a win over putting the constants in
9562 a separate constant pool, because then we'd have
9563 to have both a TOC entry _and_ the actual constant.)
9565 For a 32-bit target, CONST_INT values are loaded and shifted
9566 entirely within `low' and can be stored in one TOC entry. */
9568 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
9569 abort ();/* It would be easy to make this work, but it doesn't now. */
9571 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
9572 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
9573 POINTER_SIZE, &low, &high, 0);
9577 if (TARGET_MINIMAL_TOC)
9578 fputs (DOUBLE_INT_ASM_OP, file);
9580 fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long)high, (long)low);
9581 fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
9586 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
9588 if (TARGET_MINIMAL_TOC)
9589 fputs ("\t.long ", file);
9591 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
9592 (long)high, (long)low);
9593 fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
9597 if (TARGET_MINIMAL_TOC)
9598 fputs ("\t.long ", file);
9600 fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
9601 fprintf (file, "0x%lx\n", (long) low);
9607 if (GET_CODE (x) == CONST)
9609 if (GET_CODE (XEXP (x, 0)) != PLUS)
9612 base = XEXP (XEXP (x, 0), 0);
9613 offset = INTVAL (XEXP (XEXP (x, 0), 1));
9616 if (GET_CODE (base) == SYMBOL_REF)
9617 name = XSTR (base, 0);
9618 else if (GET_CODE (base) == LABEL_REF)
9619 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
9620 else if (GET_CODE (base) == CODE_LABEL)
9621 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
9625 STRIP_NAME_ENCODING (real_name, name);
9626 if (TARGET_MINIMAL_TOC)
9627 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9630 fprintf (file, "\t.tc %s", real_name);
9633 fprintf (file, ".N%d", - offset);
9635 fprintf (file, ".P%d", offset);
9637 fputs ("[TC],", file);
9640 /* Currently C++ toc references to vtables can be emitted before it
9641 is decided whether the vtable is public or private. If this is
9642 the case, then the linker will eventually complain that there is
9643 a TOC reference to an unknown section. Thus, for vtables only,
9644 we emit the TOC reference to reference the symbol and not the
9646 if (VTABLE_NAME_P (name))
9648 RS6000_OUTPUT_BASENAME (file, name);
9650 fprintf (file, "%d", offset);
9651 else if (offset > 0)
9652 fprintf (file, "+%d", offset);
9655 output_addr_const (file, x);
9659 /* Output an assembler pseudo-op to write an ASCII string of N characters
9660 starting at P to FILE.
9662 On the RS/6000, we have to do this using the .byte operation and
9663 write out special characters outside the quoted string.
9664 Also, the assembler is broken; very long strings are truncated,
9665 so we must artificially break them up early. */
9668 output_ascii (file, p, n)
9674 int i, count_string;
9675 const char *for_string = "\t.byte \"";
9676 const char *for_decimal = "\t.byte ";
9677 const char *to_close = NULL;
9680 for (i = 0; i < n; i++)
9683 if (c >= ' ' && c < 0177)
9686 fputs (for_string, file);
9689 /* Write two quotes to get one. */
9697 for_decimal = "\"\n\t.byte ";
9701 if (count_string >= 512)
9703 fputs (to_close, file);
9705 for_string = "\t.byte \"";
9706 for_decimal = "\t.byte ";
9714 fputs (for_decimal, file);
9715 fprintf (file, "%d", c);
9717 for_string = "\n\t.byte \"";
9724 /* Now close the string if we have written one. Then end the line. */
9726 fputs (to_close, file);
9729 /* Generate a unique section name for FILENAME for a section type
9730 represented by SECTION_DESC. Output goes into BUF.
9732 SECTION_DESC can be any string, as long as it is different for each
9733 possible section type.
9735 We name the section in the same manner as xlc. The name begins with an
9736 underscore followed by the filename (after stripping any leading directory
9737 names) with the last period replaced by the string SECTION_DESC. If
9738 FILENAME does not contain a period, SECTION_DESC is appended to the end of
9742 rs6000_gen_section_name (buf, filename, section_desc)
9744 const char *filename;
9745 const char *section_desc;
9747 const char *q, *after_last_slash, *last_period = 0;
9751 after_last_slash = filename;
9752 for (q = filename; *q; q++)
9755 after_last_slash = q + 1;
9760 len = strlen (after_last_slash) + strlen (section_desc) + 2;
9761 *buf = (char *) permalloc (len);
9766 for (q = after_last_slash; *q; q++)
9768 if (q == last_period)
9770 strcpy (p, section_desc);
9771 p += strlen (section_desc);
9774 else if (ISALNUM (*q))
9778 if (last_period == 0)
9779 strcpy (p, section_desc);
9784 /* Emit profile function. */
9787 output_profile_hook (labelno)
9790 if (DEFAULT_ABI == ABI_AIX)
9793 const char *label_name;
9798 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
9799 STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
9800 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
9802 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
9805 else if (DEFAULT_ABI == ABI_DARWIN)
9807 const char *mcount_name = RS6000_MCOUNT;
9808 int caller_addr_regno = LINK_REGISTER_REGNUM;
9810 /* Be conservative and always set this, at least for now. */
9811 current_function_uses_pic_offset_table = 1;
9814 /* For PIC code, set up a stub and collect the caller's address
9815 from r0, which is where the prologue puts it. */
9818 mcount_name = machopic_stub_name (mcount_name);
9819 if (current_function_uses_pic_offset_table)
9820 caller_addr_regno = 0;
9823 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
9825 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
9829 /* Write function profiler code. */
9832 output_function_profiler (file, labelno)
9838 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
9839 switch (DEFAULT_ABI)
9845 case ABI_AIX_NODESC:
9846 fprintf (file, "\tmflr %s\n", reg_names[0]);
9849 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
9850 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
9851 reg_names[0], reg_names[1]);
9852 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
9853 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
9854 assemble_name (file, buf);
9855 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
9857 else if (flag_pic > 1)
9859 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
9860 reg_names[0], reg_names[1]);
9861 /* Now, we need to get the address of the label. */
9862 fputs ("\tbl 1f\n\t.long ", file);
9863 assemble_name (file, buf);
9864 fputs ("-.\n1:", file);
9865 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
9866 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9867 reg_names[0], reg_names[11]);
9868 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
9869 reg_names[0], reg_names[0], reg_names[11]);
9873 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
9874 assemble_name (file, buf);
9875 fputs ("@ha\n", file);
9876 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
9877 reg_names[0], reg_names[1]);
9878 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
9879 assemble_name (file, buf);
9880 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
9883 if (current_function_needs_context)
9884 asm_fprintf (file, "\tmr %s,%s\n",
9885 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
9886 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
9887 if (current_function_needs_context)
9888 asm_fprintf (file, "\tmr %s,%s\n",
9889 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
9894 /* Don't do anything, done in output_profile_hook (). */
9900 /* Adjust the cost of a scheduling dependency. Return the new cost of
9901 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
9904 rs6000_adjust_cost (insn, link, dep_insn, cost)
9907 rtx dep_insn ATTRIBUTE_UNUSED;
9910 if (! recog_memoized (insn))
9913 if (REG_NOTE_KIND (link) != 0)
9916 if (REG_NOTE_KIND (link) == 0)
9918 /* Data dependency; DEP_INSN writes a register that INSN reads
9919 some cycles later. */
9920 switch (get_attr_type (insn))
9923 /* Tell the first scheduling pass about the latency between
9924 a mtctr and bctr (and mtlr and br/blr). The first
9925 scheduling pass will not know about this latency since
9926 the mtctr instruction, which has the latency associated
9927 to it, will be generated by reload. */
9928 return TARGET_POWER ? 5 : 4;
9930 /* Leave some extra cycles between a compare and its
9931 dependent branch, to inhibit expensive mispredicts. */
9932 if ((rs6000_cpu_attr == CPU_PPC750
9933 || rs6000_cpu_attr == CPU_PPC7400
9934 || rs6000_cpu_attr == CPU_PPC7450)
9935 && recog_memoized (dep_insn)
9936 && (INSN_CODE (dep_insn) >= 0)
9937 && (get_attr_type (dep_insn) == TYPE_COMPARE
9938 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
9939 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
9940 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
9945 /* Fall out to return default cost. */
9951 /* A C statement (sans semicolon) to update the integer scheduling
9952 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
9953 INSN earlier, increase the priority to execute INSN later. Do not
9954 define this macro if you do not need to adjust the scheduling
9955 priorities of insns. */
9958 rs6000_adjust_priority (insn, priority)
9959 rtx insn ATTRIBUTE_UNUSED;
9962 /* On machines (like the 750) which have asymmetric integer units,
9963 where one integer unit can do multiply and divides and the other
9964 can't, reduce the priority of multiply/divide so it is scheduled
9965 before other integer operations. */
9968 if (! INSN_P (insn))
9971 if (GET_CODE (PATTERN (insn)) == USE)
9974 switch (rs6000_cpu_attr) {
9976 switch (get_attr_type (insn))
9983 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
9984 priority, priority);
9985 if (priority >= 0 && priority < 0x01000000)
9995 /* Return how many instructions the machine can issue per cycle. */
9998 rs6000_issue_rate ()
10000 switch (rs6000_cpu_attr) {
10001 case CPU_RIOS1: /* ? */
10003 case CPU_PPC601: /* ? */
10022 /* Length in units of the trampoline for entering a nested function. */
10025 rs6000_trampoline_size ()
10029 switch (DEFAULT_ABI)
10035 ret = (TARGET_32BIT) ? 12 : 24;
10040 case ABI_AIX_NODESC:
10041 ret = (TARGET_32BIT) ? 40 : 48;
10048 /* Emit RTL insns to initialize the variable parts of a trampoline.
10049 FNADDR is an RTX for the address of the function's pure code.
10050 CXT is an RTX for the static chain value for the function. */
10053 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10058 enum machine_mode pmode = Pmode;
10059 int regsize = (TARGET_32BIT) ? 4 : 8;
10060 rtx ctx_reg = force_reg (pmode, cxt);
10062 switch (DEFAULT_ABI)
10067 /* Macros to shorten the code expansions below. */
10068 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10069 #define MEM_PLUS(addr,offset) \
10070 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10072 /* Under AIX, just build the 3 word function descriptor */
10075 rtx fn_reg = gen_reg_rtx (pmode);
10076 rtx toc_reg = gen_reg_rtx (pmode);
10077 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10078 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10079 emit_move_insn (MEM_DEREF (addr), fn_reg);
10080 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10081 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10085 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10088 case ABI_AIX_NODESC:
10089 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10090 FALSE, VOIDmode, 4,
10092 GEN_INT (rs6000_trampoline_size ()), SImode,
10102 /* Table of valid machine attributes. */
10104 const struct attribute_spec rs6000_attribute_table[] =
10106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10107 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10108 { NULL, 0, 0, false, false, false, NULL }
10111 /* Handle a "longcall" attribute; arguments as in struct
10112 attribute_spec.handler. */
10115 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10118 tree args ATTRIBUTE_UNUSED;
10119 int flags ATTRIBUTE_UNUSED;
10120 bool *no_add_attrs;
10122 if (TREE_CODE (*node) != FUNCTION_TYPE
10123 && TREE_CODE (*node) != FIELD_DECL
10124 && TREE_CODE (*node) != TYPE_DECL)
10126 warning ("`%s' attribute only applies to functions",
10127 IDENTIFIER_POINTER (name));
10128 *no_add_attrs = true;
10134 /* Return a reference suitable for calling a function with the
10135 longcall attribute. */
10138 rs6000_longcall_ref (call_ref)
10141 const char *call_name;
10144 if (GET_CODE (call_ref) != SYMBOL_REF)
10147 /* System V adds '.' to the internal name, so skip them. */
10148 call_name = XSTR (call_ref, 0);
10149 if (*call_name == '.')
10151 while (*call_name == '.')
10154 node = get_identifier (call_name);
10155 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10158 return force_reg (Pmode, call_ref);
10162 /* A C statement or statements to switch to the appropriate section
10163 for output of RTX in mode MODE. You can assume that RTX is some
10164 kind of constant in RTL. The argument MODE is redundant except in
10165 the case of a `const_int' rtx. Select the section by calling
10166 `text_section' or one of the alternatives for other sections.
10168 Do not define this macro if you put all constants in the read-only
10171 #ifdef USING_ELFOS_H
10174 rs6000_select_rtx_section (mode, x)
10175 enum machine_mode mode;
10178 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10184 /* A C statement or statements to switch to the appropriate
10185 section for output of DECL. DECL is either a `VAR_DECL' node
10186 or a constant of some sort. RELOC indicates whether forming
10187 the initial value of DECL requires link-time relocations. */
10190 rs6000_select_section (decl, reloc)
10194 int size = int_size_in_bytes (TREE_TYPE (decl));
10197 static void (* const sec_funcs[4]) PARAMS ((void)) = {
10204 needs_sdata = (size > 0
10205 && size <= g_switch_value
10206 && rs6000_sdata != SDATA_NONE
10207 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10209 if (TREE_CODE (decl) == STRING_CST)
10210 readonly = ! flag_writable_strings;
10211 else if (TREE_CODE (decl) == VAR_DECL)
10212 readonly = (! (flag_pic && reloc)
10213 && TREE_READONLY (decl)
10214 && ! TREE_SIDE_EFFECTS (decl)
10215 && DECL_INITIAL (decl)
10216 && DECL_INITIAL (decl) != error_mark_node
10217 && TREE_CONSTANT (DECL_INITIAL (decl)));
10218 else if (TREE_CODE (decl) == CONSTRUCTOR)
10219 readonly = (! (flag_pic && reloc)
10220 && ! TREE_SIDE_EFFECTS (decl)
10221 && TREE_CONSTANT (decl));
10224 if (needs_sdata && rs6000_sdata != SDATA_EABI)
10227 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10230 /* A C statement to build up a unique section name, expressed as a
10231 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10232 RELOC indicates whether the initial value of EXP requires
10233 link-time relocations. If you do not define this macro, GCC will use
10234 the symbol name prefixed by `.' as the section name. Note - this
10235 macro can now be called for uninitialized data items as well as
10236 initialised data and functions. */
10239 rs6000_unique_section (decl, reloc)
10247 const char *prefix;
10249 static const char *const prefixes[7][2] =
10251 { ".rodata.", ".gnu.linkonce.r." },
10252 { ".sdata2.", ".gnu.linkonce.s2." },
10253 { ".data.", ".gnu.linkonce.d." },
10254 { ".sdata.", ".gnu.linkonce.s." },
10255 { ".bss.", ".gnu.linkonce.b." },
10256 { ".sbss.", ".gnu.linkonce.sb." },
10257 { ".text.", ".gnu.linkonce.t." }
10260 if (TREE_CODE (decl) == FUNCTION_DECL)
10269 if (TREE_CODE (decl) == STRING_CST)
10270 readonly = ! flag_writable_strings;
10271 else if (TREE_CODE (decl) == VAR_DECL)
10272 readonly = (! (flag_pic && reloc)
10273 && TREE_READONLY (decl)
10274 && ! TREE_SIDE_EFFECTS (decl)
10275 && TREE_CONSTANT (DECL_INITIAL (decl)));
10277 size = int_size_in_bytes (TREE_TYPE (decl));
10278 needs_sdata = (size > 0
10279 && size <= g_switch_value
10280 && rs6000_sdata != SDATA_NONE
10281 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10283 if (DECL_INITIAL (decl) == 0
10284 || DECL_INITIAL (decl) == error_mark_node)
10286 else if (! readonly)
10293 /* .sdata2 is only for EABI. */
10294 if (sec == 0 && rs6000_sdata != SDATA_EABI)
10300 STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10301 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10302 len = strlen (name) + strlen (prefix);
10303 string = alloca (len + 1);
10305 sprintf (string, "%s%s", prefix, name);
10307 DECL_SECTION_NAME (decl) = build_string (len, string);
10311 /* If we are referencing a function that is static or is known to be
10312 in this file, make the SYMBOL_REF special. We can use this to indicate
10313 that we can branch to this function without emitting a no-op after the
10314 call. For real AIX calling sequences, we also replace the
10315 function name with the real name (1 or 2 leading .'s), rather than
10316 the function descriptor name. This saves a lot of overriding code
10317 to read the prefixes. */
10320 rs6000_encode_section_info (decl)
10323 if (TREE_CODE (decl) == FUNCTION_DECL)
10325 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10326 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10327 && ! DECL_WEAK (decl))
10328 SYMBOL_REF_FLAG (sym_ref) = 1;
10330 if (DEFAULT_ABI == ABI_AIX)
10332 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10333 size_t len2 = strlen (XSTR (sym_ref, 0));
10334 char *str = alloca (len1 + len2 + 1);
10337 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10339 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
10342 else if (rs6000_sdata != SDATA_NONE
10343 && DEFAULT_ABI == ABI_V4
10344 && TREE_CODE (decl) == VAR_DECL)
10346 int size = int_size_in_bytes (TREE_TYPE (decl));
10347 tree section_name = DECL_SECTION_NAME (decl);
10348 const char *name = (char *)0;
10353 if (TREE_CODE (section_name) == STRING_CST)
10355 name = TREE_STRING_POINTER (section_name);
10356 len = TREE_STRING_LENGTH (section_name);
10362 if ((size > 0 && size <= g_switch_value)
10364 && ((len == sizeof (".sdata") - 1
10365 && strcmp (name, ".sdata") == 0)
10366 || (len == sizeof (".sdata2") - 1
10367 && strcmp (name, ".sdata2") == 0)
10368 || (len == sizeof (".sbss") - 1
10369 && strcmp (name, ".sbss") == 0)
10370 || (len == sizeof (".sbss2") - 1
10371 && strcmp (name, ".sbss2") == 0)
10372 || (len == sizeof (".PPC.EMB.sdata0") - 1
10373 && strcmp (name, ".PPC.EMB.sdata0") == 0)
10374 || (len == sizeof (".PPC.EMB.sbss0") - 1
10375 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
10377 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10378 size_t len = strlen (XSTR (sym_ref, 0));
10379 char *str = alloca (len + 2);
10382 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
10383 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
10388 #endif /* USING_ELFOS_H */
10391 /* Return a REG that occurs in ADDR with coefficient 1.
10392 ADDR can be effectively incremented by incrementing REG.
10394 r0 is special and we must not select it as an address
10395 register by this routine since our caller will try to
10396 increment the returned register via an "la" instruction. */
10399 find_addr_reg (addr)
10402 while (GET_CODE (addr) == PLUS)
10404 if (GET_CODE (XEXP (addr, 0)) == REG
10405 && REGNO (XEXP (addr, 0)) != 0)
10406 addr = XEXP (addr, 0);
10407 else if (GET_CODE (XEXP (addr, 1)) == REG
10408 && REGNO (XEXP (addr, 1)) != 0)
10409 addr = XEXP (addr, 1);
10410 else if (CONSTANT_P (XEXP (addr, 0)))
10411 addr = XEXP (addr, 1);
10412 else if (CONSTANT_P (XEXP (addr, 1)))
10413 addr = XEXP (addr, 0);
10417 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
10423 rs6000_fatal_bad_address (op)
10426 fatal_insn ("bad address", op);
10429 /* Called to register all of our global variables with the garbage
10433 rs6000_add_gc_roots ()
10435 ggc_add_rtx_root (&rs6000_compare_op0, 1);
10436 ggc_add_rtx_root (&rs6000_compare_op1, 1);
10438 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
10439 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
10440 toc_hash_mark_table);
10443 machopic_add_gc_roots ();
10450 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
10451 reference and a constant. */
10454 symbolic_operand (op)
10457 switch (GET_CODE (op))
10464 return (GET_CODE (op) == SYMBOL_REF ||
10465 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
10466 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
10467 && GET_CODE (XEXP (op, 1)) == CONST_INT);
10474 #ifdef RS6000_LONG_BRANCH
10476 static tree stub_list = 0;
10478 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
10479 procedure calls to the linked list. */
10482 add_compiler_stub (label_name, function_name, line_number)
10484 tree function_name;
10487 tree stub = build_tree_list (function_name, label_name);
10488 TREE_TYPE (stub) = build_int_2 (line_number, 0);
10489 TREE_CHAIN (stub) = stub_list;
10493 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
10494 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
10495 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
10497 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
10498 handling procedure calls from the linked list and initializes the
10502 output_compiler_stub ()
10505 char label_buf[256];
10507 tree tmp_stub, stub;
10510 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
10512 fprintf (asm_out_file,
10513 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
10515 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
10516 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
10517 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
10518 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
10520 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
10522 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
10525 label_buf[0] = '_';
10526 strcpy (label_buf+1,
10527 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
10530 strcpy (tmp_buf, "lis r12,hi16(");
10531 strcat (tmp_buf, label_buf);
10532 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
10533 strcat (tmp_buf, label_buf);
10534 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
10535 output_asm_insn (tmp_buf, 0);
10537 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
10538 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
10539 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
10540 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
10546 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
10547 already there or not. */
10550 no_previous_def (function_name)
10551 tree function_name;
10554 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
10555 if (function_name == STUB_FUNCTION_NAME (stub))
10560 /* GET_PREV_LABEL gets the label name from the previous definition of
10564 get_prev_label (function_name)
10565 tree function_name;
10568 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
10569 if (function_name == STUB_FUNCTION_NAME (stub))
10570 return STUB_LABEL_NAME (stub);
10574 /* INSN is either a function call or a millicode call. It may have an
10575 unconditional jump in its delay slot.
10577 CALL_DEST is the routine we are calling. */
10580 output_call (insn, call_dest, operand_number)
10583 int operand_number;
10585 static char buf[256];
10586 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
10589 tree funname = get_identifier (XSTR (call_dest, 0));
10591 if (no_previous_def (funname))
10594 rtx label_rtx = gen_label_rtx ();
10595 char *label_buf, temp_buf[256];
10596 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
10597 CODE_LABEL_NUMBER (label_rtx));
10598 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
10599 labelname = get_identifier (label_buf);
10600 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
10602 line_number = NOTE_LINE_NUMBER (insn);
10603 add_compiler_stub (labelname, funname, line_number);
10606 labelname = get_prev_label (funname);
10608 sprintf (buf, "jbsr %%z%d,%.246s",
10609 operand_number, IDENTIFIER_POINTER (labelname));
10614 sprintf (buf, "bl %%z%d", operand_number);
10619 #endif /* RS6000_LONG_BRANCH */
10621 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
10623 const char *const symbol_ = (SYMBOL); \
10624 char *buffer_ = (BUF); \
10625 if (symbol_[0] == '"') \
10627 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
10629 else if (name_needs_quotes(symbol_)) \
10631 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
10635 sprintf(buffer_, "L%d$%s", (N), symbol_); \
10640 /* Generate PIC and indirect symbol stubs. */
10643 machopic_output_stub (file, symb, stub)
10645 const char *symb, *stub;
10647 unsigned int length;
10648 char *symbol_name, *lazy_ptr_name;
10649 char *local_label_0;
10650 static int label = 0;
10652 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
10653 STRIP_NAME_ENCODING (symb, symb);
10657 length = strlen (symb);
10658 symbol_name = alloca (length + 32);
10659 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
10661 lazy_ptr_name = alloca (length + 32);
10662 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
10664 local_label_0 = alloca (length + 32);
10665 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
10668 machopic_picsymbol_stub_section ();
10670 machopic_symbol_stub_section ();
10672 fprintf (file, "%s:\n", stub);
10673 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
10677 fprintf (file, "\tmflr r0\n");
10678 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
10679 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
10680 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
10681 lazy_ptr_name, local_label_0);
10682 fprintf (file, "\tmtlr r0\n");
10683 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
10684 lazy_ptr_name, local_label_0);
10685 fprintf (file, "\tmtctr r12\n");
10686 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
10687 lazy_ptr_name, local_label_0);
10688 fprintf (file, "\tbctr\n");
10691 fprintf (file, "non-pure not supported\n");
10693 machopic_lazy_symbol_ptr_section ();
10694 fprintf (file, "%s:\n", lazy_ptr_name);
10695 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
10696 fprintf (file, "\t.long dyld_stub_binding_helper\n");
10699 /* Legitimize PIC addresses. If the address is already
10700 position-independent, we return ORIG. Newly generated
10701 position-independent addresses go into a reg. This is REG if non
10702 zero, otherwise we allocate register(s) as necessary. */
10704 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
10707 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
10709 enum machine_mode mode;
10714 if (reg == NULL && ! reload_in_progress && ! reload_completed)
10715 reg = gen_reg_rtx (Pmode);
10717 if (GET_CODE (orig) == CONST)
10719 if (GET_CODE (XEXP (orig, 0)) == PLUS
10720 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
10723 if (GET_CODE (XEXP (orig, 0)) == PLUS)
10726 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
10729 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
10735 if (GET_CODE (offset) == CONST_INT)
10737 if (SMALL_INT (offset))
10738 return plus_constant (base, INTVAL (offset));
10739 else if (! reload_in_progress && ! reload_completed)
10740 offset = force_reg (Pmode, offset);
10743 rtx mem = force_const_mem (Pmode, orig);
10744 return machopic_legitimize_pic_address (mem, Pmode, reg);
10747 return gen_rtx (PLUS, Pmode, base, offset);
10750 /* Fall back on generic machopic code. */
10751 return machopic_legitimize_pic_address (orig, mode, reg);
10754 /* This is just a placeholder to make linking work without having to
10755 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
10756 ever needed for Darwin (not too likely!) this would have to get a
10757 real definition. */
10764 #endif /* TARGET_MACHO */
10767 static unsigned int
10768 rs6000_elf_section_type_flags (decl, name, reloc)
10773 unsigned int flags = default_section_type_flags (decl, name, reloc);
10775 if (TARGET_RELOCATABLE)
10776 flags |= SECTION_WRITE;
10781 /* Record an element in the table of global constructors. SYMBOL is
10782 a SYMBOL_REF of the function to be called; PRIORITY is a number
10783 between 0 and MAX_INIT_PRIORITY.
10785 This differs from default_named_section_asm_out_constructor in
10786 that we have special handling for -mrelocatable. */
10789 rs6000_elf_asm_out_constructor (symbol, priority)
10793 const char *section = ".ctors";
10796 if (priority != DEFAULT_INIT_PRIORITY)
10798 sprintf (buf, ".ctors.%.5u",
10799 /* Invert the numbering so the linker puts us in the proper
10800 order; constructors are run from right to left, and the
10801 linker sorts in increasing order. */
10802 MAX_INIT_PRIORITY - priority);
10806 named_section_flags (section, SECTION_WRITE);
10807 assemble_align (POINTER_SIZE);
10809 if (TARGET_RELOCATABLE)
10811 fputs ("\t.long (", asm_out_file);
10812 output_addr_const (asm_out_file, symbol);
10813 fputs (")@fixup\n", asm_out_file);
10816 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
10820 rs6000_elf_asm_out_destructor (symbol, priority)
10824 const char *section = ".dtors";
10827 if (priority != DEFAULT_INIT_PRIORITY)
10829 sprintf (buf, ".dtors.%.5u",
10830 /* Invert the numbering so the linker puts us in the proper
10831 order; constructors are run from right to left, and the
10832 linker sorts in increasing order. */
10833 MAX_INIT_PRIORITY - priority);
10837 named_section_flags (section, SECTION_WRITE);
10838 assemble_align (POINTER_SIZE);
10840 if (TARGET_RELOCATABLE)
10842 fputs ("\t.long (", asm_out_file);
10843 output_addr_const (asm_out_file, symbol);
10844 fputs (")@fixup\n", asm_out_file);
10847 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
10851 #ifdef OBJECT_FORMAT_COFF
10853 xcoff_asm_named_section (name, flags)
10855 unsigned int flags ATTRIBUTE_UNUSED;
10857 fprintf (asm_out_file, "\t.csect %s\n", name);