1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave;
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string;
83 /* Set to non-zero once AIX common-mode calls have been defined. */
84 static int common_mode_defined;
86 /* Save information from a "cmpxx" operation until the branch or scc is
88 rtx rs6000_compare_op0, rs6000_compare_op1;
89 int rs6000_compare_fp_p;
91 /* Label number of label created for -mrelocatable, to call to so we can
92 get the address of the GOT section */
93 int rs6000_pic_labelno;
96 /* Which abi to adhere to */
97 const char *rs6000_abi_name = RS6000_ABI_NAME;
99 /* Semantics of the small data area */
100 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
102 /* Which small data model to use */
103 const char *rs6000_sdata_name = (char *)0;
105 /* Counter for labels which are to be placed in .fixup. */
106 int fixuplabelno = 0;
109 /* ABI enumeration available for subtarget to use. */
110 enum rs6000_abi rs6000_current_abi;
112 /* ABI string from -mabi= option. */
113 const char *rs6000_abi_string;
116 const char *rs6000_debug_name;
117 int rs6000_debug_stack; /* debug stack applications */
118 int rs6000_debug_arg; /* debug argument handling */
120 /* Flag to say the TOC is initialized */
122 char toc_label_name[10];
124 /* Alias set for saves and restores from the rs6000 stack. */
125 static int rs6000_sr_alias_set;
127 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
128 The only place that looks at this is rs6000_set_default_type_attributes;
129 everywhere else should rely on the presence or absence of a longcall
130 attribute on the function declaration. */
131 int rs6000_default_long_calls;
132 const char *rs6000_longcall_switch;
134 static void rs6000_add_gc_roots PARAMS ((void));
135 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
136 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
137 static void validate_condition_mode
138 PARAMS ((enum rtx_code, enum machine_mode));
139 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
140 static void rs6000_maybe_dead PARAMS ((rtx));
141 static void rs6000_emit_stack_tie PARAMS ((void));
142 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
143 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
144 unsigned int, int, int));
145 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
146 static unsigned rs6000_hash_constant PARAMS ((rtx));
147 static unsigned toc_hash_function PARAMS ((const void *));
148 static int toc_hash_eq PARAMS ((const void *, const void *));
149 static int toc_hash_mark_entry PARAMS ((void **, void *));
150 static void toc_hash_mark_table PARAMS ((void *));
151 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
152 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
153 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
154 static int rs6000_ra_ever_killed PARAMS ((void));
155 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
156 const struct attribute_spec rs6000_attribute_table[];
157 static void rs6000_set_default_type_attributes PARAMS ((tree));
158 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
159 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
160 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
161 HOST_WIDE_INT, HOST_WIDE_INT));
163 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
165 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
166 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
167 static void rs6000_elf_select_section PARAMS ((tree, int,
168 unsigned HOST_WIDE_INT));
169 static void rs6000_elf_unique_section PARAMS ((tree, int));
170 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
171 unsigned HOST_WIDE_INT));
172 static void rs6000_elf_encode_section_info PARAMS ((tree, int));
173 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
176 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
177 static void rs6000_xcoff_select_section PARAMS ((tree, int,
178 unsigned HOST_WIDE_INT));
179 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
180 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
181 unsigned HOST_WIDE_INT));
182 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
184 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
186 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
187 static int rs6000_adjust_priority PARAMS ((rtx, int));
188 static int rs6000_issue_rate PARAMS ((void));
190 static void rs6000_init_builtins PARAMS ((void));
191 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
192 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
193 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
194 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
195 static void altivec_init_builtins PARAMS ((void));
196 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
197 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
198 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
199 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
200 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
201 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
202 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
203 static void rs6000_parse_abi_options PARAMS ((void));
204 static void rs6000_parse_vrsave_option PARAMS ((void));
205 static int first_altivec_reg_to_save PARAMS ((void));
206 static unsigned int compute_vrsave_mask PARAMS ((void));
207 static void is_altivec_return_reg PARAMS ((rtx, void *));
208 int vrsave_operation PARAMS ((rtx, enum machine_mode));
209 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
210 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
211 static int easy_vector_constant PARAMS ((rtx));
213 /* Default register names. */
214 char rs6000_reg_names[][8] =
216 "0", "1", "2", "3", "4", "5", "6", "7",
217 "8", "9", "10", "11", "12", "13", "14", "15",
218 "16", "17", "18", "19", "20", "21", "22", "23",
219 "24", "25", "26", "27", "28", "29", "30", "31",
220 "0", "1", "2", "3", "4", "5", "6", "7",
221 "8", "9", "10", "11", "12", "13", "14", "15",
222 "16", "17", "18", "19", "20", "21", "22", "23",
223 "24", "25", "26", "27", "28", "29", "30", "31",
224 "mq", "lr", "ctr","ap",
225 "0", "1", "2", "3", "4", "5", "6", "7",
227 /* AltiVec registers. */
228 "0", "1", "2", "3", "4", "5", "6", "7",
229 "8", "9", "10", "11", "12", "13", "14", "15",
230 "16", "17", "18", "19", "20", "21", "22", "23",
231 "24", "25", "26", "27", "28", "29", "30", "31",
235 #ifdef TARGET_REGNAMES
236 static const char alt_reg_names[][8] =
238 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
239 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
240 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
241 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
242 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
243 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
244 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
245 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
246 "mq", "lr", "ctr", "ap",
247 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
249 /* AltiVec registers. */
250 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
251 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
252 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
253 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
258 #ifndef MASK_STRICT_ALIGN
259 #define MASK_STRICT_ALIGN 0
262 /* Initialize the GCC target structure. */
263 #undef TARGET_ATTRIBUTE_TABLE
264 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
265 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
266 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
268 #undef TARGET_ASM_ALIGNED_DI_OP
269 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
271 /* Default unaligned ops are only provided for ELF. Find the ops needed
272 for non-ELF systems. */
273 #ifndef OBJECT_FORMAT_ELF
275 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
277 #undef TARGET_ASM_UNALIGNED_HI_OP
278 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
279 #undef TARGET_ASM_UNALIGNED_SI_OP
280 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
281 #undef TARGET_ASM_UNALIGNED_DI_OP
282 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
285 #undef TARGET_ASM_UNALIGNED_HI_OP
286 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
287 #undef TARGET_ASM_UNALIGNED_SI_OP
288 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
292 /* This hook deals with fixups for relocatable code and DI-mode objects
294 #undef TARGET_ASM_INTEGER
295 #define TARGET_ASM_INTEGER rs6000_assemble_integer
297 #undef TARGET_ASM_FUNCTION_PROLOGUE
298 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
299 #undef TARGET_ASM_FUNCTION_EPILOGUE
300 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
303 #undef TARGET_SECTION_TYPE_FLAGS
304 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
307 #undef TARGET_SCHED_ISSUE_RATE
308 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
309 #undef TARGET_SCHED_ADJUST_COST
310 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
311 #undef TARGET_SCHED_ADJUST_PRIORITY
312 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
314 #undef TARGET_INIT_BUILTINS
315 #define TARGET_INIT_BUILTINS rs6000_init_builtins
317 #undef TARGET_EXPAND_BUILTIN
318 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
320 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
321 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
323 struct gcc_target targetm = TARGET_INITIALIZER;
325 /* Override command line options. Mostly we process the processor
326 type and sometimes adjust other TARGET_ options. */
329 rs6000_override_options (default_cpu)
330 const char *default_cpu;
333 struct rs6000_cpu_select *ptr;
335 /* Simplify the entries below by making a mask for any POWER
336 variant and any PowerPC variant. */
338 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
339 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
340 | MASK_PPC_GFXOPT | MASK_POWERPC64)
341 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
345 const char *const name; /* Canonical processor name. */
346 const enum processor_type processor; /* Processor type enum value. */
347 const int target_enable; /* Target flags to enable. */
348 const int target_disable; /* Target flags to disable. */
349 } const processor_target_table[]
350 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
351 POWER_MASKS | POWERPC_MASKS},
352 {"power", PROCESSOR_POWER,
353 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
354 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
355 {"power2", PROCESSOR_POWER,
356 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
357 POWERPC_MASKS | MASK_NEW_MNEMONICS},
358 {"power3", PROCESSOR_PPC630,
359 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
360 POWER_MASKS | MASK_PPC_GPOPT},
361 {"power4", PROCESSOR_POWER4,
362 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
363 POWER_MASKS | MASK_PPC_GPOPT},
364 {"powerpc", PROCESSOR_POWERPC,
365 MASK_POWERPC | MASK_NEW_MNEMONICS,
366 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
367 {"powerpc64", PROCESSOR_POWERPC64,
368 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
369 POWER_MASKS | POWERPC_OPT_MASKS},
370 {"rios", PROCESSOR_RIOS1,
371 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
372 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
373 {"rios1", PROCESSOR_RIOS1,
374 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
375 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
376 {"rsc", PROCESSOR_PPC601,
377 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
378 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
379 {"rsc1", PROCESSOR_PPC601,
380 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
381 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
382 {"rios2", PROCESSOR_RIOS2,
383 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
384 POWERPC_MASKS | MASK_NEW_MNEMONICS},
385 {"rs64a", PROCESSOR_RS64A,
386 MASK_POWERPC | MASK_NEW_MNEMONICS,
387 POWER_MASKS | POWERPC_OPT_MASKS},
388 {"401", PROCESSOR_PPC403,
389 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
390 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
391 {"403", PROCESSOR_PPC403,
392 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
393 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
394 {"405", PROCESSOR_PPC405,
395 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
396 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
397 {"505", PROCESSOR_MPCCORE,
398 MASK_POWERPC | MASK_NEW_MNEMONICS,
399 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
400 {"601", PROCESSOR_PPC601,
401 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
402 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
403 {"602", PROCESSOR_PPC603,
404 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
405 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
406 {"603", PROCESSOR_PPC603,
407 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
408 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
409 {"603e", PROCESSOR_PPC603,
410 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
411 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
412 {"ec603e", PROCESSOR_PPC603,
413 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
414 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
415 {"604", PROCESSOR_PPC604,
416 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
417 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
418 {"604e", PROCESSOR_PPC604e,
419 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
420 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
421 {"620", PROCESSOR_PPC620,
422 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
423 POWER_MASKS | MASK_PPC_GPOPT},
424 {"630", PROCESSOR_PPC630,
425 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
426 POWER_MASKS | MASK_PPC_GPOPT},
427 {"740", PROCESSOR_PPC750,
428 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
429 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
430 {"750", PROCESSOR_PPC750,
431 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
432 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
433 {"7400", PROCESSOR_PPC7400,
434 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
435 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
436 {"7450", PROCESSOR_PPC7450,
437 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
438 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
439 {"801", PROCESSOR_MPCCORE,
440 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
441 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
442 {"821", PROCESSOR_MPCCORE,
443 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
444 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
445 {"823", PROCESSOR_MPCCORE,
446 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
447 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
448 {"860", PROCESSOR_MPCCORE,
449 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
450 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
452 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
454 /* Save current -mmultiple/-mno-multiple status. */
455 int multiple = TARGET_MULTIPLE;
456 /* Save current -mstring/-mno-string status. */
457 int string = TARGET_STRING;
459 /* Identify the processor type. */
460 rs6000_select[0].string = default_cpu;
461 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
463 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
465 ptr = &rs6000_select[i];
466 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
468 for (j = 0; j < ptt_size; j++)
469 if (! strcmp (ptr->string, processor_target_table[j].name))
472 rs6000_cpu = processor_target_table[j].processor;
476 target_flags |= processor_target_table[j].target_enable;
477 target_flags &= ~processor_target_table[j].target_disable;
483 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
487 /* If we are optimizing big endian systems for space, use the store
488 multiple instructions. */
489 if (BYTES_BIG_ENDIAN && optimize_size)
490 target_flags |= MASK_MULTIPLE;
492 /* If -mmultiple or -mno-multiple was explicitly used, don't
493 override with the processor default */
494 if (TARGET_MULTIPLE_SET)
495 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
497 /* If -mstring or -mno-string was explicitly used, don't override
498 with the processor default. */
499 if (TARGET_STRING_SET)
500 target_flags = (target_flags & ~MASK_STRING) | string;
502 /* Don't allow -mmultiple or -mstring on little endian systems
503 unless the cpu is a 750, because the hardware doesn't support the
504 instructions used in little endian mode, and causes an alignment
505 trap. The 750 does not cause an alignment trap (except when the
506 target is unaligned). */
508 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
512 target_flags &= ~MASK_MULTIPLE;
513 if (TARGET_MULTIPLE_SET)
514 warning ("-mmultiple is not supported on little endian systems");
519 target_flags &= ~MASK_STRING;
520 if (TARGET_STRING_SET)
521 warning ("-mstring is not supported on little endian systems");
525 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
530 warning ("-f%s ignored (all code is position independent)",
531 (flag_pic > 1) ? "PIC" : "pic");
534 #ifdef XCOFF_DEBUGGING_INFO
535 if (flag_function_sections && (write_symbols != NO_DEBUG)
536 && DEFAULT_ABI == ABI_AIX)
538 warning ("-ffunction-sections disabled on AIX when debugging");
539 flag_function_sections = 0;
542 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
544 warning ("-fdata-sections not supported on AIX");
545 flag_data_sections = 0;
549 /* Set debug flags */
550 if (rs6000_debug_name)
552 if (! strcmp (rs6000_debug_name, "all"))
553 rs6000_debug_stack = rs6000_debug_arg = 1;
554 else if (! strcmp (rs6000_debug_name, "stack"))
555 rs6000_debug_stack = 1;
556 else if (! strcmp (rs6000_debug_name, "arg"))
557 rs6000_debug_arg = 1;
559 error ("unknown -mdebug-%s switch", rs6000_debug_name);
562 /* Set size of long double */
563 rs6000_long_double_type_size = 64;
564 if (rs6000_long_double_size_string)
567 int size = strtol (rs6000_long_double_size_string, &tail, 10);
568 if (*tail != '\0' || (size != 64 && size != 128))
569 error ("Unknown switch -mlong-double-%s",
570 rs6000_long_double_size_string);
572 rs6000_long_double_type_size = size;
575 /* Handle -mabi= options. */
576 rs6000_parse_abi_options ();
578 /* Handle -mvrsave= option. */
579 rs6000_parse_vrsave_option ();
581 #ifdef SUBTARGET_OVERRIDE_OPTIONS
582 SUBTARGET_OVERRIDE_OPTIONS;
584 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
585 SUBSUBTARGET_OVERRIDE_OPTIONS;
588 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
589 using TARGET_OPTIONS to handle a toggle switch, but we're out of
590 bits in target_flags so TARGET_SWITCHES cannot be used.
591 Assumption here is that rs6000_longcall_switch points into the
592 text of the complete option, rather than being a copy, so we can
593 scan back for the presence or absence of the no- modifier. */
594 if (rs6000_longcall_switch)
596 const char *base = rs6000_longcall_switch;
597 while (base[-1] != 'm') base--;
599 if (*rs6000_longcall_switch != '\0')
600 error ("invalid option `%s'", base);
601 rs6000_default_long_calls = (base[0] != 'n');
604 #ifdef TARGET_REGNAMES
605 /* If the user desires alternate register names, copy in the
606 alternate names now. */
608 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
611 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
612 If -maix-struct-return or -msvr4-struct-return was explicitly
613 used, don't override with the ABI default. */
614 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
616 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
617 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
619 target_flags |= MASK_AIX_STRUCT_RET;
622 /* Register global variables with the garbage collector. */
623 rs6000_add_gc_roots ();
625 /* Allocate an alias set for register saves & restores from stack. */
626 rs6000_sr_alias_set = new_alias_set ();
629 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
631 /* We can only guarantee the availability of DI pseudo-ops when
632 assembling for 64-bit targets. */
635 targetm.asm_out.aligned_op.di = NULL;
636 targetm.asm_out.unaligned_op.di = NULL;
639 /* Arrange to save and restore machine status around nested functions. */
640 init_machine_status = rs6000_init_machine_status;
643 /* Handle -mvrsave= options. */
645 rs6000_parse_vrsave_option ()
647 /* Generate VRSAVE instructions by default. */
648 if (rs6000_altivec_vrsave_string == 0
649 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
650 rs6000_altivec_vrsave = 1;
651 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
652 rs6000_altivec_vrsave = 0;
654 error ("unknown -mvrsave= option specified: '%s'",
655 rs6000_altivec_vrsave_string);
658 /* Handle -mabi= options. */
660 rs6000_parse_abi_options ()
662 if (rs6000_abi_string == 0)
664 else if (! strcmp (rs6000_abi_string, "altivec"))
665 rs6000_altivec_abi = 1;
666 else if (! strcmp (rs6000_abi_string, "no-altivec"))
667 rs6000_altivec_abi = 0;
669 error ("unknown ABI specified: '%s'", rs6000_abi_string);
673 optimization_options (level, size)
674 int level ATTRIBUTE_UNUSED;
675 int size ATTRIBUTE_UNUSED;
679 /* Do anything needed at the start of the asm file. */
682 rs6000_file_start (file, default_cpu)
684 const char *default_cpu;
688 const char *start = buffer;
689 struct rs6000_cpu_select *ptr;
691 if (flag_verbose_asm)
693 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
694 rs6000_select[0].string = default_cpu;
696 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
698 ptr = &rs6000_select[i];
699 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
701 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
707 switch (rs6000_sdata)
709 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
710 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
711 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
712 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
715 if (rs6000_sdata && g_switch_value)
717 fprintf (file, "%s -G %d", start, g_switch_value);
727 /* Return non-zero if this function is known to have a null epilogue. */
732 if (reload_completed)
734 rs6000_stack_t *info = rs6000_stack_info ();
736 if (info->first_gp_reg_save == 32
737 && info->first_fp_reg_save == 64
738 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
741 && info->vrsave_mask == 0
749 /* Returns 1 always. */
752 any_operand (op, mode)
753 rtx op ATTRIBUTE_UNUSED;
754 enum machine_mode mode ATTRIBUTE_UNUSED;
759 /* Returns 1 if op is the count register. */
761 count_register_operand (op, mode)
763 enum machine_mode mode ATTRIBUTE_UNUSED;
765 if (GET_CODE (op) != REG)
768 if (REGNO (op) == COUNT_REGISTER_REGNUM)
771 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
777 /* Returns 1 if op is an altivec register. */
779 altivec_register_operand (op, mode)
781 enum machine_mode mode ATTRIBUTE_UNUSED;
784 return (register_operand (op, mode)
785 && (GET_CODE (op) != REG
786 || REGNO (op) > FIRST_PSEUDO_REGISTER
787 || ALTIVEC_REGNO_P (REGNO (op))));
791 xer_operand (op, mode)
793 enum machine_mode mode ATTRIBUTE_UNUSED;
795 if (GET_CODE (op) != REG)
798 if (XER_REGNO_P (REGNO (op)))
804 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
805 by such constants completes more quickly. */
808 s8bit_cint_operand (op, mode)
810 enum machine_mode mode ATTRIBUTE_UNUSED;
812 return ( GET_CODE (op) == CONST_INT
813 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
816 /* Return 1 if OP is a constant that can fit in a D field. */
819 short_cint_operand (op, mode)
821 enum machine_mode mode ATTRIBUTE_UNUSED;
823 return (GET_CODE (op) == CONST_INT
824 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
827 /* Similar for an unsigned D field. */
830 u_short_cint_operand (op, mode)
832 enum machine_mode mode ATTRIBUTE_UNUSED;
834 return (GET_CODE (op) == CONST_INT
835 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
838 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
841 non_short_cint_operand (op, mode)
843 enum machine_mode mode ATTRIBUTE_UNUSED;
845 return (GET_CODE (op) == CONST_INT
846 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
849 /* Returns 1 if OP is a CONST_INT that is a positive value
850 and an exact power of 2. */
853 exact_log2_cint_operand (op, mode)
855 enum machine_mode mode ATTRIBUTE_UNUSED;
857 return (GET_CODE (op) == CONST_INT
859 && exact_log2 (INTVAL (op)) >= 0);
862 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
866 gpc_reg_operand (op, mode)
868 enum machine_mode mode;
870 return (register_operand (op, mode)
871 && (GET_CODE (op) != REG
872 || (REGNO (op) >= ARG_POINTER_REGNUM
873 && !XER_REGNO_P (REGNO (op)))
874 || REGNO (op) < MQ_REGNO));
877 /* Returns 1 if OP is either a pseudo-register or a register denoting a
881 cc_reg_operand (op, mode)
883 enum machine_mode mode;
885 return (register_operand (op, mode)
886 && (GET_CODE (op) != REG
887 || REGNO (op) >= FIRST_PSEUDO_REGISTER
888 || CR_REGNO_P (REGNO (op))));
891 /* Returns 1 if OP is either a pseudo-register or a register denoting a
892 CR field that isn't CR0. */
895 cc_reg_not_cr0_operand (op, mode)
897 enum machine_mode mode;
899 return (register_operand (op, mode)
900 && (GET_CODE (op) != REG
901 || REGNO (op) >= FIRST_PSEUDO_REGISTER
902 || CR_REGNO_NOT_CR0_P (REGNO (op))));
905 /* Returns 1 if OP is either a constant integer valid for a D-field or
906 a non-special register. If a register, it must be in the proper
907 mode unless MODE is VOIDmode. */
910 reg_or_short_operand (op, mode)
912 enum machine_mode mode;
914 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
917 /* Similar, except check if the negation of the constant would be
918 valid for a D-field. */
921 reg_or_neg_short_operand (op, mode)
923 enum machine_mode mode;
925 if (GET_CODE (op) == CONST_INT)
926 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
928 return gpc_reg_operand (op, mode);
931 /* Returns 1 if OP is either a constant integer valid for a DS-field or
932 a non-special register. If a register, it must be in the proper
933 mode unless MODE is VOIDmode. */
936 reg_or_aligned_short_operand (op, mode)
938 enum machine_mode mode;
940 if (gpc_reg_operand (op, mode))
942 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
949 /* Return 1 if the operand is either a register or an integer whose
950 high-order 16 bits are zero. */
953 reg_or_u_short_operand (op, mode)
955 enum machine_mode mode;
957 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
960 /* Return 1 is the operand is either a non-special register or ANY
964 reg_or_cint_operand (op, mode)
966 enum machine_mode mode;
968 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
971 /* Return 1 is the operand is either a non-special register or ANY
972 32-bit signed constant integer. */
975 reg_or_arith_cint_operand (op, mode)
977 enum machine_mode mode;
979 return (gpc_reg_operand (op, mode)
980 || (GET_CODE (op) == CONST_INT
981 #if HOST_BITS_PER_WIDE_INT != 32
982 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
983 < (unsigned HOST_WIDE_INT) 0x100000000ll)
988 /* Return 1 is the operand is either a non-special register or a 32-bit
989 signed constant integer valid for 64-bit addition. */
992 reg_or_add_cint64_operand (op, mode)
994 enum machine_mode mode;
996 return (gpc_reg_operand (op, mode)
997 || (GET_CODE (op) == CONST_INT
998 #if HOST_BITS_PER_WIDE_INT == 32
999 && INTVAL (op) < 0x7fff8000
1001 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1007 /* Return 1 is the operand is either a non-special register or a 32-bit
1008 signed constant integer valid for 64-bit subtraction. */
1011 reg_or_sub_cint64_operand (op, mode)
1013 enum machine_mode mode;
1015 return (gpc_reg_operand (op, mode)
1016 || (GET_CODE (op) == CONST_INT
1017 #if HOST_BITS_PER_WIDE_INT == 32
1018 && (- INTVAL (op)) < 0x7fff8000
1020 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1026 /* Return 1 is the operand is either a non-special register or ANY
1027 32-bit unsigned constant integer. */
1030 reg_or_logical_cint_operand (op, mode)
1032 enum machine_mode mode;
1034 if (GET_CODE (op) == CONST_INT)
1036 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1038 if (GET_MODE_BITSIZE (mode) <= 32)
1041 if (INTVAL (op) < 0)
1045 return ((INTVAL (op) & GET_MODE_MASK (mode)
1046 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1048 else if (GET_CODE (op) == CONST_DOUBLE)
1050 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1054 return CONST_DOUBLE_HIGH (op) == 0;
1057 return gpc_reg_operand (op, mode);
1060 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1063 got_operand (op, mode)
1065 enum machine_mode mode ATTRIBUTE_UNUSED;
1067 return (GET_CODE (op) == SYMBOL_REF
1068 || GET_CODE (op) == CONST
1069 || GET_CODE (op) == LABEL_REF);
1072 /* Return 1 if the operand is a simple references that can be loaded via
1073 the GOT (labels involving addition aren't allowed). */
1076 got_no_const_operand (op, mode)
1078 enum machine_mode mode ATTRIBUTE_UNUSED;
1080 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1083 /* Return the number of instructions it takes to form a constant in an
1084 integer register. */
1087 num_insns_constant_wide (value)
1088 HOST_WIDE_INT value;
1090 /* signed constant loadable with {cal|addi} */
1091 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1094 /* constant loadable with {cau|addis} */
1095 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1098 #if HOST_BITS_PER_WIDE_INT == 64
1099 else if (TARGET_POWERPC64)
1101 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1102 HOST_WIDE_INT high = value >> 31;
1104 if (high == 0 || high == -1)
1110 return num_insns_constant_wide (high) + 1;
1112 return (num_insns_constant_wide (high)
1113 + num_insns_constant_wide (low) + 1);
1122 num_insns_constant (op, mode)
1124 enum machine_mode mode;
1126 if (GET_CODE (op) == CONST_INT)
1128 #if HOST_BITS_PER_WIDE_INT == 64
1129 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1130 && mask64_operand (op, mode))
1134 return num_insns_constant_wide (INTVAL (op));
1137 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1142 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1143 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1144 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1147 else if (GET_CODE (op) == CONST_DOUBLE)
1153 int endian = (WORDS_BIG_ENDIAN == 0);
1155 if (mode == VOIDmode || mode == DImode)
1157 high = CONST_DOUBLE_HIGH (op);
1158 low = CONST_DOUBLE_LOW (op);
1162 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1163 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1165 low = l[1 - endian];
1169 return (num_insns_constant_wide (low)
1170 + num_insns_constant_wide (high));
1174 if (high == 0 && low >= 0)
1175 return num_insns_constant_wide (low);
1177 else if (high == -1 && low < 0)
1178 return num_insns_constant_wide (low);
1180 else if (mask64_operand (op, mode))
1184 return num_insns_constant_wide (high) + 1;
1187 return (num_insns_constant_wide (high)
1188 + num_insns_constant_wide (low) + 1);
1196 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1197 register with one instruction per word. We only do this if we can
1198 safely read CONST_DOUBLE_{LOW,HIGH}. */
1201 easy_fp_constant (op, mode)
1203 enum machine_mode mode;
1205 if (GET_CODE (op) != CONST_DOUBLE
1206 || GET_MODE (op) != mode
1207 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1210 /* Consider all constants with -msoft-float to be easy. */
1211 if (TARGET_SOFT_FLOAT && mode != DImode)
1214 /* If we are using V.4 style PIC, consider all constants to be hard. */
1215 if (flag_pic && DEFAULT_ABI == ABI_V4)
1218 #ifdef TARGET_RELOCATABLE
1219 /* Similarly if we are using -mrelocatable, consider all constants
1221 if (TARGET_RELOCATABLE)
1230 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1231 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1233 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1234 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1237 else if (mode == SFmode)
1242 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1243 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1245 return num_insns_constant_wide (l) == 1;
1248 else if (mode == DImode)
1249 return ((TARGET_POWERPC64
1250 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1251 || (num_insns_constant (op, DImode) <= 2));
1253 else if (mode == SImode)
1259 /* Return 1 if the operand is a CONST_INT and can be put into a
1260 register with one instruction. */
1263 easy_vector_constant (op)
1269 if (GET_CODE (op) != CONST_VECTOR)
1272 units = CONST_VECTOR_NUNITS (op);
1274 /* We can generate 0 easily. Look for that. */
1275 for (i = 0; i < units; ++i)
1277 elt = CONST_VECTOR_ELT (op, i);
1279 /* We could probably simplify this by just checking for equality
1280 with CONST0_RTX for the current mode, but let's be safe
1283 switch (GET_CODE (elt))
1286 if (INTVAL (elt) != 0)
1290 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1298 /* We could probably generate a few other constants trivially, but
1299 gcc doesn't generate them yet. FIXME later. */
1303 /* Return 1 if the operand is the constant 0. This works for scalars
1304 as well as vectors. */
1306 zero_constant (op, mode)
1308 enum machine_mode mode;
1310 return op == CONST0_RTX (mode);
1313 /* Return 1 if the operand is 0.0. */
1315 zero_fp_constant (op, mode)
1317 enum machine_mode mode;
1319 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1322 /* Return 1 if the operand is in volatile memory. Note that during
1323 the RTL generation phase, memory_operand does not return TRUE for
1324 volatile memory references. So this function allows us to
1325 recognize volatile references where its safe. */
1328 volatile_mem_operand (op, mode)
1330 enum machine_mode mode;
1332 if (GET_CODE (op) != MEM)
1335 if (!MEM_VOLATILE_P (op))
1338 if (mode != GET_MODE (op))
1341 if (reload_completed)
1342 return memory_operand (op, mode);
1344 if (reload_in_progress)
1345 return strict_memory_address_p (mode, XEXP (op, 0));
1347 return memory_address_p (mode, XEXP (op, 0));
1350 /* Return 1 if the operand is an offsettable memory operand. */
1353 offsettable_mem_operand (op, mode)
1355 enum machine_mode mode;
1357 return ((GET_CODE (op) == MEM)
1358 && offsettable_address_p (reload_completed || reload_in_progress,
1359 mode, XEXP (op, 0)));
1362 /* Return 1 if the operand is either an easy FP constant (see above) or
1366 mem_or_easy_const_operand (op, mode)
1368 enum machine_mode mode;
1370 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1373 /* Return 1 if the operand is either a non-special register or an item
1374 that can be used as the operand of a `mode' add insn. */
1377 add_operand (op, mode)
1379 enum machine_mode mode;
1381 if (GET_CODE (op) == CONST_INT)
1382 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1383 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1385 return gpc_reg_operand (op, mode);
1388 /* Return 1 if OP is a constant but not a valid add_operand. */
1391 non_add_cint_operand (op, mode)
1393 enum machine_mode mode ATTRIBUTE_UNUSED;
1395 return (GET_CODE (op) == CONST_INT
1396 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1397 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1400 /* Return 1 if the operand is a non-special register or a constant that
1401 can be used as the operand of an OR or XOR insn on the RS/6000. */
1404 logical_operand (op, mode)
1406 enum machine_mode mode;
1408 HOST_WIDE_INT opl, oph;
1410 if (gpc_reg_operand (op, mode))
1413 if (GET_CODE (op) == CONST_INT)
1415 opl = INTVAL (op) & GET_MODE_MASK (mode);
1417 #if HOST_BITS_PER_WIDE_INT <= 32
1418 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1422 else if (GET_CODE (op) == CONST_DOUBLE)
1424 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1427 opl = CONST_DOUBLE_LOW (op);
1428 oph = CONST_DOUBLE_HIGH (op);
1435 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1436 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1439 /* Return 1 if C is a constant that is not a logical operand (as
1440 above), but could be split into one. */
1443 non_logical_cint_operand (op, mode)
1445 enum machine_mode mode;
1447 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1448 && ! logical_operand (op, mode)
1449 && reg_or_logical_cint_operand (op, mode));
1452 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1453 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1454 Reject all ones and all zeros, since these should have been optimized
1455 away and confuse the making of MB and ME. */
1458 mask_operand (op, mode)
1460 enum machine_mode mode ATTRIBUTE_UNUSED;
1462 HOST_WIDE_INT c, lsb;
1464 if (GET_CODE (op) != CONST_INT)
1469 /* Fail in 64-bit mode if the mask wraps around because the upper
1470 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1471 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1474 /* We don't change the number of transitions by inverting,
1475 so make sure we start with the LS bit zero. */
1479 /* Reject all zeros or all ones. */
1483 /* Find the first transition. */
1486 /* Invert to look for a second transition. */
1489 /* Erase first transition. */
1492 /* Find the second transition (if any). */
1495 /* Match if all the bits above are 1's (or c is zero). */
1499 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1500 It is if there are no more than one 1->0 or 0->1 transitions.
1501 Reject all ones and all zeros, since these should have been optimized
1502 away and confuse the making of MB and ME. */
1505 mask64_operand (op, mode)
1507 enum machine_mode mode;
1509 if (GET_CODE (op) == CONST_INT)
1511 HOST_WIDE_INT c, lsb;
1513 /* We don't change the number of transitions by inverting,
1514 so make sure we start with the LS bit zero. */
1519 /* Reject all zeros or all ones. */
1523 /* Find the transition, and check that all bits above are 1's. */
1527 else if (GET_CODE (op) == CONST_DOUBLE
1528 && (mode == VOIDmode || mode == DImode))
1530 HOST_WIDE_INT low, high, lsb;
1532 if (HOST_BITS_PER_WIDE_INT < 64)
1533 high = CONST_DOUBLE_HIGH (op);
1535 low = CONST_DOUBLE_LOW (op);
1538 if (HOST_BITS_PER_WIDE_INT < 64)
1545 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1549 return high == -lsb;
1553 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1559 /* Return 1 if the operand is either a non-special register or a constant
1560 that can be used as the operand of a PowerPC64 logical AND insn. */
1563 and64_operand (op, mode)
1565 enum machine_mode mode;
1567 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1568 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1570 return (logical_operand (op, mode) || mask64_operand (op, mode));
1573 /* Return 1 if the operand is either a non-special register or a
1574 constant that can be used as the operand of an RS/6000 logical AND insn. */
1577 and_operand (op, mode)
1579 enum machine_mode mode;
1581 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1582 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1584 return (logical_operand (op, mode) || mask_operand (op, mode));
1587 /* Return 1 if the operand is a general register or memory operand. */
1590 reg_or_mem_operand (op, mode)
1592 enum machine_mode mode;
1594 return (gpc_reg_operand (op, mode)
1595 || memory_operand (op, mode)
1596 || volatile_mem_operand (op, mode));
1599 /* Return 1 if the operand is a general register or memory operand without
1600 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1604 lwa_operand (op, mode)
1606 enum machine_mode mode;
1610 if (reload_completed && GET_CODE (inner) == SUBREG)
1611 inner = SUBREG_REG (inner);
1613 return gpc_reg_operand (inner, mode)
1614 || (memory_operand (inner, mode)
1615 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1616 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1617 && (GET_CODE (XEXP (inner, 0)) != PLUS
1618 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1619 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1622 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1625 symbol_ref_operand (op, mode)
1627 enum machine_mode mode;
1629 if (mode != VOIDmode && GET_MODE (op) != mode)
1632 return (GET_CODE (op) == SYMBOL_REF);
1635 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1636 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1639 call_operand (op, mode)
1641 enum machine_mode mode;
1643 if (mode != VOIDmode && GET_MODE (op) != mode)
1646 return (GET_CODE (op) == SYMBOL_REF
1647 || (GET_CODE (op) == REG
1648 && (REGNO (op) == LINK_REGISTER_REGNUM
1649 || REGNO (op) == COUNT_REGISTER_REGNUM
1650 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1653 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1654 this file and the function is not weakly defined. */
1657 current_file_function_operand (op, mode)
1659 enum machine_mode mode ATTRIBUTE_UNUSED;
1661 return (GET_CODE (op) == SYMBOL_REF
1662 && (SYMBOL_REF_FLAG (op)
1663 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1664 && ! DECL_WEAK (current_function_decl))));
1667 /* Return 1 if this operand is a valid input for a move insn. */
1670 input_operand (op, mode)
1672 enum machine_mode mode;
1674 /* Memory is always valid. */
1675 if (memory_operand (op, mode))
1678 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1679 if (GET_CODE (op) == CONSTANT_P_RTX)
1682 /* For floating-point, easy constants are valid. */
1683 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1685 && easy_fp_constant (op, mode))
1688 /* Allow any integer constant. */
1689 if (GET_MODE_CLASS (mode) == MODE_INT
1690 && (GET_CODE (op) == CONST_INT
1691 || GET_CODE (op) == CONST_DOUBLE))
1694 /* For floating-point or multi-word mode, the only remaining valid type
1696 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1697 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1698 return register_operand (op, mode);
1700 /* The only cases left are integral modes one word or smaller (we
1701 do not get called for MODE_CC values). These can be in any
1703 if (register_operand (op, mode))
1706 /* A SYMBOL_REF referring to the TOC is valid. */
1707 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1710 /* A constant pool expression (relative to the TOC) is valid */
1711 if (TOC_RELATIVE_EXPR_P (op))
1714 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1716 if (DEFAULT_ABI == ABI_V4
1717 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1718 && small_data_operand (op, Pmode))
1724 /* Return 1 for an operand in small memory on V.4/eabi. */
1727 small_data_operand (op, mode)
1728 rtx op ATTRIBUTE_UNUSED;
1729 enum machine_mode mode ATTRIBUTE_UNUSED;
1734 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1737 if (DEFAULT_ABI != ABI_V4)
1740 if (GET_CODE (op) == SYMBOL_REF)
1743 else if (GET_CODE (op) != CONST
1744 || GET_CODE (XEXP (op, 0)) != PLUS
1745 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1746 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1751 rtx sum = XEXP (op, 0);
1752 HOST_WIDE_INT summand;
1754 /* We have to be careful here, because it is the referenced address
1755 that must be 32k from _SDA_BASE_, not just the symbol. */
1756 summand = INTVAL (XEXP (sum, 1));
1757 if (summand < 0 || summand > g_switch_value)
1760 sym_ref = XEXP (sum, 0);
1763 if (*XSTR (sym_ref, 0) != '@')
1774 constant_pool_expr_1 (op, have_sym, have_toc)
1779 switch (GET_CODE(op))
1782 if (CONSTANT_POOL_ADDRESS_P (op))
1784 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1792 else if (! strcmp (XSTR (op, 0), toc_label_name))
1801 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1802 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1804 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1813 constant_pool_expr_p (op)
1818 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1822 toc_relative_expr_p (op)
1827 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1830 /* Try machine-dependent ways of modifying an illegitimate address
1831 to be legitimate. If we find one, return the new, valid address.
1832 This is used from only one place: `memory_address' in explow.c.
1834 OLDX is the address as it was before break_out_memory_refs was
1835 called. In some cases it is useful to look at this to decide what
1838 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1840 It is always safe for this function to do nothing. It exists to
1841 recognize opportunities to optimize the output.
1843 On RS/6000, first check for the sum of a register with a constant
1844 integer that is out of range. If so, generate code to add the
1845 constant with the low-order 16 bits masked to the register and force
1846 this result into another register (this can be done with `cau').
1847 Then generate an address of REG+(CONST&0xffff), allowing for the
1848 possibility of bit 16 being a one.
1850 Then check for the sum of a register and something not constant, try to
1851 load the other things into a register and return the sum. */
1853 rs6000_legitimize_address (x, oldx, mode)
1855 rtx oldx ATTRIBUTE_UNUSED;
1856 enum machine_mode mode;
1858 if (GET_CODE (x) == PLUS
1859 && GET_CODE (XEXP (x, 0)) == REG
1860 && GET_CODE (XEXP (x, 1)) == CONST_INT
1861 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1863 HOST_WIDE_INT high_int, low_int;
1865 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
1866 high_int = INTVAL (XEXP (x, 1)) - low_int;
1867 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1868 GEN_INT (high_int)), 0);
1869 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1871 else if (GET_CODE (x) == PLUS
1872 && GET_CODE (XEXP (x, 0)) == REG
1873 && GET_CODE (XEXP (x, 1)) != CONST_INT
1874 && GET_MODE_NUNITS (mode) == 1
1875 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1876 && (TARGET_POWERPC64 || mode != DImode)
1879 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1880 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1882 else if (ALTIVEC_VECTOR_MODE (mode))
1886 /* Make sure both operands are registers. */
1887 if (GET_CODE (x) == PLUS)
1888 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1889 force_reg (Pmode, XEXP (x, 1)));
1891 reg = force_reg (Pmode, x);
1894 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1895 && GET_CODE (x) != CONST_INT
1896 && GET_CODE (x) != CONST_DOUBLE
1898 && GET_MODE_NUNITS (mode) == 1
1899 && (GET_MODE_BITSIZE (mode) <= 32
1900 || (TARGET_HARD_FLOAT && mode == DFmode)))
1902 rtx reg = gen_reg_rtx (Pmode);
1903 emit_insn (gen_elf_high (reg, (x)));
1904 return gen_rtx_LO_SUM (Pmode, reg, (x));
1906 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1908 && GET_CODE (x) != CONST_INT
1909 && GET_CODE (x) != CONST_DOUBLE
1911 && (TARGET_HARD_FLOAT || mode != DFmode)
1915 rtx reg = gen_reg_rtx (Pmode);
1916 emit_insn (gen_macho_high (reg, (x)));
1917 return gen_rtx_LO_SUM (Pmode, reg, (x));
1920 && CONSTANT_POOL_EXPR_P (x)
1921 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1923 return create_TOC_reference (x);
1929 /* The convention appears to be to define this wherever it is used.
1930 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1931 is now used here. */
1932 #ifndef REG_MODE_OK_FOR_BASE_P
1933 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1936 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1937 replace the input X, or the original X if no replacement is called for.
1938 The output parameter *WIN is 1 if the calling macro should goto WIN,
1941 For RS/6000, we wish to handle large displacements off a base
1942 register by splitting the addend across an addiu/addis and the mem insn.
1943 This cuts number of extra insns needed from 3 to 1.
1945 On Darwin, we use this to generate code for floating point constants.
1946 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1947 The Darwin code is inside #if TARGET_MACHO because only then is
1948 machopic_function_base_name() defined. */
1950 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1952 enum machine_mode mode;
1955 int ind_levels ATTRIBUTE_UNUSED;
1958 /* We must recognize output that we have already generated ourselves. */
1959 if (GET_CODE (x) == PLUS
1960 && GET_CODE (XEXP (x, 0)) == PLUS
1961 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1962 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1963 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1965 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1966 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1967 opnum, (enum reload_type)type);
1973 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1974 && GET_CODE (x) == LO_SUM
1975 && GET_CODE (XEXP (x, 0)) == PLUS
1976 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1977 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1978 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1979 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1980 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1981 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1982 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1984 /* Result of previous invocation of this function on Darwin
1985 floating point constant. */
1986 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1987 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1988 opnum, (enum reload_type)type);
1993 if (GET_CODE (x) == PLUS
1994 && GET_CODE (XEXP (x, 0)) == REG
1995 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1996 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1997 && GET_CODE (XEXP (x, 1)) == CONST_INT
1998 && !ALTIVEC_VECTOR_MODE (mode))
2000 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2001 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2003 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2005 /* Check for 32-bit overflow. */
2006 if (high + low != val)
2012 /* Reload the high part into a base reg; leave the low part
2013 in the mem directly. */
2015 x = gen_rtx_PLUS (GET_MODE (x),
2016 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2020 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2021 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2022 opnum, (enum reload_type)type);
2027 if (GET_CODE (x) == SYMBOL_REF
2028 && DEFAULT_ABI == ABI_DARWIN
2029 && !ALTIVEC_VECTOR_MODE (mode)
2032 /* Darwin load of floating point constant. */
2033 rtx offset = gen_rtx (CONST, Pmode,
2034 gen_rtx (MINUS, Pmode, x,
2035 gen_rtx (SYMBOL_REF, Pmode,
2036 machopic_function_base_name ())));
2037 x = gen_rtx (LO_SUM, GET_MODE (x),
2038 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2039 gen_rtx (HIGH, Pmode, offset)), offset);
2040 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2041 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2042 opnum, (enum reload_type)type);
2048 && CONSTANT_POOL_EXPR_P (x)
2049 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2051 (x) = create_TOC_reference (x);
2059 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2060 that is a valid memory address for an instruction.
2061 The MODE argument is the machine mode for the MEM expression
2062 that wants to use this address.
2064 On the RS/6000, there are four valid address: a SYMBOL_REF that
2065 refers to a constant pool entry of an address (or the sum of it
2066 plus a constant), a short (16-bit signed) constant plus a register,
2067 the sum of two registers, or a register indirect, possibly with an
2068 auto-increment. For DFmode and DImode with an constant plus register,
2069 we must ensure that both words are addressable or PowerPC64 with offset
2072 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2073 32-bit DImode, TImode), indexed addressing cannot be used because
2074 adjacent memory cells are accessed by adding word-sized offsets
2075 during assembly output. */
2077 rs6000_legitimate_address (mode, x, reg_ok_strict)
2078 enum machine_mode mode;
2082 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2084 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2085 && !ALTIVEC_VECTOR_MODE (mode)
2087 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2089 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2091 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2093 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2095 && GET_CODE (x) == PLUS
2096 && GET_CODE (XEXP (x, 0)) == REG
2097 && XEXP (x, 0) == virtual_stack_vars_rtx
2098 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2100 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2103 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2104 && (TARGET_POWERPC64 || mode != DImode)
2105 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2107 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2112 /* Try to output insns to set TARGET equal to the constant C if it can
2113 be done in less than N insns. Do all computations in MODE.
2114 Returns the place where the output has been placed if it can be
2115 done and the insns have been emitted. If it would take more than N
2116 insns, zero is returned and no insns and emitted. */
2119 rs6000_emit_set_const (dest, mode, source, n)
2121 enum machine_mode mode;
2122 int n ATTRIBUTE_UNUSED;
2124 HOST_WIDE_INT c0, c1;
2126 if (mode == QImode || mode == HImode || mode == SImode)
2129 dest = gen_reg_rtx (mode);
2130 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2134 if (GET_CODE (source) == CONST_INT)
2136 c0 = INTVAL (source);
2139 else if (GET_CODE (source) == CONST_DOUBLE)
2141 #if HOST_BITS_PER_WIDE_INT >= 64
2142 c0 = CONST_DOUBLE_LOW (source);
2145 c0 = CONST_DOUBLE_LOW (source);
2146 c1 = CONST_DOUBLE_HIGH (source);
2152 return rs6000_emit_set_long_const (dest, c0, c1);
2155 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2156 fall back to a straight forward decomposition. We do this to avoid
2157 exponential run times encountered when looking for longer sequences
2158 with rs6000_emit_set_const. */
2160 rs6000_emit_set_long_const (dest, c1, c2)
2162 HOST_WIDE_INT c1, c2;
2164 if (!TARGET_POWERPC64)
2166 rtx operand1, operand2;
2168 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2170 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2172 emit_move_insn (operand1, GEN_INT (c1));
2173 emit_move_insn (operand2, GEN_INT (c2));
2177 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2180 ud2 = (c1 & 0xffff0000) >> 16;
2181 #if HOST_BITS_PER_WIDE_INT >= 64
2185 ud4 = (c2 & 0xffff0000) >> 16;
2187 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2188 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2191 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2193 emit_move_insn (dest, GEN_INT (ud1));
2196 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2197 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2200 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2203 emit_move_insn (dest, GEN_INT (ud2 << 16));
2205 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2207 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2208 || (ud4 == 0 && ! (ud3 & 0x8000)))
2211 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2214 emit_move_insn (dest, GEN_INT (ud3 << 16));
2217 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2218 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2220 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2225 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2228 emit_move_insn (dest, GEN_INT (ud4 << 16));
2231 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2233 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2235 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2236 GEN_INT (ud2 << 16)));
2238 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2244 /* Emit a move from SOURCE to DEST in mode MODE. */
2246 rs6000_emit_move (dest, source, mode)
2249 enum machine_mode mode;
2253 operands[1] = source;
2255 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2256 if (GET_CODE (operands[1]) == CONST_DOUBLE
2257 && ! FLOAT_MODE_P (mode)
2258 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2260 /* FIXME. This should never happen. */
2261 /* Since it seems that it does, do the safe thing and convert
2263 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2265 if (GET_CODE (operands[1]) == CONST_DOUBLE
2266 && ! FLOAT_MODE_P (mode)
2267 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2268 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2269 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2270 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2273 /* Check if GCC is setting up a block move that will end up using FP
2274 registers as temporaries. We must make sure this is acceptable. */
2275 if (GET_CODE (operands[0]) == MEM
2276 && GET_CODE (operands[1]) == MEM
2278 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2279 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2280 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2281 ? 32 : MEM_ALIGN (operands[0])))
2282 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2284 : MEM_ALIGN (operands[1]))))
2285 && ! MEM_VOLATILE_P (operands [0])
2286 && ! MEM_VOLATILE_P (operands [1]))
2288 emit_move_insn (adjust_address (operands[0], SImode, 0),
2289 adjust_address (operands[1], SImode, 0));
2290 emit_move_insn (adjust_address (operands[0], SImode, 4),
2291 adjust_address (operands[1], SImode, 4));
2295 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2296 operands[1] = force_reg (mode, operands[1]);
2298 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2299 && GET_CODE (operands[0]) == MEM)
2303 if (reload_in_progress || reload_completed)
2304 regnum = true_regnum (operands[1]);
2305 else if (GET_CODE (operands[1]) == REG)
2306 regnum = REGNO (operands[1]);
2310 /* If operands[1] is a register, on POWER it may have
2311 double-precision data in it, so truncate it to single
2313 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2316 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2317 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2318 operands[1] = newreg;
2322 /* Handle the case where reload calls us with an invalid address;
2323 and the case of CONSTANT_P_RTX. */
2324 if (!ALTIVEC_VECTOR_MODE (mode)
2325 && (! general_operand (operands[1], mode)
2326 || ! nonimmediate_operand (operands[0], mode)
2327 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2329 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2333 /* FIXME: In the long term, this switch statement should go away
2334 and be replaced by a sequence of tests based on things like
2340 if (CONSTANT_P (operands[1])
2341 && GET_CODE (operands[1]) != CONST_INT)
2342 operands[1] = force_const_mem (mode, operands[1]);
2348 if (CONSTANT_P (operands[1])
2349 && ! easy_fp_constant (operands[1], mode))
2350 operands[1] = force_const_mem (mode, operands[1]);
2357 if (CONSTANT_P (operands[1])
2358 && !easy_vector_constant (operands[1]))
2359 operands[1] = force_const_mem (mode, operands[1]);
2364 /* Use default pattern for address of ELF small data */
2367 && DEFAULT_ABI == ABI_V4
2368 && (GET_CODE (operands[1]) == SYMBOL_REF
2369 || GET_CODE (operands[1]) == CONST)
2370 && small_data_operand (operands[1], mode))
2372 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2376 if (DEFAULT_ABI == ABI_V4
2377 && mode == Pmode && mode == SImode
2378 && flag_pic == 1 && got_operand (operands[1], mode))
2380 emit_insn (gen_movsi_got (operands[0], operands[1]));
2384 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2385 && TARGET_NO_TOC && ! flag_pic
2387 && CONSTANT_P (operands[1])
2388 && GET_CODE (operands[1]) != HIGH
2389 && GET_CODE (operands[1]) != CONST_INT)
2391 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2393 /* If this is a function address on -mcall-aixdesc,
2394 convert it to the address of the descriptor. */
2395 if (DEFAULT_ABI == ABI_AIX
2396 && GET_CODE (operands[1]) == SYMBOL_REF
2397 && XSTR (operands[1], 0)[0] == '.')
2399 const char *name = XSTR (operands[1], 0);
2401 while (*name == '.')
2403 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2404 CONSTANT_POOL_ADDRESS_P (new_ref)
2405 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2406 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2407 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2408 operands[1] = new_ref;
2411 if (DEFAULT_ABI == ABI_DARWIN)
2413 emit_insn (gen_macho_high (target, operands[1]));
2414 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2418 emit_insn (gen_elf_high (target, operands[1]));
2419 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2423 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2424 and we have put it in the TOC, we just need to make a TOC-relative
2427 && GET_CODE (operands[1]) == SYMBOL_REF
2428 && CONSTANT_POOL_EXPR_P (operands[1])
2429 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2430 get_pool_mode (operands[1])))
2432 operands[1] = create_TOC_reference (operands[1]);
2434 else if (mode == Pmode
2435 && CONSTANT_P (operands[1])
2436 && ((GET_CODE (operands[1]) != CONST_INT
2437 && ! easy_fp_constant (operands[1], mode))
2438 || (GET_CODE (operands[1]) == CONST_INT
2439 && num_insns_constant (operands[1], mode) > 2)
2440 || (GET_CODE (operands[0]) == REG
2441 && FP_REGNO_P (REGNO (operands[0]))))
2442 && GET_CODE (operands[1]) != HIGH
2443 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2444 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2446 /* Emit a USE operation so that the constant isn't deleted if
2447 expensive optimizations are turned on because nobody
2448 references it. This should only be done for operands that
2449 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2450 This should not be done for operands that contain LABEL_REFs.
2451 For now, we just handle the obvious case. */
2452 if (GET_CODE (operands[1]) != LABEL_REF)
2453 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2456 /* Darwin uses a special PIC legitimizer. */
2457 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2460 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2462 if (operands[0] != operands[1])
2463 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2468 /* If we are to limit the number of things we put in the TOC and
2469 this is a symbol plus a constant we can add in one insn,
2470 just put the symbol in the TOC and add the constant. Don't do
2471 this if reload is in progress. */
2472 if (GET_CODE (operands[1]) == CONST
2473 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2474 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2475 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2476 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2477 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2478 && ! side_effects_p (operands[0]))
2481 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2482 rtx other = XEXP (XEXP (operands[1], 0), 1);
2484 sym = force_reg (mode, sym);
2486 emit_insn (gen_addsi3 (operands[0], sym, other));
2488 emit_insn (gen_adddi3 (operands[0], sym, other));
2492 operands[1] = force_const_mem (mode, operands[1]);
2495 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2496 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2497 get_pool_constant (XEXP (operands[1], 0)),
2498 get_pool_mode (XEXP (operands[1], 0))))
2501 = gen_rtx_MEM (mode,
2502 create_TOC_reference (XEXP (operands[1], 0)));
2503 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2504 RTX_UNCHANGING_P (operands[1]) = 1;
2510 if (GET_CODE (operands[0]) == MEM
2511 && GET_CODE (XEXP (operands[0], 0)) != REG
2512 && ! reload_in_progress)
2514 = replace_equiv_address (operands[0],
2515 copy_addr_to_reg (XEXP (operands[0], 0)));
2517 if (GET_CODE (operands[1]) == MEM
2518 && GET_CODE (XEXP (operands[1], 0)) != REG
2519 && ! reload_in_progress)
2521 = replace_equiv_address (operands[1],
2522 copy_addr_to_reg (XEXP (operands[1], 0)));
2529 /* Above, we may have called force_const_mem which may have returned
2530 an invalid address. If we can, fix this up; otherwise, reload will
2531 have to deal with it. */
2532 if (GET_CODE (operands[1]) == MEM
2533 && ! memory_address_p (mode, XEXP (operands[1], 0))
2534 && ! reload_in_progress)
2535 operands[1] = adjust_address (operands[1], mode, 0);
2537 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2541 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2542 for a call to a function whose data type is FNTYPE.
2543 For a library call, FNTYPE is 0.
2545 For incoming args we set the number of arguments in the prototype large
2546 so we never return a PARALLEL. */
2549 init_cumulative_args (cum, fntype, libname, incoming)
2550 CUMULATIVE_ARGS *cum;
2552 rtx libname ATTRIBUTE_UNUSED;
2555 static CUMULATIVE_ARGS zero_cumulative;
2557 *cum = zero_cumulative;
2559 cum->fregno = FP_ARG_MIN_REG;
2560 cum->vregno = ALTIVEC_ARG_MIN_REG;
2561 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2562 cum->call_cookie = CALL_NORMAL;
2563 cum->sysv_gregno = GP_ARG_MIN_REG;
2566 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2568 else if (cum->prototype)
2569 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2570 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2571 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2574 cum->nargs_prototype = 0;
2576 cum->orig_nargs = cum->nargs_prototype;
2578 /* Check for a longcall attribute. */
2580 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2581 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2582 cum->call_cookie = CALL_LONG;
2584 if (TARGET_DEBUG_ARG)
2586 fprintf (stderr, "\ninit_cumulative_args:");
2589 tree ret_type = TREE_TYPE (fntype);
2590 fprintf (stderr, " ret code = %s,",
2591 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2594 if (cum->call_cookie & CALL_LONG)
2595 fprintf (stderr, " longcall,");
2597 fprintf (stderr, " proto = %d, nargs = %d\n",
2598 cum->prototype, cum->nargs_prototype);
2602 /* If defined, a C expression which determines whether, and in which
2603 direction, to pad out an argument with extra space. The value
2604 should be of type `enum direction': either `upward' to pad above
2605 the argument, `downward' to pad below, or `none' to inhibit
2608 For the AIX ABI structs are always stored left shifted in their
2612 function_arg_padding (mode, type)
2613 enum machine_mode mode;
2616 if (type != 0 && AGGREGATE_TYPE_P (type))
2619 /* This is the default definition. */
2620 return (! BYTES_BIG_ENDIAN
2623 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2624 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2625 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2626 ? downward : upward));
2629 /* If defined, a C expression that gives the alignment boundary, in bits,
2630 of an argument with the specified mode and type. If it is not defined,
2631 PARM_BOUNDARY is used for all arguments.
2633 V.4 wants long longs to be double word aligned. */
2636 function_arg_boundary (mode, type)
2637 enum machine_mode mode;
2638 tree type ATTRIBUTE_UNUSED;
2640 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2642 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2645 return PARM_BOUNDARY;
2648 /* Update the data in CUM to advance over an argument
2649 of mode MODE and data type TYPE.
2650 (TYPE is null for libcalls where that information may not be available.) */
2653 function_arg_advance (cum, mode, type, named)
2654 CUMULATIVE_ARGS *cum;
2655 enum machine_mode mode;
2659 cum->nargs_prototype--;
2661 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2663 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2666 cum->words += RS6000_ARG_SIZE (mode, type);
2668 else if (DEFAULT_ABI == ABI_V4)
2670 if (TARGET_HARD_FLOAT
2671 && (mode == SFmode || mode == DFmode))
2673 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2678 cum->words += cum->words & 1;
2679 cum->words += RS6000_ARG_SIZE (mode, type);
2685 int gregno = cum->sysv_gregno;
2687 /* Aggregates and IEEE quad get passed by reference. */
2688 if ((type && AGGREGATE_TYPE_P (type))
2692 n_words = RS6000_ARG_SIZE (mode, type);
2694 /* Long long is put in odd registers. */
2695 if (n_words == 2 && (gregno & 1) == 0)
2698 /* Long long is not split between registers and stack. */
2699 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2701 /* Long long is aligned on the stack. */
2703 cum->words += cum->words & 1;
2704 cum->words += n_words;
2707 /* Note: continuing to accumulate gregno past when we've started
2708 spilling to the stack indicates the fact that we've started
2709 spilling to the stack to expand_builtin_saveregs. */
2710 cum->sysv_gregno = gregno + n_words;
2713 if (TARGET_DEBUG_ARG)
2715 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2716 cum->words, cum->fregno);
2717 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2718 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2719 fprintf (stderr, "mode = %4s, named = %d\n",
2720 GET_MODE_NAME (mode), named);
2725 int align = (TARGET_32BIT && (cum->words & 1) != 0
2726 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2728 cum->words += align + RS6000_ARG_SIZE (mode, type);
2730 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2733 if (TARGET_DEBUG_ARG)
2735 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2736 cum->words, cum->fregno);
2737 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2738 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2739 fprintf (stderr, "named = %d, align = %d\n", named, align);
2744 /* Determine where to put an argument to a function.
2745 Value is zero to push the argument on the stack,
2746 or a hard register in which to store the argument.
2748 MODE is the argument's machine mode.
2749 TYPE is the data type of the argument (as a tree).
2750 This is null for libcalls where that information may
2752 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2753 the preceding args and about the function being called.
2754 NAMED is nonzero if this argument is a named parameter
2755 (otherwise it is an extra parameter matching an ellipsis).
2757 On RS/6000 the first eight words of non-FP are normally in registers
2758 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2759 Under V.4, the first 8 FP args are in registers.
2761 If this is floating-point and no prototype is specified, we use
2762 both an FP and integer register (or possibly FP reg and stack). Library
2763 functions (when TYPE is zero) always have the proper types for args,
2764 so we can pass the FP value just in one register. emit_library_function
2765 doesn't support PARALLEL anyway. */
2768 function_arg (cum, mode, type, named)
2769 CUMULATIVE_ARGS *cum;
2770 enum machine_mode mode;
2774 enum rs6000_abi abi = DEFAULT_ABI;
2776 /* Return a marker to indicate whether CR1 needs to set or clear the
2777 bit that V.4 uses to say fp args were passed in registers.
2778 Assume that we don't need the marker for software floating point,
2779 or compiler generated library calls. */
2780 if (mode == VOIDmode)
2783 && TARGET_HARD_FLOAT
2784 && cum->nargs_prototype < 0
2785 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2787 return GEN_INT (cum->call_cookie
2788 | ((cum->fregno == FP_ARG_MIN_REG)
2789 ? CALL_V4_SET_FP_ARGS
2790 : CALL_V4_CLEAR_FP_ARGS));
2793 return GEN_INT (cum->call_cookie);
2796 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2798 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2799 return gen_rtx_REG (mode, cum->vregno);
2803 else if (abi == ABI_V4)
2805 if (TARGET_HARD_FLOAT
2806 && (mode == SFmode || mode == DFmode))
2808 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2809 return gen_rtx_REG (mode, cum->fregno);
2816 int gregno = cum->sysv_gregno;
2818 /* Aggregates and IEEE quad get passed by reference. */
2819 if ((type && AGGREGATE_TYPE_P (type))
2823 n_words = RS6000_ARG_SIZE (mode, type);
2825 /* Long long is put in odd registers. */
2826 if (n_words == 2 && (gregno & 1) == 0)
2829 /* Long long is not split between registers and stack. */
2830 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2831 return gen_rtx_REG (mode, gregno);
2838 int align = (TARGET_32BIT && (cum->words & 1) != 0
2839 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2840 int align_words = cum->words + align;
2842 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2845 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2848 || ((cum->nargs_prototype > 0)
2849 /* IBM AIX extended its linkage convention definition always
2850 to require FP args after register save area hole on the
2852 && (DEFAULT_ABI != ABI_AIX
2854 || (align_words < GP_ARG_NUM_REG))))
2855 return gen_rtx_REG (mode, cum->fregno);
2857 return gen_rtx_PARALLEL (mode,
2859 gen_rtx_EXPR_LIST (VOIDmode,
2860 ((align_words >= GP_ARG_NUM_REG)
2863 + RS6000_ARG_SIZE (mode, type)
2865 /* If this is partially on the stack, then
2866 we only include the portion actually
2867 in registers here. */
2868 ? gen_rtx_REG (SImode,
2869 GP_ARG_MIN_REG + align_words)
2870 : gen_rtx_REG (mode,
2871 GP_ARG_MIN_REG + align_words))),
2873 gen_rtx_EXPR_LIST (VOIDmode,
2874 gen_rtx_REG (mode, cum->fregno),
2877 else if (align_words < GP_ARG_NUM_REG)
2878 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2884 /* For an arg passed partly in registers and partly in memory,
2885 this is the number of registers used.
2886 For args passed entirely in registers or entirely in memory, zero. */
2889 function_arg_partial_nregs (cum, mode, type, named)
2890 CUMULATIVE_ARGS *cum;
2891 enum machine_mode mode;
2893 int named ATTRIBUTE_UNUSED;
2895 if (DEFAULT_ABI == ABI_V4)
2898 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2899 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2901 if (cum->nargs_prototype >= 0)
2905 if (cum->words < GP_ARG_NUM_REG
2906 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2908 int ret = GP_ARG_NUM_REG - cum->words;
2909 if (ret && TARGET_DEBUG_ARG)
2910 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2918 /* A C expression that indicates when an argument must be passed by
2919 reference. If nonzero for an argument, a copy of that argument is
2920 made in memory and a pointer to the argument is passed instead of
2921 the argument itself. The pointer is passed in whatever way is
2922 appropriate for passing a pointer to that type.
2924 Under V.4, structures and unions are passed by reference. */
2927 function_arg_pass_by_reference (cum, mode, type, named)
2928 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2929 enum machine_mode mode ATTRIBUTE_UNUSED;
2931 int named ATTRIBUTE_UNUSED;
2933 if (DEFAULT_ABI == ABI_V4
2934 && ((type && AGGREGATE_TYPE_P (type))
2937 if (TARGET_DEBUG_ARG)
2938 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2946 /* Perform any needed actions needed for a function that is receiving a
2947 variable number of arguments.
2951 MODE and TYPE are the mode and type of the current parameter.
2953 PRETEND_SIZE is a variable that should be set to the amount of stack
2954 that must be pushed by the prolog to pretend that our caller pushed
2957 Normally, this macro will push all remaining incoming registers on the
2958 stack and set PRETEND_SIZE to the length of the registers pushed. */
2961 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2962 CUMULATIVE_ARGS *cum;
2963 enum machine_mode mode;
2969 CUMULATIVE_ARGS next_cum;
2970 int reg_size = TARGET_32BIT ? 4 : 8;
2971 rtx save_area = NULL_RTX, mem;
2972 int first_reg_offset, set;
2976 fntype = TREE_TYPE (current_function_decl);
2977 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2978 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2979 != void_type_node));
2981 /* For varargs, we do not want to skip the dummy va_dcl argument.
2982 For stdargs, we do want to skip the last named argument. */
2985 function_arg_advance (&next_cum, mode, type, 1);
2987 if (DEFAULT_ABI == ABI_V4)
2989 /* Indicate to allocate space on the stack for varargs save area. */
2990 cfun->machine->sysv_varargs_p = 1;
2992 save_area = plus_constant (virtual_stack_vars_rtx,
2993 - RS6000_VARARGS_SIZE);
2995 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2999 first_reg_offset = next_cum.words;
3000 save_area = virtual_incoming_args_rtx;
3001 cfun->machine->sysv_varargs_p = 0;
3003 if (MUST_PASS_IN_STACK (mode, type))
3004 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3007 set = get_varargs_alias_set ();
3008 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3010 mem = gen_rtx_MEM (BLKmode,
3011 plus_constant (save_area,
3012 first_reg_offset * reg_size)),
3013 set_mem_alias_set (mem, set);
3014 set_mem_align (mem, BITS_PER_WORD);
3017 (GP_ARG_MIN_REG + first_reg_offset, mem,
3018 GP_ARG_NUM_REG - first_reg_offset,
3019 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3021 /* ??? Does ABI_V4 need this at all? */
3022 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
3025 /* Save FP registers if needed. */
3026 if (DEFAULT_ABI == ABI_V4
3027 && TARGET_HARD_FLOAT && ! no_rtl
3028 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3030 int fregno = next_cum.fregno;
3031 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3032 rtx lab = gen_label_rtx ();
3033 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3035 emit_jump_insn (gen_rtx_SET (VOIDmode,
3037 gen_rtx_IF_THEN_ELSE (VOIDmode,
3038 gen_rtx_NE (VOIDmode, cr1,
3040 gen_rtx_LABEL_REF (VOIDmode, lab),
3043 while (fregno <= FP_ARG_V4_MAX_REG)
3045 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3046 set_mem_alias_set (mem, set);
3047 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3056 /* Create the va_list data type. */
3059 rs6000_build_va_list ()
3061 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3063 /* For AIX, prefer 'char *' because that's what the system
3064 header files like. */
3065 if (DEFAULT_ABI != ABI_V4)
3066 return build_pointer_type (char_type_node);
3068 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3069 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3071 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3072 unsigned_char_type_node);
3073 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3074 unsigned_char_type_node);
3075 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3077 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3080 DECL_FIELD_CONTEXT (f_gpr) = record;
3081 DECL_FIELD_CONTEXT (f_fpr) = record;
3082 DECL_FIELD_CONTEXT (f_ovf) = record;
3083 DECL_FIELD_CONTEXT (f_sav) = record;
3085 TREE_CHAIN (record) = type_decl;
3086 TYPE_NAME (record) = type_decl;
3087 TYPE_FIELDS (record) = f_gpr;
3088 TREE_CHAIN (f_gpr) = f_fpr;
3089 TREE_CHAIN (f_fpr) = f_ovf;
3090 TREE_CHAIN (f_ovf) = f_sav;
3092 layout_type (record);
3094 /* The correct type is an array type of one element. */
3095 return build_array_type (record, build_index_type (size_zero_node));
3098 /* Implement va_start. */
3101 rs6000_va_start (stdarg_p, valist, nextarg)
3102 int stdarg_p ATTRIBUTE_UNUSED;
3106 HOST_WIDE_INT words, n_gpr, n_fpr;
3107 tree f_gpr, f_fpr, f_ovf, f_sav;
3108 tree gpr, fpr, ovf, sav, t;
3110 /* Only SVR4 needs something special. */
3111 if (DEFAULT_ABI != ABI_V4)
3113 std_expand_builtin_va_start (1, valist, nextarg);
3117 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3118 f_fpr = TREE_CHAIN (f_gpr);
3119 f_ovf = TREE_CHAIN (f_fpr);
3120 f_sav = TREE_CHAIN (f_ovf);
3122 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3123 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3124 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3125 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3126 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3128 /* Count number of gp and fp argument registers used. */
3129 words = current_function_args_info.words;
3130 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3131 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3133 if (TARGET_DEBUG_ARG)
3135 fputs ("va_start: words = ", stderr);
3136 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3137 fputs (", n_gpr = ", stderr);
3138 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3139 fputs (", n_fpr = ", stderr);
3140 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3141 putc ('\n', stderr);
3144 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3145 TREE_SIDE_EFFECTS (t) = 1;
3146 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3148 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3149 TREE_SIDE_EFFECTS (t) = 1;
3150 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3152 /* Find the overflow area. */
3153 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3155 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3156 build_int_2 (words * UNITS_PER_WORD, 0));
3157 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3158 TREE_SIDE_EFFECTS (t) = 1;
3159 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3161 /* Find the register save area. */
3162 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3163 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3164 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3165 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3166 TREE_SIDE_EFFECTS (t) = 1;
3167 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3170 /* Implement va_arg. */
3173 rs6000_va_arg (valist, type)
3176 tree f_gpr, f_fpr, f_ovf, f_sav;
3177 tree gpr, fpr, ovf, sav, reg, t, u;
3178 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3179 rtx lab_false, lab_over, addr_rtx, r;
3181 if (DEFAULT_ABI != ABI_V4)
3182 return std_expand_builtin_va_arg (valist, type);
3184 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3185 f_fpr = TREE_CHAIN (f_gpr);
3186 f_ovf = TREE_CHAIN (f_fpr);
3187 f_sav = TREE_CHAIN (f_ovf);
3189 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3190 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3191 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3192 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3193 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3195 size = int_size_in_bytes (type);
3196 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3198 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3200 /* Aggregates and long doubles are passed by reference. */
3206 size = UNITS_PER_WORD;
3209 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3211 /* FP args go in FP registers, if present. */
3220 /* Otherwise into GP registers. */
3228 /* Pull the value out of the saved registers ... */
3230 lab_false = gen_label_rtx ();
3231 lab_over = gen_label_rtx ();
3232 addr_rtx = gen_reg_rtx (Pmode);
3234 /* AltiVec vectors never go in registers. */
3235 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3237 TREE_THIS_VOLATILE (reg) = 1;
3238 emit_cmp_and_jump_insns
3239 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3240 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3243 /* Long long is aligned in the registers. */
3246 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3247 build_int_2 (n_reg - 1, 0));
3248 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3249 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3250 TREE_SIDE_EFFECTS (u) = 1;
3251 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3255 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3259 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3260 build_int_2 (n_reg, 0));
3261 TREE_SIDE_EFFECTS (u) = 1;
3263 u = build1 (CONVERT_EXPR, integer_type_node, u);
3264 TREE_SIDE_EFFECTS (u) = 1;
3266 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3267 TREE_SIDE_EFFECTS (u) = 1;
3269 t = build (PLUS_EXPR, ptr_type_node, t, u);
3270 TREE_SIDE_EFFECTS (t) = 1;
3272 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3274 emit_move_insn (addr_rtx, r);
3276 emit_jump_insn (gen_jump (lab_over));
3280 emit_label (lab_false);
3282 /* ... otherwise out of the overflow area. */
3284 /* Make sure we don't find reg 7 for the next int arg.
3286 All AltiVec vectors go in the overflow area. So in the AltiVec
3287 case we need to get the vectors from the overflow area, but
3288 remember where the GPRs and FPRs are. */
3289 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3290 || !TARGET_ALTIVEC))
3292 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3293 TREE_SIDE_EFFECTS (t) = 1;
3294 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3297 /* Care for on-stack alignment if needed. */
3304 /* AltiVec vectors are 16 byte aligned. */
3305 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3310 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3311 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3315 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3317 emit_move_insn (addr_rtx, r);
3319 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3320 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3321 TREE_SIDE_EFFECTS (t) = 1;
3322 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3324 emit_label (lab_over);
3328 r = gen_rtx_MEM (Pmode, addr_rtx);
3329 set_mem_alias_set (r, get_varargs_alias_set ());
3330 emit_move_insn (addr_rtx, r);
3338 #define def_builtin(MASK, NAME, TYPE, CODE) \
3340 if ((MASK) & target_flags) \
3341 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3345 struct builtin_description
3347 const unsigned int mask;
3348 const enum insn_code icode;
3349 const char *const name;
3350 const enum rs6000_builtins code;
3353 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3355 static const struct builtin_description bdesc_3arg[] =
3357 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3358 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3359 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3360 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3361 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3362 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3363 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3364 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3365 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3366 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3367 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3368 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3369 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3370 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3371 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3372 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3373 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3374 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3375 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3376 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3377 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3378 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3379 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3382 /* DST operations: void foo (void *, const int, const char). */
3384 static const struct builtin_description bdesc_dst[] =
3386 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3387 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3388 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3389 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3392 /* Simple binary operations: VECc = foo (VECa, VECb). */
3394 static const struct builtin_description bdesc_2arg[] =
3396 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3397 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3398 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3399 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3400 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3401 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3402 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3403 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3404 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3405 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3406 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3407 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3408 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3409 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3410 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3411 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3412 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3413 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3414 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3415 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3416 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3417 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3418 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3419 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3420 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3421 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3422 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3423 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3424 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3425 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3426 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3427 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3428 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3429 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3430 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3431 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3432 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3433 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3434 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3435 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3436 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3437 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3438 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3439 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3440 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3441 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3442 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3443 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3444 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3445 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3446 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3447 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3448 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3449 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3450 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3451 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3452 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3453 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3454 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3455 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3456 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3457 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3458 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3459 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3460 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3461 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3462 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3463 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3464 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3465 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3466 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3467 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3468 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3469 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3470 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3471 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3472 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3473 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3474 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3475 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3476 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3477 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3478 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3479 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3480 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3481 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3482 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3483 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3484 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3485 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3486 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3487 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3488 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3489 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3490 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3491 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3492 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3493 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3494 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3495 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3496 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3497 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3498 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3499 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3500 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3501 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3502 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3503 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3504 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3505 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3506 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3507 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3508 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3511 /* AltiVec predicates. */
3513 struct builtin_description_predicates
3515 const unsigned int mask;
3516 const enum insn_code icode;
3518 const char *const name;
3519 const enum rs6000_builtins code;
3522 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3524 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3525 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3526 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3527 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3528 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3529 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3530 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3531 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3532 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3533 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3534 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3535 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3536 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3539 /* ABS* opreations. */
3541 static const struct builtin_description bdesc_abs[] =
3543 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3544 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3545 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3546 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3547 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3548 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3549 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3552 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3555 static const struct builtin_description bdesc_1arg[] =
3557 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3558 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3559 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3560 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3561 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3562 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3563 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3564 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3565 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3566 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3567 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3568 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3569 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3570 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3571 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3572 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3573 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3577 rs6000_expand_unop_builtin (icode, arglist, target)
3578 enum insn_code icode;
3583 tree arg0 = TREE_VALUE (arglist);
3584 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3585 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3586 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3588 /* If we got invalid arguments bail out before generating bad rtl. */
3589 if (arg0 == error_mark_node)
3594 /* Only allow 5-bit *signed* literals. */
3595 case CODE_FOR_altivec_vspltisb:
3596 case CODE_FOR_altivec_vspltish:
3597 case CODE_FOR_altivec_vspltisw:
3598 if (GET_CODE (op0) != CONST_INT
3599 || INTVAL (op0) > 0x1f
3600 || INTVAL (op0) < -0x1f)
3602 error ("argument 1 must be a 5-bit signed literal");
3611 || GET_MODE (target) != tmode
3612 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3613 target = gen_reg_rtx (tmode);
3615 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3616 op0 = copy_to_mode_reg (mode0, op0);
3618 pat = GEN_FCN (icode) (target, op0);
3627 altivec_expand_abs_builtin (icode, arglist, target)
3628 enum insn_code icode;
3632 rtx pat, scratch1, scratch2;
3633 tree arg0 = TREE_VALUE (arglist);
3634 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3635 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3636 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3638 /* If we have invalid arguments, bail out before generating bad rtl. */
3639 if (arg0 == error_mark_node)
3643 || GET_MODE (target) != tmode
3644 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3645 target = gen_reg_rtx (tmode);
3647 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3648 op0 = copy_to_mode_reg (mode0, op0);
3650 scratch1 = gen_reg_rtx (mode0);
3651 scratch2 = gen_reg_rtx (mode0);
3653 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3662 rs6000_expand_binop_builtin (icode, arglist, target)
3663 enum insn_code icode;
3668 tree arg0 = TREE_VALUE (arglist);
3669 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3670 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3671 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3672 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3673 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3674 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3676 /* If we got invalid arguments bail out before generating bad rtl. */
3677 if (arg0 == error_mark_node || arg1 == error_mark_node)
3682 /* Only allow 5-bit unsigned literals. */
3683 case CODE_FOR_altivec_vcfux:
3684 case CODE_FOR_altivec_vcfsx:
3685 case CODE_FOR_altivec_vctsxs:
3686 case CODE_FOR_altivec_vctuxs:
3687 case CODE_FOR_altivec_vspltb:
3688 case CODE_FOR_altivec_vsplth:
3689 case CODE_FOR_altivec_vspltw:
3690 if (TREE_CODE (arg1) != INTEGER_CST
3691 || TREE_INT_CST_LOW (arg1) & ~0x1f)
3693 error ("argument 2 must be a 5-bit unsigned literal");
3702 || GET_MODE (target) != tmode
3703 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3704 target = gen_reg_rtx (tmode);
3706 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3707 op0 = copy_to_mode_reg (mode0, op0);
3708 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3709 op1 = copy_to_mode_reg (mode1, op1);
3711 pat = GEN_FCN (icode) (target, op0, op1);
3720 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3721 enum insn_code icode;
3727 tree cr6_form = TREE_VALUE (arglist);
3728 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3729 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3730 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3731 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3732 enum machine_mode tmode = SImode;
3733 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3734 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3737 if (TREE_CODE (cr6_form) != INTEGER_CST)
3739 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3743 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3748 /* If we have invalid arguments, bail out before generating bad rtl. */
3749 if (arg0 == error_mark_node || arg1 == error_mark_node)
3753 || GET_MODE (target) != tmode
3754 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3755 target = gen_reg_rtx (tmode);
3757 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3758 op0 = copy_to_mode_reg (mode0, op0);
3759 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3760 op1 = copy_to_mode_reg (mode1, op1);
3762 scratch = gen_reg_rtx (mode0);
3764 pat = GEN_FCN (icode) (scratch, op0, op1,
3765 gen_rtx (SYMBOL_REF, Pmode, opcode));
3770 /* The vec_any* and vec_all* predicates use the same opcodes for two
3771 different operations, but the bits in CR6 will be different
3772 depending on what information we want. So we have to play tricks
3773 with CR6 to get the right bits out.
3775 If you think this is disgusting, look at the specs for the
3776 AltiVec predicates. */
3778 switch (cr6_form_int)
3781 emit_insn (gen_cr6_test_for_zero (target));
3784 emit_insn (gen_cr6_test_for_zero_reverse (target));
3787 emit_insn (gen_cr6_test_for_lt (target));
3790 emit_insn (gen_cr6_test_for_lt_reverse (target));
3793 error ("argument 1 of __builtin_altivec_predicate is out of range");
3801 altivec_expand_stv_builtin (icode, arglist)
3802 enum insn_code icode;
3805 tree arg0 = TREE_VALUE (arglist);
3806 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3807 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3808 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3809 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3810 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3812 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3813 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3814 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3816 /* Invalid arguments. Bail before doing anything stoopid! */
3817 if (arg0 == error_mark_node
3818 || arg1 == error_mark_node
3819 || arg2 == error_mark_node)
3822 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3823 op0 = copy_to_mode_reg (mode2, op0);
3824 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3825 op1 = copy_to_mode_reg (mode0, op1);
3826 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3827 op2 = copy_to_mode_reg (mode1, op2);
3829 pat = GEN_FCN (icode) (op1, op2, op0);
3836 rs6000_expand_ternop_builtin (icode, arglist, target)
3837 enum insn_code icode;
3842 tree arg0 = TREE_VALUE (arglist);
3843 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3844 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3845 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3846 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3847 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3848 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3849 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3850 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3851 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3853 /* If we got invalid arguments bail out before generating bad rtl. */
3854 if (arg0 == error_mark_node
3855 || arg1 == error_mark_node
3856 || arg2 == error_mark_node)
3861 /* Only allow 4-bit unsigned literals. */
3862 case CODE_FOR_altivec_vsldoi_4sf:
3863 case CODE_FOR_altivec_vsldoi_4si:
3864 case CODE_FOR_altivec_vsldoi_8hi:
3865 case CODE_FOR_altivec_vsldoi_16qi:
3866 if (TREE_CODE (arg2) != INTEGER_CST
3867 || TREE_INT_CST_LOW (arg2) & ~0xf)
3869 error ("argument 3 must be a 4-bit unsigned literal");
3878 || GET_MODE (target) != tmode
3879 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3880 target = gen_reg_rtx (tmode);
3882 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3883 op0 = copy_to_mode_reg (mode0, op0);
3884 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3885 op1 = copy_to_mode_reg (mode1, op1);
3886 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3887 op2 = copy_to_mode_reg (mode2, op2);
3889 pat = GEN_FCN (icode) (target, op0, op1, op2);
3897 /* Expand the lvx builtins. */
3899 altivec_expand_ld_builtin (exp, target, expandedp)
3904 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3905 tree arglist = TREE_OPERAND (exp, 1);
3906 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3908 enum machine_mode tmode, mode0;
3910 enum insn_code icode;
3914 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3915 icode = CODE_FOR_altivec_lvx_16qi;
3917 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3918 icode = CODE_FOR_altivec_lvx_8hi;
3920 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3921 icode = CODE_FOR_altivec_lvx_4si;
3923 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3924 icode = CODE_FOR_altivec_lvx_4sf;
3933 arg0 = TREE_VALUE (arglist);
3934 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3935 tmode = insn_data[icode].operand[0].mode;
3936 mode0 = insn_data[icode].operand[1].mode;
3939 || GET_MODE (target) != tmode
3940 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3941 target = gen_reg_rtx (tmode);
3943 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3944 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3946 pat = GEN_FCN (icode) (target, op0);
3953 /* Expand the stvx builtins. */
3955 altivec_expand_st_builtin (exp, target, expandedp)
3957 rtx target ATTRIBUTE_UNUSED;
3960 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3961 tree arglist = TREE_OPERAND (exp, 1);
3962 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3964 enum machine_mode mode0, mode1;
3966 enum insn_code icode;
3970 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3971 icode = CODE_FOR_altivec_stvx_16qi;
3973 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3974 icode = CODE_FOR_altivec_stvx_8hi;
3976 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3977 icode = CODE_FOR_altivec_stvx_4si;
3979 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3980 icode = CODE_FOR_altivec_stvx_4sf;
3987 arg0 = TREE_VALUE (arglist);
3988 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3989 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3990 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3991 mode0 = insn_data[icode].operand[0].mode;
3992 mode1 = insn_data[icode].operand[1].mode;
3994 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3995 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3996 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3997 op1 = copy_to_mode_reg (mode1, op1);
3999 pat = GEN_FCN (icode) (op0, op1);
4007 /* Expand the dst builtins. */
4009 altivec_expand_dst_builtin (exp, target, expandedp)
4011 rtx target ATTRIBUTE_UNUSED;
4014 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4015 tree arglist = TREE_OPERAND (exp, 1);
4016 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4017 tree arg0, arg1, arg2;
4018 enum machine_mode mode0, mode1, mode2;
4019 rtx pat, op0, op1, op2;
4020 struct builtin_description *d;
4025 /* Handle DST variants. */
4026 d = (struct builtin_description *) bdesc_dst;
4027 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4028 if (d->code == fcode)
4030 arg0 = TREE_VALUE (arglist);
4031 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4032 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4033 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4034 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4035 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4036 mode0 = insn_data[d->icode].operand[0].mode;
4037 mode1 = insn_data[d->icode].operand[1].mode;
4038 mode2 = insn_data[d->icode].operand[2].mode;
4040 /* Invalid arguments, bail out before generating bad rtl. */
4041 if (arg0 == error_mark_node
4042 || arg1 == error_mark_node
4043 || arg2 == error_mark_node)
4046 if (TREE_CODE (arg2) != INTEGER_CST
4047 || TREE_INT_CST_LOW (arg2) & ~0x3)
4049 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4053 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4054 op0 = copy_to_mode_reg (mode0, op0);
4055 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4056 op1 = copy_to_mode_reg (mode1, op1);
4058 pat = GEN_FCN (d->icode) (op0, op1, op2);
4069 /* Expand the builtin in EXP and store the result in TARGET. Store
4070 true in *EXPANDEDP if we found a builtin to expand. */
4072 altivec_expand_builtin (exp, target, expandedp)
4077 struct builtin_description *d;
4078 struct builtin_description_predicates *dp;
4080 enum insn_code icode;
4081 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4082 tree arglist = TREE_OPERAND (exp, 1);
4085 enum machine_mode tmode, mode0;
4086 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4088 target = altivec_expand_ld_builtin (exp, target, expandedp);
4092 target = altivec_expand_st_builtin (exp, target, expandedp);
4096 target = altivec_expand_dst_builtin (exp, target, expandedp);
4104 case ALTIVEC_BUILTIN_STVX:
4105 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4106 case ALTIVEC_BUILTIN_STVEBX:
4107 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4108 case ALTIVEC_BUILTIN_STVEHX:
4109 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4110 case ALTIVEC_BUILTIN_STVEWX:
4111 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4112 case ALTIVEC_BUILTIN_STVXL:
4113 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4115 case ALTIVEC_BUILTIN_MFVSCR:
4116 icode = CODE_FOR_altivec_mfvscr;
4117 tmode = insn_data[icode].operand[0].mode;
4120 || GET_MODE (target) != tmode
4121 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4122 target = gen_reg_rtx (tmode);
4124 pat = GEN_FCN (icode) (target);
4130 case ALTIVEC_BUILTIN_MTVSCR:
4131 icode = CODE_FOR_altivec_mtvscr;
4132 arg0 = TREE_VALUE (arglist);
4133 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4134 mode0 = insn_data[icode].operand[0].mode;
4136 /* If we got invalid arguments bail out before generating bad rtl. */
4137 if (arg0 == error_mark_node)
4140 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4141 op0 = copy_to_mode_reg (mode0, op0);
4143 pat = GEN_FCN (icode) (op0);
4148 case ALTIVEC_BUILTIN_DSSALL:
4149 emit_insn (gen_altivec_dssall ());
4152 case ALTIVEC_BUILTIN_DSS:
4153 icode = CODE_FOR_altivec_dss;
4154 arg0 = TREE_VALUE (arglist);
4155 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4156 mode0 = insn_data[icode].operand[0].mode;
4158 /* If we got invalid arguments bail out before generating bad rtl. */
4159 if (arg0 == error_mark_node)
4162 if (TREE_CODE (arg0) != INTEGER_CST
4163 || TREE_INT_CST_LOW (arg0) & ~0x3)
4165 error ("argument to dss must be a 2-bit unsigned literal");
4169 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4170 op0 = copy_to_mode_reg (mode0, op0);
4172 emit_insn (gen_altivec_dss (op0));
4176 /* Expand abs* operations. */
4177 d = (struct builtin_description *) bdesc_abs;
4178 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4179 if (d->code == fcode)
4180 return altivec_expand_abs_builtin (d->icode, arglist, target);
4182 /* Expand the AltiVec predicates. */
4183 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4184 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4185 if (dp->code == fcode)
4186 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4188 /* LV* are funky. We initialized them differently. */
4191 case ALTIVEC_BUILTIN_LVSL:
4192 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4194 case ALTIVEC_BUILTIN_LVSR:
4195 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4197 case ALTIVEC_BUILTIN_LVEBX:
4198 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4200 case ALTIVEC_BUILTIN_LVEHX:
4201 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4203 case ALTIVEC_BUILTIN_LVEWX:
4204 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4206 case ALTIVEC_BUILTIN_LVXL:
4207 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4209 case ALTIVEC_BUILTIN_LVX:
4210 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4221 /* Expand an expression EXP that calls a built-in function,
4222 with result going to TARGET if that's convenient
4223 (and in mode MODE if that's convenient).
4224 SUBTARGET may be used as the target for computing one of EXP's operands.
4225 IGNORE is nonzero if the value is to be ignored. */
4228 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4231 rtx subtarget ATTRIBUTE_UNUSED;
4232 enum machine_mode mode ATTRIBUTE_UNUSED;
4233 int ignore ATTRIBUTE_UNUSED;
4235 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4236 tree arglist = TREE_OPERAND (exp, 1);
4237 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4238 struct builtin_description *d;
4245 ret = altivec_expand_builtin (exp, target, &success);
4251 /* Handle simple unary operations. */
4252 d = (struct builtin_description *) bdesc_1arg;
4253 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4254 if (d->code == fcode)
4255 return rs6000_expand_unop_builtin (d->icode, arglist, target);
4257 /* Handle simple binary operations. */
4258 d = (struct builtin_description *) bdesc_2arg;
4259 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4260 if (d->code == fcode)
4261 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4263 /* Handle simple ternary operations. */
4264 d = (struct builtin_description *) bdesc_3arg;
4265 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4266 if (d->code == fcode)
4267 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
4274 rs6000_init_builtins ()
4277 altivec_init_builtins ();
4281 altivec_init_builtins (void)
4283 struct builtin_description *d;
4284 struct builtin_description_predicates *dp;
4287 tree pint_type_node = build_pointer_type (integer_type_node);
4288 tree pvoid_type_node = build_pointer_type (void_type_node);
4289 tree pshort_type_node = build_pointer_type (short_integer_type_node);
4290 tree pchar_type_node = build_pointer_type (char_type_node);
4291 tree pfloat_type_node = build_pointer_type (float_type_node);
4293 tree v4sf_ftype_v4sf_v4sf_v16qi
4294 = build_function_type_list (V4SF_type_node,
4295 V4SF_type_node, V4SF_type_node,
4296 V16QI_type_node, NULL_TREE);
4297 tree v4si_ftype_v4si_v4si_v16qi
4298 = build_function_type_list (V4SI_type_node,
4299 V4SI_type_node, V4SI_type_node,
4300 V16QI_type_node, NULL_TREE);
4301 tree v8hi_ftype_v8hi_v8hi_v16qi
4302 = build_function_type_list (V8HI_type_node,
4303 V8HI_type_node, V8HI_type_node,
4304 V16QI_type_node, NULL_TREE);
4305 tree v16qi_ftype_v16qi_v16qi_v16qi
4306 = build_function_type_list (V16QI_type_node,
4307 V16QI_type_node, V16QI_type_node,
4308 V16QI_type_node, NULL_TREE);
4309 tree v4si_ftype_char
4310 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
4311 tree v8hi_ftype_char
4312 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
4313 tree v16qi_ftype_char
4314 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
4315 tree v4sf_ftype_v4sf
4316 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
4317 tree v4si_ftype_pint
4318 = build_function_type_list (V4SI_type_node, pint_type_node, NULL_TREE);
4319 tree v8hi_ftype_pshort
4320 = build_function_type_list (V8HI_type_node, pshort_type_node, NULL_TREE);
4321 tree v16qi_ftype_pchar
4322 = build_function_type_list (V16QI_type_node, pchar_type_node, NULL_TREE);
4323 tree v4sf_ftype_pfloat
4324 = build_function_type_list (V4SF_type_node, pfloat_type_node, NULL_TREE);
4325 tree v8hi_ftype_v16qi
4326 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
4327 tree void_ftype_pvoid_int_char
4328 = build_function_type_list (void_type_node,
4329 pvoid_type_node, integer_type_node,
4330 char_type_node, NULL_TREE);
4331 tree void_ftype_pint_v4si
4332 = build_function_type_list (void_type_node,
4333 pint_type_node, V4SI_type_node, NULL_TREE);
4334 tree void_ftype_pshort_v8hi
4335 = build_function_type_list (void_type_node,
4336 pshort_type_node, V8HI_type_node, NULL_TREE);
4337 tree void_ftype_pchar_v16qi
4338 = build_function_type_list (void_type_node,
4339 pchar_type_node, V16QI_type_node, NULL_TREE);
4340 tree void_ftype_pfloat_v4sf
4341 = build_function_type_list (void_type_node,
4342 pfloat_type_node, V4SF_type_node, NULL_TREE);
4343 tree void_ftype_v4si
4344 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
4345 tree void_ftype_v4si_int_pvoid
4346 = build_function_type_list (void_type_node,
4347 V4SI_type_node, integer_type_node,
4348 pvoid_type_node, NULL_TREE);
4350 tree void_ftype_v16qi_int_pvoid
4351 = build_function_type_list (void_type_node,
4352 V16QI_type_node, integer_type_node,
4353 pvoid_type_node, NULL_TREE);
4354 tree void_ftype_v8hi_int_pvoid
4355 = build_function_type_list (void_type_node,
4356 V8HI_type_node, integer_type_node,
4357 pvoid_type_node, NULL_TREE);
4359 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
4360 tree void_ftype_void
4361 = build_function_type (void_type_node, void_list_node);
4362 tree v8hi_ftype_void
4363 = build_function_type (V8HI_type_node, void_list_node);
4365 tree v4si_ftype_v4si_v4si
4366 = build_function_type_list (V4SI_type_node,
4367 V4SI_type_node, V4SI_type_node, NULL_TREE);
4368 tree v4sf_ftype_v4si_char
4369 = build_function_type_list (V4SF_type_node,
4370 V4SI_type_node, char_type_node, NULL_TREE);
4371 tree v4si_ftype_v4sf_char
4372 = build_function_type_list (V4SI_type_node,
4373 V4SF_type_node, char_type_node, NULL_TREE);
4374 tree v4si_ftype_v4si_char
4375 = build_function_type_list (V4SI_type_node,
4376 V4SI_type_node, char_type_node, NULL_TREE);
4377 tree v8hi_ftype_v8hi_char
4378 = build_function_type_list (V8HI_type_node,
4379 V8HI_type_node, char_type_node, NULL_TREE);
4380 tree v16qi_ftype_v16qi_char
4381 = build_function_type_list (V16QI_type_node,
4382 V16QI_type_node, char_type_node, NULL_TREE);
4383 tree v16qi_ftype_v16qi_v16qi_char
4384 = build_function_type_list (V16QI_type_node,
4385 V16QI_type_node, V16QI_type_node,
4386 char_type_node, NULL_TREE);
4387 tree v8hi_ftype_v8hi_v8hi_char
4388 = build_function_type_list (V8HI_type_node,
4389 V8HI_type_node, V8HI_type_node,
4390 char_type_node, NULL_TREE);
4391 tree v4si_ftype_v4si_v4si_char
4392 = build_function_type_list (V4SI_type_node,
4393 V4SI_type_node, V4SI_type_node,
4394 char_type_node, NULL_TREE);
4395 tree v4sf_ftype_v4sf_v4sf_char
4396 = build_function_type_list (V4SF_type_node,
4397 V4SF_type_node, V4SF_type_node,
4398 char_type_node, NULL_TREE);
4399 tree v4sf_ftype_v4sf_v4sf
4400 = build_function_type_list (V4SF_type_node,
4401 V4SF_type_node, V4SF_type_node, NULL_TREE);
4402 tree v4sf_ftype_v4sf_v4sf_v4si
4403 = build_function_type_list (V4SF_type_node,
4404 V4SF_type_node, V4SF_type_node,
4405 V4SI_type_node, NULL_TREE);
4406 tree v4sf_ftype_v4sf_v4sf_v4sf
4407 = build_function_type_list (V4SF_type_node,
4408 V4SF_type_node, V4SF_type_node,
4409 V4SF_type_node, NULL_TREE);
4410 tree v4si_ftype_v4si_v4si_v4si
4411 = build_function_type_list (V4SI_type_node,
4412 V4SI_type_node, V4SI_type_node,
4413 V4SI_type_node, NULL_TREE);
4414 tree v8hi_ftype_v8hi_v8hi
4415 = build_function_type_list (V8HI_type_node,
4416 V8HI_type_node, V8HI_type_node, NULL_TREE);
4417 tree v8hi_ftype_v8hi_v8hi_v8hi
4418 = build_function_type_list (V8HI_type_node,
4419 V8HI_type_node, V8HI_type_node,
4420 V8HI_type_node, NULL_TREE);
4421 tree v4si_ftype_v8hi_v8hi_v4si
4422 = build_function_type_list (V4SI_type_node,
4423 V8HI_type_node, V8HI_type_node,
4424 V4SI_type_node, NULL_TREE);
4425 tree v4si_ftype_v16qi_v16qi_v4si
4426 = build_function_type_list (V4SI_type_node,
4427 V16QI_type_node, V16QI_type_node,
4428 V4SI_type_node, NULL_TREE);
4429 tree v16qi_ftype_v16qi_v16qi
4430 = build_function_type_list (V16QI_type_node,
4431 V16QI_type_node, V16QI_type_node, NULL_TREE);
4432 tree v4si_ftype_v4sf_v4sf
4433 = build_function_type_list (V4SI_type_node,
4434 V4SF_type_node, V4SF_type_node, NULL_TREE);
4435 tree v4si_ftype_v4si
4436 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
4437 tree v8hi_ftype_v8hi
4438 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
4439 tree v16qi_ftype_v16qi
4440 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
4441 tree v8hi_ftype_v16qi_v16qi
4442 = build_function_type_list (V8HI_type_node,
4443 V16QI_type_node, V16QI_type_node, NULL_TREE);
4444 tree v4si_ftype_v8hi_v8hi
4445 = build_function_type_list (V4SI_type_node,
4446 V8HI_type_node, V8HI_type_node, NULL_TREE);
4447 tree v8hi_ftype_v4si_v4si
4448 = build_function_type_list (V8HI_type_node,
4449 V4SI_type_node, V4SI_type_node, NULL_TREE);
4450 tree v16qi_ftype_v8hi_v8hi
4451 = build_function_type_list (V16QI_type_node,
4452 V8HI_type_node, V8HI_type_node, NULL_TREE);
4453 tree v4si_ftype_v16qi_v4si
4454 = build_function_type_list (V4SI_type_node,
4455 V16QI_type_node, V4SI_type_node, NULL_TREE);
4456 tree v4si_ftype_v16qi_v16qi
4457 = build_function_type_list (V4SI_type_node,
4458 V16QI_type_node, V16QI_type_node, NULL_TREE);
4459 tree v4si_ftype_v8hi_v4si
4460 = build_function_type_list (V4SI_type_node,
4461 V8HI_type_node, V4SI_type_node, NULL_TREE);
4462 tree v4si_ftype_v8hi
4463 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
4464 tree int_ftype_v4si_v4si
4465 = build_function_type_list (integer_type_node,
4466 V4SI_type_node, V4SI_type_node, NULL_TREE);
4467 tree int_ftype_v4sf_v4sf
4468 = build_function_type_list (integer_type_node,
4469 V4SF_type_node, V4SF_type_node, NULL_TREE);
4470 tree int_ftype_v16qi_v16qi
4471 = build_function_type_list (integer_type_node,
4472 V16QI_type_node, V16QI_type_node, NULL_TREE);
4473 tree int_ftype_int_v4si_v4si
4474 = build_function_type_list (integer_type_node,
4475 integer_type_node, V4SI_type_node,
4476 V4SI_type_node, NULL_TREE);
4477 tree int_ftype_int_v4sf_v4sf
4478 = build_function_type_list (integer_type_node,
4479 integer_type_node, V4SF_type_node,
4480 V4SF_type_node, NULL_TREE);
4481 tree int_ftype_int_v8hi_v8hi
4482 = build_function_type_list (integer_type_node,
4483 integer_type_node, V8HI_type_node,
4484 V8HI_type_node, NULL_TREE);
4485 tree int_ftype_int_v16qi_v16qi
4486 = build_function_type_list (integer_type_node,
4487 integer_type_node, V16QI_type_node,
4488 V16QI_type_node, NULL_TREE);
4489 tree v16qi_ftype_int_pvoid
4490 = build_function_type_list (V16QI_type_node,
4491 integer_type_node, pvoid_type_node, NULL_TREE);
4492 tree v4si_ftype_int_pvoid
4493 = build_function_type_list (V4SI_type_node,
4494 integer_type_node, pvoid_type_node, NULL_TREE);
4495 tree v8hi_ftype_int_pvoid
4496 = build_function_type_list (V8HI_type_node,
4497 integer_type_node, pvoid_type_node, NULL_TREE);
4498 tree int_ftype_v8hi_v8hi
4499 = build_function_type_list (integer_type_node,
4500 V8HI_type_node, V8HI_type_node, NULL_TREE);
4502 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4503 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4504 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4505 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4506 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4507 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4508 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4509 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4510 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4511 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4512 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4513 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4514 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4515 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4516 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4517 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4518 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4519 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4520 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4521 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4522 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4523 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4524 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4525 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4527 /* Add the simple ternary operators. */
4528 d = (struct builtin_description *) bdesc_3arg;
4529 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4532 enum machine_mode mode0, mode1, mode2, mode3;
4538 mode0 = insn_data[d->icode].operand[0].mode;
4539 mode1 = insn_data[d->icode].operand[1].mode;
4540 mode2 = insn_data[d->icode].operand[2].mode;
4541 mode3 = insn_data[d->icode].operand[3].mode;
4543 /* When all four are of the same mode. */
4544 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4549 type = v4si_ftype_v4si_v4si_v4si;
4552 type = v4sf_ftype_v4sf_v4sf_v4sf;
4555 type = v8hi_ftype_v8hi_v8hi_v8hi;
4558 type = v16qi_ftype_v16qi_v16qi_v16qi;
4564 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4569 type = v4si_ftype_v4si_v4si_v16qi;
4572 type = v4sf_ftype_v4sf_v4sf_v16qi;
4575 type = v8hi_ftype_v8hi_v8hi_v16qi;
4578 type = v16qi_ftype_v16qi_v16qi_v16qi;
4584 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4585 && mode3 == V4SImode)
4586 type = v4si_ftype_v16qi_v16qi_v4si;
4587 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4588 && mode3 == V4SImode)
4589 type = v4si_ftype_v8hi_v8hi_v4si;
4590 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4591 && mode3 == V4SImode)
4592 type = v4sf_ftype_v4sf_v4sf_v4si;
4594 /* vchar, vchar, vchar, 4 bit literal. */
4595 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4597 type = v16qi_ftype_v16qi_v16qi_char;
4599 /* vshort, vshort, vshort, 4 bit literal. */
4600 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4602 type = v8hi_ftype_v8hi_v8hi_char;
4604 /* vint, vint, vint, 4 bit literal. */
4605 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4607 type = v4si_ftype_v4si_v4si_char;
4609 /* vfloat, vfloat, vfloat, 4 bit literal. */
4610 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4612 type = v4sf_ftype_v4sf_v4sf_char;
4617 def_builtin (d->mask, d->name, type, d->code);
4620 /* Add the DST variants. */
4621 d = (struct builtin_description *) bdesc_dst;
4622 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4623 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4625 /* Initialize the predicates. */
4626 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4627 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4629 enum machine_mode mode1;
4632 mode1 = insn_data[dp->icode].operand[1].mode;
4637 type = int_ftype_int_v4si_v4si;
4640 type = int_ftype_int_v8hi_v8hi;
4643 type = int_ftype_int_v16qi_v16qi;
4646 type = int_ftype_int_v4sf_v4sf;
4652 def_builtin (dp->mask, dp->name, type, dp->code);
4655 /* Add the simple binary operators. */
4656 d = (struct builtin_description *) bdesc_2arg;
4657 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4659 enum machine_mode mode0, mode1, mode2;
4665 mode0 = insn_data[d->icode].operand[0].mode;
4666 mode1 = insn_data[d->icode].operand[1].mode;
4667 mode2 = insn_data[d->icode].operand[2].mode;
4669 /* When all three operands are of the same mode. */
4670 if (mode0 == mode1 && mode1 == mode2)
4675 type = v4sf_ftype_v4sf_v4sf;
4678 type = v4si_ftype_v4si_v4si;
4681 type = v16qi_ftype_v16qi_v16qi;
4684 type = v8hi_ftype_v8hi_v8hi;
4691 /* A few other combos we really don't want to do manually. */
4693 /* vint, vfloat, vfloat. */
4694 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4695 type = v4si_ftype_v4sf_v4sf;
4697 /* vshort, vchar, vchar. */
4698 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4699 type = v8hi_ftype_v16qi_v16qi;
4701 /* vint, vshort, vshort. */
4702 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4703 type = v4si_ftype_v8hi_v8hi;
4705 /* vshort, vint, vint. */
4706 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4707 type = v8hi_ftype_v4si_v4si;
4709 /* vchar, vshort, vshort. */
4710 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4711 type = v16qi_ftype_v8hi_v8hi;
4713 /* vint, vchar, vint. */
4714 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4715 type = v4si_ftype_v16qi_v4si;
4717 /* vint, vchar, vchar. */
4718 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4719 type = v4si_ftype_v16qi_v16qi;
4721 /* vint, vshort, vint. */
4722 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4723 type = v4si_ftype_v8hi_v4si;
4725 /* vint, vint, 5 bit literal. */
4726 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4727 type = v4si_ftype_v4si_char;
4729 /* vshort, vshort, 5 bit literal. */
4730 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4731 type = v8hi_ftype_v8hi_char;
4733 /* vchar, vchar, 5 bit literal. */
4734 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4735 type = v16qi_ftype_v16qi_char;
4737 /* vfloat, vint, 5 bit literal. */
4738 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4739 type = v4sf_ftype_v4si_char;
4741 /* vint, vfloat, 5 bit literal. */
4742 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4743 type = v4si_ftype_v4sf_char;
4746 else if (mode0 == SImode)
4751 type = int_ftype_v4si_v4si;
4754 type = int_ftype_v4sf_v4sf;
4757 type = int_ftype_v16qi_v16qi;
4760 type = int_ftype_v8hi_v8hi;
4770 def_builtin (d->mask, d->name, type, d->code);
4773 /* Initialize the abs* operators. */
4774 d = (struct builtin_description *) bdesc_abs;
4775 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4777 enum machine_mode mode0;
4780 mode0 = insn_data[d->icode].operand[0].mode;
4785 type = v4si_ftype_v4si;
4788 type = v8hi_ftype_v8hi;
4791 type = v16qi_ftype_v16qi;
4794 type = v4sf_ftype_v4sf;
4800 def_builtin (d->mask, d->name, type, d->code);
4803 /* Add the simple unary operators. */
4804 d = (struct builtin_description *) bdesc_1arg;
4805 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4807 enum machine_mode mode0, mode1;
4813 mode0 = insn_data[d->icode].operand[0].mode;
4814 mode1 = insn_data[d->icode].operand[1].mode;
4816 if (mode0 == V4SImode && mode1 == QImode)
4817 type = v4si_ftype_char;
4818 else if (mode0 == V8HImode && mode1 == QImode)
4819 type = v8hi_ftype_char;
4820 else if (mode0 == V16QImode && mode1 == QImode)
4821 type = v16qi_ftype_char;
4822 else if (mode0 == V4SFmode && mode1 == V4SFmode)
4823 type = v4sf_ftype_v4sf;
4824 else if (mode0 == V8HImode && mode1 == V16QImode)
4825 type = v8hi_ftype_v16qi;
4826 else if (mode0 == V4SImode && mode1 == V8HImode)
4827 type = v4si_ftype_v8hi;
4831 def_builtin (d->mask, d->name, type, d->code);
4836 /* Generate a memory reference for expand_block_move, copying volatile,
4837 and other bits from an original memory reference. */
4840 expand_block_move_mem (mode, addr, orig_mem)
4841 enum machine_mode mode;
4845 rtx mem = gen_rtx_MEM (mode, addr);
4847 MEM_COPY_ATTRIBUTES (mem, orig_mem);
4851 /* Expand a block move operation, and return 1 if successful. Return 0
4852 if we should let the compiler generate normal code.
4854 operands[0] is the destination
4855 operands[1] is the source
4856 operands[2] is the length
4857 operands[3] is the alignment */
4859 #define MAX_MOVE_REG 4
4862 expand_block_move (operands)
4865 rtx orig_dest = operands[0];
4866 rtx orig_src = operands[1];
4867 rtx bytes_rtx = operands[2];
4868 rtx align_rtx = operands[3];
4869 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
4880 rtx stores[MAX_MOVE_REG];
4883 /* If this is not a fixed size move, just call memcpy */
4887 /* If this is not a fixed size alignment, abort */
4888 if (GET_CODE (align_rtx) != CONST_INT)
4890 align = INTVAL (align_rtx);
4892 /* Anything to move? */
4893 bytes = INTVAL (bytes_rtx);
4897 /* store_one_arg depends on expand_block_move to handle at least the size of
4898 reg_parm_stack_space. */
4899 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4902 /* Move the address into scratch registers. */
4903 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
4904 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
4906 if (TARGET_STRING) /* string instructions are available */
4908 for ( ; bytes > 0; bytes -= move_bytes)
4910 if (bytes > 24 /* move up to 32 bytes at a time */
4918 && ! fixed_regs[12])
4920 move_bytes = (bytes > 32) ? 32 : bytes;
4921 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
4924 expand_block_move_mem (BLKmode,
4927 GEN_INT ((move_bytes == 32)
4931 else if (bytes > 16 /* move up to 24 bytes at a time */
4937 && ! fixed_regs[10])
4939 move_bytes = (bytes > 24) ? 24 : bytes;
4940 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
4943 expand_block_move_mem (BLKmode,
4946 GEN_INT (move_bytes),
4949 else if (bytes > 8 /* move up to 16 bytes at a time */
4955 move_bytes = (bytes > 16) ? 16 : bytes;
4956 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
4959 expand_block_move_mem (BLKmode,
4962 GEN_INT (move_bytes),
4965 else if (bytes >= 8 && TARGET_POWERPC64
4966 /* 64-bit loads and stores require word-aligned
4968 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4971 tmp_reg = gen_reg_rtx (DImode);
4972 emit_move_insn (tmp_reg,
4973 expand_block_move_mem (DImode,
4974 src_reg, orig_src));
4975 emit_move_insn (expand_block_move_mem (DImode,
4976 dest_reg, orig_dest),
4979 else if (bytes > 4 && !TARGET_POWERPC64)
4980 { /* move up to 8 bytes at a time */
4981 move_bytes = (bytes > 8) ? 8 : bytes;
4982 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
4985 expand_block_move_mem (BLKmode,
4988 GEN_INT (move_bytes),
4991 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
4992 { /* move 4 bytes */
4994 tmp_reg = gen_reg_rtx (SImode);
4995 emit_move_insn (tmp_reg,
4996 expand_block_move_mem (SImode,
4997 src_reg, orig_src));
4998 emit_move_insn (expand_block_move_mem (SImode,
4999 dest_reg, orig_dest),
5002 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5003 { /* move 2 bytes */
5005 tmp_reg = gen_reg_rtx (HImode);
5006 emit_move_insn (tmp_reg,
5007 expand_block_move_mem (HImode,
5008 src_reg, orig_src));
5009 emit_move_insn (expand_block_move_mem (HImode,
5010 dest_reg, orig_dest),
5013 else if (bytes == 1) /* move 1 byte */
5016 tmp_reg = gen_reg_rtx (QImode);
5017 emit_move_insn (tmp_reg,
5018 expand_block_move_mem (QImode,
5019 src_reg, orig_src));
5020 emit_move_insn (expand_block_move_mem (QImode,
5021 dest_reg, orig_dest),
5025 { /* move up to 4 bytes at a time */
5026 move_bytes = (bytes > 4) ? 4 : bytes;
5027 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
5030 expand_block_move_mem (BLKmode,
5033 GEN_INT (move_bytes),
5037 if (bytes > move_bytes)
5039 if (! TARGET_POWERPC64)
5041 emit_insn (gen_addsi3 (src_reg, src_reg,
5042 GEN_INT (move_bytes)));
5043 emit_insn (gen_addsi3 (dest_reg, dest_reg,
5044 GEN_INT (move_bytes)));
5048 emit_insn (gen_adddi3 (src_reg, src_reg,
5049 GEN_INT (move_bytes)));
5050 emit_insn (gen_adddi3 (dest_reg, dest_reg,
5051 GEN_INT (move_bytes)));
5057 else /* string instructions not available */
5059 num_reg = offset = 0;
5060 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
5062 /* Calculate the correct offset for src/dest */
5066 dest_addr = dest_reg;
5070 src_addr = plus_constant (src_reg, offset);
5071 dest_addr = plus_constant (dest_reg, offset);
5074 /* Generate the appropriate load and store, saving the stores
5076 if (bytes >= 8 && TARGET_POWERPC64
5077 /* 64-bit loads and stores require word-aligned
5079 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5082 tmp_reg = gen_reg_rtx (DImode);
5083 emit_insn (gen_movdi (tmp_reg,
5084 expand_block_move_mem (DImode,
5087 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
5092 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5095 tmp_reg = gen_reg_rtx (SImode);
5096 emit_insn (gen_movsi (tmp_reg,
5097 expand_block_move_mem (SImode,
5100 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5105 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5108 tmp_reg = gen_reg_rtx (HImode);
5109 emit_insn (gen_movhi (tmp_reg,
5110 expand_block_move_mem (HImode,
5113 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5121 tmp_reg = gen_reg_rtx (QImode);
5122 emit_insn (gen_movqi (tmp_reg,
5123 expand_block_move_mem (QImode,
5126 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5132 if (num_reg >= MAX_MOVE_REG)
5134 for (i = 0; i < num_reg; i++)
5135 emit_insn (stores[i]);
5140 for (i = 0; i < num_reg; i++)
5141 emit_insn (stores[i]);
5148 /* Return 1 if OP is a load multiple operation. It is known to be a
5149 PARALLEL and the first section will be tested. */
5152 load_multiple_operation (op, mode)
5154 enum machine_mode mode ATTRIBUTE_UNUSED;
5156 int count = XVECLEN (op, 0);
5157 unsigned int dest_regno;
5161 /* Perform a quick check so we don't blow up below. */
5163 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5164 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5165 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5168 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5169 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5171 for (i = 1; i < count; i++)
5173 rtx elt = XVECEXP (op, 0, i);
5175 if (GET_CODE (elt) != SET
5176 || GET_CODE (SET_DEST (elt)) != REG
5177 || GET_MODE (SET_DEST (elt)) != SImode
5178 || REGNO (SET_DEST (elt)) != dest_regno + i
5179 || GET_CODE (SET_SRC (elt)) != MEM
5180 || GET_MODE (SET_SRC (elt)) != SImode
5181 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5182 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5183 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5184 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5191 /* Similar, but tests for store multiple. Here, the second vector element
5192 is a CLOBBER. It will be tested later. */
5195 store_multiple_operation (op, mode)
5197 enum machine_mode mode ATTRIBUTE_UNUSED;
5199 int count = XVECLEN (op, 0) - 1;
5200 unsigned int src_regno;
5204 /* Perform a quick check so we don't blow up below. */
5206 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5207 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5208 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5211 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5212 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5214 for (i = 1; i < count; i++)
5216 rtx elt = XVECEXP (op, 0, i + 1);
5218 if (GET_CODE (elt) != SET
5219 || GET_CODE (SET_SRC (elt)) != REG
5220 || GET_MODE (SET_SRC (elt)) != SImode
5221 || REGNO (SET_SRC (elt)) != src_regno + i
5222 || GET_CODE (SET_DEST (elt)) != MEM
5223 || GET_MODE (SET_DEST (elt)) != SImode
5224 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5225 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5226 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5227 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5234 /* Return 1 for a parallel vrsave operation. */
5237 vrsave_operation (op, mode)
5239 enum machine_mode mode ATTRIBUTE_UNUSED;
5241 int count = XVECLEN (op, 0);
5242 unsigned int dest_regno, src_regno;
5246 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5247 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5248 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5251 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5252 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5254 if (dest_regno != VRSAVE_REGNO
5255 && src_regno != VRSAVE_REGNO)
5258 for (i = 1; i < count; i++)
5260 rtx elt = XVECEXP (op, 0, i);
5262 if (GET_CODE (elt) != CLOBBER
5263 && GET_CODE (elt) != SET)
5270 /* Return 1 for an PARALLEL suitable for mtcrf. */
5273 mtcrf_operation (op, mode)
5275 enum machine_mode mode ATTRIBUTE_UNUSED;
5277 int count = XVECLEN (op, 0);
5281 /* Perform a quick check so we don't blow up below. */
5283 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5284 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5285 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5287 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5289 if (GET_CODE (src_reg) != REG
5290 || GET_MODE (src_reg) != SImode
5291 || ! INT_REGNO_P (REGNO (src_reg)))
5294 for (i = 0; i < count; i++)
5296 rtx exp = XVECEXP (op, 0, i);
5300 if (GET_CODE (exp) != SET
5301 || GET_CODE (SET_DEST (exp)) != REG
5302 || GET_MODE (SET_DEST (exp)) != CCmode
5303 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5305 unspec = SET_SRC (exp);
5306 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5308 if (GET_CODE (unspec) != UNSPEC
5309 || XINT (unspec, 1) != 20
5310 || XVECLEN (unspec, 0) != 2
5311 || XVECEXP (unspec, 0, 0) != src_reg
5312 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5313 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5319 /* Return 1 for an PARALLEL suitable for lmw. */
5322 lmw_operation (op, mode)
5324 enum machine_mode mode ATTRIBUTE_UNUSED;
5326 int count = XVECLEN (op, 0);
5327 unsigned int dest_regno;
5329 unsigned int base_regno;
5330 HOST_WIDE_INT offset;
5333 /* Perform a quick check so we don't blow up below. */
5335 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5336 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5337 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5340 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5341 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5344 || count != 32 - (int) dest_regno)
5347 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5350 base_regno = REGNO (src_addr);
5351 if (base_regno == 0)
5354 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5356 offset = INTVAL (XEXP (src_addr, 1));
5357 base_regno = REGNO (XEXP (src_addr, 0));
5362 for (i = 0; i < count; i++)
5364 rtx elt = XVECEXP (op, 0, i);
5367 HOST_WIDE_INT newoffset;
5369 if (GET_CODE (elt) != SET
5370 || GET_CODE (SET_DEST (elt)) != REG
5371 || GET_MODE (SET_DEST (elt)) != SImode
5372 || REGNO (SET_DEST (elt)) != dest_regno + i
5373 || GET_CODE (SET_SRC (elt)) != MEM
5374 || GET_MODE (SET_SRC (elt)) != SImode)
5376 newaddr = XEXP (SET_SRC (elt), 0);
5377 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5382 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5384 addr_reg = XEXP (newaddr, 0);
5385 newoffset = INTVAL (XEXP (newaddr, 1));
5389 if (REGNO (addr_reg) != base_regno
5390 || newoffset != offset + 4 * i)
5397 /* Return 1 for an PARALLEL suitable for stmw. */
5400 stmw_operation (op, mode)
5402 enum machine_mode mode ATTRIBUTE_UNUSED;
5404 int count = XVECLEN (op, 0);
5405 unsigned int src_regno;
5407 unsigned int base_regno;
5408 HOST_WIDE_INT offset;
5411 /* Perform a quick check so we don't blow up below. */
5413 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5414 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5415 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5418 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5419 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5422 || count != 32 - (int) src_regno)
5425 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5428 base_regno = REGNO (dest_addr);
5429 if (base_regno == 0)
5432 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5434 offset = INTVAL (XEXP (dest_addr, 1));
5435 base_regno = REGNO (XEXP (dest_addr, 0));
5440 for (i = 0; i < count; i++)
5442 rtx elt = XVECEXP (op, 0, i);
5445 HOST_WIDE_INT newoffset;
5447 if (GET_CODE (elt) != SET
5448 || GET_CODE (SET_SRC (elt)) != REG
5449 || GET_MODE (SET_SRC (elt)) != SImode
5450 || REGNO (SET_SRC (elt)) != src_regno + i
5451 || GET_CODE (SET_DEST (elt)) != MEM
5452 || GET_MODE (SET_DEST (elt)) != SImode)
5454 newaddr = XEXP (SET_DEST (elt), 0);
5455 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5460 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5462 addr_reg = XEXP (newaddr, 0);
5463 newoffset = INTVAL (XEXP (newaddr, 1));
5467 if (REGNO (addr_reg) != base_regno
5468 || newoffset != offset + 4 * i)
5475 /* A validation routine: say whether CODE, a condition code, and MODE
5476 match. The other alternatives either don't make sense or should
5477 never be generated. */
5480 validate_condition_mode (code, mode)
5482 enum machine_mode mode;
5484 if (GET_RTX_CLASS (code) != '<'
5485 || GET_MODE_CLASS (mode) != MODE_CC)
5488 /* These don't make sense. */
5489 if ((code == GT || code == LT || code == GE || code == LE)
5490 && mode == CCUNSmode)
5493 if ((code == GTU || code == LTU || code == GEU || code == LEU)
5494 && mode != CCUNSmode)
5497 if (mode != CCFPmode
5498 && (code == ORDERED || code == UNORDERED
5499 || code == UNEQ || code == LTGT
5500 || code == UNGT || code == UNLT
5501 || code == UNGE || code == UNLE))
5504 /* These should never be generated except for
5505 flag_unsafe_math_optimizations. */
5506 if (mode == CCFPmode
5507 && ! flag_unsafe_math_optimizations
5508 && (code == LE || code == GE
5509 || code == UNEQ || code == LTGT
5510 || code == UNGT || code == UNLT))
5513 /* These are invalid; the information is not there. */
5514 if (mode == CCEQmode
5515 && code != EQ && code != NE)
5519 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5520 We only check the opcode against the mode of the CC value here. */
5523 branch_comparison_operator (op, mode)
5525 enum machine_mode mode ATTRIBUTE_UNUSED;
5527 enum rtx_code code = GET_CODE (op);
5528 enum machine_mode cc_mode;
5530 if (GET_RTX_CLASS (code) != '<')
5533 cc_mode = GET_MODE (XEXP (op, 0));
5534 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5537 validate_condition_mode (code, cc_mode);
5542 /* Return 1 if OP is a comparison operation that is valid for a branch
5543 insn and which is true if the corresponding bit in the CC register
5547 branch_positive_comparison_operator (op, mode)
5549 enum machine_mode mode;
5553 if (! branch_comparison_operator (op, mode))
5556 code = GET_CODE (op);
5557 return (code == EQ || code == LT || code == GT
5558 || code == LTU || code == GTU
5559 || code == UNORDERED);
5562 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5563 We check the opcode against the mode of the CC value and disallow EQ or
5564 NE comparisons for integers. */
5567 scc_comparison_operator (op, mode)
5569 enum machine_mode mode;
5571 enum rtx_code code = GET_CODE (op);
5572 enum machine_mode cc_mode;
5574 if (GET_MODE (op) != mode && mode != VOIDmode)
5577 if (GET_RTX_CLASS (code) != '<')
5580 cc_mode = GET_MODE (XEXP (op, 0));
5581 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5584 validate_condition_mode (code, cc_mode);
5586 if (code == NE && cc_mode != CCFPmode)
5593 trap_comparison_operator (op, mode)
5595 enum machine_mode mode;
5597 if (mode != VOIDmode && mode != GET_MODE (op))
5599 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5603 boolean_operator (op, mode)
5605 enum machine_mode mode ATTRIBUTE_UNUSED;
5607 enum rtx_code code = GET_CODE (op);
5608 return (code == AND || code == IOR || code == XOR);
5612 boolean_or_operator (op, mode)
5614 enum machine_mode mode ATTRIBUTE_UNUSED;
5616 enum rtx_code code = GET_CODE (op);
5617 return (code == IOR || code == XOR);
5621 min_max_operator (op, mode)
5623 enum machine_mode mode ATTRIBUTE_UNUSED;
5625 enum rtx_code code = GET_CODE (op);
5626 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5629 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5630 mask required to convert the result of a rotate insn into a shift
5631 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
5634 includes_lshift_p (shiftop, andop)
5638 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5640 shift_mask <<= INTVAL (shiftop);
5642 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5645 /* Similar, but for right shift. */
5648 includes_rshift_p (shiftop, andop)
5652 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5654 shift_mask >>= INTVAL (shiftop);
5656 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5659 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5660 to perform a left shift. It must have exactly SHIFTOP least
5661 signifigant 0's, then one or more 1's, then zero or more 0's. */
5664 includes_rldic_lshift_p (shiftop, andop)
5668 if (GET_CODE (andop) == CONST_INT)
5670 HOST_WIDE_INT c, lsb, shift_mask;
5673 if (c == 0 || c == ~0)
5677 shift_mask <<= INTVAL (shiftop);
5679 /* Find the least signifigant one bit. */
5682 /* It must coincide with the LSB of the shift mask. */
5683 if (-lsb != shift_mask)
5686 /* Invert to look for the next transition (if any). */
5689 /* Remove the low group of ones (originally low group of zeros). */
5692 /* Again find the lsb, and check we have all 1's above. */
5696 else if (GET_CODE (andop) == CONST_DOUBLE
5697 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5699 HOST_WIDE_INT low, high, lsb;
5700 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5702 low = CONST_DOUBLE_LOW (andop);
5703 if (HOST_BITS_PER_WIDE_INT < 64)
5704 high = CONST_DOUBLE_HIGH (andop);
5706 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5707 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5710 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5712 shift_mask_high = ~0;
5713 if (INTVAL (shiftop) > 32)
5714 shift_mask_high <<= INTVAL (shiftop) - 32;
5718 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5725 return high == -lsb;
5728 shift_mask_low = ~0;
5729 shift_mask_low <<= INTVAL (shiftop);
5733 if (-lsb != shift_mask_low)
5736 if (HOST_BITS_PER_WIDE_INT < 64)
5741 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5744 return high == -lsb;
5748 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5754 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5755 to perform a left shift. It must have SHIFTOP or more least
5756 signifigant 0's, with the remainder of the word 1's. */
5759 includes_rldicr_lshift_p (shiftop, andop)
5763 if (GET_CODE (andop) == CONST_INT)
5765 HOST_WIDE_INT c, lsb, shift_mask;
5768 shift_mask <<= INTVAL (shiftop);
5771 /* Find the least signifigant one bit. */
5774 /* It must be covered by the shift mask.
5775 This test also rejects c == 0. */
5776 if ((lsb & shift_mask) == 0)
5779 /* Check we have all 1's above the transition, and reject all 1's. */
5780 return c == -lsb && lsb != 1;
5782 else if (GET_CODE (andop) == CONST_DOUBLE
5783 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5785 HOST_WIDE_INT low, lsb, shift_mask_low;
5787 low = CONST_DOUBLE_LOW (andop);
5789 if (HOST_BITS_PER_WIDE_INT < 64)
5791 HOST_WIDE_INT high, shift_mask_high;
5793 high = CONST_DOUBLE_HIGH (andop);
5797 shift_mask_high = ~0;
5798 if (INTVAL (shiftop) > 32)
5799 shift_mask_high <<= INTVAL (shiftop) - 32;
5803 if ((lsb & shift_mask_high) == 0)
5806 return high == -lsb;
5812 shift_mask_low = ~0;
5813 shift_mask_low <<= INTVAL (shiftop);
5817 if ((lsb & shift_mask_low) == 0)
5820 return low == -lsb && lsb != 1;
5826 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5827 for lfq and stfq insns.
5829 Note reg1 and reg2 *must* be hard registers. To be sure we will
5830 abort if we are passed pseudo registers. */
5833 registers_ok_for_quad_peep (reg1, reg2)
5836 /* We might have been passed a SUBREG. */
5837 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5840 return (REGNO (reg1) == REGNO (reg2) - 1);
5843 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5844 addr1 and addr2 must be in consecutive memory locations
5845 (addr2 == addr1 + 8). */
5848 addrs_ok_for_quad_peep (addr1, addr2)
5855 /* Extract an offset (if used) from the first addr. */
5856 if (GET_CODE (addr1) == PLUS)
5858 /* If not a REG, return zero. */
5859 if (GET_CODE (XEXP (addr1, 0)) != REG)
5863 reg1 = REGNO (XEXP (addr1, 0));
5864 /* The offset must be constant! */
5865 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5867 offset1 = INTVAL (XEXP (addr1, 1));
5870 else if (GET_CODE (addr1) != REG)
5874 reg1 = REGNO (addr1);
5875 /* This was a simple (mem (reg)) expression. Offset is 0. */
5879 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5880 if (GET_CODE (addr2) != PLUS)
5883 if (GET_CODE (XEXP (addr2, 0)) != REG
5884 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5887 if (reg1 != REGNO (XEXP (addr2, 0)))
5890 /* The offset for the second addr must be 8 more than the first addr. */
5891 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5894 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
5899 /* Return the register class of a scratch register needed to copy IN into
5900 or out of a register in CLASS in MODE. If it can be done directly,
5901 NO_REGS is returned. */
5904 secondary_reload_class (class, mode, in)
5905 enum reg_class class;
5906 enum machine_mode mode ATTRIBUTE_UNUSED;
5911 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5913 /* We cannot copy a symbolic operand directly into anything
5914 other than BASE_REGS for TARGET_ELF. So indicate that a
5915 register from BASE_REGS is needed as an intermediate
5918 On Darwin, pic addresses require a load from memory, which
5919 needs a base register. */
5920 if (class != BASE_REGS
5921 && (GET_CODE (in) == SYMBOL_REF
5922 || GET_CODE (in) == HIGH
5923 || GET_CODE (in) == LABEL_REF
5924 || GET_CODE (in) == CONST))
5928 if (GET_CODE (in) == REG)
5931 if (regno >= FIRST_PSEUDO_REGISTER)
5933 regno = true_regnum (in);
5934 if (regno >= FIRST_PSEUDO_REGISTER)
5938 else if (GET_CODE (in) == SUBREG)
5940 regno = true_regnum (in);
5941 if (regno >= FIRST_PSEUDO_REGISTER)
5947 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
5949 if (class == GENERAL_REGS || class == BASE_REGS
5950 || (regno >= 0 && INT_REGNO_P (regno)))
5953 /* Constants, memory, and FP registers can go into FP registers. */
5954 if ((regno == -1 || FP_REGNO_P (regno))
5955 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
5958 /* Memory, and AltiVec registers can go into AltiVec registers. */
5959 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
5960 && class == ALTIVEC_REGS)
5963 /* We can copy among the CR registers. */
5964 if ((class == CR_REGS || class == CR0_REGS)
5965 && regno >= 0 && CR_REGNO_P (regno))
5968 /* Otherwise, we need GENERAL_REGS. */
5969 return GENERAL_REGS;
5972 /* Given a comparison operation, return the bit number in CCR to test. We
5973 know this is a valid comparison.
5975 SCC_P is 1 if this is for an scc. That means that %D will have been
5976 used instead of %C, so the bits will be in different places.
5978 Return -1 if OP isn't a valid comparison for some reason. */
5985 enum rtx_code code = GET_CODE (op);
5986 enum machine_mode cc_mode;
5991 if (GET_RTX_CLASS (code) != '<')
5996 if (GET_CODE (reg) != REG
5997 || ! CR_REGNO_P (REGNO (reg)))
6000 cc_mode = GET_MODE (reg);
6001 cc_regnum = REGNO (reg);
6002 base_bit = 4 * (cc_regnum - CR0_REGNO);
6004 validate_condition_mode (code, cc_mode);
6009 return scc_p ? base_bit + 3 : base_bit + 2;
6011 return base_bit + 2;
6012 case GT: case GTU: case UNLE:
6013 return base_bit + 1;
6014 case LT: case LTU: case UNGE:
6016 case ORDERED: case UNORDERED:
6017 return base_bit + 3;
6020 /* If scc, we will have done a cror to put the bit in the
6021 unordered position. So test that bit. For integer, this is ! LT
6022 unless this is an scc insn. */
6023 return scc_p ? base_bit + 3 : base_bit;
6026 return scc_p ? base_bit + 3 : base_bit + 1;
6033 /* Return the GOT register. */
6036 rs6000_got_register (value)
6037 rtx value ATTRIBUTE_UNUSED;
6039 /* The second flow pass currently (June 1999) can't update
6040 regs_ever_live without disturbing other parts of the compiler, so
6041 update it here to make the prolog/epilogue code happy. */
6042 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
6043 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
6045 current_function_uses_pic_offset_table = 1;
6047 return pic_offset_table_rtx;
6050 /* Function to init struct machine_function.
6051 This will be called, via a pointer variable,
6052 from push_function_context. */
6054 static struct machine_function *
6055 rs6000_init_machine_status ()
6057 return ggc_alloc_cleared (sizeof (machine_function));
6060 /* Print an operand. Recognize special options, documented below. */
6063 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6064 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6066 #define SMALL_DATA_RELOC "sda21"
6067 #define SMALL_DATA_REG 0
6071 print_operand (file, x, code)
6079 /* These macros test for integers and extract the low-order bits. */
6081 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
6082 && GET_MODE (X) == VOIDmode)
6084 #define INT_LOWPART(X) \
6085 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6090 /* Write out an instruction after the call which may be replaced
6091 with glue code by the loader. This depends on the AIX version. */
6092 asm_fprintf (file, RS6000_CALL_GLUE);
6095 /* %a is output_address. */
6098 /* If X is a constant integer whose low-order 5 bits are zero,
6099 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6100 in the AIX assembler where "sri" with a zero shift count
6101 writes a trash instruction. */
6102 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6109 /* If constant, low-order 16 bits of constant, unsigned.
6110 Otherwise, write normally. */
6112 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6114 print_operand (file, x, 0);
6118 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6119 for 64-bit mask direction. */
6120 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6123 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6127 /* There used to be a comment for 'C' reading "This is an
6128 optional cror needed for certain floating-point
6129 comparisons. Otherwise write nothing." */
6131 /* Similar, except that this is for an scc, so we must be able to
6132 encode the test in a single bit that is one. We do the above
6133 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6134 if (GET_CODE (x) == LE || GET_CODE (x) == GE
6135 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6137 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6139 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6141 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6144 else if (GET_CODE (x) == NE)
6146 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6148 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6149 base_bit + 2, base_bit + 2);
6154 /* X is a CR register. Print the number of the EQ bit of the CR */
6155 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6156 output_operand_lossage ("invalid %%E value");
6158 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6162 /* X is a CR register. Print the shift count needed to move it
6163 to the high-order four bits. */
6164 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6165 output_operand_lossage ("invalid %%f value");
6167 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6171 /* Similar, but print the count for the rotate in the opposite
6173 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6174 output_operand_lossage ("invalid %%F value");
6176 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6180 /* X is a constant integer. If it is negative, print "m",
6181 otherwise print "z". This is to make a aze or ame insn. */
6182 if (GET_CODE (x) != CONST_INT)
6183 output_operand_lossage ("invalid %%G value");
6184 else if (INTVAL (x) >= 0)
6191 /* If constant, output low-order five bits. Otherwise, write
6194 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6196 print_operand (file, x, 0);
6200 /* If constant, output low-order six bits. Otherwise, write
6203 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6205 print_operand (file, x, 0);
6209 /* Print `i' if this is a constant, else nothing. */
6215 /* Write the bit number in CCR for jump. */
6218 output_operand_lossage ("invalid %%j code");
6220 fprintf (file, "%d", i);
6224 /* Similar, but add one for shift count in rlinm for scc and pass
6225 scc flag to `ccr_bit'. */
6228 output_operand_lossage ("invalid %%J code");
6230 /* If we want bit 31, write a shift count of zero, not 32. */
6231 fprintf (file, "%d", i == 31 ? 0 : i + 1);
6235 /* X must be a constant. Write the 1's complement of the
6238 output_operand_lossage ("invalid %%k value");
6240 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6244 /* X must be a symbolic constant on ELF. Write an
6245 expression suitable for an 'addi' that adds in the low 16
6247 if (GET_CODE (x) != CONST)
6249 print_operand_address (file, x);
6254 if (GET_CODE (XEXP (x, 0)) != PLUS
6255 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6256 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6257 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6258 output_operand_lossage ("invalid %%K value");
6259 print_operand_address (file, XEXP (XEXP (x, 0), 0));
6261 /* For GNU as, there must be a non-alphanumeric character
6262 between 'l' and the number. The '-' is added by
6263 print_operand() already. */
6264 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
6266 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6270 /* %l is output_asm_label. */
6273 /* Write second word of DImode or DFmode reference. Works on register
6274 or non-indexed memory only. */
6275 if (GET_CODE (x) == REG)
6276 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6277 else if (GET_CODE (x) == MEM)
6279 /* Handle possible auto-increment. Since it is pre-increment and
6280 we have already done it, we can just use an offset of word. */
6281 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6282 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6283 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6286 output_address (XEXP (adjust_address_nv (x, SImode,
6290 if (small_data_operand (x, GET_MODE (x)))
6291 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6292 reg_names[SMALL_DATA_REG]);
6297 /* MB value for a mask operand. */
6298 if (! mask_operand (x, SImode))
6299 output_operand_lossage ("invalid %%m value");
6301 val = INT_LOWPART (x);
6303 /* If the high bit is set and the low bit is not, the value is zero.
6304 If the high bit is zero, the value is the first 1 bit we find from
6306 if ((val & 0x80000000) && ((val & 1) == 0))
6311 else if ((val & 0x80000000) == 0)
6313 for (i = 1; i < 32; i++)
6314 if ((val <<= 1) & 0x80000000)
6316 fprintf (file, "%d", i);
6320 /* Otherwise, look for the first 0 bit from the right. The result is its
6321 number plus 1. We know the low-order bit is one. */
6322 for (i = 0; i < 32; i++)
6323 if (((val >>= 1) & 1) == 0)
6326 /* If we ended in ...01, i would be 0. The correct value is 31, so
6328 fprintf (file, "%d", 31 - i);
6332 /* ME value for a mask operand. */
6333 if (! mask_operand (x, SImode))
6334 output_operand_lossage ("invalid %%M value");
6336 val = INT_LOWPART (x);
6338 /* If the low bit is set and the high bit is not, the value is 31.
6339 If the low bit is zero, the value is the first 1 bit we find from
6341 if ((val & 1) && ((val & 0x80000000) == 0))
6346 else if ((val & 1) == 0)
6348 for (i = 0; i < 32; i++)
6349 if ((val >>= 1) & 1)
6352 /* If we had ....10, i would be 0. The result should be
6353 30, so we need 30 - i. */
6354 fprintf (file, "%d", 30 - i);
6358 /* Otherwise, look for the first 0 bit from the left. The result is its
6359 number minus 1. We know the high-order bit is one. */
6360 for (i = 0; i < 32; i++)
6361 if (((val <<= 1) & 0x80000000) == 0)
6364 fprintf (file, "%d", i);
6367 /* %n outputs the negative of its operand. */
6370 /* Write the number of elements in the vector times 4. */
6371 if (GET_CODE (x) != PARALLEL)
6372 output_operand_lossage ("invalid %%N value");
6374 fprintf (file, "%d", XVECLEN (x, 0) * 4);
6378 /* Similar, but subtract 1 first. */
6379 if (GET_CODE (x) != PARALLEL)
6380 output_operand_lossage ("invalid %%O value");
6382 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6386 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6388 || INT_LOWPART (x) < 0
6389 || (i = exact_log2 (INT_LOWPART (x))) < 0)
6390 output_operand_lossage ("invalid %%p value");
6392 fprintf (file, "%d", i);
6396 /* The operand must be an indirect memory reference. The result
6397 is the register number. */
6398 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6399 || REGNO (XEXP (x, 0)) >= 32)
6400 output_operand_lossage ("invalid %%P value");
6402 fprintf (file, "%d", REGNO (XEXP (x, 0)));
6406 /* This outputs the logical code corresponding to a boolean
6407 expression. The expression may have one or both operands
6408 negated (if one, only the first one). For condition register
6409 logical operations, it will also treat the negated
6410 CR codes as NOTs, but not handle NOTs of them. */
6412 const char *const *t = 0;
6414 enum rtx_code code = GET_CODE (x);
6415 static const char * const tbl[3][3] = {
6416 { "and", "andc", "nor" },
6417 { "or", "orc", "nand" },
6418 { "xor", "eqv", "xor" } };
6422 else if (code == IOR)
6424 else if (code == XOR)
6427 output_operand_lossage ("invalid %%q value");
6429 if (GET_CODE (XEXP (x, 0)) != NOT)
6433 if (GET_CODE (XEXP (x, 1)) == NOT)
6444 /* X is a CR register. Print the mask for `mtcrf'. */
6445 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6446 output_operand_lossage ("invalid %%R value");
6448 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6452 /* Low 5 bits of 32 - value */
6454 output_operand_lossage ("invalid %%s value");
6456 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6460 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6461 CONST_INT 32-bit mask is considered sign-extended so any
6462 transition must occur within the CONST_INT, not on the boundary. */
6463 if (! mask64_operand (x, DImode))
6464 output_operand_lossage ("invalid %%S value");
6466 val = INT_LOWPART (x);
6468 if (val & 1) /* Clear Left */
6470 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6471 if (!((val >>= 1) & 1))
6474 #if HOST_BITS_PER_WIDE_INT == 32
6475 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6477 val = CONST_DOUBLE_HIGH (x);
6482 for (i = 32; i < 64; i++)
6483 if (!((val >>= 1) & 1))
6487 /* i = index of last set bit from right
6488 mask begins at 63 - i from left */
6490 output_operand_lossage ("%%S computed all 1's mask");
6492 fprintf (file, "%d", 63 - i);
6495 else /* Clear Right */
6497 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6498 if ((val >>= 1) & 1)
6501 #if HOST_BITS_PER_WIDE_INT == 32
6502 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6504 val = CONST_DOUBLE_HIGH (x);
6506 if (val == (HOST_WIDE_INT) -1)
6509 for (i = 32; i < 64; i++)
6510 if ((val >>= 1) & 1)
6514 /* i = index of last clear bit from right
6515 mask ends at 62 - i from left */
6517 output_operand_lossage ("%%S computed all 0's mask");
6519 fprintf (file, "%d", 62 - i);
6524 /* Print the symbolic name of a branch target register. */
6525 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6526 && REGNO (x) != COUNT_REGISTER_REGNUM))
6527 output_operand_lossage ("invalid %%T value");
6528 else if (REGNO (x) == LINK_REGISTER_REGNUM)
6529 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6531 fputs ("ctr", file);
6535 /* High-order 16 bits of constant for use in unsigned operand. */
6537 output_operand_lossage ("invalid %%u value");
6539 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6540 (INT_LOWPART (x) >> 16) & 0xffff);
6544 /* High-order 16 bits of constant for use in signed operand. */
6546 output_operand_lossage ("invalid %%v value");
6548 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6549 (INT_LOWPART (x) >> 16) & 0xffff);
6553 /* Print `u' if this has an auto-increment or auto-decrement. */
6554 if (GET_CODE (x) == MEM
6555 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6556 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6561 /* Print the trap code for this operand. */
6562 switch (GET_CODE (x))
6565 fputs ("eq", file); /* 4 */
6568 fputs ("ne", file); /* 24 */
6571 fputs ("lt", file); /* 16 */
6574 fputs ("le", file); /* 20 */
6577 fputs ("gt", file); /* 8 */
6580 fputs ("ge", file); /* 12 */
6583 fputs ("llt", file); /* 2 */
6586 fputs ("lle", file); /* 6 */
6589 fputs ("lgt", file); /* 1 */
6592 fputs ("lge", file); /* 5 */
6600 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6603 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6604 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6606 print_operand (file, x, 0);
6610 /* MB value for a PowerPC64 rldic operand. */
6611 val = (GET_CODE (x) == CONST_INT
6612 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6617 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6618 if ((val <<= 1) < 0)
6621 #if HOST_BITS_PER_WIDE_INT == 32
6622 if (GET_CODE (x) == CONST_INT && i >= 0)
6623 i += 32; /* zero-extend high-part was all 0's */
6624 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6626 val = CONST_DOUBLE_LOW (x);
6633 for ( ; i < 64; i++)
6634 if ((val <<= 1) < 0)
6639 fprintf (file, "%d", i + 1);
6643 if (GET_CODE (x) == MEM
6644 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6649 /* Like 'L', for third word of TImode */
6650 if (GET_CODE (x) == REG)
6651 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6652 else if (GET_CODE (x) == MEM)
6654 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6655 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6656 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6658 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6659 if (small_data_operand (x, GET_MODE (x)))
6660 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6661 reg_names[SMALL_DATA_REG]);
6666 /* X is a SYMBOL_REF. Write out the name preceded by a
6667 period and without any trailing data in brackets. Used for function
6668 names. If we are configured for System V (or the embedded ABI) on
6669 the PowerPC, do not emit the period, since those systems do not use
6670 TOCs and the like. */
6671 if (GET_CODE (x) != SYMBOL_REF)
6674 if (XSTR (x, 0)[0] != '.')
6676 switch (DEFAULT_ABI)
6686 case ABI_AIX_NODESC:
6692 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6694 assemble_name (file, XSTR (x, 0));
6699 /* Like 'L', for last word of TImode. */
6700 if (GET_CODE (x) == REG)
6701 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6702 else if (GET_CODE (x) == MEM)
6704 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6705 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6706 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6708 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6709 if (small_data_operand (x, GET_MODE (x)))
6710 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6711 reg_names[SMALL_DATA_REG]);
6715 /* Print AltiVec memory operand. */
6720 if (GET_CODE (x) != MEM)
6725 if (GET_CODE (tmp) == REG)
6726 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6727 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6729 if (REGNO (XEXP (tmp, 0)) == 0)
6730 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6731 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6733 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6734 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6742 if (GET_CODE (x) == REG)
6743 fprintf (file, "%s", reg_names[REGNO (x)]);
6744 else if (GET_CODE (x) == MEM)
6746 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6747 know the width from the mode. */
6748 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6749 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6750 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6751 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6752 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6753 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6755 output_address (XEXP (x, 0));
6758 output_addr_const (file, x);
6762 output_operand_lossage ("invalid %%xn code");
6766 /* Print the address of an operand. */
6769 print_operand_address (file, x)
6773 if (GET_CODE (x) == REG)
6774 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6775 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6776 || GET_CODE (x) == LABEL_REF)
6778 output_addr_const (file, x);
6779 if (small_data_operand (x, GET_MODE (x)))
6780 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6781 reg_names[SMALL_DATA_REG]);
6782 else if (TARGET_TOC)
6785 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6787 if (REGNO (XEXP (x, 0)) == 0)
6788 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6789 reg_names[ REGNO (XEXP (x, 0)) ]);
6791 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6792 reg_names[ REGNO (XEXP (x, 1)) ]);
6794 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6796 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6797 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6800 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6801 && CONSTANT_P (XEXP (x, 1)))
6803 output_addr_const (file, XEXP (x, 1));
6804 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6808 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6809 && CONSTANT_P (XEXP (x, 1)))
6811 fprintf (file, "lo16(");
6812 output_addr_const (file, XEXP (x, 1));
6813 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6816 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6818 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6820 rtx contains_minus = XEXP (x, 1);
6824 /* Find the (minus (sym) (toc)) buried in X, and temporarily
6825 turn it into (sym) for output_addr_const. */
6826 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6827 contains_minus = XEXP (contains_minus, 0);
6829 minus = XEXP (contains_minus, 0);
6830 symref = XEXP (minus, 0);
6831 XEXP (contains_minus, 0) = symref;
6836 name = XSTR (symref, 0);
6837 newname = alloca (strlen (name) + sizeof ("@toc"));
6838 strcpy (newname, name);
6839 strcat (newname, "@toc");
6840 XSTR (symref, 0) = newname;
6842 output_addr_const (file, XEXP (x, 1));
6844 XSTR (symref, 0) = name;
6845 XEXP (contains_minus, 0) = minus;
6848 output_addr_const (file, XEXP (x, 1));
6850 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6856 /* Target hook for assembling integer objects. The powerpc version has
6857 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6858 is defined. It also needs to handle DI-mode objects on 64-bit
6862 rs6000_assemble_integer (x, size, aligned_p)
6867 #ifdef RELOCATABLE_NEEDS_FIXUP
6868 /* Special handling for SI values. */
6869 if (size == 4 && aligned_p)
6871 extern int in_toc_section PARAMS ((void));
6872 static int recurse = 0;
6874 /* For -mrelocatable, we mark all addresses that need to be fixed up
6875 in the .fixup section. */
6876 if (TARGET_RELOCATABLE
6877 && !in_toc_section ()
6878 && !in_text_section ()
6880 && GET_CODE (x) != CONST_INT
6881 && GET_CODE (x) != CONST_DOUBLE
6887 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6889 ASM_OUTPUT_LABEL (asm_out_file, buf);
6890 fprintf (asm_out_file, "\t.long\t(");
6891 output_addr_const (asm_out_file, x);
6892 fprintf (asm_out_file, ")@fixup\n");
6893 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6894 ASM_OUTPUT_ALIGN (asm_out_file, 2);
6895 fprintf (asm_out_file, "\t.long\t");
6896 assemble_name (asm_out_file, buf);
6897 fprintf (asm_out_file, "\n\t.previous\n");
6901 /* Remove initial .'s to turn a -mcall-aixdesc function
6902 address into the address of the descriptor, not the function
6904 else if (GET_CODE (x) == SYMBOL_REF
6905 && XSTR (x, 0)[0] == '.'
6906 && DEFAULT_ABI == ABI_AIX)
6908 const char *name = XSTR (x, 0);
6909 while (*name == '.')
6912 fprintf (asm_out_file, "\t.long\t%s\n", name);
6916 #endif /* RELOCATABLE_NEEDS_FIXUP */
6917 return default_assemble_integer (x, size, aligned_p);
6921 rs6000_reverse_condition (mode, code)
6922 enum machine_mode mode;
6925 /* Reversal of FP compares takes care -- an ordered compare
6926 becomes an unordered compare and vice versa. */
6927 if (mode == CCFPmode)
6928 return reverse_condition_maybe_unordered (code);
6930 return reverse_condition (code);
6933 /* Generate a compare for CODE. Return a brand-new rtx that
6934 represents the result of the compare. */
6937 rs6000_generate_compare (code)
6940 enum machine_mode comp_mode;
6943 if (rs6000_compare_fp_p)
6944 comp_mode = CCFPmode;
6945 else if (code == GTU || code == LTU
6946 || code == GEU || code == LEU)
6947 comp_mode = CCUNSmode;
6951 /* First, the compare. */
6952 compare_result = gen_reg_rtx (comp_mode);
6953 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
6954 gen_rtx_COMPARE (comp_mode,
6956 rs6000_compare_op1)));
6958 /* Some kinds of FP comparisons need an OR operation;
6959 except for flag_unsafe_math_optimizations we don't bother. */
6960 if (rs6000_compare_fp_p
6961 && ! flag_unsafe_math_optimizations
6962 && (code == LE || code == GE
6963 || code == UNEQ || code == LTGT
6964 || code == UNGT || code == UNLT))
6966 enum rtx_code or1, or2;
6967 rtx or1_rtx, or2_rtx, compare2_rtx;
6968 rtx or_result = gen_reg_rtx (CCEQmode);
6972 case LE: or1 = LT; or2 = EQ; break;
6973 case GE: or1 = GT; or2 = EQ; break;
6974 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
6975 case LTGT: or1 = LT; or2 = GT; break;
6976 case UNGT: or1 = UNORDERED; or2 = GT; break;
6977 case UNLT: or1 = UNORDERED; or2 = LT; break;
6980 validate_condition_mode (or1, comp_mode);
6981 validate_condition_mode (or2, comp_mode);
6982 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
6983 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
6984 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
6985 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
6987 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
6989 compare_result = or_result;
6993 validate_condition_mode (code, GET_MODE (compare_result));
6995 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
6999 /* Emit the RTL for an sCOND pattern. */
7002 rs6000_emit_sCOND (code, result)
7007 enum machine_mode op_mode;
7009 condition_rtx = rs6000_generate_compare (code);
7011 op_mode = GET_MODE (rs6000_compare_op0);
7012 if (op_mode == VOIDmode)
7013 op_mode = GET_MODE (rs6000_compare_op1);
7015 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7017 PUT_MODE (condition_rtx, DImode);
7018 convert_move (result, condition_rtx, 0);
7022 PUT_MODE (condition_rtx, SImode);
7023 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7027 /* Emit a branch of kind CODE to location LOC. */
7030 rs6000_emit_cbranch (code, loc)
7034 rtx condition_rtx, loc_ref;
7036 condition_rtx = rs6000_generate_compare (code);
7037 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7038 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7039 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7043 /* Return the string to output a conditional branch to LABEL, which is
7044 the operand number of the label, or -1 if the branch is really a
7047 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
7048 condition code register and its mode specifies what kind of
7051 REVERSED is non-zero if we should reverse the sense of the comparison.
7053 INSN is the insn. */
7056 output_cbranch (op, label, reversed, insn)
7062 static char string[64];
7063 enum rtx_code code = GET_CODE (op);
7064 rtx cc_reg = XEXP (op, 0);
7065 enum machine_mode mode = GET_MODE (cc_reg);
7066 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7067 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7068 int really_reversed = reversed ^ need_longbranch;
7074 validate_condition_mode (code, mode);
7076 /* Work out which way this really branches. We could use
7077 reverse_condition_maybe_unordered here always but this
7078 makes the resulting assembler clearer. */
7079 if (really_reversed)
7080 code = rs6000_reverse_condition (mode, code);
7084 /* Not all of these are actually distinct opcodes, but
7085 we distinguish them for clarity of the resulting assembler. */
7087 ccode = "ne"; break;
7089 ccode = "eq"; break;
7091 ccode = "ge"; break;
7092 case GT: case GTU: case UNGT:
7093 ccode = "gt"; break;
7095 ccode = "le"; break;
7096 case LT: case LTU: case UNLT:
7097 ccode = "lt"; break;
7098 case UNORDERED: ccode = "un"; break;
7099 case ORDERED: ccode = "nu"; break;
7100 case UNGE: ccode = "nl"; break;
7101 case UNLE: ccode = "ng"; break;
7106 /* Maybe we have a guess as to how likely the branch is.
7107 The old mnemonics don't have a way to specify this information. */
7108 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7109 if (note != NULL_RTX)
7111 /* PROB is the difference from 50%. */
7112 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7114 /* For branches that are very close to 50%, assume not-taken. */
7115 if (abs (prob) > REG_BR_PROB_BASE / 20
7116 && ((prob > 0) ^ need_longbranch))
7125 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7127 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7129 /* We need to escape any '%' characters in the reg_names string.
7130 Assume they'd only be the first character... */
7131 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7133 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7137 /* If the branch distance was too far, we may have to use an
7138 unconditional branch to go the distance. */
7139 if (need_longbranch)
7140 s += sprintf (s, ",$+8\n\tb %s", label);
7142 s += sprintf (s, ",%s", label);
7148 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7149 operands of the last comparison is nonzero/true, FALSE_COND if it
7150 is zero/false. Return 0 if the hardware has no such operation. */
7153 rs6000_emit_cmove (dest, op, true_cond, false_cond)
7159 enum rtx_code code = GET_CODE (op);
7160 rtx op0 = rs6000_compare_op0;
7161 rtx op1 = rs6000_compare_op1;
7163 enum machine_mode compare_mode = GET_MODE (op0);
7164 enum machine_mode result_mode = GET_MODE (dest);
7167 /* These modes should always match. */
7168 if (GET_MODE (op1) != compare_mode)
7170 if (GET_MODE (true_cond) != result_mode)
7172 if (GET_MODE (false_cond) != result_mode)
7175 /* First, work out if the hardware can do this at all, or
7176 if it's too slow... */
7177 /* If the comparison is an integer one, since we only have fsel
7178 it'll be cheaper to use a branch. */
7179 if (! rs6000_compare_fp_p)
7182 /* Eliminate half of the comparisons by switching operands, this
7183 makes the remaining code simpler. */
7184 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7185 || code == LTGT || code == LT)
7187 code = reverse_condition_maybe_unordered (code);
7189 true_cond = false_cond;
7193 /* UNEQ and LTGT take four instructions for a comparison with zero,
7194 it'll probably be faster to use a branch here too. */
7198 if (GET_CODE (op1) == CONST_DOUBLE)
7199 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7201 /* We're going to try to implement comparions by performing
7202 a subtract, then comparing against zero. Unfortunately,
7203 Inf - Inf is NaN which is not zero, and so if we don't
7204 know that the the operand is finite and the comparison
7205 would treat EQ different to UNORDERED, we can't do it. */
7206 if (! flag_unsafe_math_optimizations
7207 && code != GT && code != UNGE
7208 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7209 /* Constructs of the form (a OP b ? a : b) are safe. */
7210 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7211 || (! rtx_equal_p (op0, true_cond)
7212 && ! rtx_equal_p (op1, true_cond))))
7214 /* At this point we know we can use fsel. */
7216 /* Reduce the comparison to a comparison against zero. */
7217 temp = gen_reg_rtx (compare_mode);
7218 emit_insn (gen_rtx_SET (VOIDmode, temp,
7219 gen_rtx_MINUS (compare_mode, op0, op1)));
7221 op1 = CONST0_RTX (compare_mode);
7223 /* If we don't care about NaNs we can reduce some of the comparisons
7224 down to faster ones. */
7225 if (flag_unsafe_math_optimizations)
7231 true_cond = false_cond;
7244 /* Now, reduce everything down to a GE. */
7251 temp = gen_reg_rtx (compare_mode);
7252 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7257 temp = gen_reg_rtx (compare_mode);
7258 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
7263 temp = gen_reg_rtx (compare_mode);
7264 emit_insn (gen_rtx_SET (VOIDmode, temp,
7265 gen_rtx_NEG (compare_mode,
7266 gen_rtx_ABS (compare_mode, op0))));
7271 temp = gen_reg_rtx (result_mode);
7272 emit_insn (gen_rtx_SET (VOIDmode, temp,
7273 gen_rtx_IF_THEN_ELSE (result_mode,
7274 gen_rtx_GE (VOIDmode,
7276 true_cond, false_cond)));
7278 true_cond = false_cond;
7280 temp = gen_reg_rtx (compare_mode);
7281 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7286 temp = gen_reg_rtx (result_mode);
7287 emit_insn (gen_rtx_SET (VOIDmode, temp,
7288 gen_rtx_IF_THEN_ELSE (result_mode,
7289 gen_rtx_GE (VOIDmode,
7291 true_cond, false_cond)));
7293 false_cond = true_cond;
7295 temp = gen_reg_rtx (compare_mode);
7296 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7304 emit_insn (gen_rtx_SET (VOIDmode, dest,
7305 gen_rtx_IF_THEN_ELSE (result_mode,
7306 gen_rtx_GE (VOIDmode,
7308 true_cond, false_cond)));
7313 rs6000_emit_minmax (dest, code, op0, op1)
7319 enum machine_mode mode = GET_MODE (op0);
7323 if (code == SMAX || code == SMIN)
7328 if (code == SMAX || code == UMAX)
7329 target = emit_conditional_move (dest, c, op0, op1, mode,
7332 target = emit_conditional_move (dest, c, op0, op1, mode,
7334 if (target == NULL_RTX)
7337 emit_move_insn (dest, target);
7340 /* This page contains routines that are used to determine what the
7341 function prologue and epilogue code will do and write them out. */
7343 /* Return the first fixed-point register that is required to be
7344 saved. 32 if none. */
7347 first_reg_to_save ()
7351 /* Find lowest numbered live register. */
7352 for (first_reg = 13; first_reg <= 31; first_reg++)
7353 if (regs_ever_live[first_reg]
7354 && (! call_used_regs[first_reg]
7355 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
7356 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7357 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7360 if (current_function_profile)
7362 /* AIX must save/restore every register that contains a parameter
7363 before/after the .__mcount call plus an additional register
7364 for the static chain, if needed; use registers from 30 down to 22
7366 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7368 int last_parm_reg, profile_first_reg;
7370 /* Figure out last used parameter register. The proper thing
7371 to do is to walk incoming args of the function. A function
7372 might have live parameter registers even if it has no
7374 for (last_parm_reg = 10;
7375 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7379 /* Calculate first reg for saving parameter registers
7381 Skip reg 31 which may contain the frame pointer. */
7382 profile_first_reg = (33 - last_parm_reg
7383 - (current_function_needs_context ? 1 : 0));
7385 /* Need to skip another reg to account for R31 being PICBASE
7386 (when flag_pic is set) or R30 being used as the frame
7387 pointer (when flag_pic is not set). */
7388 --profile_first_reg;
7390 /* Do not save frame pointer if no parameters needs to be saved. */
7391 if (profile_first_reg == 31)
7392 profile_first_reg = 32;
7394 if (first_reg > profile_first_reg)
7395 first_reg = profile_first_reg;
7398 /* SVR4 may need one register to preserve the static chain. */
7399 else if (current_function_needs_context)
7401 /* Skip reg 31 which may contain the frame pointer. */
7408 if (flag_pic && current_function_uses_pic_offset_table &&
7409 (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
7410 return RS6000_PIC_OFFSET_TABLE_REGNUM;
7416 /* Similar, for FP regs. */
7419 first_fp_reg_to_save ()
7423 /* Find lowest numbered live register. */
7424 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7425 if (regs_ever_live[first_reg])
7431 /* Similar, for AltiVec regs. */
7434 first_altivec_reg_to_save ()
7438 /* Stack frame remains as is unless we are in AltiVec ABI. */
7439 if (! TARGET_ALTIVEC_ABI)
7440 return LAST_ALTIVEC_REGNO + 1;
7442 /* Find lowest numbered live register. */
7443 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7444 if (regs_ever_live[i])
7450 /* Return a 32-bit mask of the AltiVec registers we need to set in
7451 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7452 the 32-bit word is 0. */
7455 compute_vrsave_mask ()
7457 unsigned int i, mask = 0;
7459 /* First, find out if we use _any_ altivec registers. */
7460 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7461 if (regs_ever_live[i])
7462 mask |= ALTIVEC_REG_BIT (i);
7467 /* Next, add all registers that are call-clobbered. We do this
7468 because post-reload register optimizers such as regrename_optimize
7469 may choose to use them. They never change the register class
7470 chosen by reload, so cannot create new uses of altivec registers
7471 if there were none before, so the early exit above is safe. */
7472 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7473 altivec registers not saved in the mask, which might well make the
7474 adjustments below more effective in eliding the save/restore of
7475 VRSAVE in small functions. */
7476 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7477 if (call_used_regs[i])
7478 mask |= ALTIVEC_REG_BIT (i);
7480 /* Next, remove the argument registers from the set. These must
7481 be in the VRSAVE mask set by the caller, so we don't need to add
7482 them in again. More importantly, the mask we compute here is
7483 used to generate CLOBBERs in the set_vrsave insn, and we do not
7484 wish the argument registers to die. */
7485 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7486 mask &= ~ALTIVEC_REG_BIT (i);
7488 /* Similarly, remove the return value from the set. */
7491 diddle_return_value (is_altivec_return_reg, &yes);
7493 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7500 is_altivec_return_reg (reg, xyes)
7504 bool *yes = (bool *) xyes;
7505 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7510 /* Calculate the stack information for the current function. This is
7511 complicated by having two separate calling sequences, the AIX calling
7512 sequence and the V.4 calling sequence.
7514 AIX (and Darwin/Mac OS X) stack frames look like:
7516 SP----> +---------------------------------------+
7517 | back chain to caller | 0 0
7518 +---------------------------------------+
7519 | saved CR | 4 8 (8-11)
7520 +---------------------------------------+
7522 +---------------------------------------+
7523 | reserved for compilers | 12 24
7524 +---------------------------------------+
7525 | reserved for binders | 16 32
7526 +---------------------------------------+
7527 | saved TOC pointer | 20 40
7528 +---------------------------------------+
7529 | Parameter save area (P) | 24 48
7530 +---------------------------------------+
7531 | Alloca space (A) | 24+P etc.
7532 +---------------------------------------+
7533 | Local variable space (L) | 24+P+A
7534 +---------------------------------------+
7535 | Float/int conversion temporary (X) | 24+P+A+L
7536 +---------------------------------------+
7537 | Save area for AltiVec registers (W) | 24+P+A+L+X
7538 +---------------------------------------+
7539 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7540 +---------------------------------------+
7541 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7542 +---------------------------------------+
7543 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7544 +---------------------------------------+
7545 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7546 +---------------------------------------+
7547 old SP->| back chain to caller's caller |
7548 +---------------------------------------+
7550 The required alignment for AIX configurations is two words (i.e., 8
7554 V.4 stack frames look like:
7556 SP----> +---------------------------------------+
7557 | back chain to caller | 0
7558 +---------------------------------------+
7559 | caller's saved LR | 4
7560 +---------------------------------------+
7561 | Parameter save area (P) | 8
7562 +---------------------------------------+
7563 | Alloca space (A) | 8+P
7564 +---------------------------------------+
7565 | Varargs save area (V) | 8+P+A
7566 +---------------------------------------+
7567 | Local variable space (L) | 8+P+A+V
7568 +---------------------------------------+
7569 | Float/int conversion temporary (X) | 8+P+A+V+L
7570 +---------------------------------------+
7571 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7572 +---------------------------------------+
7573 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7574 +---------------------------------------+
7575 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7576 +---------------------------------------+
7577 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7578 +---------------------------------------+
7579 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7580 +---------------------------------------+
7581 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7582 +---------------------------------------+
7583 old SP->| back chain to caller's caller |
7584 +---------------------------------------+
7586 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7587 given. (But note below and in sysv4.h that we require only 8 and
7588 may round up the size of our stack frame anyways. The historical
7589 reason is early versions of powerpc-linux which didn't properly
7590 align the stack at program startup. A happy side-effect is that
7591 -mno-eabi libraries can be used with -meabi programs.)
7593 The EABI configuration defaults to the V.4 layout, unless
7594 -mcall-aix is used, in which case the AIX layout is used. However,
7595 the stack alignment requirements may differ. If -mno-eabi is not
7596 given, the required stack alignment is 8 bytes; if -mno-eabi is
7597 given, the required alignment is 16 bytes. (But see V.4 comment
7600 #ifndef ABI_STACK_BOUNDARY
7601 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7605 rs6000_stack_info ()
7607 static rs6000_stack_t info, zero_info;
7608 rs6000_stack_t *info_ptr = &info;
7609 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7610 enum rs6000_abi abi;
7614 /* Zero all fields portably. */
7617 /* Select which calling sequence. */
7618 info_ptr->abi = abi = DEFAULT_ABI;
7620 /* Calculate which registers need to be saved & save area size. */
7621 info_ptr->first_gp_reg_save = first_reg_to_save ();
7622 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
7623 even if it currently looks like we won't. */
7624 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7625 || (flag_pic == 1 && abi == ABI_V4)
7626 || (flag_pic && abi == ABI_DARWIN))
7627 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
7628 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
7630 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7632 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7633 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7635 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7636 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7637 - info_ptr->first_altivec_reg_save);
7639 /* Does this function call anything? */
7640 info_ptr->calls_p = (! current_function_is_leaf
7641 || cfun->machine->ra_needs_full_frame);
7643 /* Determine if we need to save the link register. */
7644 if (rs6000_ra_ever_killed ()
7645 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7646 #ifdef TARGET_RELOCATABLE
7647 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7649 || (info_ptr->first_fp_reg_save != 64
7650 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7651 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7652 || (abi == ABI_V4 && current_function_calls_alloca)
7653 || (DEFAULT_ABI == ABI_DARWIN
7655 && current_function_uses_pic_offset_table)
7656 || info_ptr->calls_p)
7658 info_ptr->lr_save_p = 1;
7659 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7662 /* Determine if we need to save the condition code registers. */
7663 if (regs_ever_live[CR2_REGNO]
7664 || regs_ever_live[CR3_REGNO]
7665 || regs_ever_live[CR4_REGNO])
7667 info_ptr->cr_save_p = 1;
7669 info_ptr->cr_size = reg_size;
7672 /* If the current function calls __builtin_eh_return, then we need
7673 to allocate stack space for registers that will hold data for
7674 the exception handler. */
7675 if (current_function_calls_eh_return)
7678 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7680 ehrd_size = i * UNITS_PER_WORD;
7685 /* Determine various sizes. */
7686 info_ptr->reg_size = reg_size;
7687 info_ptr->fixed_size = RS6000_SAVE_AREA;
7688 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7689 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7690 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7693 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
7695 info_ptr->vrsave_mask = compute_vrsave_mask ();
7696 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7700 info_ptr->vrsave_mask = 0;
7701 info_ptr->vrsave_size = 0;
7704 /* Calculate the offsets. */
7712 case ABI_AIX_NODESC:
7714 info_ptr->fp_save_offset = - info_ptr->fp_size;
7715 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7717 if (TARGET_ALTIVEC_ABI)
7719 info_ptr->vrsave_save_offset
7720 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7722 /* Align stack so vector save area is on a quadword boundary. */
7723 if (info_ptr->altivec_size != 0)
7724 info_ptr->altivec_padding_size
7725 = 16 - (-info_ptr->vrsave_save_offset % 16);
7727 info_ptr->altivec_padding_size = 0;
7729 info_ptr->altivec_save_offset
7730 = info_ptr->vrsave_save_offset
7731 - info_ptr->altivec_padding_size
7732 - info_ptr->altivec_size;
7734 /* Adjust for AltiVec case. */
7735 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7738 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7739 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7740 info_ptr->lr_save_offset = 2*reg_size;
7744 info_ptr->fp_save_offset = - info_ptr->fp_size;
7745 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7746 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7748 if (TARGET_ALTIVEC_ABI)
7750 info_ptr->vrsave_save_offset
7751 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7753 /* Align stack so vector save area is on a quadword boundary. */
7754 if (info_ptr->altivec_size != 0)
7755 info_ptr->altivec_padding_size
7756 = 16 - (-info_ptr->vrsave_save_offset % 16);
7758 info_ptr->altivec_padding_size = 0;
7760 info_ptr->altivec_save_offset
7761 = info_ptr->vrsave_save_offset
7762 - info_ptr->altivec_padding_size
7763 - info_ptr->altivec_size;
7765 /* Adjust for AltiVec case. */
7766 info_ptr->toc_save_offset
7767 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7770 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7771 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7772 info_ptr->lr_save_offset = reg_size;
7776 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7778 + info_ptr->altivec_size
7779 + info_ptr->altivec_padding_size
7780 + info_ptr->vrsave_size
7784 + info_ptr->vrsave_size
7785 + info_ptr->toc_size,
7786 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7789 total_raw_size = (info_ptr->vars_size
7790 + info_ptr->parm_size
7791 + info_ptr->save_size
7792 + info_ptr->varargs_size
7793 + info_ptr->fixed_size);
7795 info_ptr->total_size =
7796 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7798 /* Determine if we need to allocate any stack frame:
7800 For AIX we need to push the stack if a frame pointer is needed
7801 (because the stack might be dynamically adjusted), if we are
7802 debugging, if we make calls, or if the sum of fp_save, gp_save,
7803 and local variables are more than the space needed to save all
7804 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7805 + 18*8 = 288 (GPR13 reserved).
7807 For V.4 we don't have the stack cushion that AIX uses, but assume
7808 that the debugger can handle stackless frames. */
7810 if (info_ptr->calls_p)
7811 info_ptr->push_p = 1;
7813 else if (abi == ABI_V4)
7814 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
7817 info_ptr->push_p = (frame_pointer_needed
7818 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7819 || ((total_raw_size - info_ptr->fixed_size)
7820 > (TARGET_32BIT ? 220 : 288)));
7822 /* Zero offsets if we're not saving those registers. */
7823 if (info_ptr->fp_size == 0)
7824 info_ptr->fp_save_offset = 0;
7826 if (info_ptr->gp_size == 0)
7827 info_ptr->gp_save_offset = 0;
7829 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7830 info_ptr->altivec_save_offset = 0;
7832 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7833 info_ptr->vrsave_save_offset = 0;
7835 if (! info_ptr->lr_save_p)
7836 info_ptr->lr_save_offset = 0;
7838 if (! info_ptr->cr_save_p)
7839 info_ptr->cr_save_offset = 0;
7841 if (! info_ptr->toc_save_p)
7842 info_ptr->toc_save_offset = 0;
7848 debug_stack_info (info)
7849 rs6000_stack_t *info;
7851 const char *abi_string;
7854 info = rs6000_stack_info ();
7856 fprintf (stderr, "\nStack information for function %s:\n",
7857 ((current_function_decl && DECL_NAME (current_function_decl))
7858 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7863 default: abi_string = "Unknown"; break;
7864 case ABI_NONE: abi_string = "NONE"; break;
7866 case ABI_AIX_NODESC: abi_string = "AIX"; break;
7867 case ABI_DARWIN: abi_string = "Darwin"; break;
7868 case ABI_V4: abi_string = "V.4"; break;
7871 fprintf (stderr, "\tABI = %5s\n", abi_string);
7873 if (TARGET_ALTIVEC_ABI)
7874 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7876 if (info->first_gp_reg_save != 32)
7877 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
7879 if (info->first_fp_reg_save != 64)
7880 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
7882 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7883 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7884 info->first_altivec_reg_save);
7886 if (info->lr_save_p)
7887 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
7889 if (info->cr_save_p)
7890 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
7892 if (info->toc_save_p)
7893 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
7895 if (info->vrsave_mask)
7896 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
7899 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
7902 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
7904 if (info->gp_save_offset)
7905 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
7907 if (info->fp_save_offset)
7908 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
7910 if (info->altivec_save_offset)
7911 fprintf (stderr, "\taltivec_save_offset = %5d\n",
7912 info->altivec_save_offset);
7914 if (info->vrsave_save_offset)
7915 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
7916 info->vrsave_save_offset);
7918 if (info->lr_save_offset)
7919 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
7921 if (info->cr_save_offset)
7922 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
7924 if (info->toc_save_offset)
7925 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
7927 if (info->varargs_save_offset)
7928 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
7930 if (info->total_size)
7931 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
7933 if (info->varargs_size)
7934 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
7936 if (info->vars_size)
7937 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
7939 if (info->parm_size)
7940 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
7942 if (info->fixed_size)
7943 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
7946 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
7949 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
7951 if (info->altivec_size)
7952 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
7954 if (info->vrsave_size)
7955 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
7957 if (info->altivec_padding_size)
7958 fprintf (stderr, "\taltivec_padding_size= %5d\n",
7959 info->altivec_padding_size);
7962 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
7965 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
7968 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
7970 if (info->save_size)
7971 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
7973 if (info->reg_size != 4)
7974 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
7976 fprintf (stderr, "\n");
7980 rs6000_return_addr (count, frame)
7984 /* Currently we don't optimize very well between prolog and body
7985 code and for PIC code the code can be actually quite bad, so
7986 don't try to be too clever here. */
7987 if (count != 0 || flag_pic != 0)
7989 cfun->machine->ra_needs_full_frame = 1;
7996 plus_constant (copy_to_reg
7997 (gen_rtx_MEM (Pmode,
7998 memory_address (Pmode, frame))),
7999 RETURN_ADDRESS_OFFSET)));
8002 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
8006 rs6000_ra_ever_killed ()
8010 #ifdef ASM_OUTPUT_MI_THUNK
8011 if (current_function_is_thunk)
8014 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
8015 || cfun->machine->ra_needs_full_frame)
8016 return regs_ever_live[LINK_REGISTER_REGNUM];
8018 push_topmost_sequence ();
8020 pop_topmost_sequence ();
8022 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8026 /* Add a REG_MAYBE_DEAD note to the insn. */
8028 rs6000_maybe_dead (insn)
8031 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
8036 /* Emit instructions needed to load the TOC register.
8037 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8038 a constant pool; or for SVR4 -fpic. */
8041 rs6000_emit_load_toc_table (fromprolog)
8045 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
8047 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
8049 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8051 rtx temp = (fromprolog
8052 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8053 : gen_reg_rtx (Pmode));
8054 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8055 rs6000_maybe_dead (emit_move_insn (dest, temp));
8057 else if (flag_pic == 2)
8060 rtx tempLR = (fromprolog
8061 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8062 : gen_reg_rtx (Pmode));
8063 rtx temp0 = (fromprolog
8064 ? gen_rtx_REG (Pmode, 0)
8065 : gen_reg_rtx (Pmode));
8068 /* possibly create the toc section */
8069 if (! toc_initialized)
8072 function_section (current_function_decl);
8079 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8080 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8082 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8083 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8085 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8087 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8088 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8095 static int reload_toc_labelno = 0;
8097 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8099 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8100 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8102 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8105 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8106 rs6000_maybe_dead (emit_move_insn (temp0,
8107 gen_rtx_MEM (Pmode, dest)));
8109 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8111 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8113 /* This is for AIX code running in non-PIC ELF. */
8116 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8117 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8119 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8120 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8128 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8130 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8135 get_TOC_alias_set ()
8137 static int set = -1;
8139 set = new_alias_set ();
8143 /* This retuns nonzero if the current function uses the TOC. This is
8144 determined by the presence of (unspec ... 7), which is generated by
8145 the various load_toc_* patterns. */
8152 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8155 rtx pat = PATTERN (insn);
8158 if (GET_CODE (pat) == PARALLEL)
8159 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8160 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8161 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8168 create_TOC_reference (symbol)
8171 return gen_rtx_PLUS (Pmode,
8172 gen_rtx_REG (Pmode, TOC_REGISTER),
8173 gen_rtx_CONST (Pmode,
8174 gen_rtx_MINUS (Pmode, symbol,
8175 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8179 /* __throw will restore its own return address to be the same as the
8180 return address of the function that the throw is being made to.
8181 This is unfortunate, because we want to check the original
8182 return address to see if we need to restore the TOC.
8183 So we have to squirrel it away here.
8184 This is used only in compiling __throw and __rethrow.
8186 Most of this code should be removed by CSE. */
8187 static rtx insn_after_throw;
8189 /* This does the saving... */
8191 rs6000_aix_emit_builtin_unwind_init ()
8194 rtx stack_top = gen_reg_rtx (Pmode);
8195 rtx opcode_addr = gen_reg_rtx (Pmode);
8197 insn_after_throw = gen_reg_rtx (SImode);
8199 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8200 emit_move_insn (stack_top, mem);
8202 mem = gen_rtx_MEM (Pmode,
8203 gen_rtx_PLUS (Pmode, stack_top,
8204 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8205 emit_move_insn (opcode_addr, mem);
8206 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8209 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8210 in _eh.o). Only used on AIX.
8212 The idea is that on AIX, function calls look like this:
8213 bl somefunction-trampoline
8217 somefunction-trampoline:
8219 ... load function address in the count register ...
8221 or like this, if the linker determines that this is not a cross-module call
8222 and so the TOC need not be restored:
8225 or like this, if the compiler could determine that this is not a
8228 now, the tricky bit here is that register 2 is saved and restored
8229 by the _linker_, so we can't readily generate debugging information
8230 for it. So we need to go back up the call chain looking at the
8231 insns at return addresses to see which calls saved the TOC register
8232 and so see where it gets restored from.
8234 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8235 just before the actual epilogue.
8237 On the bright side, this incurs no space or time overhead unless an
8238 exception is thrown, except for the extra code in libgcc.a.
8240 The parameter STACKSIZE is a register containing (at runtime)
8241 the amount to be popped off the stack in addition to the stack frame
8242 of this routine (which will be __throw or __rethrow, and so is
8243 guaranteed to have a stack frame). */
8246 rs6000_emit_eh_toc_restore (stacksize)
8250 rtx bottom_of_stack = gen_reg_rtx (Pmode);
8251 rtx tocompare = gen_reg_rtx (SImode);
8252 rtx opcode = gen_reg_rtx (SImode);
8253 rtx opcode_addr = gen_reg_rtx (Pmode);
8255 rtx loop_start = gen_label_rtx ();
8256 rtx no_toc_restore_needed = gen_label_rtx ();
8257 rtx loop_exit = gen_label_rtx ();
8259 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8260 set_mem_alias_set (mem, rs6000_sr_alias_set);
8261 emit_move_insn (bottom_of_stack, mem);
8263 top_of_stack = expand_binop (Pmode, add_optab,
8264 bottom_of_stack, stacksize,
8265 NULL_RTX, 1, OPTAB_WIDEN);
8267 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
8268 : 0xE8410028, SImode));
8270 if (insn_after_throw == NULL_RTX)
8272 emit_move_insn (opcode, insn_after_throw);
8274 emit_note (NULL, NOTE_INSN_LOOP_BEG);
8275 emit_label (loop_start);
8277 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8278 SImode, NULL_RTX, NULL_RTX,
8279 no_toc_restore_needed);
8281 mem = gen_rtx_MEM (Pmode,
8282 gen_rtx_PLUS (Pmode, bottom_of_stack,
8283 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8284 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8286 emit_label (no_toc_restore_needed);
8287 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8288 Pmode, NULL_RTX, NULL_RTX,
8291 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8292 set_mem_alias_set (mem, rs6000_sr_alias_set);
8293 emit_move_insn (bottom_of_stack, mem);
8295 mem = gen_rtx_MEM (Pmode,
8296 gen_rtx_PLUS (Pmode, bottom_of_stack,
8297 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8298 emit_move_insn (opcode_addr, mem);
8299 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8301 emit_note (NULL, NOTE_INSN_LOOP_CONT);
8302 emit_jump (loop_start);
8303 emit_note (NULL, NOTE_INSN_LOOP_END);
8304 emit_label (loop_exit);
8306 #endif /* TARGET_AIX */
8308 /* This ties together stack memory (MEM with an alias set of
8309 rs6000_sr_alias_set) and the change to the stack pointer. */
8312 rs6000_emit_stack_tie ()
8314 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8316 set_mem_alias_set (mem, rs6000_sr_alias_set);
8317 emit_insn (gen_stack_tie (mem));
8320 /* Emit the correct code for allocating stack space, as insns.
8321 If COPY_R12, make sure a copy of the old frame is left in r12.
8322 The generated code may use hard register 0 as a temporary. */
8325 rs6000_emit_allocate_stack (size, copy_r12)
8330 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8331 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8332 rtx todec = GEN_INT (-size);
8334 if (current_function_limit_stack)
8336 if (REG_P (stack_limit_rtx)
8337 && REGNO (stack_limit_rtx) > 1
8338 && REGNO (stack_limit_rtx) <= 31)
8340 emit_insn (Pmode == SImode
8341 ? gen_addsi3 (tmp_reg,
8344 : gen_adddi3 (tmp_reg,
8348 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8351 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8353 && DEFAULT_ABI == ABI_V4)
8355 rtx toload = gen_rtx_CONST (VOIDmode,
8356 gen_rtx_PLUS (Pmode,
8360 emit_insn (gen_elf_high (tmp_reg, toload));
8361 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8362 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8366 warning ("stack limit expression is not supported");
8369 if (copy_r12 || ! TARGET_UPDATE)
8370 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8376 /* Need a note here so that try_split doesn't get confused. */
8377 if (get_last_insn() == NULL_RTX)
8378 emit_note (0, NOTE_INSN_DELETED);
8379 insn = emit_move_insn (tmp_reg, todec);
8380 try_split (PATTERN (insn), insn, 0);
8384 if (Pmode == SImode)
8385 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8388 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8393 if (Pmode == SImode)
8394 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8396 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8397 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8398 gen_rtx_REG (Pmode, 12));
8401 RTX_FRAME_RELATED_P (insn) = 1;
8403 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8404 gen_rtx_SET (VOIDmode, stack_reg,
8405 gen_rtx_PLUS (Pmode, stack_reg,
8410 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8413 (mem (plus (blah) (regXX)))
8417 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8420 altivec_frame_fixup (insn, reg, val)
8426 real = copy_rtx (PATTERN (insn));
8428 real = replace_rtx (real, reg, GEN_INT (val));
8430 RTX_FRAME_RELATED_P (insn) = 1;
8431 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8436 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8437 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8438 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8439 deduce these equivalences by itself so it wasn't necessary to hold
8440 its hand so much. */
8443 rs6000_frame_related (insn, reg, val, reg2, rreg)
8452 /* copy_rtx will not make unique copies of registers, so we need to
8453 ensure we don't have unwanted sharing here. */
8455 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
8458 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
8460 real = copy_rtx (PATTERN (insn));
8462 if (reg2 != NULL_RTX)
8463 real = replace_rtx (real, reg2, rreg);
8465 real = replace_rtx (real, reg,
8466 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8467 STACK_POINTER_REGNUM),
8470 /* We expect that 'real' is either a SET or a PARALLEL containing
8471 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8472 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8474 if (GET_CODE (real) == SET)
8478 temp = simplify_rtx (SET_SRC (set));
8480 SET_SRC (set) = temp;
8481 temp = simplify_rtx (SET_DEST (set));
8483 SET_DEST (set) = temp;
8484 if (GET_CODE (SET_DEST (set)) == MEM)
8486 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8488 XEXP (SET_DEST (set), 0) = temp;
8491 else if (GET_CODE (real) == PARALLEL)
8494 for (i = 0; i < XVECLEN (real, 0); i++)
8495 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8497 rtx set = XVECEXP (real, 0, i);
8499 temp = simplify_rtx (SET_SRC (set));
8501 SET_SRC (set) = temp;
8502 temp = simplify_rtx (SET_DEST (set));
8504 SET_DEST (set) = temp;
8505 if (GET_CODE (SET_DEST (set)) == MEM)
8507 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8509 XEXP (SET_DEST (set), 0) = temp;
8511 RTX_FRAME_RELATED_P (set) = 1;
8517 RTX_FRAME_RELATED_P (insn) = 1;
8518 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8523 /* Returns an insn that has a vrsave set operation with the
8524 appropriate CLOBBERs. */
8527 generate_set_vrsave (reg, info, epiloguep)
8529 rs6000_stack_t *info;
8533 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8534 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8537 = gen_rtx_SET (VOIDmode,
8539 gen_rtx_UNSPEC_VOLATILE (SImode,
8540 gen_rtvec (2, reg, vrsave),
8545 /* We need to clobber the registers in the mask so the scheduler
8546 does not move sets to VRSAVE before sets of AltiVec registers.
8548 However, if the function receives nonlocal gotos, reload will set
8549 all call saved registers live. We will end up with:
8551 (set (reg 999) (mem))
8552 (parallel [ (set (reg vrsave) (unspec blah))
8553 (clobber (reg 999))])
8555 The clobber will cause the store into reg 999 to be dead, and
8556 flow will attempt to delete an epilogue insn. In this case, we
8557 need an unspec use/set of the register. */
8559 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8560 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8562 if (!epiloguep || call_used_regs [i])
8563 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8564 gen_rtx_REG (V4SImode, i));
8567 rtx reg = gen_rtx_REG (V4SImode, i);
8570 = gen_rtx_SET (VOIDmode,
8572 gen_rtx_UNSPEC (V4SImode,
8573 gen_rtvec (1, reg), 27));
8577 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8579 for (i = 0; i < nclobs; ++i)
8580 XVECEXP (insn, 0, i) = clobs[i];
8585 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
8586 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
8589 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
8592 enum machine_mode mode;
8597 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
8598 rtx replacea, replaceb;
8600 int_rtx = GEN_INT (offset);
8602 /* Some cases that need register indexed addressing. */
8603 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
8604 /* Add more cases here. */
8607 /* Whomever calls us must make sure r11 is available in the
8608 flow path of instructions in the prologue. */
8609 offset_rtx = gen_rtx_REG (Pmode, 11);
8610 emit_move_insn (offset_rtx, int_rtx);
8612 replacea = offset_rtx;
8617 offset_rtx = int_rtx;
8618 replacea = NULL_RTX;
8619 replaceb = NULL_RTX;
8622 reg = gen_rtx_REG (mode, regno);
8623 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
8624 mem = gen_rtx_MEM (mode, addr);
8625 set_mem_alias_set (mem, rs6000_sr_alias_set);
8627 insn = emit_move_insn (mem, reg);
8629 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
8632 /* Emit function prologue as insns. */
8635 rs6000_emit_prologue ()
8637 rs6000_stack_t *info = rs6000_stack_info ();
8638 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8639 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8640 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8641 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8642 rtx frame_reg_rtx = sp_reg_rtx;
8643 rtx cr_save_rtx = NULL;
8645 int saving_FPRs_inline;
8646 int using_store_multiple;
8647 HOST_WIDE_INT sp_offset = 0;
8649 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8650 && info->first_gp_reg_save < 31);
8651 saving_FPRs_inline = (info->first_fp_reg_save == 64
8652 || FP_SAVE_INLINE (info->first_fp_reg_save));
8654 /* For V.4, update stack before we do any saving and set back pointer. */
8655 if (info->push_p && DEFAULT_ABI == ABI_V4)
8657 if (info->total_size < 32767)
8658 sp_offset = info->total_size;
8660 frame_reg_rtx = frame_ptr_rtx;
8661 rs6000_emit_allocate_stack (info->total_size,
8662 (frame_reg_rtx != sp_reg_rtx
8665 || info->first_fp_reg_save < 64
8666 || info->first_gp_reg_save < 32
8668 if (frame_reg_rtx != sp_reg_rtx)
8669 rs6000_emit_stack_tie ();
8672 /* Save AltiVec registers if needed. */
8673 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8677 /* There should be a non inline version of this, for when we
8678 are saving lots of vector registers. */
8679 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8680 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8682 rtx areg, savereg, mem;
8685 offset = info->altivec_save_offset + sp_offset
8686 + 16 * (i - info->first_altivec_reg_save);
8688 savereg = gen_rtx_REG (V4SImode, i);
8690 areg = gen_rtx_REG (Pmode, 0);
8691 emit_move_insn (areg, GEN_INT (offset));
8693 /* AltiVec addressing mode is [reg+reg]. */
8694 mem = gen_rtx_MEM (V4SImode,
8695 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8697 set_mem_alias_set (mem, rs6000_sr_alias_set);
8699 insn = emit_move_insn (mem, savereg);
8701 altivec_frame_fixup (insn, areg, offset);
8705 /* VRSAVE is a bit vector representing which AltiVec registers
8706 are used. The OS uses this to determine which vector
8707 registers to save on a context switch. We need to save
8708 VRSAVE on the stack frame, add whatever AltiVec registers we
8709 used in this function, and do the corresponding magic in the
8712 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8714 rtx reg, mem, vrsave;
8717 /* Get VRSAVE onto a GPR. */
8718 reg = gen_rtx_REG (SImode, 12);
8719 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8721 emit_insn (gen_get_vrsave_internal (reg));
8723 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8726 offset = info->vrsave_save_offset + sp_offset;
8728 = gen_rtx_MEM (SImode,
8729 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8730 set_mem_alias_set (mem, rs6000_sr_alias_set);
8731 insn = emit_move_insn (mem, reg);
8733 /* Include the registers in the mask. */
8734 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8736 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8739 /* If we use the link register, get it into r0. */
8740 if (info->lr_save_p)
8741 emit_move_insn (gen_rtx_REG (Pmode, 0),
8742 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8744 /* If we need to save CR, put it into r12. */
8745 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8747 cr_save_rtx = gen_rtx_REG (SImode, 12);
8748 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8751 /* Do any required saving of fpr's. If only one or two to save, do
8752 it ourselves. Otherwise, call function. */
8753 if (saving_FPRs_inline)
8756 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8757 if ((regs_ever_live[info->first_fp_reg_save+i]
8758 && ! call_used_regs[info->first_fp_reg_save+i]))
8759 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
8760 info->first_fp_reg_save + i,
8761 info->fp_save_offset + sp_offset + 8 * i,
8764 else if (info->first_fp_reg_save != 64)
8768 const char *alloc_rname;
8770 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8772 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8774 LINK_REGISTER_REGNUM));
8775 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8776 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8777 alloc_rname = ggc_strdup (rname);
8778 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8779 gen_rtx_SYMBOL_REF (Pmode,
8781 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8784 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8785 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8786 GEN_INT (info->fp_save_offset
8787 + sp_offset + 8*i));
8788 mem = gen_rtx_MEM (DFmode, addr);
8789 set_mem_alias_set (mem, rs6000_sr_alias_set);
8791 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8793 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8794 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8795 NULL_RTX, NULL_RTX);
8798 /* Save GPRs. This is done as a PARALLEL if we are using
8799 the store-multiple instructions. */
8800 if (using_store_multiple)
8804 p = rtvec_alloc (32 - info->first_gp_reg_save);
8805 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8806 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8809 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8810 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8811 GEN_INT (info->gp_save_offset
8814 mem = gen_rtx_MEM (reg_mode, addr);
8815 set_mem_alias_set (mem, rs6000_sr_alias_set);
8817 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8819 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8820 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8821 NULL_RTX, NULL_RTX);
8826 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8827 if ((regs_ever_live[info->first_gp_reg_save+i]
8828 && ! call_used_regs[info->first_gp_reg_save+i])
8829 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
8830 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8831 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8832 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode,
8833 info->first_gp_reg_save + i,
8834 info->gp_save_offset + sp_offset + reg_size * i,
8838 /* ??? There's no need to emit actual instructions here, but it's the
8839 easiest way to get the frame unwind information emitted. */
8840 if (current_function_calls_eh_return)
8842 unsigned int i, regno;
8846 regno = EH_RETURN_DATA_REGNO (i);
8847 if (regno == INVALID_REGNUM)
8850 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
8851 info->ehrd_offset + sp_offset
8852 + reg_size * (int) i,
8857 /* Save lr if we used it. */
8858 if (info->lr_save_p)
8860 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8861 GEN_INT (info->lr_save_offset + sp_offset));
8862 rtx reg = gen_rtx_REG (Pmode, 0);
8863 rtx mem = gen_rtx_MEM (Pmode, addr);
8864 /* This should not be of rs6000_sr_alias_set, because of
8865 __builtin_return_address. */
8867 insn = emit_move_insn (mem, reg);
8868 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8869 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8872 /* Save CR if we use any that must be preserved. */
8873 if (info->cr_save_p)
8875 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8876 GEN_INT (info->cr_save_offset + sp_offset));
8877 rtx mem = gen_rtx_MEM (SImode, addr);
8879 set_mem_alias_set (mem, rs6000_sr_alias_set);
8881 /* If r12 was used to hold the original sp, copy cr into r0 now
8883 if (REGNO (frame_reg_rtx) == 12)
8885 cr_save_rtx = gen_rtx_REG (SImode, 0);
8886 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8888 insn = emit_move_insn (mem, cr_save_rtx);
8890 /* Now, there's no way that dwarf2out_frame_debug_expr is going
8891 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
8892 OK. All we have to do is specify that _one_ condition code
8893 register is saved in this stack slot. The thrower's epilogue
8894 will then restore all the call-saved registers.
8895 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
8896 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8897 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8900 /* Update stack and set back pointer unless this is V.4,
8901 for which it was done previously. */
8902 if (info->push_p && DEFAULT_ABI != ABI_V4)
8903 rs6000_emit_allocate_stack (info->total_size, FALSE);
8905 /* Set frame pointer, if needed. */
8906 if (frame_pointer_needed)
8908 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8910 RTX_FRAME_RELATED_P (insn) = 1;
8913 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
8914 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8915 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8916 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
8918 /* If emit_load_toc_table will use the link register, we need to save
8919 it. We use R11 for this purpose because emit_load_toc_table
8920 can use register 0. This allows us to use a plain 'blr' to return
8921 from the procedure more often. */
8922 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8923 && ! info->lr_save_p
8924 && EXIT_BLOCK_PTR->pred != NULL);
8925 if (save_LR_around_toc_setup)
8926 emit_move_insn (gen_rtx_REG (Pmode, 11),
8927 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8929 rs6000_emit_load_toc_table (TRUE);
8931 if (save_LR_around_toc_setup)
8932 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8933 gen_rtx_REG (Pmode, 11));
8936 if (DEFAULT_ABI == ABI_DARWIN
8937 && flag_pic && current_function_uses_pic_offset_table)
8939 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8941 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8944 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
8945 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
8949 /* Write function prologue. */
8952 rs6000_output_function_prologue (file, size)
8954 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8956 rs6000_stack_t *info = rs6000_stack_info ();
8958 if (TARGET_DEBUG_STACK)
8959 debug_stack_info (info);
8961 /* Write .extern for any function we will call to save and restore
8963 if (info->first_fp_reg_save < 64
8964 && !FP_SAVE_INLINE (info->first_fp_reg_save))
8965 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
8966 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
8967 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
8970 /* Write .extern for AIX common mode routines, if needed. */
8971 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
8973 fputs ("\t.extern __mulh\n", file);
8974 fputs ("\t.extern __mull\n", file);
8975 fputs ("\t.extern __divss\n", file);
8976 fputs ("\t.extern __divus\n", file);
8977 fputs ("\t.extern __quoss\n", file);
8978 fputs ("\t.extern __quous\n", file);
8979 common_mode_defined = 1;
8982 if (! HAVE_prologue)
8986 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
8987 the "toplevel" insn chain. */
8988 emit_note (0, NOTE_INSN_DELETED);
8989 rs6000_emit_prologue ();
8990 emit_note (0, NOTE_INSN_DELETED);
8992 /* Expand INSN_ADDRESSES so final() doesn't crash. */
8996 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
8998 INSN_ADDRESSES_NEW (insn, addr);
9003 if (TARGET_DEBUG_STACK)
9004 debug_rtx_list (get_insns (), 100);
9005 final (get_insns (), file, FALSE, FALSE);
9009 rs6000_pic_labelno++;
9012 /* Emit function epilogue as insns.
9014 At present, dwarf2out_frame_debug_expr doesn't understand
9015 register restores, so we don't bother setting RTX_FRAME_RELATED_P
9016 anywhere in the epilogue. Most of the insns below would in any case
9017 need special notes to explain where r11 is in relation to the stack. */
9020 rs6000_emit_epilogue (sibcall)
9023 rs6000_stack_t *info;
9024 int restoring_FPRs_inline;
9025 int using_load_multiple;
9026 int using_mfcr_multiple;
9027 int use_backchain_to_restore_sp;
9029 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
9030 rtx frame_reg_rtx = sp_reg_rtx;
9031 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
9032 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9035 info = rs6000_stack_info ();
9036 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
9037 && info->first_gp_reg_save < 31);
9038 restoring_FPRs_inline = (sibcall
9039 || current_function_calls_eh_return
9040 || info->first_fp_reg_save == 64
9041 || FP_SAVE_INLINE (info->first_fp_reg_save));
9042 use_backchain_to_restore_sp = (frame_pointer_needed
9043 || current_function_calls_alloca
9044 || info->total_size > 32767);
9045 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9046 || rs6000_cpu == PROCESSOR_PPC603
9047 || rs6000_cpu == PROCESSOR_PPC750
9050 /* If we have a frame pointer, a call to alloca, or a large stack
9051 frame, restore the old stack pointer using the backchain. Otherwise,
9052 we know what size to update it with. */
9053 if (use_backchain_to_restore_sp)
9055 /* Under V.4, don't reset the stack pointer until after we're done
9056 loading the saved registers. */
9057 if (DEFAULT_ABI == ABI_V4)
9058 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
9060 emit_move_insn (frame_reg_rtx,
9061 gen_rtx_MEM (Pmode, sp_reg_rtx));
9064 else if (info->push_p)
9066 if (DEFAULT_ABI == ABI_V4)
9067 sp_offset = info->total_size;
9070 emit_insn (TARGET_32BIT
9071 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9072 GEN_INT (info->total_size))
9073 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9074 GEN_INT (info->total_size)));
9078 /* Restore AltiVec registers if needed. */
9079 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9083 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9084 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9086 rtx addr, areg, mem;
9088 areg = gen_rtx_REG (Pmode, 0);
9090 (areg, GEN_INT (info->altivec_save_offset
9092 + 16 * (i - info->first_altivec_reg_save)));
9094 /* AltiVec addressing mode is [reg+reg]. */
9095 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9096 mem = gen_rtx_MEM (V4SImode, addr);
9097 set_mem_alias_set (mem, rs6000_sr_alias_set);
9099 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9103 /* Restore VRSAVE if needed. */
9104 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9108 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9109 GEN_INT (info->vrsave_save_offset + sp_offset));
9110 mem = gen_rtx_MEM (SImode, addr);
9111 set_mem_alias_set (mem, rs6000_sr_alias_set);
9112 reg = gen_rtx_REG (SImode, 12);
9113 emit_move_insn (reg, mem);
9115 emit_insn (generate_set_vrsave (reg, info, 1));
9118 /* Get the old lr if we saved it. */
9119 if (info->lr_save_p)
9121 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9122 GEN_INT (info->lr_save_offset + sp_offset));
9123 rtx mem = gen_rtx_MEM (Pmode, addr);
9125 set_mem_alias_set (mem, rs6000_sr_alias_set);
9127 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9130 /* Get the old cr if we saved it. */
9131 if (info->cr_save_p)
9133 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9134 GEN_INT (info->cr_save_offset + sp_offset));
9135 rtx mem = gen_rtx_MEM (SImode, addr);
9137 set_mem_alias_set (mem, rs6000_sr_alias_set);
9139 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9142 /* Set LR here to try to overlap restores below. */
9143 if (info->lr_save_p)
9144 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9145 gen_rtx_REG (Pmode, 0));
9147 /* Load exception handler data registers, if needed. */
9148 if (current_function_calls_eh_return)
9150 unsigned int i, regno;
9156 regno = EH_RETURN_DATA_REGNO (i);
9157 if (regno == INVALID_REGNUM)
9160 addr = plus_constant (frame_reg_rtx,
9161 info->ehrd_offset + sp_offset
9162 + reg_size * (int) i);
9163 mem = gen_rtx_MEM (reg_mode, addr);
9164 set_mem_alias_set (mem, rs6000_sr_alias_set);
9166 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9170 /* Restore GPRs. This is done as a PARALLEL if we are using
9171 the load-multiple instructions. */
9172 if (using_load_multiple)
9175 p = rtvec_alloc (32 - info->first_gp_reg_save);
9176 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9178 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9179 GEN_INT (info->gp_save_offset
9182 rtx mem = gen_rtx_MEM (reg_mode, addr);
9184 set_mem_alias_set (mem, rs6000_sr_alias_set);
9187 gen_rtx_SET (VOIDmode,
9188 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9191 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9194 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9195 if ((regs_ever_live[info->first_gp_reg_save+i]
9196 && ! call_used_regs[info->first_gp_reg_save+i])
9197 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
9198 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9199 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9201 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9202 GEN_INT (info->gp_save_offset
9205 rtx mem = gen_rtx_MEM (reg_mode, addr);
9207 set_mem_alias_set (mem, rs6000_sr_alias_set);
9209 emit_move_insn (gen_rtx_REG (reg_mode,
9210 info->first_gp_reg_save + i),
9214 /* Restore fpr's if we need to do it without calling a function. */
9215 if (restoring_FPRs_inline)
9216 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9217 if ((regs_ever_live[info->first_fp_reg_save+i]
9218 && ! call_used_regs[info->first_fp_reg_save+i]))
9221 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9222 GEN_INT (info->fp_save_offset
9225 mem = gen_rtx_MEM (DFmode, addr);
9226 set_mem_alias_set (mem, rs6000_sr_alias_set);
9228 emit_move_insn (gen_rtx_REG (DFmode,
9229 info->first_fp_reg_save + i),
9233 /* If we saved cr, restore it here. Just those that were used. */
9234 if (info->cr_save_p)
9236 rtx r12_rtx = gen_rtx_REG (SImode, 12);
9239 if (using_mfcr_multiple)
9241 for (i = 0; i < 8; i++)
9242 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9248 if (using_mfcr_multiple && count > 1)
9253 p = rtvec_alloc (count);
9256 for (i = 0; i < 8; i++)
9257 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9259 rtvec r = rtvec_alloc (2);
9260 RTVEC_ELT (r, 0) = r12_rtx;
9261 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9262 RTVEC_ELT (p, ndx) =
9263 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9264 gen_rtx_UNSPEC (CCmode, r, 20));
9267 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9272 for (i = 0; i < 8; i++)
9273 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9275 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9281 /* If this is V.4, unwind the stack pointer after all of the loads
9282 have been done. We need to emit a block here so that sched
9283 doesn't decide to move the sp change before the register restores
9284 (which may not have any obvious dependency on the stack). This
9285 doesn't hurt performance, because there is no scheduling that can
9286 be done after this point. */
9287 if (DEFAULT_ABI == ABI_V4)
9289 if (frame_reg_rtx != sp_reg_rtx)
9290 rs6000_emit_stack_tie ();
9292 if (use_backchain_to_restore_sp)
9294 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9296 else if (sp_offset != 0)
9298 emit_insn (Pmode == SImode
9299 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9300 GEN_INT (sp_offset))
9301 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9302 GEN_INT (sp_offset)));
9306 if (current_function_calls_eh_return)
9308 rtx sa = EH_RETURN_STACKADJ_RTX;
9309 emit_insn (Pmode == SImode
9310 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9311 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9317 if (! restoring_FPRs_inline)
9318 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9320 p = rtvec_alloc (2);
9322 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9323 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9325 LINK_REGISTER_REGNUM));
9327 /* If we have to restore more than two FP registers, branch to the
9328 restore function. It will return to our caller. */
9329 if (! restoring_FPRs_inline)
9333 const char *alloc_rname;
9335 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9336 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9337 alloc_rname = ggc_strdup (rname);
9338 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9339 gen_rtx_SYMBOL_REF (Pmode,
9342 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9345 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9346 GEN_INT (info->fp_save_offset + 8*i));
9347 mem = gen_rtx_MEM (DFmode, addr);
9348 set_mem_alias_set (mem, rs6000_sr_alias_set);
9350 RTVEC_ELT (p, i+3) =
9351 gen_rtx_SET (VOIDmode,
9352 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9357 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9361 /* Write function epilogue. */
9364 rs6000_output_function_epilogue (file, size)
9366 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9368 rs6000_stack_t *info = rs6000_stack_info ();
9369 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9371 if (! HAVE_epilogue)
9373 rtx insn = get_last_insn ();
9374 /* If the last insn was a BARRIER, we don't have to write anything except
9376 if (GET_CODE (insn) == NOTE)
9377 insn = prev_nonnote_insn (insn);
9378 if (insn == 0 || GET_CODE (insn) != BARRIER)
9380 /* This is slightly ugly, but at least we don't have two
9381 copies of the epilogue-emitting code. */
9384 /* A NOTE_INSN_DELETED is supposed to be at the start
9385 and end of the "toplevel" insn chain. */
9386 emit_note (0, NOTE_INSN_DELETED);
9387 rs6000_emit_epilogue (FALSE);
9388 emit_note (0, NOTE_INSN_DELETED);
9390 /* Expand INSN_ADDRESSES so final() doesn't crash. */
9394 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9396 INSN_ADDRESSES_NEW (insn, addr);
9401 if (TARGET_DEBUG_STACK)
9402 debug_rtx_list (get_insns (), 100);
9403 final (get_insns (), file, FALSE, FALSE);
9408 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9411 We don't output a traceback table if -finhibit-size-directive was
9412 used. The documentation for -finhibit-size-directive reads
9413 ``don't output a @code{.size} assembler directive, or anything
9414 else that would cause trouble if the function is split in the
9415 middle, and the two halves are placed at locations far apart in
9416 memory.'' The traceback table has this property, since it
9417 includes the offset from the start of the function to the
9418 traceback table itself.
9420 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9421 different traceback table. */
9422 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9424 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9425 const char *language_string = lang_hooks.name;
9426 int fixed_parms = 0, float_parms = 0, parm_info = 0;
9429 while (*fname == '.') /* V.4 encodes . in the name */
9432 /* Need label immediately before tbtab, so we can compute its offset
9433 from the function start. */
9436 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9437 ASM_OUTPUT_LABEL (file, fname);
9439 /* The .tbtab pseudo-op can only be used for the first eight
9440 expressions, since it can't handle the possibly variable
9441 length fields that follow. However, if you omit the optional
9442 fields, the assembler outputs zeros for all optional fields
9443 anyways, giving each variable length field is minimum length
9444 (as defined in sys/debug.h). Thus we can not use the .tbtab
9445 pseudo-op at all. */
9447 /* An all-zero word flags the start of the tbtab, for debuggers
9448 that have to find it by searching forward from the entry
9449 point or from the current pc. */
9450 fputs ("\t.long 0\n", file);
9452 /* Tbtab format type. Use format type 0. */
9453 fputs ("\t.byte 0,", file);
9455 /* Language type. Unfortunately, there doesn't seem to be any
9456 official way to get this info, so we use language_string. C
9457 is 0. C++ is 9. No number defined for Obj-C, so use the
9458 value for C for now. There is no official value for Java,
9459 although IBM appears to be using 13. There is no official value
9460 for Chill, so we've chosen 44 pseudo-randomly. */
9461 if (! strcmp (language_string, "GNU C")
9462 || ! strcmp (language_string, "GNU Objective-C"))
9464 else if (! strcmp (language_string, "GNU F77"))
9466 else if (! strcmp (language_string, "GNU Ada"))
9468 else if (! strcmp (language_string, "GNU Pascal"))
9470 else if (! strcmp (language_string, "GNU C++"))
9472 else if (! strcmp (language_string, "GNU Java"))
9474 else if (! strcmp (language_string, "GNU CHILL"))
9478 fprintf (file, "%d,", i);
9480 /* 8 single bit fields: global linkage (not set for C extern linkage,
9481 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9482 from start of procedure stored in tbtab, internal function, function
9483 has controlled storage, function has no toc, function uses fp,
9484 function logs/aborts fp operations. */
9485 /* Assume that fp operations are used if any fp reg must be saved. */
9486 fprintf (file, "%d,",
9487 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9489 /* 6 bitfields: function is interrupt handler, name present in
9490 proc table, function calls alloca, on condition directives
9491 (controls stack walks, 3 bits), saves condition reg, saves
9493 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9494 set up as a frame pointer, even when there is no alloca call. */
9495 fprintf (file, "%d,",
9496 ((optional_tbtab << 6)
9497 | ((optional_tbtab & frame_pointer_needed) << 5)
9498 | (info->cr_save_p << 1)
9499 | (info->lr_save_p)));
9501 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9503 fprintf (file, "%d,",
9504 (info->push_p << 7) | (64 - info->first_fp_reg_save));
9506 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9507 fprintf (file, "%d,", (32 - first_reg_to_save ()));
9511 /* Compute the parameter info from the function decl argument
9514 int next_parm_info_bit = 31;
9516 for (decl = DECL_ARGUMENTS (current_function_decl);
9517 decl; decl = TREE_CHAIN (decl))
9519 rtx parameter = DECL_INCOMING_RTL (decl);
9520 enum machine_mode mode = GET_MODE (parameter);
9522 if (GET_CODE (parameter) == REG)
9524 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9532 else if (mode == DFmode)
9537 /* If only one bit will fit, don't or in this entry. */
9538 if (next_parm_info_bit > 0)
9539 parm_info |= (bits << (next_parm_info_bit - 1));
9540 next_parm_info_bit -= 2;
9544 fixed_parms += ((GET_MODE_SIZE (mode)
9545 + (UNITS_PER_WORD - 1))
9547 next_parm_info_bit -= 1;
9553 /* Number of fixed point parameters. */
9554 /* This is actually the number of words of fixed point parameters; thus
9555 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9556 fprintf (file, "%d,", fixed_parms);
9558 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9560 /* This is actually the number of fp registers that hold parameters;
9561 and thus the maximum value is 13. */
9562 /* Set parameters on stack bit if parameters are not in their original
9563 registers, regardless of whether they are on the stack? Xlc
9564 seems to set the bit when not optimizing. */
9565 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9567 if (! optional_tbtab)
9570 /* Optional fields follow. Some are variable length. */
9572 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9574 /* There is an entry for each parameter in a register, in the order that
9575 they occur in the parameter list. Any intervening arguments on the
9576 stack are ignored. If the list overflows a long (max possible length
9577 34 bits) then completely leave off all elements that don't fit. */
9578 /* Only emit this long if there was at least one parameter. */
9579 if (fixed_parms || float_parms)
9580 fprintf (file, "\t.long %d\n", parm_info);
9582 /* Offset from start of code to tb table. */
9583 fputs ("\t.long ", file);
9584 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9586 RS6000_OUTPUT_BASENAME (file, fname);
9588 assemble_name (file, fname);
9592 RS6000_OUTPUT_BASENAME (file, fname);
9594 assemble_name (file, fname);
9598 /* Interrupt handler mask. */
9599 /* Omit this long, since we never set the interrupt handler bit
9602 /* Number of CTL (controlled storage) anchors. */
9603 /* Omit this long, since the has_ctl bit is never set above. */
9605 /* Displacement into stack of each CTL anchor. */
9606 /* Omit this list of longs, because there are no CTL anchors. */
9608 /* Length of function name. */
9609 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9611 /* Function name. */
9612 assemble_string (fname, strlen (fname));
9614 /* Register for alloca automatic storage; this is always reg 31.
9615 Only emit this if the alloca bit was set above. */
9616 if (frame_pointer_needed)
9617 fputs ("\t.byte 31\n", file);
9619 fputs ("\t.align 2\n", file);
9624 /* A C compound statement that outputs the assembler code for a thunk
9625 function, used to implement C++ virtual function calls with
9626 multiple inheritance. The thunk acts as a wrapper around a virtual
9627 function, adjusting the implicit object parameter before handing
9628 control off to the real function.
9630 First, emit code to add the integer DELTA to the location that
9631 contains the incoming first argument. Assume that this argument
9632 contains a pointer, and is the one used to pass the `this' pointer
9633 in C++. This is the incoming argument *before* the function
9634 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9635 values of all other incoming arguments.
9637 After the addition, emit code to jump to FUNCTION, which is a
9638 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9639 not touch the return address. Hence returning from FUNCTION will
9640 return to whoever called the current `thunk'.
9642 The effect must be as if FUNCTION had been called directly with the
9643 adjusted first argument. This macro is responsible for emitting
9644 all of the code for a thunk function; output_function_prologue()
9645 and output_function_epilogue() are not invoked.
9647 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9648 been extracted from it.) It might possibly be useful on some
9649 targets, but probably not.
9651 If you do not define this macro, the target-independent code in the
9652 C++ frontend will generate a less efficient heavyweight thunk that
9653 calls FUNCTION instead of jumping to it. The generic approach does
9654 not support varargs. */
9657 output_mi_thunk (file, thunk_fndecl, delta, function)
9659 tree thunk_fndecl ATTRIBUTE_UNUSED;
9663 const char *this_reg =
9664 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9667 const char *r0 = reg_names[0];
9668 const char *toc = reg_names[2];
9669 const char *schain = reg_names[11];
9670 const char *r12 = reg_names[12];
9672 static int labelno = 0;
9674 /* Small constants that can be done by one add instruction. */
9675 if (delta >= -32768 && delta <= 32767)
9677 if (! TARGET_NEW_MNEMONICS)
9678 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9680 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9683 /* Large constants that can be done by one addis instruction. */
9684 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9685 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9688 /* 32-bit constants that can be done by an add and addis instruction. */
9689 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9691 /* Break into two pieces, propagating the sign bit from the low
9692 word to the upper word. */
9693 int delta_high = delta >> 16;
9694 int delta_low = delta & 0xffff;
9695 if ((delta_low & 0x8000) != 0)
9698 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9701 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9704 if (! TARGET_NEW_MNEMONICS)
9705 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9707 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9710 /* 64-bit constants, fixme */
9714 /* Get the prefix in front of the names. */
9715 switch (DEFAULT_ABI)
9725 case ABI_AIX_NODESC:
9730 /* If the function is compiled in this module, jump to it directly.
9731 Otherwise, load up its address and jump to it. */
9733 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9735 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9736 && (! lookup_attribute ("longcall",
9737 TYPE_ATTRIBUTES (TREE_TYPE (function)))
9738 || lookup_attribute ("shortcall",
9739 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
9742 fprintf (file, "\tb %s", prefix);
9743 assemble_name (file, fname);
9744 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9750 switch (DEFAULT_ABI)
9756 /* Set up a TOC entry for the function. */
9757 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9759 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9762 if (TARGET_MINIMAL_TOC)
9763 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9766 fputs ("\t.tc ", file);
9767 assemble_name (file, fname);
9768 fputs ("[TC],", file);
9770 assemble_name (file, fname);
9773 if (TARGET_MINIMAL_TOC)
9774 asm_fprintf (file, (TARGET_32BIT)
9775 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9776 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9777 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9778 assemble_name (file, buf);
9779 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9780 fputs ("-(.LCTOC1)", file);
9781 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9783 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9787 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9790 asm_fprintf (file, "\tmtctr %s\n", r0);
9792 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9795 asm_fprintf (file, "\tbctr\n");
9798 case ABI_AIX_NODESC:
9800 fprintf (file, "\tb %s", prefix);
9801 assemble_name (file, fname);
9802 if (flag_pic) fputs ("@plt", file);
9808 fprintf (file, "\tb %s", prefix);
9809 if (flag_pic && !machopic_name_defined_p (fname))
9810 assemble_name (file, machopic_stub_name (fname));
9812 assemble_name (file, fname);
9821 /* A quick summary of the various types of 'constant-pool tables'
9824 Target Flags Name One table per
9825 AIX (none) AIX TOC object file
9826 AIX -mfull-toc AIX TOC object file
9827 AIX -mminimal-toc AIX minimal TOC translation unit
9828 SVR4/EABI (none) SVR4 SDATA object file
9829 SVR4/EABI -fpic SVR4 pic object file
9830 SVR4/EABI -fPIC SVR4 PIC translation unit
9831 SVR4/EABI -mrelocatable EABI TOC function
9832 SVR4/EABI -maix AIX TOC object file
9833 SVR4/EABI -maix -mminimal-toc
9834 AIX minimal TOC translation unit
9836 Name Reg. Set by entries contains:
9837 made by addrs? fp? sum?
9839 AIX TOC 2 crt0 as Y option option
9840 AIX minimal TOC 30 prolog gcc Y Y option
9841 SVR4 SDATA 13 crt0 gcc N Y N
9842 SVR4 pic 30 prolog ld Y not yet N
9843 SVR4 PIC 30 prolog gcc Y option option
9844 EABI TOC 30 prolog gcc Y option option
9848 /* Hash table stuff for keeping track of TOC entries. */
9850 struct toc_hash_struct
9852 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9853 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
9855 enum machine_mode key_mode;
9859 static htab_t toc_hash_table;
9861 /* Hash functions for the hash table. */
9864 rs6000_hash_constant (k)
9867 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9868 const char *format = GET_RTX_FORMAT (GET_CODE (k));
9869 int flen = strlen (format);
9872 if (GET_CODE (k) == LABEL_REF)
9873 return result * 1231 + X0INT (XEXP (k, 0), 3);
9875 if (GET_CODE (k) == CODE_LABEL)
9880 for (; fidx < flen; fidx++)
9881 switch (format[fidx])
9886 const char *str = XSTR (k, fidx);
9888 result = result * 613 + len;
9889 for (i = 0; i < len; i++)
9890 result = result * 613 + (unsigned) str[i];
9895 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9899 result = result * 613 + (unsigned) XINT (k, fidx);
9902 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9903 result = result * 613 + (unsigned) XWINT (k, fidx);
9907 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9908 result = result * 613 + (unsigned) (XWINT (k, fidx)
9919 toc_hash_function (hash_entry)
9920 const void * hash_entry;
9922 const struct toc_hash_struct *thc =
9923 (const struct toc_hash_struct *) hash_entry;
9924 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9927 /* Compare H1 and H2 for equivalence. */
9930 toc_hash_eq (h1, h2)
9934 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9935 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9937 if (((const struct toc_hash_struct *) h1)->key_mode
9938 != ((const struct toc_hash_struct *) h2)->key_mode)
9941 return rtx_equal_p (r1, r2);
9944 /* Mark the hash table-entry HASH_ENTRY. */
9947 toc_hash_mark_entry (hash_slot, unused)
9949 void * unused ATTRIBUTE_UNUSED;
9951 const struct toc_hash_struct * hash_entry =
9952 *(const struct toc_hash_struct **) hash_slot;
9953 rtx r = hash_entry->key;
9954 ggc_set_mark (hash_entry);
9955 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
9956 if (GET_CODE (r) == LABEL_REF)
9959 ggc_set_mark (XEXP (r, 0));
9966 /* Mark all the elements of the TOC hash-table *HT. */
9969 toc_hash_mark_table (vht)
9974 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
9977 /* These are the names given by the C++ front-end to vtables, and
9978 vtable-like objects. Ideally, this logic should not be here;
9979 instead, there should be some programmatic way of inquiring as
9980 to whether or not an object is a vtable. */
9982 #define VTABLE_NAME_P(NAME) \
9983 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
9984 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
9985 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
9986 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
9989 rs6000_output_symbol_ref (file, x)
9993 /* Currently C++ toc references to vtables can be emitted before it
9994 is decided whether the vtable is public or private. If this is
9995 the case, then the linker will eventually complain that there is
9996 a reference to an unknown section. Thus, for vtables only,
9997 we emit the TOC reference to reference the symbol and not the
9999 const char *name = XSTR (x, 0);
10001 if (VTABLE_NAME_P (name))
10003 RS6000_OUTPUT_BASENAME (file, name);
10006 assemble_name (file, name);
10009 /* Output a TOC entry. We derive the entry name from what is being
10013 output_toc (file, x, labelno, mode)
10017 enum machine_mode mode;
10020 const char *name = buf;
10021 const char *real_name;
10028 /* When the linker won't eliminate them, don't output duplicate
10029 TOC entries (this happens on AIX if there is any kind of TOC,
10030 and on SVR4 under -fPIC or -mrelocatable). */
10033 struct toc_hash_struct *h;
10036 h = ggc_alloc (sizeof (*h));
10038 h->key_mode = mode;
10039 h->labelno = labelno;
10041 found = htab_find_slot (toc_hash_table, h, 1);
10042 if (*found == NULL)
10044 else /* This is indeed a duplicate.
10045 Set this label equal to that label. */
10047 fputs ("\t.set ", file);
10048 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10049 fprintf (file, "%d,", labelno);
10050 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10051 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
10057 /* If we're going to put a double constant in the TOC, make sure it's
10058 aligned properly when strict alignment is on. */
10059 if (GET_CODE (x) == CONST_DOUBLE
10060 && STRICT_ALIGNMENT
10061 && GET_MODE_BITSIZE (mode) >= 64
10062 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10063 ASM_OUTPUT_ALIGN (file, 3);
10066 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10068 /* Handle FP constants specially. Note that if we have a minimal
10069 TOC, things we put here aren't actually in the TOC, so we can allow
10071 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10073 REAL_VALUE_TYPE rv;
10076 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10077 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10081 if (TARGET_MINIMAL_TOC)
10082 fputs (DOUBLE_INT_ASM_OP, file);
10084 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
10085 k[0] & 0xffffffff, k[1] & 0xffffffff);
10086 fprintf (file, "0x%lx%08lx\n",
10087 k[0] & 0xffffffff, k[1] & 0xffffffff);
10092 if (TARGET_MINIMAL_TOC)
10093 fputs ("\t.long ", file);
10095 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
10096 k[0] & 0xffffffff, k[1] & 0xffffffff);
10097 fprintf (file, "0x%lx,0x%lx\n",
10098 k[0] & 0xffffffff, k[1] & 0xffffffff);
10102 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10104 REAL_VALUE_TYPE rv;
10107 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10108 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10112 if (TARGET_MINIMAL_TOC)
10113 fputs (DOUBLE_INT_ASM_OP, file);
10115 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
10116 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
10121 if (TARGET_MINIMAL_TOC)
10122 fputs ("\t.long ", file);
10124 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
10125 fprintf (file, "0x%lx\n", l & 0xffffffff);
10129 else if (GET_MODE (x) == VOIDmode
10130 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10132 unsigned HOST_WIDE_INT low;
10133 HOST_WIDE_INT high;
10135 if (GET_CODE (x) == CONST_DOUBLE)
10137 low = CONST_DOUBLE_LOW (x);
10138 high = CONST_DOUBLE_HIGH (x);
10141 #if HOST_BITS_PER_WIDE_INT == 32
10144 high = (low & 0x80000000) ? ~0 : 0;
10148 low = INTVAL (x) & 0xffffffff;
10149 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10153 /* TOC entries are always Pmode-sized, but since this
10154 is a bigendian machine then if we're putting smaller
10155 integer constants in the TOC we have to pad them.
10156 (This is still a win over putting the constants in
10157 a separate constant pool, because then we'd have
10158 to have both a TOC entry _and_ the actual constant.)
10160 For a 32-bit target, CONST_INT values are loaded and shifted
10161 entirely within `low' and can be stored in one TOC entry. */
10163 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10164 abort ();/* It would be easy to make this work, but it doesn't now. */
10166 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10167 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10168 POINTER_SIZE, &low, &high, 0);
10172 if (TARGET_MINIMAL_TOC)
10173 fputs (DOUBLE_INT_ASM_OP, file);
10175 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10176 (long) high & 0xffffffff, (long) low & 0xffffffff);
10177 fprintf (file, "0x%lx%08lx\n",
10178 (long) high & 0xffffffff, (long) low & 0xffffffff);
10183 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10185 if (TARGET_MINIMAL_TOC)
10186 fputs ("\t.long ", file);
10188 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10189 (long) high & 0xffffffff, (long) low & 0xffffffff);
10190 fprintf (file, "0x%lx,0x%lx\n",
10191 (long) high & 0xffffffff, (long) low & 0xffffffff);
10195 if (TARGET_MINIMAL_TOC)
10196 fputs ("\t.long ", file);
10198 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
10199 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
10205 if (GET_CODE (x) == CONST)
10207 if (GET_CODE (XEXP (x, 0)) != PLUS)
10210 base = XEXP (XEXP (x, 0), 0);
10211 offset = INTVAL (XEXP (XEXP (x, 0), 1));
10214 if (GET_CODE (base) == SYMBOL_REF)
10215 name = XSTR (base, 0);
10216 else if (GET_CODE (base) == LABEL_REF)
10217 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10218 else if (GET_CODE (base) == CODE_LABEL)
10219 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10223 real_name = (*targetm.strip_name_encoding) (name);
10224 if (TARGET_MINIMAL_TOC)
10225 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10228 fprintf (file, "\t.tc %s", real_name);
10231 fprintf (file, ".N%d", - offset);
10233 fprintf (file, ".P%d", offset);
10235 fputs ("[TC],", file);
10238 /* Currently C++ toc references to vtables can be emitted before it
10239 is decided whether the vtable is public or private. If this is
10240 the case, then the linker will eventually complain that there is
10241 a TOC reference to an unknown section. Thus, for vtables only,
10242 we emit the TOC reference to reference the symbol and not the
10244 if (VTABLE_NAME_P (name))
10246 RS6000_OUTPUT_BASENAME (file, name);
10248 fprintf (file, "%d", offset);
10249 else if (offset > 0)
10250 fprintf (file, "+%d", offset);
10253 output_addr_const (file, x);
10257 /* Output an assembler pseudo-op to write an ASCII string of N characters
10258 starting at P to FILE.
10260 On the RS/6000, we have to do this using the .byte operation and
10261 write out special characters outside the quoted string.
10262 Also, the assembler is broken; very long strings are truncated,
10263 so we must artificially break them up early. */
10266 output_ascii (file, p, n)
10272 int i, count_string;
10273 const char *for_string = "\t.byte \"";
10274 const char *for_decimal = "\t.byte ";
10275 const char *to_close = NULL;
10278 for (i = 0; i < n; i++)
10281 if (c >= ' ' && c < 0177)
10284 fputs (for_string, file);
10287 /* Write two quotes to get one. */
10295 for_decimal = "\"\n\t.byte ";
10299 if (count_string >= 512)
10301 fputs (to_close, file);
10303 for_string = "\t.byte \"";
10304 for_decimal = "\t.byte ";
10312 fputs (for_decimal, file);
10313 fprintf (file, "%d", c);
10315 for_string = "\n\t.byte \"";
10316 for_decimal = ", ";
10322 /* Now close the string if we have written one. Then end the line. */
10324 fputs (to_close, file);
10327 /* Generate a unique section name for FILENAME for a section type
10328 represented by SECTION_DESC. Output goes into BUF.
10330 SECTION_DESC can be any string, as long as it is different for each
10331 possible section type.
10333 We name the section in the same manner as xlc. The name begins with an
10334 underscore followed by the filename (after stripping any leading directory
10335 names) with the last period replaced by the string SECTION_DESC. If
10336 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10340 rs6000_gen_section_name (buf, filename, section_desc)
10342 const char *filename;
10343 const char *section_desc;
10345 const char *q, *after_last_slash, *last_period = 0;
10349 after_last_slash = filename;
10350 for (q = filename; *q; q++)
10353 after_last_slash = q + 1;
10354 else if (*q == '.')
10358 len = strlen (after_last_slash) + strlen (section_desc) + 2;
10359 *buf = (char *) permalloc (len);
10364 for (q = after_last_slash; *q; q++)
10366 if (q == last_period)
10368 strcpy (p, section_desc);
10369 p += strlen (section_desc);
10372 else if (ISALNUM (*q))
10376 if (last_period == 0)
10377 strcpy (p, section_desc);
10382 /* Emit profile function. */
10385 output_profile_hook (labelno)
10388 if (DEFAULT_ABI == ABI_AIX)
10391 const char *label_name;
10394 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10395 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
10396 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10398 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10401 else if (DEFAULT_ABI == ABI_DARWIN)
10403 const char *mcount_name = RS6000_MCOUNT;
10404 int caller_addr_regno = LINK_REGISTER_REGNUM;
10406 /* Be conservative and always set this, at least for now. */
10407 current_function_uses_pic_offset_table = 1;
10410 /* For PIC code, set up a stub and collect the caller's address
10411 from r0, which is where the prologue puts it. */
10414 mcount_name = machopic_stub_name (mcount_name);
10415 if (current_function_uses_pic_offset_table)
10416 caller_addr_regno = 0;
10419 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10421 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10425 /* Write function profiler code. */
10428 output_function_profiler (file, labelno)
10434 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10435 switch (DEFAULT_ABI)
10441 case ABI_AIX_NODESC:
10442 fprintf (file, "\tmflr %s\n", reg_names[0]);
10445 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10446 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10447 reg_names[0], reg_names[1]);
10448 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10449 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10450 assemble_name (file, buf);
10451 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10453 else if (flag_pic > 1)
10455 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10456 reg_names[0], reg_names[1]);
10457 /* Now, we need to get the address of the label. */
10458 fputs ("\tbl 1f\n\t.long ", file);
10459 assemble_name (file, buf);
10460 fputs ("-.\n1:", file);
10461 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10462 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10463 reg_names[0], reg_names[11]);
10464 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10465 reg_names[0], reg_names[0], reg_names[11]);
10469 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10470 assemble_name (file, buf);
10471 fputs ("@ha\n", file);
10472 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10473 reg_names[0], reg_names[1]);
10474 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10475 assemble_name (file, buf);
10476 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10479 if (current_function_needs_context)
10480 asm_fprintf (file, "\tmr %s,%s\n",
10481 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10482 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10483 if (current_function_needs_context)
10484 asm_fprintf (file, "\tmr %s,%s\n",
10485 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10490 /* Don't do anything, done in output_profile_hook (). */
10496 /* Adjust the cost of a scheduling dependency. Return the new cost of
10497 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10500 rs6000_adjust_cost (insn, link, dep_insn, cost)
10503 rtx dep_insn ATTRIBUTE_UNUSED;
10506 if (! recog_memoized (insn))
10509 if (REG_NOTE_KIND (link) != 0)
10512 if (REG_NOTE_KIND (link) == 0)
10514 /* Data dependency; DEP_INSN writes a register that INSN reads
10515 some cycles later. */
10516 switch (get_attr_type (insn))
10519 /* Tell the first scheduling pass about the latency between
10520 a mtctr and bctr (and mtlr and br/blr). The first
10521 scheduling pass will not know about this latency since
10522 the mtctr instruction, which has the latency associated
10523 to it, will be generated by reload. */
10524 return TARGET_POWER ? 5 : 4;
10526 /* Leave some extra cycles between a compare and its
10527 dependent branch, to inhibit expensive mispredicts. */
10528 if ((rs6000_cpu_attr == CPU_PPC603
10529 || rs6000_cpu_attr == CPU_PPC604
10530 || rs6000_cpu_attr == CPU_PPC604E
10531 || rs6000_cpu_attr == CPU_PPC620
10532 || rs6000_cpu_attr == CPU_PPC630
10533 || rs6000_cpu_attr == CPU_PPC750
10534 || rs6000_cpu_attr == CPU_PPC7400
10535 || rs6000_cpu_attr == CPU_PPC7450
10536 || rs6000_cpu_attr == CPU_POWER4)
10537 && recog_memoized (dep_insn)
10538 && (INSN_CODE (dep_insn) >= 0)
10539 && (get_attr_type (dep_insn) == TYPE_COMPARE
10540 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10541 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10542 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10547 /* Fall out to return default cost. */
10553 /* A C statement (sans semicolon) to update the integer scheduling
10554 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10555 INSN earlier, increase the priority to execute INSN later. Do not
10556 define this macro if you do not need to adjust the scheduling
10557 priorities of insns. */
10560 rs6000_adjust_priority (insn, priority)
10561 rtx insn ATTRIBUTE_UNUSED;
10564 /* On machines (like the 750) which have asymmetric integer units,
10565 where one integer unit can do multiply and divides and the other
10566 can't, reduce the priority of multiply/divide so it is scheduled
10567 before other integer operations. */
10570 if (! INSN_P (insn))
10573 if (GET_CODE (PATTERN (insn)) == USE)
10576 switch (rs6000_cpu_attr) {
10578 switch (get_attr_type (insn))
10585 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10586 priority, priority);
10587 if (priority >= 0 && priority < 0x01000000)
10597 /* Return how many instructions the machine can issue per cycle. */
10600 rs6000_issue_rate ()
10602 switch (rs6000_cpu_attr) {
10603 case CPU_RIOS1: /* ? */
10605 case CPU_PPC601: /* ? */
10625 /* Length in units of the trampoline for entering a nested function. */
10628 rs6000_trampoline_size ()
10632 switch (DEFAULT_ABI)
10638 ret = (TARGET_32BIT) ? 12 : 24;
10643 case ABI_AIX_NODESC:
10644 ret = (TARGET_32BIT) ? 40 : 48;
10651 /* Emit RTL insns to initialize the variable parts of a trampoline.
10652 FNADDR is an RTX for the address of the function's pure code.
10653 CXT is an RTX for the static chain value for the function. */
10656 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10661 enum machine_mode pmode = Pmode;
10662 int regsize = (TARGET_32BIT) ? 4 : 8;
10663 rtx ctx_reg = force_reg (pmode, cxt);
10665 switch (DEFAULT_ABI)
10670 /* Macros to shorten the code expansions below. */
10671 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10672 #define MEM_PLUS(addr,offset) \
10673 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10675 /* Under AIX, just build the 3 word function descriptor */
10678 rtx fn_reg = gen_reg_rtx (pmode);
10679 rtx toc_reg = gen_reg_rtx (pmode);
10680 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10681 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10682 emit_move_insn (MEM_DEREF (addr), fn_reg);
10683 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10684 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10688 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10691 case ABI_AIX_NODESC:
10692 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10693 FALSE, VOIDmode, 4,
10695 GEN_INT (rs6000_trampoline_size ()), SImode,
10705 /* Table of valid machine attributes. */
10707 const struct attribute_spec rs6000_attribute_table[] =
10709 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10710 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10711 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10712 { NULL, 0, 0, false, false, false, NULL }
10715 /* Handle a "longcall" or "shortcall" attribute; arguments as in
10716 struct attribute_spec.handler. */
10719 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10722 tree args ATTRIBUTE_UNUSED;
10723 int flags ATTRIBUTE_UNUSED;
10724 bool *no_add_attrs;
10726 if (TREE_CODE (*node) != FUNCTION_TYPE
10727 && TREE_CODE (*node) != FIELD_DECL
10728 && TREE_CODE (*node) != TYPE_DECL)
10730 warning ("`%s' attribute only applies to functions",
10731 IDENTIFIER_POINTER (name));
10732 *no_add_attrs = true;
10738 /* Set longcall attributes on all functions declared when
10739 rs6000_default_long_calls is true. */
10741 rs6000_set_default_type_attributes (type)
10744 if (rs6000_default_long_calls
10745 && (TREE_CODE (type) == FUNCTION_TYPE
10746 || TREE_CODE (type) == METHOD_TYPE))
10747 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
10749 TYPE_ATTRIBUTES (type));
10752 /* Return a reference suitable for calling a function with the
10753 longcall attribute. */
10756 rs6000_longcall_ref (call_ref)
10759 const char *call_name;
10762 if (GET_CODE (call_ref) != SYMBOL_REF)
10765 /* System V adds '.' to the internal name, so skip them. */
10766 call_name = XSTR (call_ref, 0);
10767 if (*call_name == '.')
10769 while (*call_name == '.')
10772 node = get_identifier (call_name);
10773 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10776 return force_reg (Pmode, call_ref);
10780 #ifdef USING_ELFOS_H
10782 /* A C statement or statements to switch to the appropriate section
10783 for output of RTX in mode MODE. You can assume that RTX is some
10784 kind of constant in RTL. The argument MODE is redundant except in
10785 the case of a `const_int' rtx. Select the section by calling
10786 `text_section' or one of the alternatives for other sections.
10788 Do not define this macro if you put all constants in the read-only
10792 rs6000_elf_select_rtx_section (mode, x, align)
10793 enum machine_mode mode;
10795 unsigned HOST_WIDE_INT align;
10797 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10800 default_elf_select_rtx_section (mode, x, align);
10803 /* A C statement or statements to switch to the appropriate
10804 section for output of DECL. DECL is either a `VAR_DECL' node
10805 or a constant of some sort. RELOC indicates whether forming
10806 the initial value of DECL requires link-time relocations. */
10809 rs6000_elf_select_section (decl, reloc, align)
10812 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
10814 int size = int_size_in_bytes (TREE_TYPE (decl));
10817 static void (* const sec_funcs[4]) PARAMS ((void)) = {
10818 &readonly_data_section,
10824 needs_sdata = (size > 0
10825 && size <= g_switch_value
10826 && rs6000_sdata != SDATA_NONE
10827 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10829 if (TREE_CODE (decl) == STRING_CST)
10830 readonly = ! flag_writable_strings;
10831 else if (TREE_CODE (decl) == VAR_DECL)
10832 readonly = (! (flag_pic && reloc)
10833 && TREE_READONLY (decl)
10834 && ! TREE_SIDE_EFFECTS (decl)
10835 && DECL_INITIAL (decl)
10836 && DECL_INITIAL (decl) != error_mark_node
10837 && TREE_CONSTANT (DECL_INITIAL (decl)));
10838 else if (TREE_CODE (decl) == CONSTRUCTOR)
10839 readonly = (! (flag_pic && reloc)
10840 && ! TREE_SIDE_EFFECTS (decl)
10841 && TREE_CONSTANT (decl));
10844 if (needs_sdata && rs6000_sdata != SDATA_EABI)
10847 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10850 /* A C statement to build up a unique section name, expressed as a
10851 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10852 RELOC indicates whether the initial value of EXP requires
10853 link-time relocations. If you do not define this macro, GCC will use
10854 the symbol name prefixed by `.' as the section name. Note - this
10855 macro can now be called for uninitialized data items as well as
10856 initialised data and functions. */
10859 rs6000_elf_unique_section (decl, reloc)
10867 const char *prefix;
10869 static const char *const prefixes[7][2] =
10871 { ".rodata.", ".gnu.linkonce.r." },
10872 { ".sdata2.", ".gnu.linkonce.s2." },
10873 { ".data.", ".gnu.linkonce.d." },
10874 { ".sdata.", ".gnu.linkonce.s." },
10875 { ".bss.", ".gnu.linkonce.b." },
10876 { ".sbss.", ".gnu.linkonce.sb." },
10877 { ".text.", ".gnu.linkonce.t." }
10880 if (TREE_CODE (decl) == FUNCTION_DECL)
10889 if (TREE_CODE (decl) == STRING_CST)
10890 readonly = ! flag_writable_strings;
10891 else if (TREE_CODE (decl) == VAR_DECL)
10892 readonly = (! (flag_pic && reloc)
10893 && TREE_READONLY (decl)
10894 && ! TREE_SIDE_EFFECTS (decl)
10895 && TREE_CONSTANT (DECL_INITIAL (decl)));
10897 size = int_size_in_bytes (TREE_TYPE (decl));
10898 needs_sdata = (size > 0
10899 && size <= g_switch_value
10900 && rs6000_sdata != SDATA_NONE
10901 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10903 if (DECL_INITIAL (decl) == 0
10904 || DECL_INITIAL (decl) == error_mark_node)
10906 else if (! readonly)
10913 /* .sdata2 is only for EABI. */
10914 if (sec == 0 && rs6000_sdata != SDATA_EABI)
10920 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
10921 name = (*targetm.strip_name_encoding) (name);
10922 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10923 len = strlen (name) + strlen (prefix);
10924 string = alloca (len + 1);
10926 sprintf (string, "%s%s", prefix, name);
10928 DECL_SECTION_NAME (decl) = build_string (len, string);
10932 /* If we are referencing a function that is static or is known to be
10933 in this file, make the SYMBOL_REF special. We can use this to indicate
10934 that we can branch to this function without emitting a no-op after the
10935 call. For real AIX calling sequences, we also replace the
10936 function name with the real name (1 or 2 leading .'s), rather than
10937 the function descriptor name. This saves a lot of overriding code
10938 to read the prefixes. */
10941 rs6000_elf_encode_section_info (decl, first)
10948 if (TREE_CODE (decl) == FUNCTION_DECL)
10950 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10951 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10952 && ! DECL_WEAK (decl))
10953 SYMBOL_REF_FLAG (sym_ref) = 1;
10955 if (DEFAULT_ABI == ABI_AIX)
10957 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10958 size_t len2 = strlen (XSTR (sym_ref, 0));
10959 char *str = alloca (len1 + len2 + 1);
10962 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10964 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
10967 else if (rs6000_sdata != SDATA_NONE
10968 && DEFAULT_ABI == ABI_V4
10969 && TREE_CODE (decl) == VAR_DECL)
10971 int size = int_size_in_bytes (TREE_TYPE (decl));
10972 tree section_name = DECL_SECTION_NAME (decl);
10973 const char *name = (char *)0;
10978 if (TREE_CODE (section_name) == STRING_CST)
10980 name = TREE_STRING_POINTER (section_name);
10981 len = TREE_STRING_LENGTH (section_name);
10987 if ((size > 0 && size <= g_switch_value)
10989 && ((len == sizeof (".sdata") - 1
10990 && strcmp (name, ".sdata") == 0)
10991 || (len == sizeof (".sdata2") - 1
10992 && strcmp (name, ".sdata2") == 0)
10993 || (len == sizeof (".sbss") - 1
10994 && strcmp (name, ".sbss") == 0)
10995 || (len == sizeof (".sbss2") - 1
10996 && strcmp (name, ".sbss2") == 0)
10997 || (len == sizeof (".PPC.EMB.sdata0") - 1
10998 && strcmp (name, ".PPC.EMB.sdata0") == 0)
10999 || (len == sizeof (".PPC.EMB.sbss0") - 1
11000 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
11002 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11003 size_t len = strlen (XSTR (sym_ref, 0));
11004 char *str = alloca (len + 2);
11007 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
11008 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
11013 static const char *
11014 rs6000_elf_strip_name_encoding (str)
11017 while (*str == '*' || *str == '@')
11022 #endif /* USING_ELFOS_H */
11025 /* Return a REG that occurs in ADDR with coefficient 1.
11026 ADDR can be effectively incremented by incrementing REG.
11028 r0 is special and we must not select it as an address
11029 register by this routine since our caller will try to
11030 increment the returned register via an "la" instruction. */
11033 find_addr_reg (addr)
11036 while (GET_CODE (addr) == PLUS)
11038 if (GET_CODE (XEXP (addr, 0)) == REG
11039 && REGNO (XEXP (addr, 0)) != 0)
11040 addr = XEXP (addr, 0);
11041 else if (GET_CODE (XEXP (addr, 1)) == REG
11042 && REGNO (XEXP (addr, 1)) != 0)
11043 addr = XEXP (addr, 1);
11044 else if (CONSTANT_P (XEXP (addr, 0)))
11045 addr = XEXP (addr, 1);
11046 else if (CONSTANT_P (XEXP (addr, 1)))
11047 addr = XEXP (addr, 0);
11051 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
11057 rs6000_fatal_bad_address (op)
11060 fatal_insn ("bad address", op);
11063 /* Called to register all of our global variables with the garbage
11067 rs6000_add_gc_roots ()
11069 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11070 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11071 toc_hash_mark_table);
11077 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
11078 reference and a constant. */
11081 symbolic_operand (op)
11084 switch (GET_CODE (op))
11091 return (GET_CODE (op) == SYMBOL_REF ||
11092 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11093 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11094 && GET_CODE (XEXP (op, 1)) == CONST_INT);
11101 #ifdef RS6000_LONG_BRANCH
11103 static tree stub_list = 0;
11105 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
11106 procedure calls to the linked list. */
11109 add_compiler_stub (label_name, function_name, line_number)
11111 tree function_name;
11114 tree stub = build_tree_list (function_name, label_name);
11115 TREE_TYPE (stub) = build_int_2 (line_number, 0);
11116 TREE_CHAIN (stub) = stub_list;
11120 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
11121 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
11122 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
11124 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11125 handling procedure calls from the linked list and initializes the
11129 output_compiler_stub ()
11132 char label_buf[256];
11134 tree tmp_stub, stub;
11137 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11139 fprintf (asm_out_file,
11140 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11142 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11143 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11144 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11145 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11147 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11149 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11152 label_buf[0] = '_';
11153 strcpy (label_buf+1,
11154 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11157 strcpy (tmp_buf, "lis r12,hi16(");
11158 strcat (tmp_buf, label_buf);
11159 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11160 strcat (tmp_buf, label_buf);
11161 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11162 output_asm_insn (tmp_buf, 0);
11164 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11165 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11166 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11167 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11173 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
11174 already there or not. */
11177 no_previous_def (function_name)
11178 tree function_name;
11181 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11182 if (function_name == STUB_FUNCTION_NAME (stub))
11187 /* GET_PREV_LABEL gets the label name from the previous definition of
11191 get_prev_label (function_name)
11192 tree function_name;
11195 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11196 if (function_name == STUB_FUNCTION_NAME (stub))
11197 return STUB_LABEL_NAME (stub);
11201 /* INSN is either a function call or a millicode call. It may have an
11202 unconditional jump in its delay slot.
11204 CALL_DEST is the routine we are calling. */
11207 output_call (insn, call_dest, operand_number)
11210 int operand_number;
11212 static char buf[256];
11213 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11216 tree funname = get_identifier (XSTR (call_dest, 0));
11218 if (no_previous_def (funname))
11221 rtx label_rtx = gen_label_rtx ();
11222 char *label_buf, temp_buf[256];
11223 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11224 CODE_LABEL_NUMBER (label_rtx));
11225 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11226 labelname = get_identifier (label_buf);
11227 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11229 line_number = NOTE_LINE_NUMBER (insn);
11230 add_compiler_stub (labelname, funname, line_number);
11233 labelname = get_prev_label (funname);
11235 sprintf (buf, "jbsr %%z%d,%.246s",
11236 operand_number, IDENTIFIER_POINTER (labelname));
11241 sprintf (buf, "bl %%z%d", operand_number);
11246 #endif /* RS6000_LONG_BRANCH */
11248 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11250 const char *const symbol_ = (SYMBOL); \
11251 char *buffer_ = (BUF); \
11252 if (symbol_[0] == '"') \
11254 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11256 else if (name_needs_quotes(symbol_)) \
11258 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11262 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11267 /* Generate PIC and indirect symbol stubs. */
11270 machopic_output_stub (file, symb, stub)
11272 const char *symb, *stub;
11274 unsigned int length;
11275 char *symbol_name, *lazy_ptr_name;
11276 char *local_label_0;
11277 static int label = 0;
11279 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11280 symb = (*targetm.strip_name_encoding) (symb);
11284 length = strlen (symb);
11285 symbol_name = alloca (length + 32);
11286 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11288 lazy_ptr_name = alloca (length + 32);
11289 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11291 local_label_0 = alloca (length + 32);
11292 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11295 machopic_picsymbol_stub_section ();
11297 machopic_symbol_stub_section ();
11299 fprintf (file, "%s:\n", stub);
11300 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11304 fprintf (file, "\tmflr r0\n");
11305 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11306 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11307 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11308 lazy_ptr_name, local_label_0);
11309 fprintf (file, "\tmtlr r0\n");
11310 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11311 lazy_ptr_name, local_label_0);
11312 fprintf (file, "\tmtctr r12\n");
11313 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11314 lazy_ptr_name, local_label_0);
11315 fprintf (file, "\tbctr\n");
11318 fprintf (file, "non-pure not supported\n");
11320 machopic_lazy_symbol_ptr_section ();
11321 fprintf (file, "%s:\n", lazy_ptr_name);
11322 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11323 fprintf (file, "\t.long dyld_stub_binding_helper\n");
11326 /* Legitimize PIC addresses. If the address is already
11327 position-independent, we return ORIG. Newly generated
11328 position-independent addresses go into a reg. This is REG if non
11329 zero, otherwise we allocate register(s) as necessary. */
11331 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11334 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11336 enum machine_mode mode;
11341 if (reg == NULL && ! reload_in_progress && ! reload_completed)
11342 reg = gen_reg_rtx (Pmode);
11344 if (GET_CODE (orig) == CONST)
11346 if (GET_CODE (XEXP (orig, 0)) == PLUS
11347 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11350 if (GET_CODE (XEXP (orig, 0)) == PLUS)
11353 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11356 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11362 if (GET_CODE (offset) == CONST_INT)
11364 if (SMALL_INT (offset))
11365 return plus_constant (base, INTVAL (offset));
11366 else if (! reload_in_progress && ! reload_completed)
11367 offset = force_reg (Pmode, offset);
11370 rtx mem = force_const_mem (Pmode, orig);
11371 return machopic_legitimize_pic_address (mem, Pmode, reg);
11374 return gen_rtx (PLUS, Pmode, base, offset);
11377 /* Fall back on generic machopic code. */
11378 return machopic_legitimize_pic_address (orig, mode, reg);
11381 /* This is just a placeholder to make linking work without having to
11382 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11383 ever needed for Darwin (not too likely!) this would have to get a
11384 real definition. */
11391 #endif /* TARGET_MACHO */
11394 static unsigned int
11395 rs6000_elf_section_type_flags (decl, name, reloc)
11400 unsigned int flags = default_section_type_flags (decl, name, reloc);
11402 if (TARGET_RELOCATABLE)
11403 flags |= SECTION_WRITE;
11408 /* Record an element in the table of global constructors. SYMBOL is
11409 a SYMBOL_REF of the function to be called; PRIORITY is a number
11410 between 0 and MAX_INIT_PRIORITY.
11412 This differs from default_named_section_asm_out_constructor in
11413 that we have special handling for -mrelocatable. */
11416 rs6000_elf_asm_out_constructor (symbol, priority)
11420 const char *section = ".ctors";
11423 if (priority != DEFAULT_INIT_PRIORITY)
11425 sprintf (buf, ".ctors.%.5u",
11426 /* Invert the numbering so the linker puts us in the proper
11427 order; constructors are run from right to left, and the
11428 linker sorts in increasing order. */
11429 MAX_INIT_PRIORITY - priority);
11433 named_section_flags (section, SECTION_WRITE);
11434 assemble_align (POINTER_SIZE);
11436 if (TARGET_RELOCATABLE)
11438 fputs ("\t.long (", asm_out_file);
11439 output_addr_const (asm_out_file, symbol);
11440 fputs (")@fixup\n", asm_out_file);
11443 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11447 rs6000_elf_asm_out_destructor (symbol, priority)
11451 const char *section = ".dtors";
11454 if (priority != DEFAULT_INIT_PRIORITY)
11456 sprintf (buf, ".dtors.%.5u",
11457 /* Invert the numbering so the linker puts us in the proper
11458 order; constructors are run from right to left, and the
11459 linker sorts in increasing order. */
11460 MAX_INIT_PRIORITY - priority);
11464 named_section_flags (section, SECTION_WRITE);
11465 assemble_align (POINTER_SIZE);
11467 if (TARGET_RELOCATABLE)
11469 fputs ("\t.long (", asm_out_file);
11470 output_addr_const (asm_out_file, symbol);
11471 fputs (")@fixup\n", asm_out_file);
11474 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11480 xcoff_asm_named_section (name, flags)
11482 unsigned int flags ATTRIBUTE_UNUSED;
11484 fprintf (asm_out_file, "\t.csect %s\n", name);
11488 rs6000_xcoff_select_section (exp, reloc, align)
11491 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
11493 if ((TREE_CODE (exp) == STRING_CST
11494 && ! flag_writable_strings)
11495 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
11496 && TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp)
11497 && DECL_INITIAL (exp)
11498 && (DECL_INITIAL (exp) == error_mark_node
11499 || TREE_CONSTANT (DECL_INITIAL (exp)))
11502 if (TREE_PUBLIC (exp))
11503 read_only_data_section ();
11505 read_only_private_data_section ();
11509 if (TREE_PUBLIC (exp))
11512 private_data_section ();
11517 rs6000_xcoff_unique_section (decl, reloc)
11519 int reloc ATTRIBUTE_UNUSED;
11525 if (TREE_CODE (decl) == FUNCTION_DECL)
11527 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
11528 len = strlen (name) + 5;
11529 string = alloca (len + 1);
11530 sprintf (string, ".%s[PR]", name);
11531 DECL_SECTION_NAME (decl) = build_string (len, string);
11535 /* Select section for constant in constant pool.
11537 On RS/6000, all constants are in the private read-only data area.
11538 However, if this is being placed in the TOC it must be output as a
11542 rs6000_xcoff_select_rtx_section (mode, x, align)
11543 enum machine_mode mode;
11545 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
11547 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
11550 read_only_private_data_section ();
11553 /* Remove any trailing [DS] or the like from the symbol name. */
11555 static const char *
11556 rs6000_xcoff_strip_name_encoding (name)
11562 len = strlen (name);
11563 if (name[len - 1] == ']')
11564 return ggc_alloc_string (name, len - 4);
11569 #endif /* TARGET_XCOFF */
11571 /* Note that this is also used for ELF64. */
11574 rs6000_xcoff_encode_section_info (decl, first)
11576 int first ATTRIBUTE_UNUSED;
11578 if (TREE_CODE (decl) == FUNCTION_DECL
11579 && (TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
11580 && ! DECL_WEAK (decl))
11581 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;