1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave;
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string;
83 /* Set to non-zero once AIX common-mode calls have been defined. */
84 static int common_mode_defined;
86 /* Save information from a "cmpxx" operation until the branch or scc is
88 rtx rs6000_compare_op0, rs6000_compare_op1;
89 int rs6000_compare_fp_p;
91 /* Label number of label created for -mrelocatable, to call to so we can
92 get the address of the GOT section */
93 int rs6000_pic_labelno;
96 /* Which abi to adhere to */
97 const char *rs6000_abi_name = RS6000_ABI_NAME;
99 /* Semantics of the small data area */
100 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
102 /* Which small data model to use */
103 const char *rs6000_sdata_name = (char *)0;
105 /* Counter for labels which are to be placed in .fixup. */
106 int fixuplabelno = 0;
109 /* ABI enumeration available for subtarget to use. */
110 enum rs6000_abi rs6000_current_abi;
112 /* ABI string from -mabi= option. */
113 const char *rs6000_abi_string;
116 const char *rs6000_debug_name;
117 int rs6000_debug_stack; /* debug stack applications */
118 int rs6000_debug_arg; /* debug argument handling */
120 /* Flag to say the TOC is initialized */
122 char toc_label_name[10];
124 /* Alias set for saves and restores from the rs6000 stack. */
125 static int rs6000_sr_alias_set;
127 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
128 The only place that looks at this is rs6000_set_default_type_attributes;
129 everywhere else should rely on the presence or absence of a longcall
130 attribute on the function declaration. */
131 int rs6000_default_long_calls;
132 const char *rs6000_longcall_switch;
134 static void rs6000_add_gc_roots PARAMS ((void));
135 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
136 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
137 static void validate_condition_mode
138 PARAMS ((enum rtx_code, enum machine_mode));
139 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
140 static void rs6000_maybe_dead PARAMS ((rtx));
141 static void rs6000_emit_stack_tie PARAMS ((void));
142 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
143 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
144 static unsigned rs6000_hash_constant PARAMS ((rtx));
145 static unsigned toc_hash_function PARAMS ((const void *));
146 static int toc_hash_eq PARAMS ((const void *, const void *));
147 static int toc_hash_mark_entry PARAMS ((void **, void *));
148 static void toc_hash_mark_table PARAMS ((void *));
149 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
150 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
151 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
152 static int rs6000_ra_ever_killed PARAMS ((void));
153 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
154 const struct attribute_spec rs6000_attribute_table[];
155 static void rs6000_set_default_type_attributes PARAMS ((tree));
156 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
157 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
158 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
159 HOST_WIDE_INT, HOST_WIDE_INT));
161 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
163 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
164 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
165 static void rs6000_elf_select_section PARAMS ((tree, int,
166 unsigned HOST_WIDE_INT));
167 static void rs6000_elf_unique_section PARAMS ((tree, int));
168 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
169 unsigned HOST_WIDE_INT));
170 static void rs6000_elf_encode_section_info PARAMS ((tree, int));
171 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
174 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
175 static void rs6000_xcoff_select_section PARAMS ((tree, int,
176 unsigned HOST_WIDE_INT));
177 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
178 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
179 unsigned HOST_WIDE_INT));
180 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
182 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
184 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
185 static int rs6000_adjust_priority PARAMS ((rtx, int));
186 static int rs6000_issue_rate PARAMS ((void));
188 static void rs6000_init_builtins PARAMS ((void));
189 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
190 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
191 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
192 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
193 static void altivec_init_builtins PARAMS ((void));
194 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
195 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
196 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
197 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
198 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
199 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
200 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
201 static void rs6000_parse_abi_options PARAMS ((void));
202 static void rs6000_parse_vrsave_option PARAMS ((void));
203 static int first_altivec_reg_to_save PARAMS ((void));
204 static unsigned int compute_vrsave_mask PARAMS ((void));
205 static void is_altivec_return_reg PARAMS ((rtx, void *));
206 int vrsave_operation PARAMS ((rtx, enum machine_mode));
207 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
208 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
209 static int easy_vector_constant PARAMS ((rtx));
211 /* Default register names. */
212 char rs6000_reg_names[][8] =
214 "0", "1", "2", "3", "4", "5", "6", "7",
215 "8", "9", "10", "11", "12", "13", "14", "15",
216 "16", "17", "18", "19", "20", "21", "22", "23",
217 "24", "25", "26", "27", "28", "29", "30", "31",
218 "0", "1", "2", "3", "4", "5", "6", "7",
219 "8", "9", "10", "11", "12", "13", "14", "15",
220 "16", "17", "18", "19", "20", "21", "22", "23",
221 "24", "25", "26", "27", "28", "29", "30", "31",
222 "mq", "lr", "ctr","ap",
223 "0", "1", "2", "3", "4", "5", "6", "7",
225 /* AltiVec registers. */
226 "0", "1", "2", "3", "4", "5", "6", "7",
227 "8", "9", "10", "11", "12", "13", "14", "15",
228 "16", "17", "18", "19", "20", "21", "22", "23",
229 "24", "25", "26", "27", "28", "29", "30", "31",
233 #ifdef TARGET_REGNAMES
234 static const char alt_reg_names[][8] =
236 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
237 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
238 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
239 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
240 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
241 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
242 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
243 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
244 "mq", "lr", "ctr", "ap",
245 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
247 /* AltiVec registers. */
248 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
249 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
250 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
251 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
256 #ifndef MASK_STRICT_ALIGN
257 #define MASK_STRICT_ALIGN 0
260 /* Initialize the GCC target structure. */
261 #undef TARGET_ATTRIBUTE_TABLE
262 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
263 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
264 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
266 #undef TARGET_ASM_ALIGNED_DI_OP
267 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
269 /* Default unaligned ops are only provided for ELF. Find the ops needed
270 for non-ELF systems. */
271 #ifndef OBJECT_FORMAT_ELF
273 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
275 #undef TARGET_ASM_UNALIGNED_HI_OP
276 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
277 #undef TARGET_ASM_UNALIGNED_SI_OP
278 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
279 #undef TARGET_ASM_UNALIGNED_DI_OP
280 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
283 #undef TARGET_ASM_UNALIGNED_HI_OP
284 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
285 #undef TARGET_ASM_UNALIGNED_SI_OP
286 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
290 /* This hook deals with fixups for relocatable code and DI-mode objects
292 #undef TARGET_ASM_INTEGER
293 #define TARGET_ASM_INTEGER rs6000_assemble_integer
295 #undef TARGET_ASM_FUNCTION_PROLOGUE
296 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
297 #undef TARGET_ASM_FUNCTION_EPILOGUE
298 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
301 #undef TARGET_SECTION_TYPE_FLAGS
302 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
305 #undef TARGET_SCHED_ISSUE_RATE
306 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
307 #undef TARGET_SCHED_ADJUST_COST
308 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
309 #undef TARGET_SCHED_ADJUST_PRIORITY
310 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
312 #undef TARGET_INIT_BUILTINS
313 #define TARGET_INIT_BUILTINS rs6000_init_builtins
315 #undef TARGET_EXPAND_BUILTIN
316 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
318 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
319 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
321 struct gcc_target targetm = TARGET_INITIALIZER;
323 /* Override command line options. Mostly we process the processor
324 type and sometimes adjust other TARGET_ options. */
327 rs6000_override_options (default_cpu)
328 const char *default_cpu;
331 struct rs6000_cpu_select *ptr;
333 /* Simplify the entries below by making a mask for any POWER
334 variant and any PowerPC variant. */
336 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
337 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
338 | MASK_PPC_GFXOPT | MASK_POWERPC64)
339 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
343 const char *const name; /* Canonical processor name. */
344 const enum processor_type processor; /* Processor type enum value. */
345 const int target_enable; /* Target flags to enable. */
346 const int target_disable; /* Target flags to disable. */
347 } const processor_target_table[]
348 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
349 POWER_MASKS | POWERPC_MASKS},
350 {"power", PROCESSOR_POWER,
351 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
352 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
353 {"power2", PROCESSOR_POWER,
354 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
355 POWERPC_MASKS | MASK_NEW_MNEMONICS},
356 {"power3", PROCESSOR_PPC630,
357 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
358 POWER_MASKS | MASK_PPC_GPOPT},
359 {"power4", PROCESSOR_POWER4,
360 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
361 POWER_MASKS | MASK_PPC_GPOPT},
362 {"powerpc", PROCESSOR_POWERPC,
363 MASK_POWERPC | MASK_NEW_MNEMONICS,
364 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
365 {"powerpc64", PROCESSOR_POWERPC64,
366 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
367 POWER_MASKS | POWERPC_OPT_MASKS},
368 {"rios", PROCESSOR_RIOS1,
369 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
370 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
371 {"rios1", PROCESSOR_RIOS1,
372 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
373 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
374 {"rsc", PROCESSOR_PPC601,
375 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
376 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
377 {"rsc1", PROCESSOR_PPC601,
378 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
379 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
380 {"rios2", PROCESSOR_RIOS2,
381 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
382 POWERPC_MASKS | MASK_NEW_MNEMONICS},
383 {"rs64a", PROCESSOR_RS64A,
384 MASK_POWERPC | MASK_NEW_MNEMONICS,
385 POWER_MASKS | POWERPC_OPT_MASKS},
386 {"401", PROCESSOR_PPC403,
387 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
388 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
389 {"403", PROCESSOR_PPC403,
390 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
391 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
392 {"405", PROCESSOR_PPC405,
393 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
394 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
395 {"505", PROCESSOR_MPCCORE,
396 MASK_POWERPC | MASK_NEW_MNEMONICS,
397 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
398 {"601", PROCESSOR_PPC601,
399 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
400 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
401 {"602", PROCESSOR_PPC603,
402 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
403 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
404 {"603", PROCESSOR_PPC603,
405 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
406 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
407 {"603e", PROCESSOR_PPC603,
408 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
409 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
410 {"ec603e", PROCESSOR_PPC603,
411 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
412 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
413 {"604", PROCESSOR_PPC604,
414 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
415 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
416 {"604e", PROCESSOR_PPC604e,
417 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
418 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
419 {"620", PROCESSOR_PPC620,
420 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
421 POWER_MASKS | MASK_PPC_GPOPT},
422 {"630", PROCESSOR_PPC630,
423 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
424 POWER_MASKS | MASK_PPC_GPOPT},
425 {"740", PROCESSOR_PPC750,
426 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
427 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
428 {"750", PROCESSOR_PPC750,
429 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
430 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
431 {"7400", PROCESSOR_PPC7400,
432 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
433 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
434 {"7450", PROCESSOR_PPC7450,
435 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
436 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
437 {"801", PROCESSOR_MPCCORE,
438 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
439 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
440 {"821", PROCESSOR_MPCCORE,
441 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
442 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
443 {"823", PROCESSOR_MPCCORE,
444 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
445 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
446 {"860", PROCESSOR_MPCCORE,
447 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
448 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
450 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
452 /* Save current -mmultiple/-mno-multiple status. */
453 int multiple = TARGET_MULTIPLE;
454 /* Save current -mstring/-mno-string status. */
455 int string = TARGET_STRING;
457 /* Identify the processor type. */
458 rs6000_select[0].string = default_cpu;
459 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
461 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
463 ptr = &rs6000_select[i];
464 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
466 for (j = 0; j < ptt_size; j++)
467 if (! strcmp (ptr->string, processor_target_table[j].name))
470 rs6000_cpu = processor_target_table[j].processor;
474 target_flags |= processor_target_table[j].target_enable;
475 target_flags &= ~processor_target_table[j].target_disable;
481 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
485 /* If we are optimizing big endian systems for space, use the store
486 multiple instructions. */
487 if (BYTES_BIG_ENDIAN && optimize_size)
488 target_flags |= MASK_MULTIPLE;
490 /* If -mmultiple or -mno-multiple was explicitly used, don't
491 override with the processor default */
492 if (TARGET_MULTIPLE_SET)
493 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
495 /* If -mstring or -mno-string was explicitly used, don't override
496 with the processor default. */
497 if (TARGET_STRING_SET)
498 target_flags = (target_flags & ~MASK_STRING) | string;
500 /* Don't allow -mmultiple or -mstring on little endian systems
501 unless the cpu is a 750, because the hardware doesn't support the
502 instructions used in little endian mode, and causes an alignment
503 trap. The 750 does not cause an alignment trap (except when the
504 target is unaligned). */
506 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
510 target_flags &= ~MASK_MULTIPLE;
511 if (TARGET_MULTIPLE_SET)
512 warning ("-mmultiple is not supported on little endian systems");
517 target_flags &= ~MASK_STRING;
518 if (TARGET_STRING_SET)
519 warning ("-mstring is not supported on little endian systems");
523 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
528 warning ("-f%s ignored (all code is position independent)",
529 (flag_pic > 1) ? "PIC" : "pic");
532 #ifdef XCOFF_DEBUGGING_INFO
533 if (flag_function_sections && (write_symbols != NO_DEBUG)
534 && DEFAULT_ABI == ABI_AIX)
536 warning ("-ffunction-sections disabled on AIX when debugging");
537 flag_function_sections = 0;
540 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
542 warning ("-fdata-sections not supported on AIX");
543 flag_data_sections = 0;
547 /* Set debug flags */
548 if (rs6000_debug_name)
550 if (! strcmp (rs6000_debug_name, "all"))
551 rs6000_debug_stack = rs6000_debug_arg = 1;
552 else if (! strcmp (rs6000_debug_name, "stack"))
553 rs6000_debug_stack = 1;
554 else if (! strcmp (rs6000_debug_name, "arg"))
555 rs6000_debug_arg = 1;
557 error ("unknown -mdebug-%s switch", rs6000_debug_name);
560 /* Set size of long double */
561 rs6000_long_double_type_size = 64;
562 if (rs6000_long_double_size_string)
565 int size = strtol (rs6000_long_double_size_string, &tail, 10);
566 if (*tail != '\0' || (size != 64 && size != 128))
567 error ("Unknown switch -mlong-double-%s",
568 rs6000_long_double_size_string);
570 rs6000_long_double_type_size = size;
573 /* Handle -mabi= options. */
574 rs6000_parse_abi_options ();
576 /* Handle -mvrsave= option. */
577 rs6000_parse_vrsave_option ();
579 #ifdef SUBTARGET_OVERRIDE_OPTIONS
580 SUBTARGET_OVERRIDE_OPTIONS;
582 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
583 SUBSUBTARGET_OVERRIDE_OPTIONS;
586 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
587 using TARGET_OPTIONS to handle a toggle switch, but we're out of
588 bits in target_flags so TARGET_SWITCHES cannot be used.
589 Assumption here is that rs6000_longcall_switch points into the
590 text of the complete option, rather than being a copy, so we can
591 scan back for the presence or absence of the no- modifier. */
592 if (rs6000_longcall_switch)
594 const char *base = rs6000_longcall_switch;
595 while (base[-1] != 'm') base--;
597 if (*rs6000_longcall_switch != '\0')
598 error ("invalid option `%s'", base);
599 rs6000_default_long_calls = (base[0] != 'n');
602 #ifdef TARGET_REGNAMES
603 /* If the user desires alternate register names, copy in the
604 alternate names now. */
606 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
609 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
610 If -maix-struct-return or -msvr4-struct-return was explicitly
611 used, don't override with the ABI default. */
612 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
614 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
615 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
617 target_flags |= MASK_AIX_STRUCT_RET;
620 /* Register global variables with the garbage collector. */
621 rs6000_add_gc_roots ();
623 /* Allocate an alias set for register saves & restores from stack. */
624 rs6000_sr_alias_set = new_alias_set ();
627 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
629 /* We can only guarantee the availability of DI pseudo-ops when
630 assembling for 64-bit targets. */
633 targetm.asm_out.aligned_op.di = NULL;
634 targetm.asm_out.unaligned_op.di = NULL;
637 /* Arrange to save and restore machine status around nested functions. */
638 init_machine_status = rs6000_init_machine_status;
641 /* Handle -mvrsave= options. */
643 rs6000_parse_vrsave_option ()
645 /* Generate VRSAVE instructions by default. */
646 if (rs6000_altivec_vrsave_string == 0
647 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
648 rs6000_altivec_vrsave = 1;
649 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
650 rs6000_altivec_vrsave = 0;
652 error ("unknown -mvrsave= option specified: '%s'",
653 rs6000_altivec_vrsave_string);
656 /* Handle -mabi= options. */
658 rs6000_parse_abi_options ()
660 if (rs6000_abi_string == 0)
662 else if (! strcmp (rs6000_abi_string, "altivec"))
663 rs6000_altivec_abi = 1;
664 else if (! strcmp (rs6000_abi_string, "no-altivec"))
665 rs6000_altivec_abi = 0;
667 error ("unknown ABI specified: '%s'", rs6000_abi_string);
671 optimization_options (level, size)
672 int level ATTRIBUTE_UNUSED;
673 int size ATTRIBUTE_UNUSED;
677 /* Do anything needed at the start of the asm file. */
680 rs6000_file_start (file, default_cpu)
682 const char *default_cpu;
686 const char *start = buffer;
687 struct rs6000_cpu_select *ptr;
689 if (flag_verbose_asm)
691 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
692 rs6000_select[0].string = default_cpu;
694 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
696 ptr = &rs6000_select[i];
697 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
699 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
705 switch (rs6000_sdata)
707 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
708 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
709 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
710 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
713 if (rs6000_sdata && g_switch_value)
715 fprintf (file, "%s -G %d", start, g_switch_value);
725 /* Return non-zero if this function is known to have a null epilogue. */
730 if (reload_completed)
732 rs6000_stack_t *info = rs6000_stack_info ();
734 if (info->first_gp_reg_save == 32
735 && info->first_fp_reg_save == 64
736 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
739 && info->vrsave_mask == 0
747 /* Returns 1 always. */
750 any_operand (op, mode)
751 rtx op ATTRIBUTE_UNUSED;
752 enum machine_mode mode ATTRIBUTE_UNUSED;
757 /* Returns 1 if op is the count register. */
759 count_register_operand (op, mode)
761 enum machine_mode mode ATTRIBUTE_UNUSED;
763 if (GET_CODE (op) != REG)
766 if (REGNO (op) == COUNT_REGISTER_REGNUM)
769 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
775 /* Returns 1 if op is an altivec register. */
777 altivec_register_operand (op, mode)
779 enum machine_mode mode ATTRIBUTE_UNUSED;
782 return (register_operand (op, mode)
783 && (GET_CODE (op) != REG
784 || REGNO (op) > FIRST_PSEUDO_REGISTER
785 || ALTIVEC_REGNO_P (REGNO (op))));
789 xer_operand (op, mode)
791 enum machine_mode mode ATTRIBUTE_UNUSED;
793 if (GET_CODE (op) != REG)
796 if (XER_REGNO_P (REGNO (op)))
802 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
803 by such constants completes more quickly. */
806 s8bit_cint_operand (op, mode)
808 enum machine_mode mode ATTRIBUTE_UNUSED;
810 return ( GET_CODE (op) == CONST_INT
811 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
814 /* Return 1 if OP is a constant that can fit in a D field. */
817 short_cint_operand (op, mode)
819 enum machine_mode mode ATTRIBUTE_UNUSED;
821 return (GET_CODE (op) == CONST_INT
822 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
825 /* Similar for an unsigned D field. */
828 u_short_cint_operand (op, mode)
830 enum machine_mode mode ATTRIBUTE_UNUSED;
832 return (GET_CODE (op) == CONST_INT
833 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
836 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
839 non_short_cint_operand (op, mode)
841 enum machine_mode mode ATTRIBUTE_UNUSED;
843 return (GET_CODE (op) == CONST_INT
844 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
847 /* Returns 1 if OP is a CONST_INT that is a positive value
848 and an exact power of 2. */
851 exact_log2_cint_operand (op, mode)
853 enum machine_mode mode ATTRIBUTE_UNUSED;
855 return (GET_CODE (op) == CONST_INT
857 && exact_log2 (INTVAL (op)) >= 0);
860 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
864 gpc_reg_operand (op, mode)
866 enum machine_mode mode;
868 return (register_operand (op, mode)
869 && (GET_CODE (op) != REG
870 || (REGNO (op) >= ARG_POINTER_REGNUM
871 && !XER_REGNO_P (REGNO (op)))
872 || REGNO (op) < MQ_REGNO));
875 /* Returns 1 if OP is either a pseudo-register or a register denoting a
879 cc_reg_operand (op, mode)
881 enum machine_mode mode;
883 return (register_operand (op, mode)
884 && (GET_CODE (op) != REG
885 || REGNO (op) >= FIRST_PSEUDO_REGISTER
886 || CR_REGNO_P (REGNO (op))));
889 /* Returns 1 if OP is either a pseudo-register or a register denoting a
890 CR field that isn't CR0. */
893 cc_reg_not_cr0_operand (op, mode)
895 enum machine_mode mode;
897 return (register_operand (op, mode)
898 && (GET_CODE (op) != REG
899 || REGNO (op) >= FIRST_PSEUDO_REGISTER
900 || CR_REGNO_NOT_CR0_P (REGNO (op))));
903 /* Returns 1 if OP is either a constant integer valid for a D-field or
904 a non-special register. If a register, it must be in the proper
905 mode unless MODE is VOIDmode. */
908 reg_or_short_operand (op, mode)
910 enum machine_mode mode;
912 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
915 /* Similar, except check if the negation of the constant would be
916 valid for a D-field. */
919 reg_or_neg_short_operand (op, mode)
921 enum machine_mode mode;
923 if (GET_CODE (op) == CONST_INT)
924 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
926 return gpc_reg_operand (op, mode);
929 /* Returns 1 if OP is either a constant integer valid for a DS-field or
930 a non-special register. If a register, it must be in the proper
931 mode unless MODE is VOIDmode. */
934 reg_or_aligned_short_operand (op, mode)
936 enum machine_mode mode;
938 if (gpc_reg_operand (op, mode))
940 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
947 /* Return 1 if the operand is either a register or an integer whose
948 high-order 16 bits are zero. */
951 reg_or_u_short_operand (op, mode)
953 enum machine_mode mode;
955 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
958 /* Return 1 is the operand is either a non-special register or ANY
962 reg_or_cint_operand (op, mode)
964 enum machine_mode mode;
966 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
969 /* Return 1 is the operand is either a non-special register or ANY
970 32-bit signed constant integer. */
973 reg_or_arith_cint_operand (op, mode)
975 enum machine_mode mode;
977 return (gpc_reg_operand (op, mode)
978 || (GET_CODE (op) == CONST_INT
979 #if HOST_BITS_PER_WIDE_INT != 32
980 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
981 < (unsigned HOST_WIDE_INT) 0x100000000ll)
986 /* Return 1 is the operand is either a non-special register or a 32-bit
987 signed constant integer valid for 64-bit addition. */
990 reg_or_add_cint64_operand (op, mode)
992 enum machine_mode mode;
994 return (gpc_reg_operand (op, mode)
995 || (GET_CODE (op) == CONST_INT
996 #if HOST_BITS_PER_WIDE_INT == 32
997 && INTVAL (op) < 0x7fff8000
999 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1005 /* Return 1 is the operand is either a non-special register or a 32-bit
1006 signed constant integer valid for 64-bit subtraction. */
1009 reg_or_sub_cint64_operand (op, mode)
1011 enum machine_mode mode;
1013 return (gpc_reg_operand (op, mode)
1014 || (GET_CODE (op) == CONST_INT
1015 #if HOST_BITS_PER_WIDE_INT == 32
1016 && (- INTVAL (op)) < 0x7fff8000
1018 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1024 /* Return 1 is the operand is either a non-special register or ANY
1025 32-bit unsigned constant integer. */
1028 reg_or_logical_cint_operand (op, mode)
1030 enum machine_mode mode;
1032 if (GET_CODE (op) == CONST_INT)
1034 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1036 if (GET_MODE_BITSIZE (mode) <= 32)
1039 if (INTVAL (op) < 0)
1043 return ((INTVAL (op) & GET_MODE_MASK (mode)
1044 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1046 else if (GET_CODE (op) == CONST_DOUBLE)
1048 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1052 return CONST_DOUBLE_HIGH (op) == 0;
1055 return gpc_reg_operand (op, mode);
1058 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1061 got_operand (op, mode)
1063 enum machine_mode mode ATTRIBUTE_UNUSED;
1065 return (GET_CODE (op) == SYMBOL_REF
1066 || GET_CODE (op) == CONST
1067 || GET_CODE (op) == LABEL_REF);
1070 /* Return 1 if the operand is a simple references that can be loaded via
1071 the GOT (labels involving addition aren't allowed). */
1074 got_no_const_operand (op, mode)
1076 enum machine_mode mode ATTRIBUTE_UNUSED;
1078 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1081 /* Return the number of instructions it takes to form a constant in an
1082 integer register. */
1085 num_insns_constant_wide (value)
1086 HOST_WIDE_INT value;
1088 /* signed constant loadable with {cal|addi} */
1089 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1092 /* constant loadable with {cau|addis} */
1093 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1096 #if HOST_BITS_PER_WIDE_INT == 64
1097 else if (TARGET_POWERPC64)
1099 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1100 HOST_WIDE_INT high = value >> 31;
1102 if (high == 0 || high == -1)
1108 return num_insns_constant_wide (high) + 1;
1110 return (num_insns_constant_wide (high)
1111 + num_insns_constant_wide (low) + 1);
1120 num_insns_constant (op, mode)
1122 enum machine_mode mode;
1124 if (GET_CODE (op) == CONST_INT)
1126 #if HOST_BITS_PER_WIDE_INT == 64
1127 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1128 && mask64_operand (op, mode))
1132 return num_insns_constant_wide (INTVAL (op));
1135 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1140 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1141 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1142 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1145 else if (GET_CODE (op) == CONST_DOUBLE)
1151 int endian = (WORDS_BIG_ENDIAN == 0);
1153 if (mode == VOIDmode || mode == DImode)
1155 high = CONST_DOUBLE_HIGH (op);
1156 low = CONST_DOUBLE_LOW (op);
1160 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1161 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1163 low = l[1 - endian];
1167 return (num_insns_constant_wide (low)
1168 + num_insns_constant_wide (high));
1172 if (high == 0 && low >= 0)
1173 return num_insns_constant_wide (low);
1175 else if (high == -1 && low < 0)
1176 return num_insns_constant_wide (low);
1178 else if (mask64_operand (op, mode))
1182 return num_insns_constant_wide (high) + 1;
1185 return (num_insns_constant_wide (high)
1186 + num_insns_constant_wide (low) + 1);
1194 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1195 register with one instruction per word. We only do this if we can
1196 safely read CONST_DOUBLE_{LOW,HIGH}. */
1199 easy_fp_constant (op, mode)
1201 enum machine_mode mode;
1203 if (GET_CODE (op) != CONST_DOUBLE
1204 || GET_MODE (op) != mode
1205 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1208 /* Consider all constants with -msoft-float to be easy. */
1209 if (TARGET_SOFT_FLOAT && mode != DImode)
1212 /* If we are using V.4 style PIC, consider all constants to be hard. */
1213 if (flag_pic && DEFAULT_ABI == ABI_V4)
1216 #ifdef TARGET_RELOCATABLE
1217 /* Similarly if we are using -mrelocatable, consider all constants
1219 if (TARGET_RELOCATABLE)
1228 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1229 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1231 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1232 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1235 else if (mode == SFmode)
1240 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1241 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1243 return num_insns_constant_wide (l) == 1;
1246 else if (mode == DImode)
1247 return ((TARGET_POWERPC64
1248 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1249 || (num_insns_constant (op, DImode) <= 2));
1251 else if (mode == SImode)
1257 /* Return 1 if the operand is a CONST_INT and can be put into a
1258 register with one instruction. */
1261 easy_vector_constant (op)
1267 if (GET_CODE (op) != CONST_VECTOR)
1270 units = CONST_VECTOR_NUNITS (op);
1272 /* We can generate 0 easily. Look for that. */
1273 for (i = 0; i < units; ++i)
1275 elt = CONST_VECTOR_ELT (op, i);
1277 /* We could probably simplify this by just checking for equality
1278 with CONST0_RTX for the current mode, but let's be safe
1281 switch (GET_CODE (elt))
1284 if (INTVAL (elt) != 0)
1288 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1296 /* We could probably generate a few other constants trivially, but
1297 gcc doesn't generate them yet. FIXME later. */
1301 /* Return 1 if the operand is the constant 0. This works for scalars
1302 as well as vectors. */
1304 zero_constant (op, mode)
1306 enum machine_mode mode;
1308 return op == CONST0_RTX (mode);
1311 /* Return 1 if the operand is 0.0. */
1313 zero_fp_constant (op, mode)
1315 enum machine_mode mode;
1317 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1320 /* Return 1 if the operand is in volatile memory. Note that during
1321 the RTL generation phase, memory_operand does not return TRUE for
1322 volatile memory references. So this function allows us to
1323 recognize volatile references where its safe. */
1326 volatile_mem_operand (op, mode)
1328 enum machine_mode mode;
1330 if (GET_CODE (op) != MEM)
1333 if (!MEM_VOLATILE_P (op))
1336 if (mode != GET_MODE (op))
1339 if (reload_completed)
1340 return memory_operand (op, mode);
1342 if (reload_in_progress)
1343 return strict_memory_address_p (mode, XEXP (op, 0));
1345 return memory_address_p (mode, XEXP (op, 0));
1348 /* Return 1 if the operand is an offsettable memory operand. */
1351 offsettable_mem_operand (op, mode)
1353 enum machine_mode mode;
1355 return ((GET_CODE (op) == MEM)
1356 && offsettable_address_p (reload_completed || reload_in_progress,
1357 mode, XEXP (op, 0)));
1360 /* Return 1 if the operand is either an easy FP constant (see above) or
1364 mem_or_easy_const_operand (op, mode)
1366 enum machine_mode mode;
1368 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1371 /* Return 1 if the operand is either a non-special register or an item
1372 that can be used as the operand of a `mode' add insn. */
1375 add_operand (op, mode)
1377 enum machine_mode mode;
1379 if (GET_CODE (op) == CONST_INT)
1380 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1381 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1383 return gpc_reg_operand (op, mode);
1386 /* Return 1 if OP is a constant but not a valid add_operand. */
1389 non_add_cint_operand (op, mode)
1391 enum machine_mode mode ATTRIBUTE_UNUSED;
1393 return (GET_CODE (op) == CONST_INT
1394 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1395 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1398 /* Return 1 if the operand is a non-special register or a constant that
1399 can be used as the operand of an OR or XOR insn on the RS/6000. */
1402 logical_operand (op, mode)
1404 enum machine_mode mode;
1406 HOST_WIDE_INT opl, oph;
1408 if (gpc_reg_operand (op, mode))
1411 if (GET_CODE (op) == CONST_INT)
1413 opl = INTVAL (op) & GET_MODE_MASK (mode);
1415 #if HOST_BITS_PER_WIDE_INT <= 32
1416 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1420 else if (GET_CODE (op) == CONST_DOUBLE)
1422 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1425 opl = CONST_DOUBLE_LOW (op);
1426 oph = CONST_DOUBLE_HIGH (op);
1433 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1434 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1437 /* Return 1 if C is a constant that is not a logical operand (as
1438 above), but could be split into one. */
1441 non_logical_cint_operand (op, mode)
1443 enum machine_mode mode;
1445 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1446 && ! logical_operand (op, mode)
1447 && reg_or_logical_cint_operand (op, mode));
1450 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1451 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1452 Reject all ones and all zeros, since these should have been optimized
1453 away and confuse the making of MB and ME. */
1456 mask_operand (op, mode)
1458 enum machine_mode mode ATTRIBUTE_UNUSED;
1460 HOST_WIDE_INT c, lsb;
1462 if (GET_CODE (op) != CONST_INT)
1467 /* Fail in 64-bit mode if the mask wraps around because the upper
1468 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1469 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1472 /* We don't change the number of transitions by inverting,
1473 so make sure we start with the LS bit zero. */
1477 /* Reject all zeros or all ones. */
1481 /* Find the first transition. */
1484 /* Invert to look for a second transition. */
1487 /* Erase first transition. */
1490 /* Find the second transition (if any). */
1493 /* Match if all the bits above are 1's (or c is zero). */
1497 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1498 It is if there are no more than one 1->0 or 0->1 transitions.
1499 Reject all ones and all zeros, since these should have been optimized
1500 away and confuse the making of MB and ME. */
1503 mask64_operand (op, mode)
1505 enum machine_mode mode;
1507 if (GET_CODE (op) == CONST_INT)
1509 HOST_WIDE_INT c, lsb;
1511 /* We don't change the number of transitions by inverting,
1512 so make sure we start with the LS bit zero. */
1517 /* Reject all zeros or all ones. */
1521 /* Find the transition, and check that all bits above are 1's. */
1525 else if (GET_CODE (op) == CONST_DOUBLE
1526 && (mode == VOIDmode || mode == DImode))
1528 HOST_WIDE_INT low, high, lsb;
1530 if (HOST_BITS_PER_WIDE_INT < 64)
1531 high = CONST_DOUBLE_HIGH (op);
1533 low = CONST_DOUBLE_LOW (op);
1536 if (HOST_BITS_PER_WIDE_INT < 64)
1543 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1547 return high == -lsb;
1551 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1557 /* Return 1 if the operand is either a non-special register or a constant
1558 that can be used as the operand of a PowerPC64 logical AND insn. */
1561 and64_operand (op, mode)
1563 enum machine_mode mode;
1565 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1566 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1568 return (logical_operand (op, mode) || mask64_operand (op, mode));
1571 /* Return 1 if the operand is either a non-special register or a
1572 constant that can be used as the operand of an RS/6000 logical AND insn. */
1575 and_operand (op, mode)
1577 enum machine_mode mode;
1579 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1580 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1582 return (logical_operand (op, mode) || mask_operand (op, mode));
1585 /* Return 1 if the operand is a general register or memory operand. */
1588 reg_or_mem_operand (op, mode)
1590 enum machine_mode mode;
1592 return (gpc_reg_operand (op, mode)
1593 || memory_operand (op, mode)
1594 || volatile_mem_operand (op, mode));
1597 /* Return 1 if the operand is a general register or memory operand without
1598 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1602 lwa_operand (op, mode)
1604 enum machine_mode mode;
1608 if (reload_completed && GET_CODE (inner) == SUBREG)
1609 inner = SUBREG_REG (inner);
1611 return gpc_reg_operand (inner, mode)
1612 || (memory_operand (inner, mode)
1613 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1614 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1615 && (GET_CODE (XEXP (inner, 0)) != PLUS
1616 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1617 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1620 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1623 symbol_ref_operand (op, mode)
1625 enum machine_mode mode;
1627 if (mode != VOIDmode && GET_MODE (op) != mode)
1630 return (GET_CODE (op) == SYMBOL_REF);
1633 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1634 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1637 call_operand (op, mode)
1639 enum machine_mode mode;
1641 if (mode != VOIDmode && GET_MODE (op) != mode)
1644 return (GET_CODE (op) == SYMBOL_REF
1645 || (GET_CODE (op) == REG
1646 && (REGNO (op) == LINK_REGISTER_REGNUM
1647 || REGNO (op) == COUNT_REGISTER_REGNUM
1648 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1651 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1652 this file and the function is not weakly defined. */
1655 current_file_function_operand (op, mode)
1657 enum machine_mode mode ATTRIBUTE_UNUSED;
1659 return (GET_CODE (op) == SYMBOL_REF
1660 && (SYMBOL_REF_FLAG (op)
1661 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1662 && ! DECL_WEAK (current_function_decl))));
1665 /* Return 1 if this operand is a valid input for a move insn. */
1668 input_operand (op, mode)
1670 enum machine_mode mode;
1672 /* Memory is always valid. */
1673 if (memory_operand (op, mode))
1676 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1677 if (GET_CODE (op) == CONSTANT_P_RTX)
1680 /* For floating-point, easy constants are valid. */
1681 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1683 && easy_fp_constant (op, mode))
1686 /* Allow any integer constant. */
1687 if (GET_MODE_CLASS (mode) == MODE_INT
1688 && (GET_CODE (op) == CONST_INT
1689 || GET_CODE (op) == CONST_DOUBLE))
1692 /* For floating-point or multi-word mode, the only remaining valid type
1694 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1695 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1696 return register_operand (op, mode);
1698 /* The only cases left are integral modes one word or smaller (we
1699 do not get called for MODE_CC values). These can be in any
1701 if (register_operand (op, mode))
1704 /* A SYMBOL_REF referring to the TOC is valid. */
1705 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1708 /* A constant pool expression (relative to the TOC) is valid */
1709 if (TOC_RELATIVE_EXPR_P (op))
1712 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1714 if (DEFAULT_ABI == ABI_V4
1715 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1716 && small_data_operand (op, Pmode))
1722 /* Return 1 for an operand in small memory on V.4/eabi. */
1725 small_data_operand (op, mode)
1726 rtx op ATTRIBUTE_UNUSED;
1727 enum machine_mode mode ATTRIBUTE_UNUSED;
1732 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1735 if (DEFAULT_ABI != ABI_V4)
1738 if (GET_CODE (op) == SYMBOL_REF)
1741 else if (GET_CODE (op) != CONST
1742 || GET_CODE (XEXP (op, 0)) != PLUS
1743 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1744 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1749 rtx sum = XEXP (op, 0);
1750 HOST_WIDE_INT summand;
1752 /* We have to be careful here, because it is the referenced address
1753 that must be 32k from _SDA_BASE_, not just the symbol. */
1754 summand = INTVAL (XEXP (sum, 1));
1755 if (summand < 0 || summand > g_switch_value)
1758 sym_ref = XEXP (sum, 0);
1761 if (*XSTR (sym_ref, 0) != '@')
1772 constant_pool_expr_1 (op, have_sym, have_toc)
1777 switch (GET_CODE(op))
1780 if (CONSTANT_POOL_ADDRESS_P (op))
1782 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1790 else if (! strcmp (XSTR (op, 0), toc_label_name))
1799 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1800 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1802 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1811 constant_pool_expr_p (op)
1816 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1820 toc_relative_expr_p (op)
1825 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1828 /* Try machine-dependent ways of modifying an illegitimate address
1829 to be legitimate. If we find one, return the new, valid address.
1830 This is used from only one place: `memory_address' in explow.c.
1832 OLDX is the address as it was before break_out_memory_refs was
1833 called. In some cases it is useful to look at this to decide what
1836 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1838 It is always safe for this function to do nothing. It exists to
1839 recognize opportunities to optimize the output.
1841 On RS/6000, first check for the sum of a register with a constant
1842 integer that is out of range. If so, generate code to add the
1843 constant with the low-order 16 bits masked to the register and force
1844 this result into another register (this can be done with `cau').
1845 Then generate an address of REG+(CONST&0xffff), allowing for the
1846 possibility of bit 16 being a one.
1848 Then check for the sum of a register and something not constant, try to
1849 load the other things into a register and return the sum. */
1851 rs6000_legitimize_address (x, oldx, mode)
1853 rtx oldx ATTRIBUTE_UNUSED;
1854 enum machine_mode mode;
1856 if (GET_CODE (x) == PLUS
1857 && GET_CODE (XEXP (x, 0)) == REG
1858 && GET_CODE (XEXP (x, 1)) == CONST_INT
1859 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1861 HOST_WIDE_INT high_int, low_int;
1863 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
1864 high_int = INTVAL (XEXP (x, 1)) - low_int;
1865 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1866 GEN_INT (high_int)), 0);
1867 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1869 else if (GET_CODE (x) == PLUS
1870 && GET_CODE (XEXP (x, 0)) == REG
1871 && GET_CODE (XEXP (x, 1)) != CONST_INT
1872 && GET_MODE_NUNITS (mode) == 1
1873 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1874 && (TARGET_POWERPC64 || mode != DImode)
1877 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1878 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1880 else if (ALTIVEC_VECTOR_MODE (mode))
1884 /* Make sure both operands are registers. */
1885 if (GET_CODE (x) == PLUS)
1886 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1887 force_reg (Pmode, XEXP (x, 1)));
1889 reg = force_reg (Pmode, x);
1892 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1893 && GET_CODE (x) != CONST_INT
1894 && GET_CODE (x) != CONST_DOUBLE
1896 && GET_MODE_NUNITS (mode) == 1
1897 && (GET_MODE_BITSIZE (mode) <= 32
1898 || (TARGET_HARD_FLOAT && mode == DFmode)))
1900 rtx reg = gen_reg_rtx (Pmode);
1901 emit_insn (gen_elf_high (reg, (x)));
1902 return gen_rtx_LO_SUM (Pmode, reg, (x));
1904 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1906 && GET_CODE (x) != CONST_INT
1907 && GET_CODE (x) != CONST_DOUBLE
1909 && (TARGET_HARD_FLOAT || mode != DFmode)
1913 rtx reg = gen_reg_rtx (Pmode);
1914 emit_insn (gen_macho_high (reg, (x)));
1915 return gen_rtx_LO_SUM (Pmode, reg, (x));
1918 && CONSTANT_POOL_EXPR_P (x)
1919 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1921 return create_TOC_reference (x);
1927 /* The convention appears to be to define this wherever it is used.
1928 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1929 is now used here. */
1930 #ifndef REG_MODE_OK_FOR_BASE_P
1931 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1934 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1935 replace the input X, or the original X if no replacement is called for.
1936 The output parameter *WIN is 1 if the calling macro should goto WIN,
1939 For RS/6000, we wish to handle large displacements off a base
1940 register by splitting the addend across an addiu/addis and the mem insn.
1941 This cuts number of extra insns needed from 3 to 1.
1943 On Darwin, we use this to generate code for floating point constants.
1944 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1945 The Darwin code is inside #if TARGET_MACHO because only then is
1946 machopic_function_base_name() defined. */
1948 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1950 enum machine_mode mode;
1953 int ind_levels ATTRIBUTE_UNUSED;
1956 /* We must recognize output that we have already generated ourselves. */
1957 if (GET_CODE (x) == PLUS
1958 && GET_CODE (XEXP (x, 0)) == PLUS
1959 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1960 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1961 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1963 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1964 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1965 opnum, (enum reload_type)type);
1971 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1972 && GET_CODE (x) == LO_SUM
1973 && GET_CODE (XEXP (x, 0)) == PLUS
1974 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1975 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1976 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1977 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1978 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1979 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1980 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1982 /* Result of previous invocation of this function on Darwin
1983 floating point constant. */
1984 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1985 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1986 opnum, (enum reload_type)type);
1991 if (GET_CODE (x) == PLUS
1992 && GET_CODE (XEXP (x, 0)) == REG
1993 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1994 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1995 && GET_CODE (XEXP (x, 1)) == CONST_INT
1996 && !ALTIVEC_VECTOR_MODE (mode))
1998 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1999 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2001 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2003 /* Check for 32-bit overflow. */
2004 if (high + low != val)
2010 /* Reload the high part into a base reg; leave the low part
2011 in the mem directly. */
2013 x = gen_rtx_PLUS (GET_MODE (x),
2014 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2018 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2019 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2020 opnum, (enum reload_type)type);
2025 if (GET_CODE (x) == SYMBOL_REF
2026 && DEFAULT_ABI == ABI_DARWIN
2027 && !ALTIVEC_VECTOR_MODE (mode)
2030 /* Darwin load of floating point constant. */
2031 rtx offset = gen_rtx (CONST, Pmode,
2032 gen_rtx (MINUS, Pmode, x,
2033 gen_rtx (SYMBOL_REF, Pmode,
2034 machopic_function_base_name ())));
2035 x = gen_rtx (LO_SUM, GET_MODE (x),
2036 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2037 gen_rtx (HIGH, Pmode, offset)), offset);
2038 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2039 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2040 opnum, (enum reload_type)type);
2046 && CONSTANT_POOL_EXPR_P (x)
2047 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2049 (x) = create_TOC_reference (x);
2057 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2058 that is a valid memory address for an instruction.
2059 The MODE argument is the machine mode for the MEM expression
2060 that wants to use this address.
2062 On the RS/6000, there are four valid address: a SYMBOL_REF that
2063 refers to a constant pool entry of an address (or the sum of it
2064 plus a constant), a short (16-bit signed) constant plus a register,
2065 the sum of two registers, or a register indirect, possibly with an
2066 auto-increment. For DFmode and DImode with an constant plus register,
2067 we must ensure that both words are addressable or PowerPC64 with offset
2070 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2071 32-bit DImode, TImode), indexed addressing cannot be used because
2072 adjacent memory cells are accessed by adding word-sized offsets
2073 during assembly output. */
2075 rs6000_legitimate_address (mode, x, reg_ok_strict)
2076 enum machine_mode mode;
2080 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2082 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2083 && !ALTIVEC_VECTOR_MODE (mode)
2085 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2087 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2089 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2091 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2093 && GET_CODE (x) == PLUS
2094 && GET_CODE (XEXP (x, 0)) == REG
2095 && XEXP (x, 0) == virtual_stack_vars_rtx
2096 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2098 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2101 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2102 && (TARGET_POWERPC64 || mode != DImode)
2103 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2105 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2110 /* Try to output insns to set TARGET equal to the constant C if it can
2111 be done in less than N insns. Do all computations in MODE.
2112 Returns the place where the output has been placed if it can be
2113 done and the insns have been emitted. If it would take more than N
2114 insns, zero is returned and no insns and emitted. */
2117 rs6000_emit_set_const (dest, mode, source, n)
2119 enum machine_mode mode;
2120 int n ATTRIBUTE_UNUSED;
2122 HOST_WIDE_INT c0, c1;
2124 if (mode == QImode || mode == HImode || mode == SImode)
2127 dest = gen_reg_rtx (mode);
2128 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2132 if (GET_CODE (source) == CONST_INT)
2134 c0 = INTVAL (source);
2137 else if (GET_CODE (source) == CONST_DOUBLE)
2139 #if HOST_BITS_PER_WIDE_INT >= 64
2140 c0 = CONST_DOUBLE_LOW (source);
2143 c0 = CONST_DOUBLE_LOW (source);
2144 c1 = CONST_DOUBLE_HIGH (source);
2150 return rs6000_emit_set_long_const (dest, c0, c1);
2153 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2154 fall back to a straight forward decomposition. We do this to avoid
2155 exponential run times encountered when looking for longer sequences
2156 with rs6000_emit_set_const. */
2158 rs6000_emit_set_long_const (dest, c1, c2)
2160 HOST_WIDE_INT c1, c2;
2162 if (!TARGET_POWERPC64)
2164 rtx operand1, operand2;
2166 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2168 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2170 emit_move_insn (operand1, GEN_INT (c1));
2171 emit_move_insn (operand2, GEN_INT (c2));
2175 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2178 ud2 = (c1 & 0xffff0000) >> 16;
2179 #if HOST_BITS_PER_WIDE_INT >= 64
2183 ud4 = (c2 & 0xffff0000) >> 16;
2185 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2186 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2189 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2191 emit_move_insn (dest, GEN_INT (ud1));
2194 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2195 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2198 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2201 emit_move_insn (dest, GEN_INT (ud2 << 16));
2203 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2205 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2206 || (ud4 == 0 && ! (ud3 & 0x8000)))
2209 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2212 emit_move_insn (dest, GEN_INT (ud3 << 16));
2215 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2216 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2218 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2223 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2226 emit_move_insn (dest, GEN_INT (ud4 << 16));
2229 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2231 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2233 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2234 GEN_INT (ud2 << 16)));
2236 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2242 /* Emit a move from SOURCE to DEST in mode MODE. */
2244 rs6000_emit_move (dest, source, mode)
2247 enum machine_mode mode;
2251 operands[1] = source;
2253 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2254 if (GET_CODE (operands[1]) == CONST_DOUBLE
2255 && ! FLOAT_MODE_P (mode)
2256 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2258 /* FIXME. This should never happen. */
2259 /* Since it seems that it does, do the safe thing and convert
2261 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2263 if (GET_CODE (operands[1]) == CONST_DOUBLE
2264 && ! FLOAT_MODE_P (mode)
2265 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2266 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2267 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2268 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2271 /* Check if GCC is setting up a block move that will end up using FP
2272 registers as temporaries. We must make sure this is acceptable. */
2273 if (GET_CODE (operands[0]) == MEM
2274 && GET_CODE (operands[1]) == MEM
2276 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2277 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2278 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2279 ? 32 : MEM_ALIGN (operands[0])))
2280 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2282 : MEM_ALIGN (operands[1]))))
2283 && ! MEM_VOLATILE_P (operands [0])
2284 && ! MEM_VOLATILE_P (operands [1]))
2286 emit_move_insn (adjust_address (operands[0], SImode, 0),
2287 adjust_address (operands[1], SImode, 0));
2288 emit_move_insn (adjust_address (operands[0], SImode, 4),
2289 adjust_address (operands[1], SImode, 4));
2293 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2294 operands[1] = force_reg (mode, operands[1]);
2296 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2297 && GET_CODE (operands[0]) == MEM)
2301 if (reload_in_progress || reload_completed)
2302 regnum = true_regnum (operands[1]);
2303 else if (GET_CODE (operands[1]) == REG)
2304 regnum = REGNO (operands[1]);
2308 /* If operands[1] is a register, on POWER it may have
2309 double-precision data in it, so truncate it to single
2311 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2314 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2315 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2316 operands[1] = newreg;
2320 /* Handle the case where reload calls us with an invalid address;
2321 and the case of CONSTANT_P_RTX. */
2322 if (!ALTIVEC_VECTOR_MODE (mode)
2323 && (! general_operand (operands[1], mode)
2324 || ! nonimmediate_operand (operands[0], mode)
2325 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2327 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2331 /* FIXME: In the long term, this switch statement should go away
2332 and be replaced by a sequence of tests based on things like
2338 if (CONSTANT_P (operands[1])
2339 && GET_CODE (operands[1]) != CONST_INT)
2340 operands[1] = force_const_mem (mode, operands[1]);
2346 if (CONSTANT_P (operands[1])
2347 && ! easy_fp_constant (operands[1], mode))
2348 operands[1] = force_const_mem (mode, operands[1]);
2355 if (CONSTANT_P (operands[1])
2356 && !easy_vector_constant (operands[1]))
2357 operands[1] = force_const_mem (mode, operands[1]);
2362 /* Use default pattern for address of ELF small data */
2365 && DEFAULT_ABI == ABI_V4
2366 && (GET_CODE (operands[1]) == SYMBOL_REF
2367 || GET_CODE (operands[1]) == CONST)
2368 && small_data_operand (operands[1], mode))
2370 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2374 if (DEFAULT_ABI == ABI_V4
2375 && mode == Pmode && mode == SImode
2376 && flag_pic == 1 && got_operand (operands[1], mode))
2378 emit_insn (gen_movsi_got (operands[0], operands[1]));
2382 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2383 && TARGET_NO_TOC && ! flag_pic
2385 && CONSTANT_P (operands[1])
2386 && GET_CODE (operands[1]) != HIGH
2387 && GET_CODE (operands[1]) != CONST_INT)
2389 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2391 /* If this is a function address on -mcall-aixdesc,
2392 convert it to the address of the descriptor. */
2393 if (DEFAULT_ABI == ABI_AIX
2394 && GET_CODE (operands[1]) == SYMBOL_REF
2395 && XSTR (operands[1], 0)[0] == '.')
2397 const char *name = XSTR (operands[1], 0);
2399 while (*name == '.')
2401 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2402 CONSTANT_POOL_ADDRESS_P (new_ref)
2403 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2404 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2405 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2406 operands[1] = new_ref;
2409 if (DEFAULT_ABI == ABI_DARWIN)
2411 emit_insn (gen_macho_high (target, operands[1]));
2412 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2416 emit_insn (gen_elf_high (target, operands[1]));
2417 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2421 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2422 and we have put it in the TOC, we just need to make a TOC-relative
2425 && GET_CODE (operands[1]) == SYMBOL_REF
2426 && CONSTANT_POOL_EXPR_P (operands[1])
2427 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2428 get_pool_mode (operands[1])))
2430 operands[1] = create_TOC_reference (operands[1]);
2432 else if (mode == Pmode
2433 && CONSTANT_P (operands[1])
2434 && ((GET_CODE (operands[1]) != CONST_INT
2435 && ! easy_fp_constant (operands[1], mode))
2436 || (GET_CODE (operands[1]) == CONST_INT
2437 && num_insns_constant (operands[1], mode) > 2)
2438 || (GET_CODE (operands[0]) == REG
2439 && FP_REGNO_P (REGNO (operands[0]))))
2440 && GET_CODE (operands[1]) != HIGH
2441 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2442 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2444 /* Emit a USE operation so that the constant isn't deleted if
2445 expensive optimizations are turned on because nobody
2446 references it. This should only be done for operands that
2447 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2448 This should not be done for operands that contain LABEL_REFs.
2449 For now, we just handle the obvious case. */
2450 if (GET_CODE (operands[1]) != LABEL_REF)
2451 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2454 /* Darwin uses a special PIC legitimizer. */
2455 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2458 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2460 if (operands[0] != operands[1])
2461 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2466 /* If we are to limit the number of things we put in the TOC and
2467 this is a symbol plus a constant we can add in one insn,
2468 just put the symbol in the TOC and add the constant. Don't do
2469 this if reload is in progress. */
2470 if (GET_CODE (operands[1]) == CONST
2471 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2472 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2473 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2474 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2475 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2476 && ! side_effects_p (operands[0]))
2479 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2480 rtx other = XEXP (XEXP (operands[1], 0), 1);
2482 sym = force_reg (mode, sym);
2484 emit_insn (gen_addsi3 (operands[0], sym, other));
2486 emit_insn (gen_adddi3 (operands[0], sym, other));
2490 operands[1] = force_const_mem (mode, operands[1]);
2493 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2494 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2495 get_pool_constant (XEXP (operands[1], 0)),
2496 get_pool_mode (XEXP (operands[1], 0))))
2499 = gen_rtx_MEM (mode,
2500 create_TOC_reference (XEXP (operands[1], 0)));
2501 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2502 RTX_UNCHANGING_P (operands[1]) = 1;
2508 if (GET_CODE (operands[0]) == MEM
2509 && GET_CODE (XEXP (operands[0], 0)) != REG
2510 && ! reload_in_progress)
2512 = replace_equiv_address (operands[0],
2513 copy_addr_to_reg (XEXP (operands[0], 0)));
2515 if (GET_CODE (operands[1]) == MEM
2516 && GET_CODE (XEXP (operands[1], 0)) != REG
2517 && ! reload_in_progress)
2519 = replace_equiv_address (operands[1],
2520 copy_addr_to_reg (XEXP (operands[1], 0)));
2527 /* Above, we may have called force_const_mem which may have returned
2528 an invalid address. If we can, fix this up; otherwise, reload will
2529 have to deal with it. */
2530 if (GET_CODE (operands[1]) == MEM
2531 && ! memory_address_p (mode, XEXP (operands[1], 0))
2532 && ! reload_in_progress)
2533 operands[1] = adjust_address (operands[1], mode, 0);
2535 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2539 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2540 for a call to a function whose data type is FNTYPE.
2541 For a library call, FNTYPE is 0.
2543 For incoming args we set the number of arguments in the prototype large
2544 so we never return a PARALLEL. */
2547 init_cumulative_args (cum, fntype, libname, incoming)
2548 CUMULATIVE_ARGS *cum;
2550 rtx libname ATTRIBUTE_UNUSED;
2553 static CUMULATIVE_ARGS zero_cumulative;
2555 *cum = zero_cumulative;
2557 cum->fregno = FP_ARG_MIN_REG;
2558 cum->vregno = ALTIVEC_ARG_MIN_REG;
2559 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2560 cum->call_cookie = CALL_NORMAL;
2561 cum->sysv_gregno = GP_ARG_MIN_REG;
2564 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2566 else if (cum->prototype)
2567 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2568 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2569 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2572 cum->nargs_prototype = 0;
2574 cum->orig_nargs = cum->nargs_prototype;
2576 /* Check for a longcall attribute. */
2578 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2579 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2580 cum->call_cookie = CALL_LONG;
2582 if (TARGET_DEBUG_ARG)
2584 fprintf (stderr, "\ninit_cumulative_args:");
2587 tree ret_type = TREE_TYPE (fntype);
2588 fprintf (stderr, " ret code = %s,",
2589 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2592 if (cum->call_cookie & CALL_LONG)
2593 fprintf (stderr, " longcall,");
2595 fprintf (stderr, " proto = %d, nargs = %d\n",
2596 cum->prototype, cum->nargs_prototype);
2600 /* If defined, a C expression which determines whether, and in which
2601 direction, to pad out an argument with extra space. The value
2602 should be of type `enum direction': either `upward' to pad above
2603 the argument, `downward' to pad below, or `none' to inhibit
2606 For the AIX ABI structs are always stored left shifted in their
2610 function_arg_padding (mode, type)
2611 enum machine_mode mode;
2614 if (type != 0 && AGGREGATE_TYPE_P (type))
2617 /* This is the default definition. */
2618 return (! BYTES_BIG_ENDIAN
2621 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2622 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2623 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2624 ? downward : upward));
2627 /* If defined, a C expression that gives the alignment boundary, in bits,
2628 of an argument with the specified mode and type. If it is not defined,
2629 PARM_BOUNDARY is used for all arguments.
2631 V.4 wants long longs to be double word aligned. */
2634 function_arg_boundary (mode, type)
2635 enum machine_mode mode;
2636 tree type ATTRIBUTE_UNUSED;
2638 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2640 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2643 return PARM_BOUNDARY;
2646 /* Update the data in CUM to advance over an argument
2647 of mode MODE and data type TYPE.
2648 (TYPE is null for libcalls where that information may not be available.) */
2651 function_arg_advance (cum, mode, type, named)
2652 CUMULATIVE_ARGS *cum;
2653 enum machine_mode mode;
2657 cum->nargs_prototype--;
2659 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2661 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2664 cum->words += RS6000_ARG_SIZE (mode, type);
2666 else if (DEFAULT_ABI == ABI_V4)
2668 if (TARGET_HARD_FLOAT
2669 && (mode == SFmode || mode == DFmode))
2671 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2676 cum->words += cum->words & 1;
2677 cum->words += RS6000_ARG_SIZE (mode, type);
2683 int gregno = cum->sysv_gregno;
2685 /* Aggregates and IEEE quad get passed by reference. */
2686 if ((type && AGGREGATE_TYPE_P (type))
2690 n_words = RS6000_ARG_SIZE (mode, type);
2692 /* Long long is put in odd registers. */
2693 if (n_words == 2 && (gregno & 1) == 0)
2696 /* Long long is not split between registers and stack. */
2697 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2699 /* Long long is aligned on the stack. */
2701 cum->words += cum->words & 1;
2702 cum->words += n_words;
2705 /* Note: continuing to accumulate gregno past when we've started
2706 spilling to the stack indicates the fact that we've started
2707 spilling to the stack to expand_builtin_saveregs. */
2708 cum->sysv_gregno = gregno + n_words;
2711 if (TARGET_DEBUG_ARG)
2713 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2714 cum->words, cum->fregno);
2715 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2716 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2717 fprintf (stderr, "mode = %4s, named = %d\n",
2718 GET_MODE_NAME (mode), named);
2723 int align = (TARGET_32BIT && (cum->words & 1) != 0
2724 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2726 cum->words += align + RS6000_ARG_SIZE (mode, type);
2728 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2731 if (TARGET_DEBUG_ARG)
2733 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2734 cum->words, cum->fregno);
2735 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2736 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2737 fprintf (stderr, "named = %d, align = %d\n", named, align);
2742 /* Determine where to put an argument to a function.
2743 Value is zero to push the argument on the stack,
2744 or a hard register in which to store the argument.
2746 MODE is the argument's machine mode.
2747 TYPE is the data type of the argument (as a tree).
2748 This is null for libcalls where that information may
2750 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2751 the preceding args and about the function being called.
2752 NAMED is nonzero if this argument is a named parameter
2753 (otherwise it is an extra parameter matching an ellipsis).
2755 On RS/6000 the first eight words of non-FP are normally in registers
2756 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2757 Under V.4, the first 8 FP args are in registers.
2759 If this is floating-point and no prototype is specified, we use
2760 both an FP and integer register (or possibly FP reg and stack). Library
2761 functions (when TYPE is zero) always have the proper types for args,
2762 so we can pass the FP value just in one register. emit_library_function
2763 doesn't support PARALLEL anyway. */
2766 function_arg (cum, mode, type, named)
2767 CUMULATIVE_ARGS *cum;
2768 enum machine_mode mode;
2772 enum rs6000_abi abi = DEFAULT_ABI;
2774 /* Return a marker to indicate whether CR1 needs to set or clear the
2775 bit that V.4 uses to say fp args were passed in registers.
2776 Assume that we don't need the marker for software floating point,
2777 or compiler generated library calls. */
2778 if (mode == VOIDmode)
2781 && TARGET_HARD_FLOAT
2782 && cum->nargs_prototype < 0
2783 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2785 return GEN_INT (cum->call_cookie
2786 | ((cum->fregno == FP_ARG_MIN_REG)
2787 ? CALL_V4_SET_FP_ARGS
2788 : CALL_V4_CLEAR_FP_ARGS));
2791 return GEN_INT (cum->call_cookie);
2794 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2796 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2797 return gen_rtx_REG (mode, cum->vregno);
2801 else if (abi == ABI_V4)
2803 if (TARGET_HARD_FLOAT
2804 && (mode == SFmode || mode == DFmode))
2806 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2807 return gen_rtx_REG (mode, cum->fregno);
2814 int gregno = cum->sysv_gregno;
2816 /* Aggregates and IEEE quad get passed by reference. */
2817 if ((type && AGGREGATE_TYPE_P (type))
2821 n_words = RS6000_ARG_SIZE (mode, type);
2823 /* Long long is put in odd registers. */
2824 if (n_words == 2 && (gregno & 1) == 0)
2827 /* Long long is not split between registers and stack. */
2828 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2829 return gen_rtx_REG (mode, gregno);
2836 int align = (TARGET_32BIT && (cum->words & 1) != 0
2837 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2838 int align_words = cum->words + align;
2840 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2843 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2846 || ((cum->nargs_prototype > 0)
2847 /* IBM AIX extended its linkage convention definition always
2848 to require FP args after register save area hole on the
2850 && (DEFAULT_ABI != ABI_AIX
2852 || (align_words < GP_ARG_NUM_REG))))
2853 return gen_rtx_REG (mode, cum->fregno);
2855 return gen_rtx_PARALLEL (mode,
2857 gen_rtx_EXPR_LIST (VOIDmode,
2858 ((align_words >= GP_ARG_NUM_REG)
2861 + RS6000_ARG_SIZE (mode, type)
2863 /* If this is partially on the stack, then
2864 we only include the portion actually
2865 in registers here. */
2866 ? gen_rtx_REG (SImode,
2867 GP_ARG_MIN_REG + align_words)
2868 : gen_rtx_REG (mode,
2869 GP_ARG_MIN_REG + align_words))),
2871 gen_rtx_EXPR_LIST (VOIDmode,
2872 gen_rtx_REG (mode, cum->fregno),
2875 else if (align_words < GP_ARG_NUM_REG)
2876 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2882 /* For an arg passed partly in registers and partly in memory,
2883 this is the number of registers used.
2884 For args passed entirely in registers or entirely in memory, zero. */
2887 function_arg_partial_nregs (cum, mode, type, named)
2888 CUMULATIVE_ARGS *cum;
2889 enum machine_mode mode;
2891 int named ATTRIBUTE_UNUSED;
2893 if (DEFAULT_ABI == ABI_V4)
2896 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2897 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2899 if (cum->nargs_prototype >= 0)
2903 if (cum->words < GP_ARG_NUM_REG
2904 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2906 int ret = GP_ARG_NUM_REG - cum->words;
2907 if (ret && TARGET_DEBUG_ARG)
2908 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2916 /* A C expression that indicates when an argument must be passed by
2917 reference. If nonzero for an argument, a copy of that argument is
2918 made in memory and a pointer to the argument is passed instead of
2919 the argument itself. The pointer is passed in whatever way is
2920 appropriate for passing a pointer to that type.
2922 Under V.4, structures and unions are passed by reference. */
2925 function_arg_pass_by_reference (cum, mode, type, named)
2926 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2927 enum machine_mode mode ATTRIBUTE_UNUSED;
2929 int named ATTRIBUTE_UNUSED;
2931 if (DEFAULT_ABI == ABI_V4
2932 && ((type && AGGREGATE_TYPE_P (type))
2935 if (TARGET_DEBUG_ARG)
2936 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2944 /* Perform any needed actions needed for a function that is receiving a
2945 variable number of arguments.
2949 MODE and TYPE are the mode and type of the current parameter.
2951 PRETEND_SIZE is a variable that should be set to the amount of stack
2952 that must be pushed by the prolog to pretend that our caller pushed
2955 Normally, this macro will push all remaining incoming registers on the
2956 stack and set PRETEND_SIZE to the length of the registers pushed. */
2959 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2960 CUMULATIVE_ARGS *cum;
2961 enum machine_mode mode;
2967 CUMULATIVE_ARGS next_cum;
2968 int reg_size = TARGET_32BIT ? 4 : 8;
2969 rtx save_area = NULL_RTX, mem;
2970 int first_reg_offset, set;
2974 fntype = TREE_TYPE (current_function_decl);
2975 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2976 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2977 != void_type_node));
2979 /* For varargs, we do not want to skip the dummy va_dcl argument.
2980 For stdargs, we do want to skip the last named argument. */
2983 function_arg_advance (&next_cum, mode, type, 1);
2985 if (DEFAULT_ABI == ABI_V4)
2987 /* Indicate to allocate space on the stack for varargs save area. */
2988 cfun->machine->sysv_varargs_p = 1;
2990 save_area = plus_constant (virtual_stack_vars_rtx,
2991 - RS6000_VARARGS_SIZE);
2993 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2997 first_reg_offset = next_cum.words;
2998 save_area = virtual_incoming_args_rtx;
2999 cfun->machine->sysv_varargs_p = 0;
3001 if (MUST_PASS_IN_STACK (mode, type))
3002 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3005 set = get_varargs_alias_set ();
3006 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3008 mem = gen_rtx_MEM (BLKmode,
3009 plus_constant (save_area,
3010 first_reg_offset * reg_size)),
3011 set_mem_alias_set (mem, set);
3012 set_mem_align (mem, BITS_PER_WORD);
3015 (GP_ARG_MIN_REG + first_reg_offset, mem,
3016 GP_ARG_NUM_REG - first_reg_offset,
3017 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3019 /* ??? Does ABI_V4 need this at all? */
3020 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
3023 /* Save FP registers if needed. */
3024 if (DEFAULT_ABI == ABI_V4
3025 && TARGET_HARD_FLOAT && ! no_rtl
3026 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3028 int fregno = next_cum.fregno;
3029 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3030 rtx lab = gen_label_rtx ();
3031 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3033 emit_jump_insn (gen_rtx_SET (VOIDmode,
3035 gen_rtx_IF_THEN_ELSE (VOIDmode,
3036 gen_rtx_NE (VOIDmode, cr1,
3038 gen_rtx_LABEL_REF (VOIDmode, lab),
3041 while (fregno <= FP_ARG_V4_MAX_REG)
3043 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3044 set_mem_alias_set (mem, set);
3045 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3054 /* Create the va_list data type. */
3057 rs6000_build_va_list ()
3059 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3061 /* For AIX, prefer 'char *' because that's what the system
3062 header files like. */
3063 if (DEFAULT_ABI != ABI_V4)
3064 return build_pointer_type (char_type_node);
3066 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3067 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3069 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3070 unsigned_char_type_node);
3071 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3072 unsigned_char_type_node);
3073 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3075 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3078 DECL_FIELD_CONTEXT (f_gpr) = record;
3079 DECL_FIELD_CONTEXT (f_fpr) = record;
3080 DECL_FIELD_CONTEXT (f_ovf) = record;
3081 DECL_FIELD_CONTEXT (f_sav) = record;
3083 TREE_CHAIN (record) = type_decl;
3084 TYPE_NAME (record) = type_decl;
3085 TYPE_FIELDS (record) = f_gpr;
3086 TREE_CHAIN (f_gpr) = f_fpr;
3087 TREE_CHAIN (f_fpr) = f_ovf;
3088 TREE_CHAIN (f_ovf) = f_sav;
3090 layout_type (record);
3092 /* The correct type is an array type of one element. */
3093 return build_array_type (record, build_index_type (size_zero_node));
3096 /* Implement va_start. */
3099 rs6000_va_start (stdarg_p, valist, nextarg)
3104 HOST_WIDE_INT words, n_gpr, n_fpr;
3105 tree f_gpr, f_fpr, f_ovf, f_sav;
3106 tree gpr, fpr, ovf, sav, t;
3108 /* Only SVR4 needs something special. */
3109 if (DEFAULT_ABI != ABI_V4)
3111 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
3115 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3116 f_fpr = TREE_CHAIN (f_gpr);
3117 f_ovf = TREE_CHAIN (f_fpr);
3118 f_sav = TREE_CHAIN (f_ovf);
3120 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3121 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3122 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3123 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3124 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3126 /* Count number of gp and fp argument registers used. */
3127 words = current_function_args_info.words;
3128 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3129 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3131 if (TARGET_DEBUG_ARG)
3133 fputs ("va_start: words = ", stderr);
3134 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3135 fputs (", n_gpr = ", stderr);
3136 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3137 fputs (", n_fpr = ", stderr);
3138 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3139 putc ('\n', stderr);
3142 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3143 TREE_SIDE_EFFECTS (t) = 1;
3144 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3146 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3147 TREE_SIDE_EFFECTS (t) = 1;
3148 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3150 /* Find the overflow area. */
3151 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3153 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3154 build_int_2 (words * UNITS_PER_WORD, 0));
3155 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3156 TREE_SIDE_EFFECTS (t) = 1;
3157 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3159 /* Find the register save area. */
3160 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3161 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3162 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3163 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3164 TREE_SIDE_EFFECTS (t) = 1;
3165 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3168 /* Implement va_arg. */
3171 rs6000_va_arg (valist, type)
3174 tree f_gpr, f_fpr, f_ovf, f_sav;
3175 tree gpr, fpr, ovf, sav, reg, t, u;
3176 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3177 rtx lab_false, lab_over, addr_rtx, r;
3179 if (DEFAULT_ABI != ABI_V4)
3180 return std_expand_builtin_va_arg (valist, type);
3182 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3183 f_fpr = TREE_CHAIN (f_gpr);
3184 f_ovf = TREE_CHAIN (f_fpr);
3185 f_sav = TREE_CHAIN (f_ovf);
3187 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3188 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3189 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3190 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3191 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3193 size = int_size_in_bytes (type);
3194 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3196 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3198 /* Aggregates and long doubles are passed by reference. */
3204 size = UNITS_PER_WORD;
3207 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3209 /* FP args go in FP registers, if present. */
3218 /* Otherwise into GP registers. */
3226 /* Pull the value out of the saved registers ... */
3228 lab_false = gen_label_rtx ();
3229 lab_over = gen_label_rtx ();
3230 addr_rtx = gen_reg_rtx (Pmode);
3232 /* AltiVec vectors never go in registers. */
3233 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3235 TREE_THIS_VOLATILE (reg) = 1;
3236 emit_cmp_and_jump_insns
3237 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3238 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3241 /* Long long is aligned in the registers. */
3244 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3245 build_int_2 (n_reg - 1, 0));
3246 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3247 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3248 TREE_SIDE_EFFECTS (u) = 1;
3249 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3253 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3257 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3258 build_int_2 (n_reg, 0));
3259 TREE_SIDE_EFFECTS (u) = 1;
3261 u = build1 (CONVERT_EXPR, integer_type_node, u);
3262 TREE_SIDE_EFFECTS (u) = 1;
3264 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3265 TREE_SIDE_EFFECTS (u) = 1;
3267 t = build (PLUS_EXPR, ptr_type_node, t, u);
3268 TREE_SIDE_EFFECTS (t) = 1;
3270 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3272 emit_move_insn (addr_rtx, r);
3274 emit_jump_insn (gen_jump (lab_over));
3278 emit_label (lab_false);
3280 /* ... otherwise out of the overflow area. */
3282 /* Make sure we don't find reg 7 for the next int arg.
3284 All AltiVec vectors go in the overflow area. So in the AltiVec
3285 case we need to get the vectors from the overflow area, but
3286 remember where the GPRs and FPRs are. */
3287 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3288 || !TARGET_ALTIVEC))
3290 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3291 TREE_SIDE_EFFECTS (t) = 1;
3292 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3295 /* Care for on-stack alignment if needed. */
3302 /* AltiVec vectors are 16 byte aligned. */
3303 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3308 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3309 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3313 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3315 emit_move_insn (addr_rtx, r);
3317 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3318 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3319 TREE_SIDE_EFFECTS (t) = 1;
3320 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3322 emit_label (lab_over);
3326 r = gen_rtx_MEM (Pmode, addr_rtx);
3327 set_mem_alias_set (r, get_varargs_alias_set ());
3328 emit_move_insn (addr_rtx, r);
3336 #define def_builtin(MASK, NAME, TYPE, CODE) \
3338 if ((MASK) & target_flags) \
3339 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3342 struct builtin_description
3344 const unsigned int mask;
3345 const enum insn_code icode;
3346 const char *const name;
3347 const enum rs6000_builtins code;
3350 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3352 static const struct builtin_description bdesc_3arg[] =
3354 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3355 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3356 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3357 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3358 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3359 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3360 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3361 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3362 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3363 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3364 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3365 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3366 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3367 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3368 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3369 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3370 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3371 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3372 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3373 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3374 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3375 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3376 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3379 /* DST operations: void foo (void *, const int, const char). */
3381 static const struct builtin_description bdesc_dst[] =
3383 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3384 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3385 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3386 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3389 /* Simple binary operations: VECc = foo (VECa, VECb). */
3391 static const struct builtin_description bdesc_2arg[] =
3393 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3394 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3395 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3396 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3397 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3398 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3399 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3400 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3401 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3402 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3403 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3404 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3405 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3406 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3407 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3408 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3409 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3410 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3411 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3412 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3413 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3414 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3415 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3416 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3417 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3418 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3419 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3420 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3421 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3422 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3423 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3424 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3425 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3426 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3427 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3428 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3429 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3430 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3431 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3432 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3433 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3434 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3435 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3436 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3437 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3438 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3439 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3440 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3441 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3442 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3443 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3444 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3445 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3446 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3447 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3448 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3449 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3450 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3451 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3452 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3453 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3454 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3455 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3456 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3457 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3458 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3459 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3460 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3461 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3462 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3463 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3464 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3465 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3466 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3467 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3468 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3469 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3470 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3471 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3472 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3473 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3474 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3475 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3476 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3477 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3478 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3479 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3480 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3481 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3482 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3483 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3484 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3485 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3486 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3487 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3488 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3489 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3490 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3491 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3492 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3493 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3494 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3495 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3496 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3497 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3498 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3499 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3500 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3501 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3502 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3503 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3504 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3505 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3508 /* AltiVec predicates. */
3510 struct builtin_description_predicates
3512 const unsigned int mask;
3513 const enum insn_code icode;
3515 const char *const name;
3516 const enum rs6000_builtins code;
3519 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3521 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3522 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3523 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3524 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3525 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3526 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3527 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3528 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3529 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3530 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3531 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3532 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3533 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3536 /* ABS* opreations. */
3538 static const struct builtin_description bdesc_abs[] =
3540 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3541 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3542 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3543 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3544 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3545 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3546 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3549 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3552 static const struct builtin_description bdesc_1arg[] =
3554 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3555 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3556 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3557 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3558 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3559 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3560 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3561 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3562 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3563 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3564 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3565 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3566 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3567 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3568 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3569 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3570 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3574 rs6000_expand_unop_builtin (icode, arglist, target)
3575 enum insn_code icode;
3580 tree arg0 = TREE_VALUE (arglist);
3581 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3582 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3583 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3585 /* If we got invalid arguments bail out before generating bad rtl. */
3586 if (arg0 == error_mark_node)
3591 /* Only allow 5-bit *signed* literals. */
3592 case CODE_FOR_altivec_vspltisb:
3593 case CODE_FOR_altivec_vspltish:
3594 case CODE_FOR_altivec_vspltisw:
3595 if (GET_CODE (op0) != CONST_INT
3596 || INTVAL (op0) > 0x1f
3597 || INTVAL (op0) < -0x1f)
3599 error ("argument 1 must be a 5-bit signed literal");
3608 || GET_MODE (target) != tmode
3609 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3610 target = gen_reg_rtx (tmode);
3612 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3613 op0 = copy_to_mode_reg (mode0, op0);
3615 pat = GEN_FCN (icode) (target, op0);
3624 altivec_expand_abs_builtin (icode, arglist, target)
3625 enum insn_code icode;
3629 rtx pat, scratch1, scratch2;
3630 tree arg0 = TREE_VALUE (arglist);
3631 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3632 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3633 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3635 /* If we have invalid arguments, bail out before generating bad rtl. */
3636 if (arg0 == error_mark_node)
3640 || GET_MODE (target) != tmode
3641 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3642 target = gen_reg_rtx (tmode);
3644 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3645 op0 = copy_to_mode_reg (mode0, op0);
3647 scratch1 = gen_reg_rtx (mode0);
3648 scratch2 = gen_reg_rtx (mode0);
3650 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3659 rs6000_expand_binop_builtin (icode, arglist, target)
3660 enum insn_code icode;
3665 tree arg0 = TREE_VALUE (arglist);
3666 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3667 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3668 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3669 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3670 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3671 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3673 /* If we got invalid arguments bail out before generating bad rtl. */
3674 if (arg0 == error_mark_node || arg1 == error_mark_node)
3679 /* Only allow 5-bit unsigned literals. */
3680 case CODE_FOR_altivec_vcfux:
3681 case CODE_FOR_altivec_vcfsx:
3682 case CODE_FOR_altivec_vctsxs:
3683 case CODE_FOR_altivec_vctuxs:
3684 case CODE_FOR_altivec_vspltb:
3685 case CODE_FOR_altivec_vsplth:
3686 case CODE_FOR_altivec_vspltw:
3687 if (TREE_CODE (arg1) != INTEGER_CST
3688 || TREE_INT_CST_LOW (arg1) & ~0x1f)
3690 error ("argument 2 must be a 5-bit unsigned literal");
3699 || GET_MODE (target) != tmode
3700 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3701 target = gen_reg_rtx (tmode);
3703 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3704 op0 = copy_to_mode_reg (mode0, op0);
3705 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3706 op1 = copy_to_mode_reg (mode1, op1);
3708 pat = GEN_FCN (icode) (target, op0, op1);
3717 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3718 enum insn_code icode;
3724 tree cr6_form = TREE_VALUE (arglist);
3725 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3726 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3727 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3728 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3729 enum machine_mode tmode = SImode;
3730 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3731 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3734 if (TREE_CODE (cr6_form) != INTEGER_CST)
3736 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3740 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3745 /* If we have invalid arguments, bail out before generating bad rtl. */
3746 if (arg0 == error_mark_node || arg1 == error_mark_node)
3750 || GET_MODE (target) != tmode
3751 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3752 target = gen_reg_rtx (tmode);
3754 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3755 op0 = copy_to_mode_reg (mode0, op0);
3756 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3757 op1 = copy_to_mode_reg (mode1, op1);
3759 scratch = gen_reg_rtx (mode0);
3761 pat = GEN_FCN (icode) (scratch, op0, op1,
3762 gen_rtx (SYMBOL_REF, Pmode, opcode));
3767 /* The vec_any* and vec_all* predicates use the same opcodes for two
3768 different operations, but the bits in CR6 will be different
3769 depending on what information we want. So we have to play tricks
3770 with CR6 to get the right bits out.
3772 If you think this is disgusting, look at the specs for the
3773 AltiVec predicates. */
3775 switch (cr6_form_int)
3778 emit_insn (gen_cr6_test_for_zero (target));
3781 emit_insn (gen_cr6_test_for_zero_reverse (target));
3784 emit_insn (gen_cr6_test_for_lt (target));
3787 emit_insn (gen_cr6_test_for_lt_reverse (target));
3790 error ("argument 1 of __builtin_altivec_predicate is out of range");
3798 altivec_expand_stv_builtin (icode, arglist)
3799 enum insn_code icode;
3802 tree arg0 = TREE_VALUE (arglist);
3803 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3804 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3805 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3806 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3807 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3809 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3810 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3811 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3813 /* Invalid arguments. Bail before doing anything stoopid! */
3814 if (arg0 == error_mark_node
3815 || arg1 == error_mark_node
3816 || arg2 == error_mark_node)
3819 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3820 op0 = copy_to_mode_reg (mode2, op0);
3821 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3822 op1 = copy_to_mode_reg (mode0, op1);
3823 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3824 op2 = copy_to_mode_reg (mode1, op2);
3826 pat = GEN_FCN (icode) (op1, op2, op0);
3833 rs6000_expand_ternop_builtin (icode, arglist, target)
3834 enum insn_code icode;
3839 tree arg0 = TREE_VALUE (arglist);
3840 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3841 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3842 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3843 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3844 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3845 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3846 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3847 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3848 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3850 /* If we got invalid arguments bail out before generating bad rtl. */
3851 if (arg0 == error_mark_node
3852 || arg1 == error_mark_node
3853 || arg2 == error_mark_node)
3858 /* Only allow 4-bit unsigned literals. */
3859 case CODE_FOR_altivec_vsldoi_4sf:
3860 case CODE_FOR_altivec_vsldoi_4si:
3861 case CODE_FOR_altivec_vsldoi_8hi:
3862 case CODE_FOR_altivec_vsldoi_16qi:
3863 if (TREE_CODE (arg2) != INTEGER_CST
3864 || TREE_INT_CST_LOW (arg2) & ~0xf)
3866 error ("argument 3 must be a 4-bit unsigned literal");
3875 || GET_MODE (target) != tmode
3876 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3877 target = gen_reg_rtx (tmode);
3879 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3880 op0 = copy_to_mode_reg (mode0, op0);
3881 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3882 op1 = copy_to_mode_reg (mode1, op1);
3883 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3884 op2 = copy_to_mode_reg (mode2, op2);
3886 pat = GEN_FCN (icode) (target, op0, op1, op2);
3894 /* Expand the lvx builtins. */
3896 altivec_expand_ld_builtin (exp, target, expandedp)
3901 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3902 tree arglist = TREE_OPERAND (exp, 1);
3903 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3905 enum machine_mode tmode, mode0;
3906 rtx pat, target, op0;
3907 enum insn_code icode;
3911 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3912 icode = CODE_FOR_altivec_lvx_16qi;
3914 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3915 icode = CODE_FOR_altivec_lvx_8hi;
3917 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3918 icode = CODE_FOR_altivec_lvx_4si;
3920 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3921 icode = CODE_FOR_altivec_lvx_4sf;
3930 arg0 = TREE_VALUE (arglist);
3931 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3932 tmode = insn_data[icode].operand[0].mode;
3933 mode0 = insn_data[icode].operand[1].mode;
3936 || GET_MODE (target) != tmode
3937 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3938 target = gen_reg_rtx (tmode);
3940 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3941 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3943 pat = GEN_FCN (icode) (target, op0);
3950 /* Expand the stvx builtins. */
3952 altivec_expand_st_builtin (exp, target, expandedp)
3957 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3958 tree arglist = TREE_OPERAND (exp, 1);
3959 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3961 enum machine_mode mode0, mode1;
3962 rtx pat, target, op0, op1;
3963 enum insn_code icode;
3967 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3968 icode = CODE_FOR_altivec_stvx_16qi;
3970 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3971 icode = CODE_FOR_altivec_stvx_8hi;
3973 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3974 icode = CODE_FOR_altivec_stvx_4si;
3976 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3977 icode = CODE_FOR_altivec_stvx_4sf;
3984 arg0 = TREE_VALUE (arglist);
3985 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3986 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3987 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3988 mode0 = insn_data[icode].operand[0].mode;
3989 mode1 = insn_data[icode].operand[1].mode;
3991 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3992 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3993 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3994 op1 = copy_to_mode_reg (mode1, op1);
3996 pat = GEN_FCN (icode) (op0, op1);
4004 /* Expand the dst builtins. */
4006 altivec_expand_dst_builtin (exp, target, expandedp)
4011 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4012 tree arglist = TREE_OPERAND (exp, 1);
4013 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4014 tree arg0, arg1, arg2;
4015 enum machine_mode mode0, mode1, mode2;
4016 rtx pat, target, op0, op1, op2;
4017 struct builtin_description *d;
4022 /* Handle DST variants. */
4023 d = (struct builtin_description *) bdesc_dst;
4024 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4025 if (d->code == fcode)
4027 arg0 = TREE_VALUE (arglist);
4028 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4029 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4030 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4031 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4032 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4033 mode0 = insn_data[d->icode].operand[0].mode;
4034 mode1 = insn_data[d->icode].operand[1].mode;
4035 mode2 = insn_data[d->icode].operand[2].mode;
4037 /* Invalid arguments, bail out before generating bad rtl. */
4038 if (arg0 == error_mark_node
4039 || arg1 == error_mark_node
4040 || arg2 == error_mark_node)
4043 if (TREE_CODE (arg2) != INTEGER_CST
4044 || TREE_INT_CST_LOW (arg2) & ~0x3)
4046 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4050 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4051 op0 = copy_to_mode_reg (mode0, op0);
4052 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4053 op1 = copy_to_mode_reg (mode1, op1);
4055 pat = GEN_FCN (d->icode) (op0, op1, op2);
4066 /* Expand the builtin in EXP and store the result in TARGET. Store
4067 true in *EXPANDEDP if we found a builtin to expand. */
4069 altivec_expand_builtin (exp, target, expandedp)
4074 struct builtin_description *d;
4075 struct builtin_description_predicates *dp;
4077 enum insn_code icode;
4078 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4079 tree arglist = TREE_OPERAND (exp, 1);
4080 tree arg0, arg1, arg2;
4081 rtx op0, op1, op2, pat;
4082 enum machine_mode tmode, mode0, mode1, mode2;
4083 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4085 target = altivec_expand_ld_builtin (exp, target, expandedp);
4089 target = altivec_expand_st_builtin (exp, target, expandedp);
4093 target = altivec_expand_dst_builtin (exp, target, expandedp);
4101 case ALTIVEC_BUILTIN_STVX:
4102 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4103 case ALTIVEC_BUILTIN_STVEBX:
4104 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4105 case ALTIVEC_BUILTIN_STVEHX:
4106 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4107 case ALTIVEC_BUILTIN_STVEWX:
4108 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4109 case ALTIVEC_BUILTIN_STVXL:
4110 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4112 case ALTIVEC_BUILTIN_MFVSCR:
4113 icode = CODE_FOR_altivec_mfvscr;
4114 tmode = insn_data[icode].operand[0].mode;
4117 || GET_MODE (target) != tmode
4118 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4119 target = gen_reg_rtx (tmode);
4121 pat = GEN_FCN (icode) (target);
4127 case ALTIVEC_BUILTIN_MTVSCR:
4128 icode = CODE_FOR_altivec_mtvscr;
4129 arg0 = TREE_VALUE (arglist);
4130 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4131 mode0 = insn_data[icode].operand[0].mode;
4133 /* If we got invalid arguments bail out before generating bad rtl. */
4134 if (arg0 == error_mark_node)
4137 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4138 op0 = copy_to_mode_reg (mode0, op0);
4140 pat = GEN_FCN (icode) (op0);
4145 case ALTIVEC_BUILTIN_DSSALL:
4146 emit_insn (gen_altivec_dssall ());
4149 case ALTIVEC_BUILTIN_DSS:
4150 icode = CODE_FOR_altivec_dss;
4151 arg0 = TREE_VALUE (arglist);
4152 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4153 mode0 = insn_data[icode].operand[0].mode;
4155 /* If we got invalid arguments bail out before generating bad rtl. */
4156 if (arg0 == error_mark_node)
4159 if (TREE_CODE (arg0) != INTEGER_CST
4160 || TREE_INT_CST_LOW (arg0) & ~0x3)
4162 error ("argument to dss must be a 2-bit unsigned literal");
4166 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4167 op0 = copy_to_mode_reg (mode0, op0);
4169 emit_insn (gen_altivec_dss (op0));
4173 /* Expand abs* operations. */
4174 d = (struct builtin_description *) bdesc_abs;
4175 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4176 if (d->code == fcode)
4177 return altivec_expand_abs_builtin (d->icode, arglist, target);
4179 /* Expand the AltiVec predicates. */
4180 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4181 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4182 if (dp->code == fcode)
4183 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4185 /* LV* are funky. We initialized them differently. */
4188 case ALTIVEC_BUILTIN_LVSL:
4189 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4191 case ALTIVEC_BUILTIN_LVSR:
4192 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4194 case ALTIVEC_BUILTIN_LVEBX:
4195 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4197 case ALTIVEC_BUILTIN_LVEHX:
4198 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4200 case ALTIVEC_BUILTIN_LVEWX:
4201 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4203 case ALTIVEC_BUILTIN_LVXL:
4204 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4206 case ALTIVEC_BUILTIN_LVX:
4207 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4218 /* Expand an expression EXP that calls a built-in function,
4219 with result going to TARGET if that's convenient
4220 (and in mode MODE if that's convenient).
4221 SUBTARGET may be used as the target for computing one of EXP's operands.
4222 IGNORE is nonzero if the value is to be ignored. */
4225 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4228 rtx subtarget ATTRIBUTE_UNUSED;
4229 enum machine_mode mode ATTRIBUTE_UNUSED;
4230 int ignore ATTRIBUTE_UNUSED;
4232 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4233 tree arglist = TREE_OPERAND (exp, 1);
4234 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4235 struct builtin_description *d;
4242 ret = altivec_expand_builtin (exp, target, &success);
4248 /* Handle simple unary operations. */
4249 d = (struct builtin_description *) bdesc_1arg;
4250 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4251 if (d->code == fcode)
4252 return rs6000_expand_unop_builtin (d->icode, arglist, target);
4254 /* Handle simple binary operations. */
4255 d = (struct builtin_description *) bdesc_2arg;
4256 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4257 if (d->code == fcode)
4258 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4260 /* Handle simple ternary operations. */
4261 d = (struct builtin_description *) bdesc_3arg;
4262 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4263 if (d->code == fcode)
4264 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
4271 rs6000_init_builtins ()
4274 altivec_init_builtins ();
4278 altivec_init_builtins (void)
4280 struct builtin_description *d;
4281 struct builtin_description_predicates *dp;
4284 tree endlink = void_list_node;
4286 tree pint_type_node = build_pointer_type (integer_type_node);
4287 tree pvoid_type_node = build_pointer_type (void_type_node);
4288 tree pshort_type_node = build_pointer_type (short_integer_type_node);
4289 tree pchar_type_node = build_pointer_type (char_type_node);
4290 tree pfloat_type_node = build_pointer_type (float_type_node);
4292 tree v4sf_ftype_v4sf_v4sf_v16qi
4293 = build_function_type_list (V4SF_type_node,
4294 V4SF_type_node, V4SF_type_node,
4295 V16QI_type_node, NULL_TREE);
4296 tree v4si_ftype_v4si_v4si_v16qi
4297 = build_function_type_list (V4SI_type_node,
4298 V4SI_type_node, V4SI_type_node,
4299 V16QI_type_node, NULL_TREE);
4300 tree v8hi_ftype_v8hi_v8hi_v16qi
4301 = build_function_type_list (V8HI_type_node,
4302 V8HI_type_node, V8HI_type_node,
4303 V16QI_type_node, NULL_TREE);
4304 tree v16qi_ftype_v16qi_v16qi_v16qi
4305 = build_function_type_list (V16QI_type_node,
4306 V16QI_type_node, V16QI_type_node,
4307 V16QI_type_node, NULL_TREE);
4308 tree v4si_ftype_char
4309 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
4310 tree v8hi_ftype_char
4311 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
4312 tree v16qi_ftype_char
4313 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
4314 tree v4sf_ftype_v4sf
4315 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
4316 tree v4si_ftype_pint
4317 = build_function_type_list (V4SI_type_node, pint_type_node, NULL_TREE);
4318 tree v8hi_ftype_pshort
4319 = build_function_type_list (V8HI_type_node, pshort_type_node, NULL_TREE);
4320 tree v16qi_ftype_pchar
4321 = build_function_type_list (V16QI_type_node, pchar_type_node, NULL_TREE);
4322 tree v4sf_ftype_pfloat
4323 = build_function_type_list (V4SF_type_node, pfloat_type_node, NULL_TREE);
4324 tree v8hi_ftype_v16qi
4325 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
4326 tree void_ftype_pvoid_int_char
4327 = build_function_type_list (void_type_node,
4328 pvoid_type_node, integer_type_node,
4329 char_type_node, NULL_TREE);
4330 tree void_ftype_pint_v4si
4331 = build_function_type_list (void_type_node,
4332 pint_type_node, V4SI_type_node, NULL_TREE);
4333 tree void_ftype_pshort_v8hi
4334 = build_function_type_list (void_type_node,
4335 pshort_type_node, V8HI_type_node, NULL_TREE);
4336 tree void_ftype_pchar_v16qi
4337 = build_function_type_list (void_type_node,
4338 pchar_type_node, V16QI_type_node, NULL_TREE);
4339 tree void_ftype_pfloat_v4sf
4340 = build_function_type_list (void_type_node,
4341 pfloat_type_node, V4SF_type_node, NULL_TREE);
4342 tree void_ftype_v4si
4343 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
4344 tree void_ftype_v4si_int_pvoid
4345 = build_function_type_list (void_type_node,
4346 V4SI_type_node, integer_type_node,
4347 pvoid_type_node, NULL_TREE);
4349 tree void_ftype_v16qi_int_pvoid
4350 = build_function_type_list (void_type_node,
4351 V16QI_type_node, integer_type_node,
4352 pvoid_type_node, NULL_TREE);
4353 tree void_ftype_v8hi_int_pvoid
4354 = build_function_type_list (void_type_node,
4355 V8HI_type_node, integer_type_node,
4356 pvoid_type_node, NULL_TREE);
4358 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
4359 tree void_ftype_void
4360 = build_function_type (void_type_node, void_list_node);
4361 tree v8hi_ftype_void
4362 = build_function_type (V8HI_type_node, void_list_node);
4364 tree v4si_ftype_v4si_v4si
4365 = build_function_type_list (V4SI_type_node,
4366 V4SI_type_node, V4SI_type_node, NULL_TREE);
4367 tree v4sf_ftype_v4si_char
4368 = build_function_type_list (V4SF_type_node,
4369 V4SI_type_node, char_type_node, NULL_TREE);
4370 tree v4si_ftype_v4sf_char
4371 = build_function_type_list (V4SI_type_node,
4372 V4SF_type_node, char_type_node, NULL_TREE);
4373 tree v4si_ftype_v4si_char
4374 = build_function_type_list (V4SI_type_node,
4375 V4SI_type_node, char_type_node, NULL_TREE);
4376 tree v8hi_ftype_v8hi_char
4377 = build_function_type_list (V8HI_type_node,
4378 V8HI_type_node, char_type_node, NULL_TREE);
4379 tree v16qi_ftype_v16qi_char
4380 = build_function_type_list (V16QI_type_node,
4381 V16QI_type_node, char_type_node, NULL_TREE);
4382 tree v16qi_ftype_v16qi_v16qi_char
4383 = build_function_type_list (V16QI_type_node,
4384 V16QI_type_node, V16QI_type_node,
4385 char_type_node, NULL_TREE);
4386 tree v8hi_ftype_v8hi_v8hi_char
4387 = build_function_type_list (V8HI_type_node,
4388 V8HI_type_node, V8HI_type_node,
4389 char_type_node, NULL_TREE);
4390 tree v4si_ftype_v4si_v4si_char
4391 = build_function_type_list (V4SI_type_node,
4392 V4SI_type_node, V4SI_type_node,
4393 char_type_node, NULL_TREE);
4394 tree v4sf_ftype_v4sf_v4sf_char
4395 = build_function_type_list (V4SF_type_node,
4396 V4SF_type_node, V4SF_type_node,
4397 char_type_node, NULL_TREE);
4398 tree v4sf_ftype_v4sf_v4sf
4399 = build_function_type_list (V4SF_type_node,
4400 V4SF_type_node, V4SF_type_node, NULL_TREE);
4401 tree v4sf_ftype_v4sf_v4sf_v4si
4402 = build_function_type_list (V4SF_type_node,
4403 V4SF_type_node, V4SF_type_node,
4404 V4SI_type_node, NULL_TREE);
4405 tree v4sf_ftype_v4sf_v4sf_v4sf
4406 = build_function_type_list (V4SF_type_node,
4407 V4SF_type_node, V4SF_type_node,
4408 V4SF_type_node, NULL_TREE);
4409 tree v4si_ftype_v4si_v4si_v4si
4410 = build_function_type_list (V4SI_type_node,
4411 V4SI_type_node, V4SI_type_node,
4412 V4SI_type_node, NULL_TREE);
4413 tree v8hi_ftype_v8hi_v8hi
4414 = build_function_type_list (V8HI_type_node,
4415 V8HI_type_node, V8HI_type_node, NULL_TREE);
4416 tree v8hi_ftype_v8hi_v8hi_v8hi
4417 = build_function_type_list (V8HI_type_node,
4418 V8HI_type_node, V8HI_type_node,
4419 V8HI_type_node, NULL_TREE);
4420 tree v4si_ftype_v8hi_v8hi_v4si
4421 = build_function_type_list (V4SI_type_node,
4422 V8HI_type_node, V8HI_type_node,
4423 V4SI_type_node, NULL_TREE);
4424 tree v4si_ftype_v16qi_v16qi_v4si
4425 = build_function_type_list (V4SI_type_node,
4426 V16QI_type_node, V16QI_type_node,
4427 V4SI_type_node, NULL_TREE);
4428 tree v16qi_ftype_v16qi_v16qi
4429 = build_function_type_list (V16QI_type_node,
4430 V16QI_type_node, V16QI_type_node, NULL_TREE);
4431 tree v4si_ftype_v4sf_v4sf
4432 = build_function_type_list (V4SI_type_node,
4433 V4SF_type_node, V4SF_type_node, NULL_TREE);
4434 tree v4si_ftype_v4si
4435 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
4436 tree v8hi_ftype_v8hi
4437 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
4438 tree v16qi_ftype_v16qi
4439 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
4440 tree v8hi_ftype_v16qi_v16qi
4441 = build_function_type_list (V8HI_type_node,
4442 V16QI_type_node V16QI_type_node, NULL_TREE);
4443 tree v4si_ftype_v8hi_v8hi
4444 = build_function_type_list (V4SI_type_node,
4445 V8HI_type_node, V8HI_type_node, NULL_TREE);
4446 tree v8hi_ftype_v4si_v4si
4447 = build_function_type_list (V8HI_type_node,
4448 V4SI_type_node, V4SI_type_node, NULL_TREE);
4449 tree v16qi_ftype_v8hi_v8hi
4450 = build_function_type_list (V16QI_type_node,
4451 V8HI_type_node, V8HI_type_node, NULL_TREE);
4452 tree v4si_ftype_v16qi_v4si
4453 = build_function_type_list (V4SI_type_node,
4454 V16QI_type_node, V4SI_type_node, NULL_TREE);
4455 tree v4si_ftype_v16qi_v16qi
4456 = build_function_type_list (V4SI_type_node,
4457 V16QI_type_node, V16QI_type_node, NULL_TREE);
4458 tree v4si_ftype_v8hi_v4si
4459 = build_function_type_list (V4SI_type_node,
4460 V8HI_type_node, V4SI_type_node, NULL_TREE);
4461 tree v4si_ftype_v8hi
4462 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
4463 tree int_ftype_v4si_v4si
4464 = build_function_type_list (integer_type_node,
4465 V4SI_type_node, V4SI_type_node, NULL_TREE);
4466 tree int_ftype_v4sf_v4sf
4467 = build_function_type_list (integer_type_node,
4468 V4SF_type_node, V4SF_type_node, NULL_TREE);
4469 tree int_ftype_v16qi_v16qi
4470 = build_function_type_list (integer_type_node,
4471 V16QI_type_node, V16QI_type_node, NULL_TREE);
4472 tree int_ftype_int_v4si_v4si
4473 = build_function_type_list (integer_type_node,
4474 integer_type_node, V4SI_type_node,
4475 V4SI_type_node, NULL_TREE);
4476 tree int_ftype_int_v4sf_v4sf
4477 = build_function_type_list (integer_type_node,
4478 integer_type_node, V4SF_type_node,
4479 V4SF_type_node, NULL_TREE);
4480 tree int_ftype_int_v8hi_v8hi
4481 = build_function_type_list (integer_type_node,
4482 integer_type_node, V8HI_type_node,
4483 V8HI_type_node, NULL_TREE);
4484 tree int_ftype_int_v16qi_v16qi
4485 = build_function_type_list (integer_type_node,
4486 integer_type_node, V16QI_type_node,
4487 V16QI_type_node, NULL_TREE);
4488 tree v16qi_ftype_int_pvoid
4489 = build_function_type_list (V16QI_type_node,
4490 integer_type_node, pvoid_type_node, NULL_TREE);
4491 tree v4si_ftype_int_pvoid
4492 = build_function_type_list (V4SI_type_node,
4493 integer_type_node, pvoid_type_node, NULL_TREE);
4494 tree v8hi_ftype_int_pvoid
4495 = build_function_type_list (V8HI_type_node,
4496 integer_type_node, pvoid_type_node, NULL_TREE);
4497 tree int_ftype_v8hi_v8hi
4498 = build_function_type_list (integer_type_node,
4499 V8HI_type_node, V8HI_type_node, NULL_TREE);
4501 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4502 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4503 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4504 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4505 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4506 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4507 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4508 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4509 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4510 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4511 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4512 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4513 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4514 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4515 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4516 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4517 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4518 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4519 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4520 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4521 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4522 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4523 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4524 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4526 /* Add the simple ternary operators. */
4527 d = (struct builtin_description *) bdesc_3arg;
4528 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4531 enum machine_mode mode0, mode1, mode2, mode3;
4537 mode0 = insn_data[d->icode].operand[0].mode;
4538 mode1 = insn_data[d->icode].operand[1].mode;
4539 mode2 = insn_data[d->icode].operand[2].mode;
4540 mode3 = insn_data[d->icode].operand[3].mode;
4542 /* When all four are of the same mode. */
4543 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4548 type = v4si_ftype_v4si_v4si_v4si;
4551 type = v4sf_ftype_v4sf_v4sf_v4sf;
4554 type = v8hi_ftype_v8hi_v8hi_v8hi;
4557 type = v16qi_ftype_v16qi_v16qi_v16qi;
4563 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4568 type = v4si_ftype_v4si_v4si_v16qi;
4571 type = v4sf_ftype_v4sf_v4sf_v16qi;
4574 type = v8hi_ftype_v8hi_v8hi_v16qi;
4577 type = v16qi_ftype_v16qi_v16qi_v16qi;
4583 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4584 && mode3 == V4SImode)
4585 type = v4si_ftype_v16qi_v16qi_v4si;
4586 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4587 && mode3 == V4SImode)
4588 type = v4si_ftype_v8hi_v8hi_v4si;
4589 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4590 && mode3 == V4SImode)
4591 type = v4sf_ftype_v4sf_v4sf_v4si;
4593 /* vchar, vchar, vchar, 4 bit literal. */
4594 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4596 type = v16qi_ftype_v16qi_v16qi_char;
4598 /* vshort, vshort, vshort, 4 bit literal. */
4599 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4601 type = v8hi_ftype_v8hi_v8hi_char;
4603 /* vint, vint, vint, 4 bit literal. */
4604 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4606 type = v4si_ftype_v4si_v4si_char;
4608 /* vfloat, vfloat, vfloat, 4 bit literal. */
4609 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4611 type = v4sf_ftype_v4sf_v4sf_char;
4616 def_builtin (d->mask, d->name, type, d->code);
4619 /* Add the DST variants. */
4620 d = (struct builtin_description *) bdesc_dst;
4621 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4622 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4624 /* Initialize the predicates. */
4625 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4626 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4628 enum machine_mode mode1;
4631 mode1 = insn_data[dp->icode].operand[1].mode;
4636 type = int_ftype_int_v4si_v4si;
4639 type = int_ftype_int_v8hi_v8hi;
4642 type = int_ftype_int_v16qi_v16qi;
4645 type = int_ftype_int_v4sf_v4sf;
4651 def_builtin (dp->mask, dp->name, type, dp->code);
4654 /* Add the simple binary operators. */
4655 d = (struct builtin_description *) bdesc_2arg;
4656 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4658 enum machine_mode mode0, mode1, mode2;
4664 mode0 = insn_data[d->icode].operand[0].mode;
4665 mode1 = insn_data[d->icode].operand[1].mode;
4666 mode2 = insn_data[d->icode].operand[2].mode;
4668 /* When all three operands are of the same mode. */
4669 if (mode0 == mode1 && mode1 == mode2)
4674 type = v4sf_ftype_v4sf_v4sf;
4677 type = v4si_ftype_v4si_v4si;
4680 type = v16qi_ftype_v16qi_v16qi;
4683 type = v8hi_ftype_v8hi_v8hi;
4690 /* A few other combos we really don't want to do manually. */
4692 /* vint, vfloat, vfloat. */
4693 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4694 type = v4si_ftype_v4sf_v4sf;
4696 /* vshort, vchar, vchar. */
4697 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4698 type = v8hi_ftype_v16qi_v16qi;
4700 /* vint, vshort, vshort. */
4701 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4702 type = v4si_ftype_v8hi_v8hi;
4704 /* vshort, vint, vint. */
4705 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4706 type = v8hi_ftype_v4si_v4si;
4708 /* vchar, vshort, vshort. */
4709 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4710 type = v16qi_ftype_v8hi_v8hi;
4712 /* vint, vchar, vint. */
4713 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4714 type = v4si_ftype_v16qi_v4si;
4716 /* vint, vchar, vchar. */
4717 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4718 type = v4si_ftype_v16qi_v16qi;
4720 /* vint, vshort, vint. */
4721 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4722 type = v4si_ftype_v8hi_v4si;
4724 /* vint, vint, 5 bit literal. */
4725 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4726 type = v4si_ftype_v4si_char;
4728 /* vshort, vshort, 5 bit literal. */
4729 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4730 type = v8hi_ftype_v8hi_char;
4732 /* vchar, vchar, 5 bit literal. */
4733 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4734 type = v16qi_ftype_v16qi_char;
4736 /* vfloat, vint, 5 bit literal. */
4737 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4738 type = v4sf_ftype_v4si_char;
4740 /* vint, vfloat, 5 bit literal. */
4741 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4742 type = v4si_ftype_v4sf_char;
4745 else if (mode0 == SImode)
4750 type = int_ftype_v4si_v4si;
4753 type = int_ftype_v4sf_v4sf;
4756 type = int_ftype_v16qi_v16qi;
4759 type = int_ftype_v8hi_v8hi;
4769 def_builtin (d->mask, d->name, type, d->code);
4772 /* Initialize the abs* operators. */
4773 d = (struct builtin_description *) bdesc_abs;
4774 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4776 enum machine_mode mode0;
4779 mode0 = insn_data[d->icode].operand[0].mode;
4784 type = v4si_ftype_v4si;
4787 type = v8hi_ftype_v8hi;
4790 type = v16qi_ftype_v16qi;
4793 type = v4sf_ftype_v4sf;
4799 def_builtin (d->mask, d->name, type, d->code);
4802 /* Add the simple unary operators. */
4803 d = (struct builtin_description *) bdesc_1arg;
4804 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4806 enum machine_mode mode0, mode1;
4812 mode0 = insn_data[d->icode].operand[0].mode;
4813 mode1 = insn_data[d->icode].operand[1].mode;
4815 if (mode0 == V4SImode && mode1 == QImode)
4816 type = v4si_ftype_char;
4817 else if (mode0 == V8HImode && mode1 == QImode)
4818 type = v8hi_ftype_char;
4819 else if (mode0 == V16QImode && mode1 == QImode)
4820 type = v16qi_ftype_char;
4821 else if (mode0 == V4SFmode && mode1 == V4SFmode)
4822 type = v4sf_ftype_v4sf;
4823 else if (mode0 == V8HImode && mode1 == V16QImode)
4824 type = v8hi_ftype_v16qi;
4825 else if (mode0 == V4SImode && mode1 == V8HImode)
4826 type = v4si_ftype_v8hi;
4830 def_builtin (d->mask, d->name, type, d->code);
4835 /* Generate a memory reference for expand_block_move, copying volatile,
4836 and other bits from an original memory reference. */
4839 expand_block_move_mem (mode, addr, orig_mem)
4840 enum machine_mode mode;
4844 rtx mem = gen_rtx_MEM (mode, addr);
4846 MEM_COPY_ATTRIBUTES (mem, orig_mem);
4850 /* Expand a block move operation, and return 1 if successful. Return 0
4851 if we should let the compiler generate normal code.
4853 operands[0] is the destination
4854 operands[1] is the source
4855 operands[2] is the length
4856 operands[3] is the alignment */
4858 #define MAX_MOVE_REG 4
4861 expand_block_move (operands)
4864 rtx orig_dest = operands[0];
4865 rtx orig_src = operands[1];
4866 rtx bytes_rtx = operands[2];
4867 rtx align_rtx = operands[3];
4868 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
4879 rtx stores[MAX_MOVE_REG];
4882 /* If this is not a fixed size move, just call memcpy */
4886 /* If this is not a fixed size alignment, abort */
4887 if (GET_CODE (align_rtx) != CONST_INT)
4889 align = INTVAL (align_rtx);
4891 /* Anything to move? */
4892 bytes = INTVAL (bytes_rtx);
4896 /* store_one_arg depends on expand_block_move to handle at least the size of
4897 reg_parm_stack_space. */
4898 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4901 /* Move the address into scratch registers. */
4902 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
4903 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
4905 if (TARGET_STRING) /* string instructions are available */
4907 for ( ; bytes > 0; bytes -= move_bytes)
4909 if (bytes > 24 /* move up to 32 bytes at a time */
4917 && ! fixed_regs[12])
4919 move_bytes = (bytes > 32) ? 32 : bytes;
4920 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
4923 expand_block_move_mem (BLKmode,
4926 GEN_INT ((move_bytes == 32)
4930 else if (bytes > 16 /* move up to 24 bytes at a time */
4936 && ! fixed_regs[10])
4938 move_bytes = (bytes > 24) ? 24 : bytes;
4939 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
4942 expand_block_move_mem (BLKmode,
4945 GEN_INT (move_bytes),
4948 else if (bytes > 8 /* move up to 16 bytes at a time */
4954 move_bytes = (bytes > 16) ? 16 : bytes;
4955 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
4958 expand_block_move_mem (BLKmode,
4961 GEN_INT (move_bytes),
4964 else if (bytes >= 8 && TARGET_POWERPC64
4965 /* 64-bit loads and stores require word-aligned
4967 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4970 tmp_reg = gen_reg_rtx (DImode);
4971 emit_move_insn (tmp_reg,
4972 expand_block_move_mem (DImode,
4973 src_reg, orig_src));
4974 emit_move_insn (expand_block_move_mem (DImode,
4975 dest_reg, orig_dest),
4978 else if (bytes > 4 && !TARGET_POWERPC64)
4979 { /* move up to 8 bytes at a time */
4980 move_bytes = (bytes > 8) ? 8 : bytes;
4981 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
4984 expand_block_move_mem (BLKmode,
4987 GEN_INT (move_bytes),
4990 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
4991 { /* move 4 bytes */
4993 tmp_reg = gen_reg_rtx (SImode);
4994 emit_move_insn (tmp_reg,
4995 expand_block_move_mem (SImode,
4996 src_reg, orig_src));
4997 emit_move_insn (expand_block_move_mem (SImode,
4998 dest_reg, orig_dest),
5001 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5002 { /* move 2 bytes */
5004 tmp_reg = gen_reg_rtx (HImode);
5005 emit_move_insn (tmp_reg,
5006 expand_block_move_mem (HImode,
5007 src_reg, orig_src));
5008 emit_move_insn (expand_block_move_mem (HImode,
5009 dest_reg, orig_dest),
5012 else if (bytes == 1) /* move 1 byte */
5015 tmp_reg = gen_reg_rtx (QImode);
5016 emit_move_insn (tmp_reg,
5017 expand_block_move_mem (QImode,
5018 src_reg, orig_src));
5019 emit_move_insn (expand_block_move_mem (QImode,
5020 dest_reg, orig_dest),
5024 { /* move up to 4 bytes at a time */
5025 move_bytes = (bytes > 4) ? 4 : bytes;
5026 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
5029 expand_block_move_mem (BLKmode,
5032 GEN_INT (move_bytes),
5036 if (bytes > move_bytes)
5038 if (! TARGET_POWERPC64)
5040 emit_insn (gen_addsi3 (src_reg, src_reg,
5041 GEN_INT (move_bytes)));
5042 emit_insn (gen_addsi3 (dest_reg, dest_reg,
5043 GEN_INT (move_bytes)));
5047 emit_insn (gen_adddi3 (src_reg, src_reg,
5048 GEN_INT (move_bytes)));
5049 emit_insn (gen_adddi3 (dest_reg, dest_reg,
5050 GEN_INT (move_bytes)));
5056 else /* string instructions not available */
5058 num_reg = offset = 0;
5059 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
5061 /* Calculate the correct offset for src/dest */
5065 dest_addr = dest_reg;
5069 src_addr = plus_constant (src_reg, offset);
5070 dest_addr = plus_constant (dest_reg, offset);
5073 /* Generate the appropriate load and store, saving the stores
5075 if (bytes >= 8 && TARGET_POWERPC64
5076 /* 64-bit loads and stores require word-aligned
5078 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5081 tmp_reg = gen_reg_rtx (DImode);
5082 emit_insn (gen_movdi (tmp_reg,
5083 expand_block_move_mem (DImode,
5086 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
5091 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5094 tmp_reg = gen_reg_rtx (SImode);
5095 emit_insn (gen_movsi (tmp_reg,
5096 expand_block_move_mem (SImode,
5099 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5104 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5107 tmp_reg = gen_reg_rtx (HImode);
5108 emit_insn (gen_movhi (tmp_reg,
5109 expand_block_move_mem (HImode,
5112 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5120 tmp_reg = gen_reg_rtx (QImode);
5121 emit_insn (gen_movqi (tmp_reg,
5122 expand_block_move_mem (QImode,
5125 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5131 if (num_reg >= MAX_MOVE_REG)
5133 for (i = 0; i < num_reg; i++)
5134 emit_insn (stores[i]);
5139 for (i = 0; i < num_reg; i++)
5140 emit_insn (stores[i]);
5147 /* Return 1 if OP is a load multiple operation. It is known to be a
5148 PARALLEL and the first section will be tested. */
5151 load_multiple_operation (op, mode)
5153 enum machine_mode mode ATTRIBUTE_UNUSED;
5155 int count = XVECLEN (op, 0);
5156 unsigned int dest_regno;
5160 /* Perform a quick check so we don't blow up below. */
5162 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5163 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5164 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5167 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5168 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5170 for (i = 1; i < count; i++)
5172 rtx elt = XVECEXP (op, 0, i);
5174 if (GET_CODE (elt) != SET
5175 || GET_CODE (SET_DEST (elt)) != REG
5176 || GET_MODE (SET_DEST (elt)) != SImode
5177 || REGNO (SET_DEST (elt)) != dest_regno + i
5178 || GET_CODE (SET_SRC (elt)) != MEM
5179 || GET_MODE (SET_SRC (elt)) != SImode
5180 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5181 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5182 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5183 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5190 /* Similar, but tests for store multiple. Here, the second vector element
5191 is a CLOBBER. It will be tested later. */
5194 store_multiple_operation (op, mode)
5196 enum machine_mode mode ATTRIBUTE_UNUSED;
5198 int count = XVECLEN (op, 0) - 1;
5199 unsigned int src_regno;
5203 /* Perform a quick check so we don't blow up below. */
5205 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5206 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5207 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5210 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5211 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5213 for (i = 1; i < count; i++)
5215 rtx elt = XVECEXP (op, 0, i + 1);
5217 if (GET_CODE (elt) != SET
5218 || GET_CODE (SET_SRC (elt)) != REG
5219 || GET_MODE (SET_SRC (elt)) != SImode
5220 || REGNO (SET_SRC (elt)) != src_regno + i
5221 || GET_CODE (SET_DEST (elt)) != MEM
5222 || GET_MODE (SET_DEST (elt)) != SImode
5223 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5224 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5225 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5226 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5233 /* Return 1 for a parallel vrsave operation. */
5236 vrsave_operation (op, mode)
5238 enum machine_mode mode ATTRIBUTE_UNUSED;
5240 int count = XVECLEN (op, 0);
5241 unsigned int dest_regno, src_regno;
5245 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5246 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5247 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5250 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5251 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5253 if (dest_regno != VRSAVE_REGNO
5254 && src_regno != VRSAVE_REGNO)
5257 for (i = 1; i < count; i++)
5259 rtx elt = XVECEXP (op, 0, i);
5261 if (GET_CODE (elt) != CLOBBER
5262 && GET_CODE (elt) != SET)
5269 /* Return 1 for an PARALLEL suitable for mtcrf. */
5272 mtcrf_operation (op, mode)
5274 enum machine_mode mode ATTRIBUTE_UNUSED;
5276 int count = XVECLEN (op, 0);
5280 /* Perform a quick check so we don't blow up below. */
5282 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5283 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5284 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5286 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5288 if (GET_CODE (src_reg) != REG
5289 || GET_MODE (src_reg) != SImode
5290 || ! INT_REGNO_P (REGNO (src_reg)))
5293 for (i = 0; i < count; i++)
5295 rtx exp = XVECEXP (op, 0, i);
5299 if (GET_CODE (exp) != SET
5300 || GET_CODE (SET_DEST (exp)) != REG
5301 || GET_MODE (SET_DEST (exp)) != CCmode
5302 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5304 unspec = SET_SRC (exp);
5305 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5307 if (GET_CODE (unspec) != UNSPEC
5308 || XINT (unspec, 1) != 20
5309 || XVECLEN (unspec, 0) != 2
5310 || XVECEXP (unspec, 0, 0) != src_reg
5311 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5312 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5318 /* Return 1 for an PARALLEL suitable for lmw. */
5321 lmw_operation (op, mode)
5323 enum machine_mode mode ATTRIBUTE_UNUSED;
5325 int count = XVECLEN (op, 0);
5326 unsigned int dest_regno;
5328 unsigned int base_regno;
5329 HOST_WIDE_INT offset;
5332 /* Perform a quick check so we don't blow up below. */
5334 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5335 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5336 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5339 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5340 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5343 || count != 32 - (int) dest_regno)
5346 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5349 base_regno = REGNO (src_addr);
5350 if (base_regno == 0)
5353 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5355 offset = INTVAL (XEXP (src_addr, 1));
5356 base_regno = REGNO (XEXP (src_addr, 0));
5361 for (i = 0; i < count; i++)
5363 rtx elt = XVECEXP (op, 0, i);
5366 HOST_WIDE_INT newoffset;
5368 if (GET_CODE (elt) != SET
5369 || GET_CODE (SET_DEST (elt)) != REG
5370 || GET_MODE (SET_DEST (elt)) != SImode
5371 || REGNO (SET_DEST (elt)) != dest_regno + i
5372 || GET_CODE (SET_SRC (elt)) != MEM
5373 || GET_MODE (SET_SRC (elt)) != SImode)
5375 newaddr = XEXP (SET_SRC (elt), 0);
5376 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5381 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5383 addr_reg = XEXP (newaddr, 0);
5384 newoffset = INTVAL (XEXP (newaddr, 1));
5388 if (REGNO (addr_reg) != base_regno
5389 || newoffset != offset + 4 * i)
5396 /* Return 1 for an PARALLEL suitable for stmw. */
5399 stmw_operation (op, mode)
5401 enum machine_mode mode ATTRIBUTE_UNUSED;
5403 int count = XVECLEN (op, 0);
5404 unsigned int src_regno;
5406 unsigned int base_regno;
5407 HOST_WIDE_INT offset;
5410 /* Perform a quick check so we don't blow up below. */
5412 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5413 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5414 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5417 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5418 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5421 || count != 32 - (int) src_regno)
5424 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5427 base_regno = REGNO (dest_addr);
5428 if (base_regno == 0)
5431 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5433 offset = INTVAL (XEXP (dest_addr, 1));
5434 base_regno = REGNO (XEXP (dest_addr, 0));
5439 for (i = 0; i < count; i++)
5441 rtx elt = XVECEXP (op, 0, i);
5444 HOST_WIDE_INT newoffset;
5446 if (GET_CODE (elt) != SET
5447 || GET_CODE (SET_SRC (elt)) != REG
5448 || GET_MODE (SET_SRC (elt)) != SImode
5449 || REGNO (SET_SRC (elt)) != src_regno + i
5450 || GET_CODE (SET_DEST (elt)) != MEM
5451 || GET_MODE (SET_DEST (elt)) != SImode)
5453 newaddr = XEXP (SET_DEST (elt), 0);
5454 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5459 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5461 addr_reg = XEXP (newaddr, 0);
5462 newoffset = INTVAL (XEXP (newaddr, 1));
5466 if (REGNO (addr_reg) != base_regno
5467 || newoffset != offset + 4 * i)
5474 /* A validation routine: say whether CODE, a condition code, and MODE
5475 match. The other alternatives either don't make sense or should
5476 never be generated. */
5479 validate_condition_mode (code, mode)
5481 enum machine_mode mode;
5483 if (GET_RTX_CLASS (code) != '<'
5484 || GET_MODE_CLASS (mode) != MODE_CC)
5487 /* These don't make sense. */
5488 if ((code == GT || code == LT || code == GE || code == LE)
5489 && mode == CCUNSmode)
5492 if ((code == GTU || code == LTU || code == GEU || code == LEU)
5493 && mode != CCUNSmode)
5496 if (mode != CCFPmode
5497 && (code == ORDERED || code == UNORDERED
5498 || code == UNEQ || code == LTGT
5499 || code == UNGT || code == UNLT
5500 || code == UNGE || code == UNLE))
5503 /* These should never be generated except for
5504 flag_unsafe_math_optimizations. */
5505 if (mode == CCFPmode
5506 && ! flag_unsafe_math_optimizations
5507 && (code == LE || code == GE
5508 || code == UNEQ || code == LTGT
5509 || code == UNGT || code == UNLT))
5512 /* These are invalid; the information is not there. */
5513 if (mode == CCEQmode
5514 && code != EQ && code != NE)
5518 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5519 We only check the opcode against the mode of the CC value here. */
5522 branch_comparison_operator (op, mode)
5524 enum machine_mode mode ATTRIBUTE_UNUSED;
5526 enum rtx_code code = GET_CODE (op);
5527 enum machine_mode cc_mode;
5529 if (GET_RTX_CLASS (code) != '<')
5532 cc_mode = GET_MODE (XEXP (op, 0));
5533 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5536 validate_condition_mode (code, cc_mode);
5541 /* Return 1 if OP is a comparison operation that is valid for a branch
5542 insn and which is true if the corresponding bit in the CC register
5546 branch_positive_comparison_operator (op, mode)
5548 enum machine_mode mode;
5552 if (! branch_comparison_operator (op, mode))
5555 code = GET_CODE (op);
5556 return (code == EQ || code == LT || code == GT
5557 || code == LTU || code == GTU
5558 || code == UNORDERED);
5561 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5562 We check the opcode against the mode of the CC value and disallow EQ or
5563 NE comparisons for integers. */
5566 scc_comparison_operator (op, mode)
5568 enum machine_mode mode;
5570 enum rtx_code code = GET_CODE (op);
5571 enum machine_mode cc_mode;
5573 if (GET_MODE (op) != mode && mode != VOIDmode)
5576 if (GET_RTX_CLASS (code) != '<')
5579 cc_mode = GET_MODE (XEXP (op, 0));
5580 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5583 validate_condition_mode (code, cc_mode);
5585 if (code == NE && cc_mode != CCFPmode)
5592 trap_comparison_operator (op, mode)
5594 enum machine_mode mode;
5596 if (mode != VOIDmode && mode != GET_MODE (op))
5598 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5602 boolean_operator (op, mode)
5604 enum machine_mode mode ATTRIBUTE_UNUSED;
5606 enum rtx_code code = GET_CODE (op);
5607 return (code == AND || code == IOR || code == XOR);
5611 boolean_or_operator (op, mode)
5613 enum machine_mode mode ATTRIBUTE_UNUSED;
5615 enum rtx_code code = GET_CODE (op);
5616 return (code == IOR || code == XOR);
5620 min_max_operator (op, mode)
5622 enum machine_mode mode ATTRIBUTE_UNUSED;
5624 enum rtx_code code = GET_CODE (op);
5625 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5628 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5629 mask required to convert the result of a rotate insn into a shift
5630 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
5633 includes_lshift_p (shiftop, andop)
5637 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5639 shift_mask <<= INTVAL (shiftop);
5641 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5644 /* Similar, but for right shift. */
5647 includes_rshift_p (shiftop, andop)
5651 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5653 shift_mask >>= INTVAL (shiftop);
5655 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5658 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5659 to perform a left shift. It must have exactly SHIFTOP least
5660 signifigant 0's, then one or more 1's, then zero or more 0's. */
5663 includes_rldic_lshift_p (shiftop, andop)
5667 if (GET_CODE (andop) == CONST_INT)
5669 HOST_WIDE_INT c, lsb, shift_mask;
5672 if (c == 0 || c == ~0)
5676 shift_mask <<= INTVAL (shiftop);
5678 /* Find the least signifigant one bit. */
5681 /* It must coincide with the LSB of the shift mask. */
5682 if (-lsb != shift_mask)
5685 /* Invert to look for the next transition (if any). */
5688 /* Remove the low group of ones (originally low group of zeros). */
5691 /* Again find the lsb, and check we have all 1's above. */
5695 else if (GET_CODE (andop) == CONST_DOUBLE
5696 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5698 HOST_WIDE_INT low, high, lsb;
5699 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5701 low = CONST_DOUBLE_LOW (andop);
5702 if (HOST_BITS_PER_WIDE_INT < 64)
5703 high = CONST_DOUBLE_HIGH (andop);
5705 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5706 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5709 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5711 shift_mask_high = ~0;
5712 if (INTVAL (shiftop) > 32)
5713 shift_mask_high <<= INTVAL (shiftop) - 32;
5717 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5724 return high == -lsb;
5727 shift_mask_low = ~0;
5728 shift_mask_low <<= INTVAL (shiftop);
5732 if (-lsb != shift_mask_low)
5735 if (HOST_BITS_PER_WIDE_INT < 64)
5740 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5743 return high == -lsb;
5747 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5753 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5754 to perform a left shift. It must have SHIFTOP or more least
5755 signifigant 0's, with the remainder of the word 1's. */
5758 includes_rldicr_lshift_p (shiftop, andop)
5762 if (GET_CODE (andop) == CONST_INT)
5764 HOST_WIDE_INT c, lsb, shift_mask;
5767 shift_mask <<= INTVAL (shiftop);
5770 /* Find the least signifigant one bit. */
5773 /* It must be covered by the shift mask.
5774 This test also rejects c == 0. */
5775 if ((lsb & shift_mask) == 0)
5778 /* Check we have all 1's above the transition, and reject all 1's. */
5779 return c == -lsb && lsb != 1;
5781 else if (GET_CODE (andop) == CONST_DOUBLE
5782 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5784 HOST_WIDE_INT low, lsb, shift_mask_low;
5786 low = CONST_DOUBLE_LOW (andop);
5788 if (HOST_BITS_PER_WIDE_INT < 64)
5790 HOST_WIDE_INT high, shift_mask_high;
5792 high = CONST_DOUBLE_HIGH (andop);
5796 shift_mask_high = ~0;
5797 if (INTVAL (shiftop) > 32)
5798 shift_mask_high <<= INTVAL (shiftop) - 32;
5802 if ((lsb & shift_mask_high) == 0)
5805 return high == -lsb;
5811 shift_mask_low = ~0;
5812 shift_mask_low <<= INTVAL (shiftop);
5816 if ((lsb & shift_mask_low) == 0)
5819 return low == -lsb && lsb != 1;
5825 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5826 for lfq and stfq insns.
5828 Note reg1 and reg2 *must* be hard registers. To be sure we will
5829 abort if we are passed pseudo registers. */
5832 registers_ok_for_quad_peep (reg1, reg2)
5835 /* We might have been passed a SUBREG. */
5836 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5839 return (REGNO (reg1) == REGNO (reg2) - 1);
5842 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5843 addr1 and addr2 must be in consecutive memory locations
5844 (addr2 == addr1 + 8). */
5847 addrs_ok_for_quad_peep (addr1, addr2)
5854 /* Extract an offset (if used) from the first addr. */
5855 if (GET_CODE (addr1) == PLUS)
5857 /* If not a REG, return zero. */
5858 if (GET_CODE (XEXP (addr1, 0)) != REG)
5862 reg1 = REGNO (XEXP (addr1, 0));
5863 /* The offset must be constant! */
5864 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5866 offset1 = INTVAL (XEXP (addr1, 1));
5869 else if (GET_CODE (addr1) != REG)
5873 reg1 = REGNO (addr1);
5874 /* This was a simple (mem (reg)) expression. Offset is 0. */
5878 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5879 if (GET_CODE (addr2) != PLUS)
5882 if (GET_CODE (XEXP (addr2, 0)) != REG
5883 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5886 if (reg1 != REGNO (XEXP (addr2, 0)))
5889 /* The offset for the second addr must be 8 more than the first addr. */
5890 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5893 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
5898 /* Return the register class of a scratch register needed to copy IN into
5899 or out of a register in CLASS in MODE. If it can be done directly,
5900 NO_REGS is returned. */
5903 secondary_reload_class (class, mode, in)
5904 enum reg_class class;
5905 enum machine_mode mode ATTRIBUTE_UNUSED;
5910 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5912 /* We cannot copy a symbolic operand directly into anything
5913 other than BASE_REGS for TARGET_ELF. So indicate that a
5914 register from BASE_REGS is needed as an intermediate
5917 On Darwin, pic addresses require a load from memory, which
5918 needs a base register. */
5919 if (class != BASE_REGS
5920 && (GET_CODE (in) == SYMBOL_REF
5921 || GET_CODE (in) == HIGH
5922 || GET_CODE (in) == LABEL_REF
5923 || GET_CODE (in) == CONST))
5927 if (GET_CODE (in) == REG)
5930 if (regno >= FIRST_PSEUDO_REGISTER)
5932 regno = true_regnum (in);
5933 if (regno >= FIRST_PSEUDO_REGISTER)
5937 else if (GET_CODE (in) == SUBREG)
5939 regno = true_regnum (in);
5940 if (regno >= FIRST_PSEUDO_REGISTER)
5946 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
5948 if (class == GENERAL_REGS || class == BASE_REGS
5949 || (regno >= 0 && INT_REGNO_P (regno)))
5952 /* Constants, memory, and FP registers can go into FP registers. */
5953 if ((regno == -1 || FP_REGNO_P (regno))
5954 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
5957 /* Memory, and AltiVec registers can go into AltiVec registers. */
5958 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
5959 && class == ALTIVEC_REGS)
5962 /* We can copy among the CR registers. */
5963 if ((class == CR_REGS || class == CR0_REGS)
5964 && regno >= 0 && CR_REGNO_P (regno))
5967 /* Otherwise, we need GENERAL_REGS. */
5968 return GENERAL_REGS;
5971 /* Given a comparison operation, return the bit number in CCR to test. We
5972 know this is a valid comparison.
5974 SCC_P is 1 if this is for an scc. That means that %D will have been
5975 used instead of %C, so the bits will be in different places.
5977 Return -1 if OP isn't a valid comparison for some reason. */
5984 enum rtx_code code = GET_CODE (op);
5985 enum machine_mode cc_mode;
5990 if (GET_RTX_CLASS (code) != '<')
5995 if (GET_CODE (reg) != REG
5996 || ! CR_REGNO_P (REGNO (reg)))
5999 cc_mode = GET_MODE (reg);
6000 cc_regnum = REGNO (reg);
6001 base_bit = 4 * (cc_regnum - CR0_REGNO);
6003 validate_condition_mode (code, cc_mode);
6008 return scc_p ? base_bit + 3 : base_bit + 2;
6010 return base_bit + 2;
6011 case GT: case GTU: case UNLE:
6012 return base_bit + 1;
6013 case LT: case LTU: case UNGE:
6015 case ORDERED: case UNORDERED:
6016 return base_bit + 3;
6019 /* If scc, we will have done a cror to put the bit in the
6020 unordered position. So test that bit. For integer, this is ! LT
6021 unless this is an scc insn. */
6022 return scc_p ? base_bit + 3 : base_bit;
6025 return scc_p ? base_bit + 3 : base_bit + 1;
6032 /* Return the GOT register. */
6035 rs6000_got_register (value)
6036 rtx value ATTRIBUTE_UNUSED;
6038 /* The second flow pass currently (June 1999) can't update
6039 regs_ever_live without disturbing other parts of the compiler, so
6040 update it here to make the prolog/epilogue code happy. */
6041 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
6042 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
6044 current_function_uses_pic_offset_table = 1;
6046 return pic_offset_table_rtx;
6049 /* Function to init struct machine_function.
6050 This will be called, via a pointer variable,
6051 from push_function_context. */
6053 static struct machine_function *
6054 rs6000_init_machine_status ()
6056 return ggc_alloc_cleared (sizeof (machine_function));
6059 /* Print an operand. Recognize special options, documented below. */
6062 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6063 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6065 #define SMALL_DATA_RELOC "sda21"
6066 #define SMALL_DATA_REG 0
6070 print_operand (file, x, code)
6078 /* These macros test for integers and extract the low-order bits. */
6080 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
6081 && GET_MODE (X) == VOIDmode)
6083 #define INT_LOWPART(X) \
6084 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6089 /* Write out an instruction after the call which may be replaced
6090 with glue code by the loader. This depends on the AIX version. */
6091 asm_fprintf (file, RS6000_CALL_GLUE);
6094 /* %a is output_address. */
6097 /* If X is a constant integer whose low-order 5 bits are zero,
6098 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6099 in the AIX assembler where "sri" with a zero shift count
6100 writes a trash instruction. */
6101 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6108 /* If constant, low-order 16 bits of constant, unsigned.
6109 Otherwise, write normally. */
6111 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6113 print_operand (file, x, 0);
6117 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6118 for 64-bit mask direction. */
6119 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6122 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6126 /* There used to be a comment for 'C' reading "This is an
6127 optional cror needed for certain floating-point
6128 comparisons. Otherwise write nothing." */
6130 /* Similar, except that this is for an scc, so we must be able to
6131 encode the test in a single bit that is one. We do the above
6132 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6133 if (GET_CODE (x) == LE || GET_CODE (x) == GE
6134 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6136 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6138 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6140 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6143 else if (GET_CODE (x) == NE)
6145 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6147 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6148 base_bit + 2, base_bit + 2);
6153 /* X is a CR register. Print the number of the EQ bit of the CR */
6154 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6155 output_operand_lossage ("invalid %%E value");
6157 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6161 /* X is a CR register. Print the shift count needed to move it
6162 to the high-order four bits. */
6163 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6164 output_operand_lossage ("invalid %%f value");
6166 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6170 /* Similar, but print the count for the rotate in the opposite
6172 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6173 output_operand_lossage ("invalid %%F value");
6175 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6179 /* X is a constant integer. If it is negative, print "m",
6180 otherwise print "z". This is to make a aze or ame insn. */
6181 if (GET_CODE (x) != CONST_INT)
6182 output_operand_lossage ("invalid %%G value");
6183 else if (INTVAL (x) >= 0)
6190 /* If constant, output low-order five bits. Otherwise, write
6193 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6195 print_operand (file, x, 0);
6199 /* If constant, output low-order six bits. Otherwise, write
6202 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6204 print_operand (file, x, 0);
6208 /* Print `i' if this is a constant, else nothing. */
6214 /* Write the bit number in CCR for jump. */
6217 output_operand_lossage ("invalid %%j code");
6219 fprintf (file, "%d", i);
6223 /* Similar, but add one for shift count in rlinm for scc and pass
6224 scc flag to `ccr_bit'. */
6227 output_operand_lossage ("invalid %%J code");
6229 /* If we want bit 31, write a shift count of zero, not 32. */
6230 fprintf (file, "%d", i == 31 ? 0 : i + 1);
6234 /* X must be a constant. Write the 1's complement of the
6237 output_operand_lossage ("invalid %%k value");
6239 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6243 /* X must be a symbolic constant on ELF. Write an
6244 expression suitable for an 'addi' that adds in the low 16
6246 if (GET_CODE (x) != CONST)
6248 print_operand_address (file, x);
6253 if (GET_CODE (XEXP (x, 0)) != PLUS
6254 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6255 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6256 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6257 output_operand_lossage ("invalid %%K value");
6258 print_operand_address (file, XEXP (XEXP (x, 0), 0));
6260 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6264 /* %l is output_asm_label. */
6267 /* Write second word of DImode or DFmode reference. Works on register
6268 or non-indexed memory only. */
6269 if (GET_CODE (x) == REG)
6270 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6271 else if (GET_CODE (x) == MEM)
6273 /* Handle possible auto-increment. Since it is pre-increment and
6274 we have already done it, we can just use an offset of word. */
6275 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6276 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6277 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6280 output_address (XEXP (adjust_address_nv (x, SImode,
6284 if (small_data_operand (x, GET_MODE (x)))
6285 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6286 reg_names[SMALL_DATA_REG]);
6291 /* MB value for a mask operand. */
6292 if (! mask_operand (x, SImode))
6293 output_operand_lossage ("invalid %%m value");
6295 val = INT_LOWPART (x);
6297 /* If the high bit is set and the low bit is not, the value is zero.
6298 If the high bit is zero, the value is the first 1 bit we find from
6300 if ((val & 0x80000000) && ((val & 1) == 0))
6305 else if ((val & 0x80000000) == 0)
6307 for (i = 1; i < 32; i++)
6308 if ((val <<= 1) & 0x80000000)
6310 fprintf (file, "%d", i);
6314 /* Otherwise, look for the first 0 bit from the right. The result is its
6315 number plus 1. We know the low-order bit is one. */
6316 for (i = 0; i < 32; i++)
6317 if (((val >>= 1) & 1) == 0)
6320 /* If we ended in ...01, i would be 0. The correct value is 31, so
6322 fprintf (file, "%d", 31 - i);
6326 /* ME value for a mask operand. */
6327 if (! mask_operand (x, SImode))
6328 output_operand_lossage ("invalid %%M value");
6330 val = INT_LOWPART (x);
6332 /* If the low bit is set and the high bit is not, the value is 31.
6333 If the low bit is zero, the value is the first 1 bit we find from
6335 if ((val & 1) && ((val & 0x80000000) == 0))
6340 else if ((val & 1) == 0)
6342 for (i = 0; i < 32; i++)
6343 if ((val >>= 1) & 1)
6346 /* If we had ....10, i would be 0. The result should be
6347 30, so we need 30 - i. */
6348 fprintf (file, "%d", 30 - i);
6352 /* Otherwise, look for the first 0 bit from the left. The result is its
6353 number minus 1. We know the high-order bit is one. */
6354 for (i = 0; i < 32; i++)
6355 if (((val <<= 1) & 0x80000000) == 0)
6358 fprintf (file, "%d", i);
6361 /* %n outputs the negative of its operand. */
6364 /* Write the number of elements in the vector times 4. */
6365 if (GET_CODE (x) != PARALLEL)
6366 output_operand_lossage ("invalid %%N value");
6368 fprintf (file, "%d", XVECLEN (x, 0) * 4);
6372 /* Similar, but subtract 1 first. */
6373 if (GET_CODE (x) != PARALLEL)
6374 output_operand_lossage ("invalid %%O value");
6376 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6380 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6382 || INT_LOWPART (x) < 0
6383 || (i = exact_log2 (INT_LOWPART (x))) < 0)
6384 output_operand_lossage ("invalid %%p value");
6386 fprintf (file, "%d", i);
6390 /* The operand must be an indirect memory reference. The result
6391 is the register number. */
6392 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6393 || REGNO (XEXP (x, 0)) >= 32)
6394 output_operand_lossage ("invalid %%P value");
6396 fprintf (file, "%d", REGNO (XEXP (x, 0)));
6400 /* This outputs the logical code corresponding to a boolean
6401 expression. The expression may have one or both operands
6402 negated (if one, only the first one). For condition register
6403 logical operations, it will also treat the negated
6404 CR codes as NOTs, but not handle NOTs of them. */
6406 const char *const *t = 0;
6408 enum rtx_code code = GET_CODE (x);
6409 static const char * const tbl[3][3] = {
6410 { "and", "andc", "nor" },
6411 { "or", "orc", "nand" },
6412 { "xor", "eqv", "xor" } };
6416 else if (code == IOR)
6418 else if (code == XOR)
6421 output_operand_lossage ("invalid %%q value");
6423 if (GET_CODE (XEXP (x, 0)) != NOT)
6427 if (GET_CODE (XEXP (x, 1)) == NOT)
6438 /* X is a CR register. Print the mask for `mtcrf'. */
6439 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6440 output_operand_lossage ("invalid %%R value");
6442 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6446 /* Low 5 bits of 32 - value */
6448 output_operand_lossage ("invalid %%s value");
6450 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6454 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6455 CONST_INT 32-bit mask is considered sign-extended so any
6456 transition must occur within the CONST_INT, not on the boundary. */
6457 if (! mask64_operand (x, DImode))
6458 output_operand_lossage ("invalid %%S value");
6460 val = INT_LOWPART (x);
6462 if (val & 1) /* Clear Left */
6464 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6465 if (!((val >>= 1) & 1))
6468 #if HOST_BITS_PER_WIDE_INT == 32
6469 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6471 val = CONST_DOUBLE_HIGH (x);
6476 for (i = 32; i < 64; i++)
6477 if (!((val >>= 1) & 1))
6481 /* i = index of last set bit from right
6482 mask begins at 63 - i from left */
6484 output_operand_lossage ("%%S computed all 1's mask");
6486 fprintf (file, "%d", 63 - i);
6489 else /* Clear Right */
6491 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6492 if ((val >>= 1) & 1)
6495 #if HOST_BITS_PER_WIDE_INT == 32
6496 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6498 val = CONST_DOUBLE_HIGH (x);
6500 if (val == (HOST_WIDE_INT) -1)
6503 for (i = 32; i < 64; i++)
6504 if ((val >>= 1) & 1)
6508 /* i = index of last clear bit from right
6509 mask ends at 62 - i from left */
6511 output_operand_lossage ("%%S computed all 0's mask");
6513 fprintf (file, "%d", 62 - i);
6518 /* Print the symbolic name of a branch target register. */
6519 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6520 && REGNO (x) != COUNT_REGISTER_REGNUM))
6521 output_operand_lossage ("invalid %%T value");
6522 else if (REGNO (x) == LINK_REGISTER_REGNUM)
6523 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6525 fputs ("ctr", file);
6529 /* High-order 16 bits of constant for use in unsigned operand. */
6531 output_operand_lossage ("invalid %%u value");
6533 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6534 (INT_LOWPART (x) >> 16) & 0xffff);
6538 /* High-order 16 bits of constant for use in signed operand. */
6540 output_operand_lossage ("invalid %%v value");
6542 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6543 (INT_LOWPART (x) >> 16) & 0xffff);
6547 /* Print `u' if this has an auto-increment or auto-decrement. */
6548 if (GET_CODE (x) == MEM
6549 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6550 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6555 /* Print the trap code for this operand. */
6556 switch (GET_CODE (x))
6559 fputs ("eq", file); /* 4 */
6562 fputs ("ne", file); /* 24 */
6565 fputs ("lt", file); /* 16 */
6568 fputs ("le", file); /* 20 */
6571 fputs ("gt", file); /* 8 */
6574 fputs ("ge", file); /* 12 */
6577 fputs ("llt", file); /* 2 */
6580 fputs ("lle", file); /* 6 */
6583 fputs ("lgt", file); /* 1 */
6586 fputs ("lge", file); /* 5 */
6594 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6597 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6598 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6600 print_operand (file, x, 0);
6604 /* MB value for a PowerPC64 rldic operand. */
6605 val = (GET_CODE (x) == CONST_INT
6606 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6611 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6612 if ((val <<= 1) < 0)
6615 #if HOST_BITS_PER_WIDE_INT == 32
6616 if (GET_CODE (x) == CONST_INT && i >= 0)
6617 i += 32; /* zero-extend high-part was all 0's */
6618 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6620 val = CONST_DOUBLE_LOW (x);
6627 for ( ; i < 64; i++)
6628 if ((val <<= 1) < 0)
6633 fprintf (file, "%d", i + 1);
6637 if (GET_CODE (x) == MEM
6638 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6643 /* Like 'L', for third word of TImode */
6644 if (GET_CODE (x) == REG)
6645 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6646 else if (GET_CODE (x) == MEM)
6648 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6649 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6650 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6652 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6653 if (small_data_operand (x, GET_MODE (x)))
6654 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6655 reg_names[SMALL_DATA_REG]);
6660 /* X is a SYMBOL_REF. Write out the name preceded by a
6661 period and without any trailing data in brackets. Used for function
6662 names. If we are configured for System V (or the embedded ABI) on
6663 the PowerPC, do not emit the period, since those systems do not use
6664 TOCs and the like. */
6665 if (GET_CODE (x) != SYMBOL_REF)
6668 if (XSTR (x, 0)[0] != '.')
6670 switch (DEFAULT_ABI)
6680 case ABI_AIX_NODESC:
6686 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6688 assemble_name (file, XSTR (x, 0));
6693 /* Like 'L', for last word of TImode. */
6694 if (GET_CODE (x) == REG)
6695 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6696 else if (GET_CODE (x) == MEM)
6698 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6699 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6700 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6702 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6703 if (small_data_operand (x, GET_MODE (x)))
6704 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6705 reg_names[SMALL_DATA_REG]);
6709 /* Print AltiVec memory operand. */
6714 if (GET_CODE (x) != MEM)
6719 if (GET_CODE (tmp) == REG)
6720 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6721 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6723 if (REGNO (XEXP (tmp, 0)) == 0)
6724 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6725 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6727 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6728 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6736 if (GET_CODE (x) == REG)
6737 fprintf (file, "%s", reg_names[REGNO (x)]);
6738 else if (GET_CODE (x) == MEM)
6740 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6741 know the width from the mode. */
6742 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6743 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6744 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6745 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6746 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6747 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6749 output_address (XEXP (x, 0));
6752 output_addr_const (file, x);
6756 output_operand_lossage ("invalid %%xn code");
6760 /* Print the address of an operand. */
6763 print_operand_address (file, x)
6767 if (GET_CODE (x) == REG)
6768 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6769 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6770 || GET_CODE (x) == LABEL_REF)
6772 output_addr_const (file, x);
6773 if (small_data_operand (x, GET_MODE (x)))
6774 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6775 reg_names[SMALL_DATA_REG]);
6776 else if (TARGET_TOC)
6779 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6781 if (REGNO (XEXP (x, 0)) == 0)
6782 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6783 reg_names[ REGNO (XEXP (x, 0)) ]);
6785 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6786 reg_names[ REGNO (XEXP (x, 1)) ]);
6788 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6790 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6791 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6794 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6795 && CONSTANT_P (XEXP (x, 1)))
6797 output_addr_const (file, XEXP (x, 1));
6798 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6802 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6803 && CONSTANT_P (XEXP (x, 1)))
6805 fprintf (file, "lo16(");
6806 output_addr_const (file, XEXP (x, 1));
6807 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6810 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6812 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6814 rtx contains_minus = XEXP (x, 1);
6818 /* Find the (minus (sym) (toc)) buried in X, and temporarily
6819 turn it into (sym) for output_addr_const. */
6820 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6821 contains_minus = XEXP (contains_minus, 0);
6823 minus = XEXP (contains_minus, 0);
6824 symref = XEXP (minus, 0);
6825 XEXP (contains_minus, 0) = symref;
6830 name = XSTR (symref, 0);
6831 newname = alloca (strlen (name) + sizeof ("@toc"));
6832 strcpy (newname, name);
6833 strcat (newname, "@toc");
6834 XSTR (symref, 0) = newname;
6836 output_addr_const (file, XEXP (x, 1));
6838 XSTR (symref, 0) = name;
6839 XEXP (contains_minus, 0) = minus;
6842 output_addr_const (file, XEXP (x, 1));
6844 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6850 /* Target hook for assembling integer objects. The powerpc version has
6851 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6852 is defined. It also needs to handle DI-mode objects on 64-bit
6856 rs6000_assemble_integer (x, size, aligned_p)
6861 #ifdef RELOCATABLE_NEEDS_FIXUP
6862 /* Special handling for SI values. */
6863 if (size == 4 && aligned_p)
6865 extern int in_toc_section PARAMS ((void));
6866 static int recurse = 0;
6868 /* For -mrelocatable, we mark all addresses that need to be fixed up
6869 in the .fixup section. */
6870 if (TARGET_RELOCATABLE
6871 && !in_toc_section ()
6872 && !in_text_section ()
6874 && GET_CODE (x) != CONST_INT
6875 && GET_CODE (x) != CONST_DOUBLE
6881 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6883 ASM_OUTPUT_LABEL (asm_out_file, buf);
6884 fprintf (asm_out_file, "\t.long\t(");
6885 output_addr_const (asm_out_file, x);
6886 fprintf (asm_out_file, ")@fixup\n");
6887 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6888 ASM_OUTPUT_ALIGN (asm_out_file, 2);
6889 fprintf (asm_out_file, "\t.long\t");
6890 assemble_name (asm_out_file, buf);
6891 fprintf (asm_out_file, "\n\t.previous\n");
6895 /* Remove initial .'s to turn a -mcall-aixdesc function
6896 address into the address of the descriptor, not the function
6898 else if (GET_CODE (x) == SYMBOL_REF
6899 && XSTR (x, 0)[0] == '.'
6900 && DEFAULT_ABI == ABI_AIX)
6902 const char *name = XSTR (x, 0);
6903 while (*name == '.')
6906 fprintf (asm_out_file, "\t.long\t%s\n", name);
6910 #endif /* RELOCATABLE_NEEDS_FIXUP */
6911 return default_assemble_integer (x, size, aligned_p);
6915 rs6000_reverse_condition (mode, code)
6916 enum machine_mode mode;
6919 /* Reversal of FP compares takes care -- an ordered compare
6920 becomes an unordered compare and vice versa. */
6921 if (mode == CCFPmode)
6922 return reverse_condition_maybe_unordered (code);
6924 return reverse_condition (code);
6927 /* Generate a compare for CODE. Return a brand-new rtx that
6928 represents the result of the compare. */
6931 rs6000_generate_compare (code)
6934 enum machine_mode comp_mode;
6937 if (rs6000_compare_fp_p)
6938 comp_mode = CCFPmode;
6939 else if (code == GTU || code == LTU
6940 || code == GEU || code == LEU)
6941 comp_mode = CCUNSmode;
6945 /* First, the compare. */
6946 compare_result = gen_reg_rtx (comp_mode);
6947 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
6948 gen_rtx_COMPARE (comp_mode,
6950 rs6000_compare_op1)));
6952 /* Some kinds of FP comparisons need an OR operation;
6953 except for flag_unsafe_math_optimizations we don't bother. */
6954 if (rs6000_compare_fp_p
6955 && ! flag_unsafe_math_optimizations
6956 && (code == LE || code == GE
6957 || code == UNEQ || code == LTGT
6958 || code == UNGT || code == UNLT))
6960 enum rtx_code or1, or2;
6961 rtx or1_rtx, or2_rtx, compare2_rtx;
6962 rtx or_result = gen_reg_rtx (CCEQmode);
6966 case LE: or1 = LT; or2 = EQ; break;
6967 case GE: or1 = GT; or2 = EQ; break;
6968 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
6969 case LTGT: or1 = LT; or2 = GT; break;
6970 case UNGT: or1 = UNORDERED; or2 = GT; break;
6971 case UNLT: or1 = UNORDERED; or2 = LT; break;
6974 validate_condition_mode (or1, comp_mode);
6975 validate_condition_mode (or2, comp_mode);
6976 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
6977 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
6978 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
6979 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
6981 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
6983 compare_result = or_result;
6987 validate_condition_mode (code, GET_MODE (compare_result));
6989 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
6993 /* Emit the RTL for an sCOND pattern. */
6996 rs6000_emit_sCOND (code, result)
7001 enum machine_mode op_mode;
7003 condition_rtx = rs6000_generate_compare (code);
7005 op_mode = GET_MODE (rs6000_compare_op0);
7006 if (op_mode == VOIDmode)
7007 op_mode = GET_MODE (rs6000_compare_op1);
7009 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7011 PUT_MODE (condition_rtx, DImode);
7012 convert_move (result, condition_rtx, 0);
7016 PUT_MODE (condition_rtx, SImode);
7017 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7021 /* Emit a branch of kind CODE to location LOC. */
7024 rs6000_emit_cbranch (code, loc)
7028 rtx condition_rtx, loc_ref;
7030 condition_rtx = rs6000_generate_compare (code);
7031 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7032 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7033 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7037 /* Return the string to output a conditional branch to LABEL, which is
7038 the operand number of the label, or -1 if the branch is really a
7041 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
7042 condition code register and its mode specifies what kind of
7045 REVERSED is non-zero if we should reverse the sense of the comparison.
7047 INSN is the insn. */
7050 output_cbranch (op, label, reversed, insn)
7056 static char string[64];
7057 enum rtx_code code = GET_CODE (op);
7058 rtx cc_reg = XEXP (op, 0);
7059 enum machine_mode mode = GET_MODE (cc_reg);
7060 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7061 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7062 int really_reversed = reversed ^ need_longbranch;
7068 validate_condition_mode (code, mode);
7070 /* Work out which way this really branches. We could use
7071 reverse_condition_maybe_unordered here always but this
7072 makes the resulting assembler clearer. */
7073 if (really_reversed)
7074 code = rs6000_reverse_condition (mode, code);
7078 /* Not all of these are actually distinct opcodes, but
7079 we distinguish them for clarity of the resulting assembler. */
7081 ccode = "ne"; break;
7083 ccode = "eq"; break;
7085 ccode = "ge"; break;
7086 case GT: case GTU: case UNGT:
7087 ccode = "gt"; break;
7089 ccode = "le"; break;
7090 case LT: case LTU: case UNLT:
7091 ccode = "lt"; break;
7092 case UNORDERED: ccode = "un"; break;
7093 case ORDERED: ccode = "nu"; break;
7094 case UNGE: ccode = "nl"; break;
7095 case UNLE: ccode = "ng"; break;
7100 /* Maybe we have a guess as to how likely the branch is.
7101 The old mnemonics don't have a way to specify this information. */
7102 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7103 if (note != NULL_RTX)
7105 /* PROB is the difference from 50%. */
7106 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7108 /* For branches that are very close to 50%, assume not-taken. */
7109 if (abs (prob) > REG_BR_PROB_BASE / 20
7110 && ((prob > 0) ^ need_longbranch))
7119 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7121 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7123 /* We need to escape any '%' characters in the reg_names string.
7124 Assume they'd only be the first character... */
7125 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7127 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7131 /* If the branch distance was too far, we may have to use an
7132 unconditional branch to go the distance. */
7133 if (need_longbranch)
7134 s += sprintf (s, ",$+8\n\tb %s", label);
7136 s += sprintf (s, ",%s", label);
7142 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7143 operands of the last comparison is nonzero/true, FALSE_COND if it
7144 is zero/false. Return 0 if the hardware has no such operation. */
7147 rs6000_emit_cmove (dest, op, true_cond, false_cond)
7153 enum rtx_code code = GET_CODE (op);
7154 rtx op0 = rs6000_compare_op0;
7155 rtx op1 = rs6000_compare_op1;
7157 enum machine_mode compare_mode = GET_MODE (op0);
7158 enum machine_mode result_mode = GET_MODE (dest);
7161 /* These modes should always match. */
7162 if (GET_MODE (op1) != compare_mode)
7164 if (GET_MODE (true_cond) != result_mode)
7166 if (GET_MODE (false_cond) != result_mode)
7169 /* First, work out if the hardware can do this at all, or
7170 if it's too slow... */
7171 /* If the comparison is an integer one, since we only have fsel
7172 it'll be cheaper to use a branch. */
7173 if (! rs6000_compare_fp_p)
7176 /* Eliminate half of the comparisons by switching operands, this
7177 makes the remaining code simpler. */
7178 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7179 || code == LTGT || code == LT)
7181 code = reverse_condition_maybe_unordered (code);
7183 true_cond = false_cond;
7187 /* UNEQ and LTGT take four instructions for a comparison with zero,
7188 it'll probably be faster to use a branch here too. */
7192 if (GET_CODE (op1) == CONST_DOUBLE)
7193 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7195 /* We're going to try to implement comparions by performing
7196 a subtract, then comparing against zero. Unfortunately,
7197 Inf - Inf is NaN which is not zero, and so if we don't
7198 know that the the operand is finite and the comparison
7199 would treat EQ different to UNORDERED, we can't do it. */
7200 if (! flag_unsafe_math_optimizations
7201 && code != GT && code != UNGE
7202 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7203 /* Constructs of the form (a OP b ? a : b) are safe. */
7204 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7205 || (! rtx_equal_p (op0, true_cond)
7206 && ! rtx_equal_p (op1, true_cond))))
7208 /* At this point we know we can use fsel. */
7210 /* Reduce the comparison to a comparison against zero. */
7211 temp = gen_reg_rtx (compare_mode);
7212 emit_insn (gen_rtx_SET (VOIDmode, temp,
7213 gen_rtx_MINUS (compare_mode, op0, op1)));
7215 op1 = CONST0_RTX (compare_mode);
7217 /* If we don't care about NaNs we can reduce some of the comparisons
7218 down to faster ones. */
7219 if (flag_unsafe_math_optimizations)
7225 true_cond = false_cond;
7238 /* Now, reduce everything down to a GE. */
7245 temp = gen_reg_rtx (compare_mode);
7246 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7251 temp = gen_reg_rtx (compare_mode);
7252 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
7257 temp = gen_reg_rtx (compare_mode);
7258 emit_insn (gen_rtx_SET (VOIDmode, temp,
7259 gen_rtx_NEG (compare_mode,
7260 gen_rtx_ABS (compare_mode, op0))));
7265 temp = gen_reg_rtx (result_mode);
7266 emit_insn (gen_rtx_SET (VOIDmode, temp,
7267 gen_rtx_IF_THEN_ELSE (result_mode,
7268 gen_rtx_GE (VOIDmode,
7270 true_cond, false_cond)));
7272 true_cond = false_cond;
7274 temp = gen_reg_rtx (compare_mode);
7275 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7280 temp = gen_reg_rtx (result_mode);
7281 emit_insn (gen_rtx_SET (VOIDmode, temp,
7282 gen_rtx_IF_THEN_ELSE (result_mode,
7283 gen_rtx_GE (VOIDmode,
7285 true_cond, false_cond)));
7287 false_cond = true_cond;
7289 temp = gen_reg_rtx (compare_mode);
7290 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7298 emit_insn (gen_rtx_SET (VOIDmode, dest,
7299 gen_rtx_IF_THEN_ELSE (result_mode,
7300 gen_rtx_GE (VOIDmode,
7302 true_cond, false_cond)));
7307 rs6000_emit_minmax (dest, code, op0, op1)
7313 enum machine_mode mode = GET_MODE (op0);
7317 if (code == SMAX || code == SMIN)
7322 if (code == SMAX || code == UMAX)
7323 target = emit_conditional_move (dest, c, op0, op1, mode,
7326 target = emit_conditional_move (dest, c, op0, op1, mode,
7328 if (target == NULL_RTX)
7331 emit_move_insn (dest, target);
7334 /* This page contains routines that are used to determine what the
7335 function prologue and epilogue code will do and write them out. */
7337 /* Return the first fixed-point register that is required to be
7338 saved. 32 if none. */
7341 first_reg_to_save ()
7345 /* Find lowest numbered live register. */
7346 for (first_reg = 13; first_reg <= 31; first_reg++)
7347 if (regs_ever_live[first_reg]
7348 && (! call_used_regs[first_reg]
7349 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
7350 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7351 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7354 if (current_function_profile)
7356 /* AIX must save/restore every register that contains a parameter
7357 before/after the .__mcount call plus an additional register
7358 for the static chain, if needed; use registers from 30 down to 22
7360 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7362 int last_parm_reg, profile_first_reg;
7364 /* Figure out last used parameter register. The proper thing
7365 to do is to walk incoming args of the function. A function
7366 might have live parameter registers even if it has no
7368 for (last_parm_reg = 10;
7369 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7373 /* Calculate first reg for saving parameter registers
7375 Skip reg 31 which may contain the frame pointer. */
7376 profile_first_reg = (33 - last_parm_reg
7377 - (current_function_needs_context ? 1 : 0));
7379 /* Need to skip another reg to account for R31 being PICBASE
7380 (when flag_pic is set) or R30 being used as the frame
7381 pointer (when flag_pic is not set). */
7382 --profile_first_reg;
7384 /* Do not save frame pointer if no parameters needs to be saved. */
7385 if (profile_first_reg == 31)
7386 profile_first_reg = 32;
7388 if (first_reg > profile_first_reg)
7389 first_reg = profile_first_reg;
7392 /* SVR4 may need one register to preserve the static chain. */
7393 else if (current_function_needs_context)
7395 /* Skip reg 31 which may contain the frame pointer. */
7402 if (flag_pic && current_function_uses_pic_offset_table &&
7403 (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
7404 return RS6000_PIC_OFFSET_TABLE_REGNUM;
7410 /* Similar, for FP regs. */
7413 first_fp_reg_to_save ()
7417 /* Find lowest numbered live register. */
7418 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7419 if (regs_ever_live[first_reg])
7425 /* Similar, for AltiVec regs. */
7428 first_altivec_reg_to_save ()
7432 /* Stack frame remains as is unless we are in AltiVec ABI. */
7433 if (! TARGET_ALTIVEC_ABI)
7434 return LAST_ALTIVEC_REGNO + 1;
7436 /* Find lowest numbered live register. */
7437 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7438 if (regs_ever_live[i])
7444 /* Return a 32-bit mask of the AltiVec registers we need to set in
7445 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7446 the 32-bit word is 0. */
7449 compute_vrsave_mask ()
7451 unsigned int i, mask = 0;
7453 /* First, find out if we use _any_ altivec registers. */
7454 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7455 if (regs_ever_live[i])
7456 mask |= ALTIVEC_REG_BIT (i);
7461 /* Next, add all registers that are call-clobbered. We do this
7462 because post-reload register optimizers such as regrename_optimize
7463 may choose to use them. They never change the register class
7464 chosen by reload, so cannot create new uses of altivec registers
7465 if there were none before, so the early exit above is safe. */
7466 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7467 altivec registers not saved in the mask, which might well make the
7468 adjustments below more effective in eliding the save/restore of
7469 VRSAVE in small functions. */
7470 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7471 if (call_used_regs[i])
7472 mask |= ALTIVEC_REG_BIT (i);
7474 /* Next, remove the argument registers from the set. These must
7475 be in the VRSAVE mask set by the caller, so we don't need to add
7476 them in again. More importantly, the mask we compute here is
7477 used to generate CLOBBERs in the set_vrsave insn, and we do not
7478 wish the argument registers to die. */
7479 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7480 mask &= ~ALTIVEC_REG_BIT (i);
7482 /* Similarly, remove the return value from the set. */
7485 diddle_return_value (is_altivec_return_reg, &yes);
7487 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7494 is_altivec_return_reg (reg, xyes)
7498 bool *yes = (bool *) xyes;
7499 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7504 /* Calculate the stack information for the current function. This is
7505 complicated by having two separate calling sequences, the AIX calling
7506 sequence and the V.4 calling sequence.
7508 AIX (and Darwin/Mac OS X) stack frames look like:
7510 SP----> +---------------------------------------+
7511 | back chain to caller | 0 0
7512 +---------------------------------------+
7513 | saved CR | 4 8 (8-11)
7514 +---------------------------------------+
7516 +---------------------------------------+
7517 | reserved for compilers | 12 24
7518 +---------------------------------------+
7519 | reserved for binders | 16 32
7520 +---------------------------------------+
7521 | saved TOC pointer | 20 40
7522 +---------------------------------------+
7523 | Parameter save area (P) | 24 48
7524 +---------------------------------------+
7525 | Alloca space (A) | 24+P etc.
7526 +---------------------------------------+
7527 | Local variable space (L) | 24+P+A
7528 +---------------------------------------+
7529 | Float/int conversion temporary (X) | 24+P+A+L
7530 +---------------------------------------+
7531 | Save area for AltiVec registers (W) | 24+P+A+L+X
7532 +---------------------------------------+
7533 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7534 +---------------------------------------+
7535 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7536 +---------------------------------------+
7537 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7538 +---------------------------------------+
7539 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7540 +---------------------------------------+
7541 old SP->| back chain to caller's caller |
7542 +---------------------------------------+
7544 The required alignment for AIX configurations is two words (i.e., 8
7548 V.4 stack frames look like:
7550 SP----> +---------------------------------------+
7551 | back chain to caller | 0
7552 +---------------------------------------+
7553 | caller's saved LR | 4
7554 +---------------------------------------+
7555 | Parameter save area (P) | 8
7556 +---------------------------------------+
7557 | Alloca space (A) | 8+P
7558 +---------------------------------------+
7559 | Varargs save area (V) | 8+P+A
7560 +---------------------------------------+
7561 | Local variable space (L) | 8+P+A+V
7562 +---------------------------------------+
7563 | Float/int conversion temporary (X) | 8+P+A+V+L
7564 +---------------------------------------+
7565 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7566 +---------------------------------------+
7567 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7568 +---------------------------------------+
7569 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7570 +---------------------------------------+
7571 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7572 +---------------------------------------+
7573 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7574 +---------------------------------------+
7575 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7576 +---------------------------------------+
7577 old SP->| back chain to caller's caller |
7578 +---------------------------------------+
7580 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7581 given. (But note below and in sysv4.h that we require only 8 and
7582 may round up the size of our stack frame anyways. The historical
7583 reason is early versions of powerpc-linux which didn't properly
7584 align the stack at program startup. A happy side-effect is that
7585 -mno-eabi libraries can be used with -meabi programs.)
7587 The EABI configuration defaults to the V.4 layout, unless
7588 -mcall-aix is used, in which case the AIX layout is used. However,
7589 the stack alignment requirements may differ. If -mno-eabi is not
7590 given, the required stack alignment is 8 bytes; if -mno-eabi is
7591 given, the required alignment is 16 bytes. (But see V.4 comment
7594 #ifndef ABI_STACK_BOUNDARY
7595 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7599 rs6000_stack_info ()
7601 static rs6000_stack_t info, zero_info;
7602 rs6000_stack_t *info_ptr = &info;
7603 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7604 enum rs6000_abi abi;
7608 /* Zero all fields portably. */
7611 /* Select which calling sequence. */
7612 info_ptr->abi = abi = DEFAULT_ABI;
7614 /* Calculate which registers need to be saved & save area size. */
7615 info_ptr->first_gp_reg_save = first_reg_to_save ();
7616 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
7617 even if it currently looks like we won't. */
7618 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7619 || (flag_pic == 1 && abi == ABI_V4)
7620 || (flag_pic && abi == ABI_DARWIN))
7621 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
7622 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
7624 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7626 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7627 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7629 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7630 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7631 - info_ptr->first_altivec_reg_save);
7633 /* Does this function call anything? */
7634 info_ptr->calls_p = (! current_function_is_leaf
7635 || cfun->machine->ra_needs_full_frame);
7637 /* Determine if we need to save the link register. */
7638 if (rs6000_ra_ever_killed ()
7639 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7640 #ifdef TARGET_RELOCATABLE
7641 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7643 || (info_ptr->first_fp_reg_save != 64
7644 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7645 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7646 || (abi == ABI_V4 && current_function_calls_alloca)
7647 || (DEFAULT_ABI == ABI_DARWIN
7649 && current_function_uses_pic_offset_table)
7650 || info_ptr->calls_p)
7652 info_ptr->lr_save_p = 1;
7653 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7656 /* Determine if we need to save the condition code registers. */
7657 if (regs_ever_live[CR2_REGNO]
7658 || regs_ever_live[CR3_REGNO]
7659 || regs_ever_live[CR4_REGNO])
7661 info_ptr->cr_save_p = 1;
7663 info_ptr->cr_size = reg_size;
7666 /* If the current function calls __builtin_eh_return, then we need
7667 to allocate stack space for registers that will hold data for
7668 the exception handler. */
7669 if (current_function_calls_eh_return)
7672 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7674 ehrd_size = i * UNITS_PER_WORD;
7679 /* Determine various sizes. */
7680 info_ptr->reg_size = reg_size;
7681 info_ptr->fixed_size = RS6000_SAVE_AREA;
7682 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7683 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7684 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7687 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
7689 info_ptr->vrsave_mask = compute_vrsave_mask ();
7690 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7694 info_ptr->vrsave_mask = 0;
7695 info_ptr->vrsave_size = 0;
7698 /* Calculate the offsets. */
7706 case ABI_AIX_NODESC:
7708 info_ptr->fp_save_offset = - info_ptr->fp_size;
7709 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7711 if (TARGET_ALTIVEC_ABI)
7713 info_ptr->vrsave_save_offset
7714 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7716 /* Align stack so vector save area is on a quadword boundary. */
7717 if (info_ptr->altivec_size != 0)
7718 info_ptr->altivec_padding_size
7719 = 16 - (-info_ptr->vrsave_save_offset % 16);
7721 info_ptr->altivec_padding_size = 0;
7723 info_ptr->altivec_save_offset
7724 = info_ptr->vrsave_save_offset
7725 - info_ptr->altivec_padding_size
7726 - info_ptr->altivec_size;
7728 /* Adjust for AltiVec case. */
7729 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7732 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7733 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7734 info_ptr->lr_save_offset = 2*reg_size;
7738 info_ptr->fp_save_offset = - info_ptr->fp_size;
7739 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7740 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7742 if (TARGET_ALTIVEC_ABI)
7744 info_ptr->vrsave_save_offset
7745 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7747 /* Align stack so vector save area is on a quadword boundary. */
7748 if (info_ptr->altivec_size != 0)
7749 info_ptr->altivec_padding_size
7750 = 16 - (-info_ptr->vrsave_save_offset % 16);
7752 info_ptr->altivec_padding_size = 0;
7754 info_ptr->altivec_save_offset
7755 = info_ptr->vrsave_save_offset
7756 - info_ptr->altivec_padding_size
7757 - info_ptr->altivec_size;
7759 /* Adjust for AltiVec case. */
7760 info_ptr->toc_save_offset
7761 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7764 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7765 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7766 info_ptr->lr_save_offset = reg_size;
7770 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7772 + info_ptr->altivec_size
7773 + info_ptr->altivec_padding_size
7774 + info_ptr->vrsave_size
7778 + info_ptr->vrsave_size
7779 + info_ptr->toc_size,
7780 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7783 total_raw_size = (info_ptr->vars_size
7784 + info_ptr->parm_size
7785 + info_ptr->save_size
7786 + info_ptr->varargs_size
7787 + info_ptr->fixed_size);
7789 info_ptr->total_size =
7790 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7792 /* Determine if we need to allocate any stack frame:
7794 For AIX we need to push the stack if a frame pointer is needed
7795 (because the stack might be dynamically adjusted), if we are
7796 debugging, if we make calls, or if the sum of fp_save, gp_save,
7797 and local variables are more than the space needed to save all
7798 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7799 + 18*8 = 288 (GPR13 reserved).
7801 For V.4 we don't have the stack cushion that AIX uses, but assume
7802 that the debugger can handle stackless frames. */
7804 if (info_ptr->calls_p)
7805 info_ptr->push_p = 1;
7807 else if (abi == ABI_V4)
7808 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
7811 info_ptr->push_p = (frame_pointer_needed
7812 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7813 || ((total_raw_size - info_ptr->fixed_size)
7814 > (TARGET_32BIT ? 220 : 288)));
7816 /* Zero offsets if we're not saving those registers. */
7817 if (info_ptr->fp_size == 0)
7818 info_ptr->fp_save_offset = 0;
7820 if (info_ptr->gp_size == 0)
7821 info_ptr->gp_save_offset = 0;
7823 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7824 info_ptr->altivec_save_offset = 0;
7826 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7827 info_ptr->vrsave_save_offset = 0;
7829 if (! info_ptr->lr_save_p)
7830 info_ptr->lr_save_offset = 0;
7832 if (! info_ptr->cr_save_p)
7833 info_ptr->cr_save_offset = 0;
7835 if (! info_ptr->toc_save_p)
7836 info_ptr->toc_save_offset = 0;
7842 debug_stack_info (info)
7843 rs6000_stack_t *info;
7845 const char *abi_string;
7848 info = rs6000_stack_info ();
7850 fprintf (stderr, "\nStack information for function %s:\n",
7851 ((current_function_decl && DECL_NAME (current_function_decl))
7852 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7857 default: abi_string = "Unknown"; break;
7858 case ABI_NONE: abi_string = "NONE"; break;
7860 case ABI_AIX_NODESC: abi_string = "AIX"; break;
7861 case ABI_DARWIN: abi_string = "Darwin"; break;
7862 case ABI_V4: abi_string = "V.4"; break;
7865 fprintf (stderr, "\tABI = %5s\n", abi_string);
7867 if (TARGET_ALTIVEC_ABI)
7868 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7870 if (info->first_gp_reg_save != 32)
7871 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
7873 if (info->first_fp_reg_save != 64)
7874 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
7876 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7877 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7878 info->first_altivec_reg_save);
7880 if (info->lr_save_p)
7881 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
7883 if (info->cr_save_p)
7884 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
7886 if (info->toc_save_p)
7887 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
7889 if (info->vrsave_mask)
7890 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
7893 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
7896 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
7898 if (info->gp_save_offset)
7899 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
7901 if (info->fp_save_offset)
7902 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
7904 if (info->altivec_save_offset)
7905 fprintf (stderr, "\taltivec_save_offset = %5d\n",
7906 info->altivec_save_offset);
7908 if (info->vrsave_save_offset)
7909 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
7910 info->vrsave_save_offset);
7912 if (info->lr_save_offset)
7913 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
7915 if (info->cr_save_offset)
7916 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
7918 if (info->toc_save_offset)
7919 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
7921 if (info->varargs_save_offset)
7922 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
7924 if (info->total_size)
7925 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
7927 if (info->varargs_size)
7928 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
7930 if (info->vars_size)
7931 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
7933 if (info->parm_size)
7934 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
7936 if (info->fixed_size)
7937 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
7940 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
7943 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
7945 if (info->altivec_size)
7946 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
7948 if (info->vrsave_size)
7949 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
7951 if (info->altivec_padding_size)
7952 fprintf (stderr, "\taltivec_padding_size= %5d\n",
7953 info->altivec_padding_size);
7956 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
7959 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
7962 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
7964 if (info->save_size)
7965 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
7967 if (info->reg_size != 4)
7968 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
7970 fprintf (stderr, "\n");
7974 rs6000_return_addr (count, frame)
7978 /* Currently we don't optimize very well between prolog and body
7979 code and for PIC code the code can be actually quite bad, so
7980 don't try to be too clever here. */
7981 if (count != 0 || flag_pic != 0)
7983 cfun->machine->ra_needs_full_frame = 1;
7990 plus_constant (copy_to_reg
7991 (gen_rtx_MEM (Pmode,
7992 memory_address (Pmode, frame))),
7993 RETURN_ADDRESS_OFFSET)));
7996 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
8000 rs6000_ra_ever_killed ()
8004 #ifdef ASM_OUTPUT_MI_THUNK
8005 if (current_function_is_thunk)
8008 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
8009 || cfun->machine->ra_needs_full_frame)
8010 return regs_ever_live[LINK_REGISTER_REGNUM];
8012 push_topmost_sequence ();
8014 pop_topmost_sequence ();
8016 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8020 /* Add a REG_MAYBE_DEAD note to the insn. */
8022 rs6000_maybe_dead (insn)
8025 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
8030 /* Emit instructions needed to load the TOC register.
8031 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8032 a constant pool; or for SVR4 -fpic. */
8035 rs6000_emit_load_toc_table (fromprolog)
8039 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
8041 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
8043 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8045 rtx temp = (fromprolog
8046 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8047 : gen_reg_rtx (Pmode));
8048 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8049 rs6000_maybe_dead (emit_move_insn (dest, temp));
8051 else if (flag_pic == 2)
8054 rtx tempLR = (fromprolog
8055 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8056 : gen_reg_rtx (Pmode));
8057 rtx temp0 = (fromprolog
8058 ? gen_rtx_REG (Pmode, 0)
8059 : gen_reg_rtx (Pmode));
8062 /* possibly create the toc section */
8063 if (! toc_initialized)
8066 function_section (current_function_decl);
8073 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8074 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8076 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8077 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8079 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8081 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8082 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8089 static int reload_toc_labelno = 0;
8091 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8093 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8094 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8096 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8099 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8100 rs6000_maybe_dead (emit_move_insn (temp0,
8101 gen_rtx_MEM (Pmode, dest)));
8103 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8105 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8107 /* This is for AIX code running in non-PIC ELF. */
8110 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8111 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8113 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8114 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8122 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8124 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8129 get_TOC_alias_set ()
8131 static int set = -1;
8133 set = new_alias_set ();
8137 /* This retuns nonzero if the current function uses the TOC. This is
8138 determined by the presence of (unspec ... 7), which is generated by
8139 the various load_toc_* patterns. */
8146 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8149 rtx pat = PATTERN (insn);
8152 if (GET_CODE (pat) == PARALLEL)
8153 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8154 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8155 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8162 create_TOC_reference (symbol)
8165 return gen_rtx_PLUS (Pmode,
8166 gen_rtx_REG (Pmode, TOC_REGISTER),
8167 gen_rtx_CONST (Pmode,
8168 gen_rtx_MINUS (Pmode, symbol,
8169 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8173 /* __throw will restore its own return address to be the same as the
8174 return address of the function that the throw is being made to.
8175 This is unfortunate, because we want to check the original
8176 return address to see if we need to restore the TOC.
8177 So we have to squirrel it away here.
8178 This is used only in compiling __throw and __rethrow.
8180 Most of this code should be removed by CSE. */
8181 static rtx insn_after_throw;
8183 /* This does the saving... */
8185 rs6000_aix_emit_builtin_unwind_init ()
8188 rtx stack_top = gen_reg_rtx (Pmode);
8189 rtx opcode_addr = gen_reg_rtx (Pmode);
8191 insn_after_throw = gen_reg_rtx (SImode);
8193 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8194 emit_move_insn (stack_top, mem);
8196 mem = gen_rtx_MEM (Pmode,
8197 gen_rtx_PLUS (Pmode, stack_top,
8198 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8199 emit_move_insn (opcode_addr, mem);
8200 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8203 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8204 in _eh.o). Only used on AIX.
8206 The idea is that on AIX, function calls look like this:
8207 bl somefunction-trampoline
8211 somefunction-trampoline:
8213 ... load function address in the count register ...
8215 or like this, if the linker determines that this is not a cross-module call
8216 and so the TOC need not be restored:
8219 or like this, if the compiler could determine that this is not a
8222 now, the tricky bit here is that register 2 is saved and restored
8223 by the _linker_, so we can't readily generate debugging information
8224 for it. So we need to go back up the call chain looking at the
8225 insns at return addresses to see which calls saved the TOC register
8226 and so see where it gets restored from.
8228 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8229 just before the actual epilogue.
8231 On the bright side, this incurs no space or time overhead unless an
8232 exception is thrown, except for the extra code in libgcc.a.
8234 The parameter STACKSIZE is a register containing (at runtime)
8235 the amount to be popped off the stack in addition to the stack frame
8236 of this routine (which will be __throw or __rethrow, and so is
8237 guaranteed to have a stack frame). */
8240 rs6000_emit_eh_toc_restore (stacksize)
8244 rtx bottom_of_stack = gen_reg_rtx (Pmode);
8245 rtx tocompare = gen_reg_rtx (SImode);
8246 rtx opcode = gen_reg_rtx (SImode);
8247 rtx opcode_addr = gen_reg_rtx (Pmode);
8249 rtx loop_start = gen_label_rtx ();
8250 rtx no_toc_restore_needed = gen_label_rtx ();
8251 rtx loop_exit = gen_label_rtx ();
8253 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8254 set_mem_alias_set (mem, rs6000_sr_alias_set);
8255 emit_move_insn (bottom_of_stack, mem);
8257 top_of_stack = expand_binop (Pmode, add_optab,
8258 bottom_of_stack, stacksize,
8259 NULL_RTX, 1, OPTAB_WIDEN);
8261 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
8262 : 0xE8410028, SImode));
8264 if (insn_after_throw == NULL_RTX)
8266 emit_move_insn (opcode, insn_after_throw);
8268 emit_note (NULL, NOTE_INSN_LOOP_BEG);
8269 emit_label (loop_start);
8271 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8272 SImode, NULL_RTX, NULL_RTX,
8273 no_toc_restore_needed);
8275 mem = gen_rtx_MEM (Pmode,
8276 gen_rtx_PLUS (Pmode, bottom_of_stack,
8277 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8278 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8280 emit_label (no_toc_restore_needed);
8281 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8282 Pmode, NULL_RTX, NULL_RTX,
8285 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8286 set_mem_alias_set (mem, rs6000_sr_alias_set);
8287 emit_move_insn (bottom_of_stack, mem);
8289 mem = gen_rtx_MEM (Pmode,
8290 gen_rtx_PLUS (Pmode, bottom_of_stack,
8291 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8292 emit_move_insn (opcode_addr, mem);
8293 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8295 emit_note (NULL, NOTE_INSN_LOOP_CONT);
8296 emit_jump (loop_start);
8297 emit_note (NULL, NOTE_INSN_LOOP_END);
8298 emit_label (loop_exit);
8300 #endif /* TARGET_AIX */
8302 /* This ties together stack memory (MEM with an alias set of
8303 rs6000_sr_alias_set) and the change to the stack pointer. */
8306 rs6000_emit_stack_tie ()
8308 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8310 set_mem_alias_set (mem, rs6000_sr_alias_set);
8311 emit_insn (gen_stack_tie (mem));
8314 /* Emit the correct code for allocating stack space, as insns.
8315 If COPY_R12, make sure a copy of the old frame is left in r12.
8316 The generated code may use hard register 0 as a temporary. */
8319 rs6000_emit_allocate_stack (size, copy_r12)
8324 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8325 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8326 rtx todec = GEN_INT (-size);
8328 if (current_function_limit_stack)
8330 if (REG_P (stack_limit_rtx)
8331 && REGNO (stack_limit_rtx) > 1
8332 && REGNO (stack_limit_rtx) <= 31)
8334 emit_insn (Pmode == SImode
8335 ? gen_addsi3 (tmp_reg,
8338 : gen_adddi3 (tmp_reg,
8342 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8345 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8347 && DEFAULT_ABI == ABI_V4)
8349 rtx toload = gen_rtx_CONST (VOIDmode,
8350 gen_rtx_PLUS (Pmode,
8354 emit_insn (gen_elf_high (tmp_reg, toload));
8355 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8356 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8360 warning ("stack limit expression is not supported");
8363 if (copy_r12 || ! TARGET_UPDATE)
8364 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8370 /* Need a note here so that try_split doesn't get confused. */
8371 if (get_last_insn() == NULL_RTX)
8372 emit_note (0, NOTE_INSN_DELETED);
8373 insn = emit_move_insn (tmp_reg, todec);
8374 try_split (PATTERN (insn), insn, 0);
8378 if (Pmode == SImode)
8379 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8382 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8387 if (Pmode == SImode)
8388 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8390 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8391 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8392 gen_rtx_REG (Pmode, 12));
8395 RTX_FRAME_RELATED_P (insn) = 1;
8397 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8398 gen_rtx_SET (VOIDmode, stack_reg,
8399 gen_rtx_PLUS (Pmode, stack_reg,
8404 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8407 (mem (plus (blah) (regXX)))
8411 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8414 altivec_frame_fixup (insn, reg, val)
8420 real = copy_rtx (PATTERN (insn));
8422 real = replace_rtx (real, reg, GEN_INT (val));
8424 RTX_FRAME_RELATED_P (insn) = 1;
8425 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8430 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8431 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8432 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8433 deduce these equivalences by itself so it wasn't necessary to hold
8434 its hand so much. */
8437 rs6000_frame_related (insn, reg, val, reg2, rreg)
8446 /* copy_rtx will not make unique copies of registers, so we need to
8447 ensure we don't have unwanted sharing here. */
8449 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
8452 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
8454 real = copy_rtx (PATTERN (insn));
8456 real = replace_rtx (real, reg,
8457 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8458 STACK_POINTER_REGNUM),
8461 /* We expect that 'real' is either a SET or a PARALLEL containing
8462 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8463 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8465 if (GET_CODE (real) == SET)
8469 temp = simplify_rtx (SET_SRC (set));
8471 SET_SRC (set) = temp;
8472 temp = simplify_rtx (SET_DEST (set));
8474 SET_DEST (set) = temp;
8475 if (GET_CODE (SET_DEST (set)) == MEM)
8477 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8479 XEXP (SET_DEST (set), 0) = temp;
8482 else if (GET_CODE (real) == PARALLEL)
8485 for (i = 0; i < XVECLEN (real, 0); i++)
8486 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8488 rtx set = XVECEXP (real, 0, i);
8490 temp = simplify_rtx (SET_SRC (set));
8492 SET_SRC (set) = temp;
8493 temp = simplify_rtx (SET_DEST (set));
8495 SET_DEST (set) = temp;
8496 if (GET_CODE (SET_DEST (set)) == MEM)
8498 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8500 XEXP (SET_DEST (set), 0) = temp;
8502 RTX_FRAME_RELATED_P (set) = 1;
8508 if (reg2 != NULL_RTX)
8509 real = replace_rtx (real, reg2, rreg);
8511 RTX_FRAME_RELATED_P (insn) = 1;
8512 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8517 /* Returns an insn that has a vrsave set operation with the
8518 appropriate CLOBBERs. */
8521 generate_set_vrsave (reg, info, epiloguep)
8523 rs6000_stack_t *info;
8527 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8528 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8531 = gen_rtx_SET (VOIDmode,
8533 gen_rtx_UNSPEC_VOLATILE (SImode,
8534 gen_rtvec (2, reg, vrsave),
8539 /* We need to clobber the registers in the mask so the scheduler
8540 does not move sets to VRSAVE before sets of AltiVec registers.
8542 However, if the function receives nonlocal gotos, reload will set
8543 all call saved registers live. We will end up with:
8545 (set (reg 999) (mem))
8546 (parallel [ (set (reg vrsave) (unspec blah))
8547 (clobber (reg 999))])
8549 The clobber will cause the store into reg 999 to be dead, and
8550 flow will attempt to delete an epilogue insn. In this case, we
8551 need an unspec use/set of the register. */
8553 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8554 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8556 if (!epiloguep || call_used_regs [i])
8557 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8558 gen_rtx_REG (V4SImode, i));
8561 rtx reg = gen_rtx_REG (V4SImode, i);
8564 = gen_rtx_SET (VOIDmode,
8566 gen_rtx_UNSPEC (V4SImode,
8567 gen_rtvec (1, reg), 27));
8571 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8573 for (i = 0; i < nclobs; ++i)
8574 XVECEXP (insn, 0, i) = clobs[i];
8579 /* Emit function prologue as insns. */
8582 rs6000_emit_prologue ()
8584 rs6000_stack_t *info = rs6000_stack_info ();
8585 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8586 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8587 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8588 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8589 rtx frame_reg_rtx = sp_reg_rtx;
8590 rtx cr_save_rtx = NULL;
8592 int saving_FPRs_inline;
8593 int using_store_multiple;
8594 HOST_WIDE_INT sp_offset = 0;
8596 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8597 && info->first_gp_reg_save < 31);
8598 saving_FPRs_inline = (info->first_fp_reg_save == 64
8599 || FP_SAVE_INLINE (info->first_fp_reg_save));
8601 /* For V.4, update stack before we do any saving and set back pointer. */
8602 if (info->push_p && DEFAULT_ABI == ABI_V4)
8604 if (info->total_size < 32767)
8605 sp_offset = info->total_size;
8607 frame_reg_rtx = frame_ptr_rtx;
8608 rs6000_emit_allocate_stack (info->total_size,
8609 (frame_reg_rtx != sp_reg_rtx
8612 || info->first_fp_reg_save < 64
8613 || info->first_gp_reg_save < 32
8615 if (frame_reg_rtx != sp_reg_rtx)
8616 rs6000_emit_stack_tie ();
8619 /* Save AltiVec registers if needed. */
8620 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8624 /* There should be a non inline version of this, for when we
8625 are saving lots of vector registers. */
8626 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8627 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8629 rtx areg, savereg, mem;
8632 offset = info->altivec_save_offset + sp_offset
8633 + 16 * (i - info->first_altivec_reg_save);
8635 savereg = gen_rtx_REG (V4SImode, i);
8637 areg = gen_rtx_REG (Pmode, 0);
8638 emit_move_insn (areg, GEN_INT (offset));
8640 /* AltiVec addressing mode is [reg+reg]. */
8641 mem = gen_rtx_MEM (V4SImode,
8642 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8644 set_mem_alias_set (mem, rs6000_sr_alias_set);
8646 insn = emit_move_insn (mem, savereg);
8648 altivec_frame_fixup (insn, areg, offset);
8652 /* VRSAVE is a bit vector representing which AltiVec registers
8653 are used. The OS uses this to determine which vector
8654 registers to save on a context switch. We need to save
8655 VRSAVE on the stack frame, add whatever AltiVec registers we
8656 used in this function, and do the corresponding magic in the
8659 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8661 rtx reg, mem, vrsave;
8664 /* Get VRSAVE onto a GPR. */
8665 reg = gen_rtx_REG (SImode, 12);
8666 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8668 emit_insn (gen_get_vrsave_internal (reg));
8670 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8673 offset = info->vrsave_save_offset + sp_offset;
8675 = gen_rtx_MEM (SImode,
8676 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8677 set_mem_alias_set (mem, rs6000_sr_alias_set);
8678 insn = emit_move_insn (mem, reg);
8680 /* Include the registers in the mask. */
8681 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8683 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8686 /* If we use the link register, get it into r0. */
8687 if (info->lr_save_p)
8688 emit_move_insn (gen_rtx_REG (Pmode, 0),
8689 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8691 /* If we need to save CR, put it into r12. */
8692 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8694 cr_save_rtx = gen_rtx_REG (SImode, 12);
8695 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8698 /* Do any required saving of fpr's. If only one or two to save, do
8699 it ourselves. Otherwise, call function. */
8700 if (saving_FPRs_inline)
8703 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8704 if ((regs_ever_live[info->first_fp_reg_save+i]
8705 && ! call_used_regs[info->first_fp_reg_save+i]))
8708 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8709 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8710 GEN_INT (info->fp_save_offset
8713 mem = gen_rtx_MEM (DFmode, addr);
8714 set_mem_alias_set (mem, rs6000_sr_alias_set);
8716 insn = emit_move_insn (mem, reg);
8717 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8718 NULL_RTX, NULL_RTX);
8721 else if (info->first_fp_reg_save != 64)
8725 const char *alloc_rname;
8727 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8729 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8731 LINK_REGISTER_REGNUM));
8732 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8733 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8734 alloc_rname = ggc_strdup (rname);
8735 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8736 gen_rtx_SYMBOL_REF (Pmode,
8738 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8741 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8742 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8743 GEN_INT (info->fp_save_offset
8744 + sp_offset + 8*i));
8745 mem = gen_rtx_MEM (DFmode, addr);
8746 set_mem_alias_set (mem, rs6000_sr_alias_set);
8748 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8750 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8751 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8752 NULL_RTX, NULL_RTX);
8755 /* Save GPRs. This is done as a PARALLEL if we are using
8756 the store-multiple instructions. */
8757 if (using_store_multiple)
8761 p = rtvec_alloc (32 - info->first_gp_reg_save);
8762 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8763 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8766 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8767 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8768 GEN_INT (info->gp_save_offset
8771 mem = gen_rtx_MEM (reg_mode, addr);
8772 set_mem_alias_set (mem, rs6000_sr_alias_set);
8774 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8776 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8777 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8778 NULL_RTX, NULL_RTX);
8783 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8784 if ((regs_ever_live[info->first_gp_reg_save+i]
8785 && ! call_used_regs[info->first_gp_reg_save+i])
8786 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
8787 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8788 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8791 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8792 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8793 GEN_INT (info->gp_save_offset
8796 mem = gen_rtx_MEM (reg_mode, addr);
8797 set_mem_alias_set (mem, rs6000_sr_alias_set);
8799 insn = emit_move_insn (mem, reg);
8800 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8801 NULL_RTX, NULL_RTX);
8805 /* ??? There's no need to emit actual instructions here, but it's the
8806 easiest way to get the frame unwind information emitted. */
8807 if (current_function_calls_eh_return)
8809 unsigned int i, regno;
8815 regno = EH_RETURN_DATA_REGNO (i);
8816 if (regno == INVALID_REGNUM)
8819 reg = gen_rtx_REG (reg_mode, regno);
8820 addr = plus_constant (frame_reg_rtx,
8821 info->ehrd_offset + sp_offset
8822 + reg_size * (int) i);
8823 mem = gen_rtx_MEM (reg_mode, addr);
8824 set_mem_alias_set (mem, rs6000_sr_alias_set);
8826 insn = emit_move_insn (mem, reg);
8827 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8828 NULL_RTX, NULL_RTX);
8832 /* Save lr if we used it. */
8833 if (info->lr_save_p)
8835 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8836 GEN_INT (info->lr_save_offset + sp_offset));
8837 rtx reg = gen_rtx_REG (Pmode, 0);
8838 rtx mem = gen_rtx_MEM (Pmode, addr);
8839 /* This should not be of rs6000_sr_alias_set, because of
8840 __builtin_return_address. */
8842 insn = emit_move_insn (mem, reg);
8843 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8844 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8847 /* Save CR if we use any that must be preserved. */
8848 if (info->cr_save_p)
8850 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8851 GEN_INT (info->cr_save_offset + sp_offset));
8852 rtx mem = gen_rtx_MEM (SImode, addr);
8854 set_mem_alias_set (mem, rs6000_sr_alias_set);
8856 /* If r12 was used to hold the original sp, copy cr into r0 now
8858 if (REGNO (frame_reg_rtx) == 12)
8860 cr_save_rtx = gen_rtx_REG (SImode, 0);
8861 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8863 insn = emit_move_insn (mem, cr_save_rtx);
8865 /* Now, there's no way that dwarf2out_frame_debug_expr is going
8866 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
8867 OK. All we have to do is specify that _one_ condition code
8868 register is saved in this stack slot. The thrower's epilogue
8869 will then restore all the call-saved registers.
8870 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
8871 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8872 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8875 /* Update stack and set back pointer unless this is V.4,
8876 for which it was done previously. */
8877 if (info->push_p && DEFAULT_ABI != ABI_V4)
8878 rs6000_emit_allocate_stack (info->total_size, FALSE);
8880 /* Set frame pointer, if needed. */
8881 if (frame_pointer_needed)
8883 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8885 RTX_FRAME_RELATED_P (insn) = 1;
8888 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
8889 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8890 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8891 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
8893 /* If emit_load_toc_table will use the link register, we need to save
8894 it. We use R11 for this purpose because emit_load_toc_table
8895 can use register 0. This allows us to use a plain 'blr' to return
8896 from the procedure more often. */
8897 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8898 && ! info->lr_save_p
8899 && EXIT_BLOCK_PTR->pred != NULL);
8900 if (save_LR_around_toc_setup)
8901 emit_move_insn (gen_rtx_REG (Pmode, 11),
8902 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8904 rs6000_emit_load_toc_table (TRUE);
8906 if (save_LR_around_toc_setup)
8907 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8908 gen_rtx_REG (Pmode, 11));
8911 if (DEFAULT_ABI == ABI_DARWIN
8912 && flag_pic && current_function_uses_pic_offset_table)
8914 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8916 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8919 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
8920 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
8924 /* Write function prologue. */
8927 rs6000_output_function_prologue (file, size)
8929 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8931 rs6000_stack_t *info = rs6000_stack_info ();
8933 if (TARGET_DEBUG_STACK)
8934 debug_stack_info (info);
8936 /* Write .extern for any function we will call to save and restore
8938 if (info->first_fp_reg_save < 64
8939 && !FP_SAVE_INLINE (info->first_fp_reg_save))
8940 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
8941 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
8942 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
8945 /* Write .extern for AIX common mode routines, if needed. */
8946 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
8948 fputs ("\t.extern __mulh\n", file);
8949 fputs ("\t.extern __mull\n", file);
8950 fputs ("\t.extern __divss\n", file);
8951 fputs ("\t.extern __divus\n", file);
8952 fputs ("\t.extern __quoss\n", file);
8953 fputs ("\t.extern __quous\n", file);
8954 common_mode_defined = 1;
8957 if (! HAVE_prologue)
8961 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
8962 the "toplevel" insn chain. */
8963 emit_note (0, NOTE_INSN_DELETED);
8964 rs6000_emit_prologue ();
8965 emit_note (0, NOTE_INSN_DELETED);
8967 /* Expand INSN_ADDRESSES so final() doesn't crash. */
8971 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
8973 INSN_ADDRESSES_NEW (insn, addr);
8978 if (TARGET_DEBUG_STACK)
8979 debug_rtx_list (get_insns (), 100);
8980 final (get_insns (), file, FALSE, FALSE);
8984 rs6000_pic_labelno++;
8987 /* Emit function epilogue as insns.
8989 At present, dwarf2out_frame_debug_expr doesn't understand
8990 register restores, so we don't bother setting RTX_FRAME_RELATED_P
8991 anywhere in the epilogue. Most of the insns below would in any case
8992 need special notes to explain where r11 is in relation to the stack. */
8995 rs6000_emit_epilogue (sibcall)
8998 rs6000_stack_t *info;
8999 int restoring_FPRs_inline;
9000 int using_load_multiple;
9001 int using_mfcr_multiple;
9002 int use_backchain_to_restore_sp;
9004 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
9005 rtx frame_reg_rtx = sp_reg_rtx;
9006 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
9007 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9010 info = rs6000_stack_info ();
9011 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
9012 && info->first_gp_reg_save < 31);
9013 restoring_FPRs_inline = (sibcall
9014 || current_function_calls_eh_return
9015 || info->first_fp_reg_save == 64
9016 || FP_SAVE_INLINE (info->first_fp_reg_save));
9017 use_backchain_to_restore_sp = (frame_pointer_needed
9018 || current_function_calls_alloca
9019 || info->total_size > 32767);
9020 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9021 || rs6000_cpu == PROCESSOR_PPC603
9022 || rs6000_cpu == PROCESSOR_PPC750
9025 /* If we have a frame pointer, a call to alloca, or a large stack
9026 frame, restore the old stack pointer using the backchain. Otherwise,
9027 we know what size to update it with. */
9028 if (use_backchain_to_restore_sp)
9030 /* Under V.4, don't reset the stack pointer until after we're done
9031 loading the saved registers. */
9032 if (DEFAULT_ABI == ABI_V4)
9033 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
9035 emit_move_insn (frame_reg_rtx,
9036 gen_rtx_MEM (Pmode, sp_reg_rtx));
9039 else if (info->push_p)
9041 if (DEFAULT_ABI == ABI_V4)
9042 sp_offset = info->total_size;
9045 emit_insn (TARGET_32BIT
9046 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9047 GEN_INT (info->total_size))
9048 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9049 GEN_INT (info->total_size)));
9053 /* Restore AltiVec registers if needed. */
9054 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9058 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9059 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9061 rtx addr, areg, mem;
9063 areg = gen_rtx_REG (Pmode, 0);
9065 (areg, GEN_INT (info->altivec_save_offset
9067 + 16 * (i - info->first_altivec_reg_save)));
9069 /* AltiVec addressing mode is [reg+reg]. */
9070 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9071 mem = gen_rtx_MEM (V4SImode, addr);
9072 set_mem_alias_set (mem, rs6000_sr_alias_set);
9074 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9078 /* Restore VRSAVE if needed. */
9079 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9083 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9084 GEN_INT (info->vrsave_save_offset + sp_offset));
9085 mem = gen_rtx_MEM (SImode, addr);
9086 set_mem_alias_set (mem, rs6000_sr_alias_set);
9087 reg = gen_rtx_REG (SImode, 12);
9088 emit_move_insn (reg, mem);
9090 emit_insn (generate_set_vrsave (reg, info, 1));
9093 /* Get the old lr if we saved it. */
9094 if (info->lr_save_p)
9096 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9097 GEN_INT (info->lr_save_offset + sp_offset));
9098 rtx mem = gen_rtx_MEM (Pmode, addr);
9100 set_mem_alias_set (mem, rs6000_sr_alias_set);
9102 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9105 /* Get the old cr if we saved it. */
9106 if (info->cr_save_p)
9108 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9109 GEN_INT (info->cr_save_offset + sp_offset));
9110 rtx mem = gen_rtx_MEM (SImode, addr);
9112 set_mem_alias_set (mem, rs6000_sr_alias_set);
9114 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9117 /* Set LR here to try to overlap restores below. */
9118 if (info->lr_save_p)
9119 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9120 gen_rtx_REG (Pmode, 0));
9122 /* Load exception handler data registers, if needed. */
9123 if (current_function_calls_eh_return)
9125 unsigned int i, regno;
9131 regno = EH_RETURN_DATA_REGNO (i);
9132 if (regno == INVALID_REGNUM)
9135 addr = plus_constant (frame_reg_rtx,
9136 info->ehrd_offset + sp_offset
9137 + reg_size * (int) i);
9138 mem = gen_rtx_MEM (reg_mode, addr);
9139 set_mem_alias_set (mem, rs6000_sr_alias_set);
9141 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9145 /* Restore GPRs. This is done as a PARALLEL if we are using
9146 the load-multiple instructions. */
9147 if (using_load_multiple)
9150 p = rtvec_alloc (32 - info->first_gp_reg_save);
9151 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9153 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9154 GEN_INT (info->gp_save_offset
9157 rtx mem = gen_rtx_MEM (reg_mode, addr);
9159 set_mem_alias_set (mem, rs6000_sr_alias_set);
9162 gen_rtx_SET (VOIDmode,
9163 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9166 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9169 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9170 if ((regs_ever_live[info->first_gp_reg_save+i]
9171 && ! call_used_regs[info->first_gp_reg_save+i])
9172 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
9173 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9174 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9176 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9177 GEN_INT (info->gp_save_offset
9180 rtx mem = gen_rtx_MEM (reg_mode, addr);
9182 set_mem_alias_set (mem, rs6000_sr_alias_set);
9184 emit_move_insn (gen_rtx_REG (reg_mode,
9185 info->first_gp_reg_save + i),
9189 /* Restore fpr's if we need to do it without calling a function. */
9190 if (restoring_FPRs_inline)
9191 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9192 if ((regs_ever_live[info->first_fp_reg_save+i]
9193 && ! call_used_regs[info->first_fp_reg_save+i]))
9196 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9197 GEN_INT (info->fp_save_offset
9200 mem = gen_rtx_MEM (DFmode, addr);
9201 set_mem_alias_set (mem, rs6000_sr_alias_set);
9203 emit_move_insn (gen_rtx_REG (DFmode,
9204 info->first_fp_reg_save + i),
9208 /* If we saved cr, restore it here. Just those that were used. */
9209 if (info->cr_save_p)
9211 rtx r12_rtx = gen_rtx_REG (SImode, 12);
9214 if (using_mfcr_multiple)
9216 for (i = 0; i < 8; i++)
9217 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9223 if (using_mfcr_multiple && count > 1)
9228 p = rtvec_alloc (count);
9231 for (i = 0; i < 8; i++)
9232 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9234 rtvec r = rtvec_alloc (2);
9235 RTVEC_ELT (r, 0) = r12_rtx;
9236 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9237 RTVEC_ELT (p, ndx) =
9238 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9239 gen_rtx_UNSPEC (CCmode, r, 20));
9242 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9247 for (i = 0; i < 8; i++)
9248 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9250 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9256 /* If this is V.4, unwind the stack pointer after all of the loads
9257 have been done. We need to emit a block here so that sched
9258 doesn't decide to move the sp change before the register restores
9259 (which may not have any obvious dependency on the stack). This
9260 doesn't hurt performance, because there is no scheduling that can
9261 be done after this point. */
9262 if (DEFAULT_ABI == ABI_V4)
9264 if (frame_reg_rtx != sp_reg_rtx)
9265 rs6000_emit_stack_tie ();
9267 if (use_backchain_to_restore_sp)
9269 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9271 else if (sp_offset != 0)
9273 emit_insn (Pmode == SImode
9274 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9275 GEN_INT (sp_offset))
9276 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9277 GEN_INT (sp_offset)));
9281 if (current_function_calls_eh_return)
9283 rtx sa = EH_RETURN_STACKADJ_RTX;
9284 emit_insn (Pmode == SImode
9285 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9286 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9292 if (! restoring_FPRs_inline)
9293 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9295 p = rtvec_alloc (2);
9297 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9298 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9300 LINK_REGISTER_REGNUM));
9302 /* If we have to restore more than two FP registers, branch to the
9303 restore function. It will return to our caller. */
9304 if (! restoring_FPRs_inline)
9308 const char *alloc_rname;
9310 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9311 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9312 alloc_rname = ggc_strdup (rname);
9313 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9314 gen_rtx_SYMBOL_REF (Pmode,
9317 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9320 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9321 GEN_INT (info->fp_save_offset + 8*i));
9322 mem = gen_rtx_MEM (DFmode, addr);
9323 set_mem_alias_set (mem, rs6000_sr_alias_set);
9325 RTVEC_ELT (p, i+3) =
9326 gen_rtx_SET (VOIDmode,
9327 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9332 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9336 /* Write function epilogue. */
9339 rs6000_output_function_epilogue (file, size)
9341 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9343 rs6000_stack_t *info = rs6000_stack_info ();
9344 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9346 if (! HAVE_epilogue)
9348 rtx insn = get_last_insn ();
9349 /* If the last insn was a BARRIER, we don't have to write anything except
9351 if (GET_CODE (insn) == NOTE)
9352 insn = prev_nonnote_insn (insn);
9353 if (insn == 0 || GET_CODE (insn) != BARRIER)
9355 /* This is slightly ugly, but at least we don't have two
9356 copies of the epilogue-emitting code. */
9359 /* A NOTE_INSN_DELETED is supposed to be at the start
9360 and end of the "toplevel" insn chain. */
9361 emit_note (0, NOTE_INSN_DELETED);
9362 rs6000_emit_epilogue (FALSE);
9363 emit_note (0, NOTE_INSN_DELETED);
9365 /* Expand INSN_ADDRESSES so final() doesn't crash. */
9369 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9371 INSN_ADDRESSES_NEW (insn, addr);
9376 if (TARGET_DEBUG_STACK)
9377 debug_rtx_list (get_insns (), 100);
9378 final (get_insns (), file, FALSE, FALSE);
9383 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9386 We don't output a traceback table if -finhibit-size-directive was
9387 used. The documentation for -finhibit-size-directive reads
9388 ``don't output a @code{.size} assembler directive, or anything
9389 else that would cause trouble if the function is split in the
9390 middle, and the two halves are placed at locations far apart in
9391 memory.'' The traceback table has this property, since it
9392 includes the offset from the start of the function to the
9393 traceback table itself.
9395 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9396 different traceback table. */
9397 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9399 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9400 const char *language_string = lang_hooks.name;
9401 int fixed_parms = 0, float_parms = 0, parm_info = 0;
9404 while (*fname == '.') /* V.4 encodes . in the name */
9407 /* Need label immediately before tbtab, so we can compute its offset
9408 from the function start. */
9411 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9412 ASM_OUTPUT_LABEL (file, fname);
9414 /* The .tbtab pseudo-op can only be used for the first eight
9415 expressions, since it can't handle the possibly variable
9416 length fields that follow. However, if you omit the optional
9417 fields, the assembler outputs zeros for all optional fields
9418 anyways, giving each variable length field is minimum length
9419 (as defined in sys/debug.h). Thus we can not use the .tbtab
9420 pseudo-op at all. */
9422 /* An all-zero word flags the start of the tbtab, for debuggers
9423 that have to find it by searching forward from the entry
9424 point or from the current pc. */
9425 fputs ("\t.long 0\n", file);
9427 /* Tbtab format type. Use format type 0. */
9428 fputs ("\t.byte 0,", file);
9430 /* Language type. Unfortunately, there doesn't seem to be any
9431 official way to get this info, so we use language_string. C
9432 is 0. C++ is 9. No number defined for Obj-C, so use the
9433 value for C for now. There is no official value for Java,
9434 although IBM appears to be using 13. There is no official value
9435 for Chill, so we've chosen 44 pseudo-randomly. */
9436 if (! strcmp (language_string, "GNU C")
9437 || ! strcmp (language_string, "GNU Objective-C"))
9439 else if (! strcmp (language_string, "GNU F77"))
9441 else if (! strcmp (language_string, "GNU Ada"))
9443 else if (! strcmp (language_string, "GNU Pascal"))
9445 else if (! strcmp (language_string, "GNU C++"))
9447 else if (! strcmp (language_string, "GNU Java"))
9449 else if (! strcmp (language_string, "GNU CHILL"))
9453 fprintf (file, "%d,", i);
9455 /* 8 single bit fields: global linkage (not set for C extern linkage,
9456 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9457 from start of procedure stored in tbtab, internal function, function
9458 has controlled storage, function has no toc, function uses fp,
9459 function logs/aborts fp operations. */
9460 /* Assume that fp operations are used if any fp reg must be saved. */
9461 fprintf (file, "%d,",
9462 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9464 /* 6 bitfields: function is interrupt handler, name present in
9465 proc table, function calls alloca, on condition directives
9466 (controls stack walks, 3 bits), saves condition reg, saves
9468 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9469 set up as a frame pointer, even when there is no alloca call. */
9470 fprintf (file, "%d,",
9471 ((optional_tbtab << 6)
9472 | ((optional_tbtab & frame_pointer_needed) << 5)
9473 | (info->cr_save_p << 1)
9474 | (info->lr_save_p)));
9476 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9478 fprintf (file, "%d,",
9479 (info->push_p << 7) | (64 - info->first_fp_reg_save));
9481 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9482 fprintf (file, "%d,", (32 - first_reg_to_save ()));
9486 /* Compute the parameter info from the function decl argument
9489 int next_parm_info_bit = 31;
9491 for (decl = DECL_ARGUMENTS (current_function_decl);
9492 decl; decl = TREE_CHAIN (decl))
9494 rtx parameter = DECL_INCOMING_RTL (decl);
9495 enum machine_mode mode = GET_MODE (parameter);
9497 if (GET_CODE (parameter) == REG)
9499 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9507 else if (mode == DFmode)
9512 /* If only one bit will fit, don't or in this entry. */
9513 if (next_parm_info_bit > 0)
9514 parm_info |= (bits << (next_parm_info_bit - 1));
9515 next_parm_info_bit -= 2;
9519 fixed_parms += ((GET_MODE_SIZE (mode)
9520 + (UNITS_PER_WORD - 1))
9522 next_parm_info_bit -= 1;
9528 /* Number of fixed point parameters. */
9529 /* This is actually the number of words of fixed point parameters; thus
9530 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9531 fprintf (file, "%d,", fixed_parms);
9533 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9535 /* This is actually the number of fp registers that hold parameters;
9536 and thus the maximum value is 13. */
9537 /* Set parameters on stack bit if parameters are not in their original
9538 registers, regardless of whether they are on the stack? Xlc
9539 seems to set the bit when not optimizing. */
9540 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9542 if (! optional_tbtab)
9545 /* Optional fields follow. Some are variable length. */
9547 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9549 /* There is an entry for each parameter in a register, in the order that
9550 they occur in the parameter list. Any intervening arguments on the
9551 stack are ignored. If the list overflows a long (max possible length
9552 34 bits) then completely leave off all elements that don't fit. */
9553 /* Only emit this long if there was at least one parameter. */
9554 if (fixed_parms || float_parms)
9555 fprintf (file, "\t.long %d\n", parm_info);
9557 /* Offset from start of code to tb table. */
9558 fputs ("\t.long ", file);
9559 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9561 RS6000_OUTPUT_BASENAME (file, fname);
9563 assemble_name (file, fname);
9567 RS6000_OUTPUT_BASENAME (file, fname);
9569 assemble_name (file, fname);
9573 /* Interrupt handler mask. */
9574 /* Omit this long, since we never set the interrupt handler bit
9577 /* Number of CTL (controlled storage) anchors. */
9578 /* Omit this long, since the has_ctl bit is never set above. */
9580 /* Displacement into stack of each CTL anchor. */
9581 /* Omit this list of longs, because there are no CTL anchors. */
9583 /* Length of function name. */
9584 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9586 /* Function name. */
9587 assemble_string (fname, strlen (fname));
9589 /* Register for alloca automatic storage; this is always reg 31.
9590 Only emit this if the alloca bit was set above. */
9591 if (frame_pointer_needed)
9592 fputs ("\t.byte 31\n", file);
9594 fputs ("\t.align 2\n", file);
9599 /* A C compound statement that outputs the assembler code for a thunk
9600 function, used to implement C++ virtual function calls with
9601 multiple inheritance. The thunk acts as a wrapper around a virtual
9602 function, adjusting the implicit object parameter before handing
9603 control off to the real function.
9605 First, emit code to add the integer DELTA to the location that
9606 contains the incoming first argument. Assume that this argument
9607 contains a pointer, and is the one used to pass the `this' pointer
9608 in C++. This is the incoming argument *before* the function
9609 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9610 values of all other incoming arguments.
9612 After the addition, emit code to jump to FUNCTION, which is a
9613 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9614 not touch the return address. Hence returning from FUNCTION will
9615 return to whoever called the current `thunk'.
9617 The effect must be as if FUNCTION had been called directly with the
9618 adjusted first argument. This macro is responsible for emitting
9619 all of the code for a thunk function; output_function_prologue()
9620 and output_function_epilogue() are not invoked.
9622 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9623 been extracted from it.) It might possibly be useful on some
9624 targets, but probably not.
9626 If you do not define this macro, the target-independent code in the
9627 C++ frontend will generate a less efficient heavyweight thunk that
9628 calls FUNCTION instead of jumping to it. The generic approach does
9629 not support varargs. */
9632 output_mi_thunk (file, thunk_fndecl, delta, function)
9634 tree thunk_fndecl ATTRIBUTE_UNUSED;
9638 const char *this_reg =
9639 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9642 const char *r0 = reg_names[0];
9643 const char *toc = reg_names[2];
9644 const char *schain = reg_names[11];
9645 const char *r12 = reg_names[12];
9647 static int labelno = 0;
9649 /* Small constants that can be done by one add instruction. */
9650 if (delta >= -32768 && delta <= 32767)
9652 if (! TARGET_NEW_MNEMONICS)
9653 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9655 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9658 /* Large constants that can be done by one addis instruction. */
9659 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9660 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9663 /* 32-bit constants that can be done by an add and addis instruction. */
9664 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9666 /* Break into two pieces, propagating the sign bit from the low
9667 word to the upper word. */
9668 int delta_high = delta >> 16;
9669 int delta_low = delta & 0xffff;
9670 if ((delta_low & 0x8000) != 0)
9673 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9676 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9679 if (! TARGET_NEW_MNEMONICS)
9680 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9682 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9685 /* 64-bit constants, fixme */
9689 /* Get the prefix in front of the names. */
9690 switch (DEFAULT_ABI)
9700 case ABI_AIX_NODESC:
9705 /* If the function is compiled in this module, jump to it directly.
9706 Otherwise, load up its address and jump to it. */
9708 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9710 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9711 && (! lookup_attribute ("longcall",
9712 TYPE_ATTRIBUTES (TREE_TYPE (function)))
9713 || lookup_attribute ("shortcall",
9714 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
9717 fprintf (file, "\tb %s", prefix);
9718 assemble_name (file, fname);
9719 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9725 switch (DEFAULT_ABI)
9731 /* Set up a TOC entry for the function. */
9732 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9734 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9737 if (TARGET_MINIMAL_TOC)
9738 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9741 fputs ("\t.tc ", file);
9742 assemble_name (file, fname);
9743 fputs ("[TC],", file);
9745 assemble_name (file, fname);
9748 if (TARGET_MINIMAL_TOC)
9749 asm_fprintf (file, (TARGET_32BIT)
9750 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9751 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9752 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9753 assemble_name (file, buf);
9754 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9755 fputs ("-(.LCTOC1)", file);
9756 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9758 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9762 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9765 asm_fprintf (file, "\tmtctr %s\n", r0);
9767 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9770 asm_fprintf (file, "\tbctr\n");
9773 case ABI_AIX_NODESC:
9775 fprintf (file, "\tb %s", prefix);
9776 assemble_name (file, fname);
9777 if (flag_pic) fputs ("@plt", file);
9783 fprintf (file, "\tb %s", prefix);
9784 if (flag_pic && !machopic_name_defined_p (fname))
9785 assemble_name (file, machopic_stub_name (fname));
9787 assemble_name (file, fname);
9796 /* A quick summary of the various types of 'constant-pool tables'
9799 Target Flags Name One table per
9800 AIX (none) AIX TOC object file
9801 AIX -mfull-toc AIX TOC object file
9802 AIX -mminimal-toc AIX minimal TOC translation unit
9803 SVR4/EABI (none) SVR4 SDATA object file
9804 SVR4/EABI -fpic SVR4 pic object file
9805 SVR4/EABI -fPIC SVR4 PIC translation unit
9806 SVR4/EABI -mrelocatable EABI TOC function
9807 SVR4/EABI -maix AIX TOC object file
9808 SVR4/EABI -maix -mminimal-toc
9809 AIX minimal TOC translation unit
9811 Name Reg. Set by entries contains:
9812 made by addrs? fp? sum?
9814 AIX TOC 2 crt0 as Y option option
9815 AIX minimal TOC 30 prolog gcc Y Y option
9816 SVR4 SDATA 13 crt0 gcc N Y N
9817 SVR4 pic 30 prolog ld Y not yet N
9818 SVR4 PIC 30 prolog gcc Y option option
9819 EABI TOC 30 prolog gcc Y option option
9823 /* Hash table stuff for keeping track of TOC entries. */
9825 struct toc_hash_struct
9827 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9828 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
9830 enum machine_mode key_mode;
9834 static htab_t toc_hash_table;
9836 /* Hash functions for the hash table. */
9839 rs6000_hash_constant (k)
9842 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9843 const char *format = GET_RTX_FORMAT (GET_CODE (k));
9844 int flen = strlen (format);
9847 if (GET_CODE (k) == LABEL_REF)
9848 return result * 1231 + X0INT (XEXP (k, 0), 3);
9850 if (GET_CODE (k) == CODE_LABEL)
9855 for (; fidx < flen; fidx++)
9856 switch (format[fidx])
9861 const char *str = XSTR (k, fidx);
9863 result = result * 613 + len;
9864 for (i = 0; i < len; i++)
9865 result = result * 613 + (unsigned) str[i];
9870 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9874 result = result * 613 + (unsigned) XINT (k, fidx);
9877 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9878 result = result * 613 + (unsigned) XWINT (k, fidx);
9882 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9883 result = result * 613 + (unsigned) (XWINT (k, fidx)
9894 toc_hash_function (hash_entry)
9895 const void * hash_entry;
9897 const struct toc_hash_struct *thc =
9898 (const struct toc_hash_struct *) hash_entry;
9899 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9902 /* Compare H1 and H2 for equivalence. */
9905 toc_hash_eq (h1, h2)
9909 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9910 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9912 if (((const struct toc_hash_struct *) h1)->key_mode
9913 != ((const struct toc_hash_struct *) h2)->key_mode)
9916 return rtx_equal_p (r1, r2);
9919 /* Mark the hash table-entry HASH_ENTRY. */
9922 toc_hash_mark_entry (hash_slot, unused)
9924 void * unused ATTRIBUTE_UNUSED;
9926 const struct toc_hash_struct * hash_entry =
9927 *(const struct toc_hash_struct **) hash_slot;
9928 rtx r = hash_entry->key;
9929 ggc_set_mark (hash_entry);
9930 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
9931 if (GET_CODE (r) == LABEL_REF)
9934 ggc_set_mark (XEXP (r, 0));
9941 /* Mark all the elements of the TOC hash-table *HT. */
9944 toc_hash_mark_table (vht)
9949 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
9952 /* These are the names given by the C++ front-end to vtables, and
9953 vtable-like objects. Ideally, this logic should not be here;
9954 instead, there should be some programmatic way of inquiring as
9955 to whether or not an object is a vtable. */
9957 #define VTABLE_NAME_P(NAME) \
9958 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
9959 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
9960 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
9961 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
9964 rs6000_output_symbol_ref (file, x)
9968 /* Currently C++ toc references to vtables can be emitted before it
9969 is decided whether the vtable is public or private. If this is
9970 the case, then the linker will eventually complain that there is
9971 a reference to an unknown section. Thus, for vtables only,
9972 we emit the TOC reference to reference the symbol and not the
9974 const char *name = XSTR (x, 0);
9976 if (VTABLE_NAME_P (name))
9978 RS6000_OUTPUT_BASENAME (file, name);
9981 assemble_name (file, name);
9984 /* Output a TOC entry. We derive the entry name from what is being
9988 output_toc (file, x, labelno, mode)
9992 enum machine_mode mode;
9995 const char *name = buf;
9996 const char *real_name;
10003 /* When the linker won't eliminate them, don't output duplicate
10004 TOC entries (this happens on AIX if there is any kind of TOC,
10005 and on SVR4 under -fPIC or -mrelocatable). */
10008 struct toc_hash_struct *h;
10011 h = ggc_alloc (sizeof (*h));
10013 h->key_mode = mode;
10014 h->labelno = labelno;
10016 found = htab_find_slot (toc_hash_table, h, 1);
10017 if (*found == NULL)
10019 else /* This is indeed a duplicate.
10020 Set this label equal to that label. */
10022 fputs ("\t.set ", file);
10023 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10024 fprintf (file, "%d,", labelno);
10025 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10026 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
10032 /* If we're going to put a double constant in the TOC, make sure it's
10033 aligned properly when strict alignment is on. */
10034 if (GET_CODE (x) == CONST_DOUBLE
10035 && STRICT_ALIGNMENT
10036 && GET_MODE_BITSIZE (mode) >= 64
10037 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10038 ASM_OUTPUT_ALIGN (file, 3);
10041 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10043 /* Handle FP constants specially. Note that if we have a minimal
10044 TOC, things we put here aren't actually in the TOC, so we can allow
10046 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10048 REAL_VALUE_TYPE rv;
10051 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10052 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10056 if (TARGET_MINIMAL_TOC)
10057 fputs (DOUBLE_INT_ASM_OP, file);
10059 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
10060 k[0] & 0xffffffff, k[1] & 0xffffffff);
10061 fprintf (file, "0x%lx%08lx\n",
10062 k[0] & 0xffffffff, k[1] & 0xffffffff);
10067 if (TARGET_MINIMAL_TOC)
10068 fputs ("\t.long ", file);
10070 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
10071 k[0] & 0xffffffff, k[1] & 0xffffffff);
10072 fprintf (file, "0x%lx,0x%lx\n",
10073 k[0] & 0xffffffff, k[1] & 0xffffffff);
10077 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10079 REAL_VALUE_TYPE rv;
10082 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10083 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10087 if (TARGET_MINIMAL_TOC)
10088 fputs (DOUBLE_INT_ASM_OP, file);
10090 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
10091 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
10096 if (TARGET_MINIMAL_TOC)
10097 fputs ("\t.long ", file);
10099 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
10100 fprintf (file, "0x%lx\n", l & 0xffffffff);
10104 else if (GET_MODE (x) == VOIDmode
10105 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10107 unsigned HOST_WIDE_INT low;
10108 HOST_WIDE_INT high;
10110 if (GET_CODE (x) == CONST_DOUBLE)
10112 low = CONST_DOUBLE_LOW (x);
10113 high = CONST_DOUBLE_HIGH (x);
10116 #if HOST_BITS_PER_WIDE_INT == 32
10119 high = (low & 0x80000000) ? ~0 : 0;
10123 low = INTVAL (x) & 0xffffffff;
10124 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10128 /* TOC entries are always Pmode-sized, but since this
10129 is a bigendian machine then if we're putting smaller
10130 integer constants in the TOC we have to pad them.
10131 (This is still a win over putting the constants in
10132 a separate constant pool, because then we'd have
10133 to have both a TOC entry _and_ the actual constant.)
10135 For a 32-bit target, CONST_INT values are loaded and shifted
10136 entirely within `low' and can be stored in one TOC entry. */
10138 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10139 abort ();/* It would be easy to make this work, but it doesn't now. */
10141 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10142 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10143 POINTER_SIZE, &low, &high, 0);
10147 if (TARGET_MINIMAL_TOC)
10148 fputs (DOUBLE_INT_ASM_OP, file);
10150 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10151 (long) high & 0xffffffff, (long) low & 0xffffffff);
10152 fprintf (file, "0x%lx%08lx\n",
10153 (long) high & 0xffffffff, (long) low & 0xffffffff);
10158 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10160 if (TARGET_MINIMAL_TOC)
10161 fputs ("\t.long ", file);
10163 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10164 (long) high & 0xffffffff, (long) low & 0xffffffff);
10165 fprintf (file, "0x%lx,0x%lx\n",
10166 (long) high & 0xffffffff, (long) low & 0xffffffff);
10170 if (TARGET_MINIMAL_TOC)
10171 fputs ("\t.long ", file);
10173 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
10174 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
10180 if (GET_CODE (x) == CONST)
10182 if (GET_CODE (XEXP (x, 0)) != PLUS)
10185 base = XEXP (XEXP (x, 0), 0);
10186 offset = INTVAL (XEXP (XEXP (x, 0), 1));
10189 if (GET_CODE (base) == SYMBOL_REF)
10190 name = XSTR (base, 0);
10191 else if (GET_CODE (base) == LABEL_REF)
10192 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10193 else if (GET_CODE (base) == CODE_LABEL)
10194 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10198 real_name = (*targetm.strip_name_encoding) (name);
10199 if (TARGET_MINIMAL_TOC)
10200 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10203 fprintf (file, "\t.tc %s", real_name);
10206 fprintf (file, ".N%d", - offset);
10208 fprintf (file, ".P%d", offset);
10210 fputs ("[TC],", file);
10213 /* Currently C++ toc references to vtables can be emitted before it
10214 is decided whether the vtable is public or private. If this is
10215 the case, then the linker will eventually complain that there is
10216 a TOC reference to an unknown section. Thus, for vtables only,
10217 we emit the TOC reference to reference the symbol and not the
10219 if (VTABLE_NAME_P (name))
10221 RS6000_OUTPUT_BASENAME (file, name);
10223 fprintf (file, "%d", offset);
10224 else if (offset > 0)
10225 fprintf (file, "+%d", offset);
10228 output_addr_const (file, x);
10232 /* Output an assembler pseudo-op to write an ASCII string of N characters
10233 starting at P to FILE.
10235 On the RS/6000, we have to do this using the .byte operation and
10236 write out special characters outside the quoted string.
10237 Also, the assembler is broken; very long strings are truncated,
10238 so we must artificially break them up early. */
10241 output_ascii (file, p, n)
10247 int i, count_string;
10248 const char *for_string = "\t.byte \"";
10249 const char *for_decimal = "\t.byte ";
10250 const char *to_close = NULL;
10253 for (i = 0; i < n; i++)
10256 if (c >= ' ' && c < 0177)
10259 fputs (for_string, file);
10262 /* Write two quotes to get one. */
10270 for_decimal = "\"\n\t.byte ";
10274 if (count_string >= 512)
10276 fputs (to_close, file);
10278 for_string = "\t.byte \"";
10279 for_decimal = "\t.byte ";
10287 fputs (for_decimal, file);
10288 fprintf (file, "%d", c);
10290 for_string = "\n\t.byte \"";
10291 for_decimal = ", ";
10297 /* Now close the string if we have written one. Then end the line. */
10299 fputs (to_close, file);
10302 /* Generate a unique section name for FILENAME for a section type
10303 represented by SECTION_DESC. Output goes into BUF.
10305 SECTION_DESC can be any string, as long as it is different for each
10306 possible section type.
10308 We name the section in the same manner as xlc. The name begins with an
10309 underscore followed by the filename (after stripping any leading directory
10310 names) with the last period replaced by the string SECTION_DESC. If
10311 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10315 rs6000_gen_section_name (buf, filename, section_desc)
10317 const char *filename;
10318 const char *section_desc;
10320 const char *q, *after_last_slash, *last_period = 0;
10324 after_last_slash = filename;
10325 for (q = filename; *q; q++)
10328 after_last_slash = q + 1;
10329 else if (*q == '.')
10333 len = strlen (after_last_slash) + strlen (section_desc) + 2;
10334 *buf = (char *) permalloc (len);
10339 for (q = after_last_slash; *q; q++)
10341 if (q == last_period)
10343 strcpy (p, section_desc);
10344 p += strlen (section_desc);
10347 else if (ISALNUM (*q))
10351 if (last_period == 0)
10352 strcpy (p, section_desc);
10357 /* Emit profile function. */
10360 output_profile_hook (labelno)
10363 if (DEFAULT_ABI == ABI_AIX)
10366 const char *label_name;
10369 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10370 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
10371 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10373 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10376 else if (DEFAULT_ABI == ABI_DARWIN)
10378 const char *mcount_name = RS6000_MCOUNT;
10379 int caller_addr_regno = LINK_REGISTER_REGNUM;
10381 /* Be conservative and always set this, at least for now. */
10382 current_function_uses_pic_offset_table = 1;
10385 /* For PIC code, set up a stub and collect the caller's address
10386 from r0, which is where the prologue puts it. */
10389 mcount_name = machopic_stub_name (mcount_name);
10390 if (current_function_uses_pic_offset_table)
10391 caller_addr_regno = 0;
10394 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10396 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10400 /* Write function profiler code. */
10403 output_function_profiler (file, labelno)
10409 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10410 switch (DEFAULT_ABI)
10416 case ABI_AIX_NODESC:
10417 fprintf (file, "\tmflr %s\n", reg_names[0]);
10420 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10421 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10422 reg_names[0], reg_names[1]);
10423 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10424 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10425 assemble_name (file, buf);
10426 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10428 else if (flag_pic > 1)
10430 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10431 reg_names[0], reg_names[1]);
10432 /* Now, we need to get the address of the label. */
10433 fputs ("\tbl 1f\n\t.long ", file);
10434 assemble_name (file, buf);
10435 fputs ("-.\n1:", file);
10436 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10437 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10438 reg_names[0], reg_names[11]);
10439 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10440 reg_names[0], reg_names[0], reg_names[11]);
10444 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10445 assemble_name (file, buf);
10446 fputs ("@ha\n", file);
10447 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10448 reg_names[0], reg_names[1]);
10449 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10450 assemble_name (file, buf);
10451 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10454 if (current_function_needs_context)
10455 asm_fprintf (file, "\tmr %s,%s\n",
10456 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10457 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10458 if (current_function_needs_context)
10459 asm_fprintf (file, "\tmr %s,%s\n",
10460 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10465 /* Don't do anything, done in output_profile_hook (). */
10471 /* Adjust the cost of a scheduling dependency. Return the new cost of
10472 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10475 rs6000_adjust_cost (insn, link, dep_insn, cost)
10478 rtx dep_insn ATTRIBUTE_UNUSED;
10481 if (! recog_memoized (insn))
10484 if (REG_NOTE_KIND (link) != 0)
10487 if (REG_NOTE_KIND (link) == 0)
10489 /* Data dependency; DEP_INSN writes a register that INSN reads
10490 some cycles later. */
10491 switch (get_attr_type (insn))
10494 /* Tell the first scheduling pass about the latency between
10495 a mtctr and bctr (and mtlr and br/blr). The first
10496 scheduling pass will not know about this latency since
10497 the mtctr instruction, which has the latency associated
10498 to it, will be generated by reload. */
10499 return TARGET_POWER ? 5 : 4;
10501 /* Leave some extra cycles between a compare and its
10502 dependent branch, to inhibit expensive mispredicts. */
10503 if ((rs6000_cpu_attr == CPU_PPC603
10504 || rs6000_cpu_attr == CPU_PPC604
10505 || rs6000_cpu_attr == CPU_PPC604E
10506 || rs6000_cpu_attr == CPU_PPC620
10507 || rs6000_cpu_attr == CPU_PPC630
10508 || rs6000_cpu_attr == CPU_PPC750
10509 || rs6000_cpu_attr == CPU_PPC7400
10510 || rs6000_cpu_attr == CPU_PPC7450
10511 || rs6000_cpu_attr == CPU_POWER4)
10512 && recog_memoized (dep_insn)
10513 && (INSN_CODE (dep_insn) >= 0)
10514 && (get_attr_type (dep_insn) == TYPE_COMPARE
10515 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10516 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10517 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10522 /* Fall out to return default cost. */
10528 /* A C statement (sans semicolon) to update the integer scheduling
10529 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10530 INSN earlier, increase the priority to execute INSN later. Do not
10531 define this macro if you do not need to adjust the scheduling
10532 priorities of insns. */
10535 rs6000_adjust_priority (insn, priority)
10536 rtx insn ATTRIBUTE_UNUSED;
10539 /* On machines (like the 750) which have asymmetric integer units,
10540 where one integer unit can do multiply and divides and the other
10541 can't, reduce the priority of multiply/divide so it is scheduled
10542 before other integer operations. */
10545 if (! INSN_P (insn))
10548 if (GET_CODE (PATTERN (insn)) == USE)
10551 switch (rs6000_cpu_attr) {
10553 switch (get_attr_type (insn))
10560 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10561 priority, priority);
10562 if (priority >= 0 && priority < 0x01000000)
10572 /* Return how many instructions the machine can issue per cycle. */
10575 rs6000_issue_rate ()
10577 switch (rs6000_cpu_attr) {
10578 case CPU_RIOS1: /* ? */
10580 case CPU_PPC601: /* ? */
10600 /* Length in units of the trampoline for entering a nested function. */
10603 rs6000_trampoline_size ()
10607 switch (DEFAULT_ABI)
10613 ret = (TARGET_32BIT) ? 12 : 24;
10618 case ABI_AIX_NODESC:
10619 ret = (TARGET_32BIT) ? 40 : 48;
10626 /* Emit RTL insns to initialize the variable parts of a trampoline.
10627 FNADDR is an RTX for the address of the function's pure code.
10628 CXT is an RTX for the static chain value for the function. */
10631 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10636 enum machine_mode pmode = Pmode;
10637 int regsize = (TARGET_32BIT) ? 4 : 8;
10638 rtx ctx_reg = force_reg (pmode, cxt);
10640 switch (DEFAULT_ABI)
10645 /* Macros to shorten the code expansions below. */
10646 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10647 #define MEM_PLUS(addr,offset) \
10648 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10650 /* Under AIX, just build the 3 word function descriptor */
10653 rtx fn_reg = gen_reg_rtx (pmode);
10654 rtx toc_reg = gen_reg_rtx (pmode);
10655 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10656 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10657 emit_move_insn (MEM_DEREF (addr), fn_reg);
10658 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10659 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10663 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10666 case ABI_AIX_NODESC:
10667 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10668 FALSE, VOIDmode, 4,
10670 GEN_INT (rs6000_trampoline_size ()), SImode,
10680 /* Table of valid machine attributes. */
10682 const struct attribute_spec rs6000_attribute_table[] =
10684 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10685 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10686 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10687 { NULL, 0, 0, false, false, false, NULL }
10690 /* Handle a "longcall" or "shortcall" attribute; arguments as in
10691 struct attribute_spec.handler. */
10694 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10697 tree args ATTRIBUTE_UNUSED;
10698 int flags ATTRIBUTE_UNUSED;
10699 bool *no_add_attrs;
10701 if (TREE_CODE (*node) != FUNCTION_TYPE
10702 && TREE_CODE (*node) != FIELD_DECL
10703 && TREE_CODE (*node) != TYPE_DECL)
10705 warning ("`%s' attribute only applies to functions",
10706 IDENTIFIER_POINTER (name));
10707 *no_add_attrs = true;
10713 /* Set longcall attributes on all functions declared when
10714 rs6000_default_long_calls is true. */
10716 rs6000_set_default_type_attributes (type)
10719 if (rs6000_default_long_calls
10720 && (TREE_CODE (type) == FUNCTION_TYPE
10721 || TREE_CODE (type) == METHOD_TYPE))
10722 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
10724 TYPE_ATTRIBUTES (type));
10727 /* Return a reference suitable for calling a function with the
10728 longcall attribute. */
10731 rs6000_longcall_ref (call_ref)
10734 const char *call_name;
10737 if (GET_CODE (call_ref) != SYMBOL_REF)
10740 /* System V adds '.' to the internal name, so skip them. */
10741 call_name = XSTR (call_ref, 0);
10742 if (*call_name == '.')
10744 while (*call_name == '.')
10747 node = get_identifier (call_name);
10748 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10751 return force_reg (Pmode, call_ref);
10755 #ifdef USING_ELFOS_H
10757 /* A C statement or statements to switch to the appropriate section
10758 for output of RTX in mode MODE. You can assume that RTX is some
10759 kind of constant in RTL. The argument MODE is redundant except in
10760 the case of a `const_int' rtx. Select the section by calling
10761 `text_section' or one of the alternatives for other sections.
10763 Do not define this macro if you put all constants in the read-only
10767 rs6000_elf_select_rtx_section (mode, x, align)
10768 enum machine_mode mode;
10770 unsigned HOST_WIDE_INT align;
10772 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10775 default_elf_select_rtx_section (mode, x, align);
10778 /* A C statement or statements to switch to the appropriate
10779 section for output of DECL. DECL is either a `VAR_DECL' node
10780 or a constant of some sort. RELOC indicates whether forming
10781 the initial value of DECL requires link-time relocations. */
10784 rs6000_elf_select_section (decl, reloc, align)
10787 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
10789 int size = int_size_in_bytes (TREE_TYPE (decl));
10792 static void (* const sec_funcs[4]) PARAMS ((void)) = {
10793 &readonly_data_section,
10799 needs_sdata = (size > 0
10800 && size <= g_switch_value
10801 && rs6000_sdata != SDATA_NONE
10802 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10804 if (TREE_CODE (decl) == STRING_CST)
10805 readonly = ! flag_writable_strings;
10806 else if (TREE_CODE (decl) == VAR_DECL)
10807 readonly = (! (flag_pic && reloc)
10808 && TREE_READONLY (decl)
10809 && ! TREE_SIDE_EFFECTS (decl)
10810 && DECL_INITIAL (decl)
10811 && DECL_INITIAL (decl) != error_mark_node
10812 && TREE_CONSTANT (DECL_INITIAL (decl)));
10813 else if (TREE_CODE (decl) == CONSTRUCTOR)
10814 readonly = (! (flag_pic && reloc)
10815 && ! TREE_SIDE_EFFECTS (decl)
10816 && TREE_CONSTANT (decl));
10819 if (needs_sdata && rs6000_sdata != SDATA_EABI)
10822 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10825 /* A C statement to build up a unique section name, expressed as a
10826 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10827 RELOC indicates whether the initial value of EXP requires
10828 link-time relocations. If you do not define this macro, GCC will use
10829 the symbol name prefixed by `.' as the section name. Note - this
10830 macro can now be called for uninitialized data items as well as
10831 initialised data and functions. */
10834 rs6000_elf_unique_section (decl, reloc)
10842 const char *prefix;
10844 static const char *const prefixes[7][2] =
10846 { ".rodata.", ".gnu.linkonce.r." },
10847 { ".sdata2.", ".gnu.linkonce.s2." },
10848 { ".data.", ".gnu.linkonce.d." },
10849 { ".sdata.", ".gnu.linkonce.s." },
10850 { ".bss.", ".gnu.linkonce.b." },
10851 { ".sbss.", ".gnu.linkonce.sb." },
10852 { ".text.", ".gnu.linkonce.t." }
10855 if (TREE_CODE (decl) == FUNCTION_DECL)
10864 if (TREE_CODE (decl) == STRING_CST)
10865 readonly = ! flag_writable_strings;
10866 else if (TREE_CODE (decl) == VAR_DECL)
10867 readonly = (! (flag_pic && reloc)
10868 && TREE_READONLY (decl)
10869 && ! TREE_SIDE_EFFECTS (decl)
10870 && TREE_CONSTANT (DECL_INITIAL (decl)));
10872 size = int_size_in_bytes (TREE_TYPE (decl));
10873 needs_sdata = (size > 0
10874 && size <= g_switch_value
10875 && rs6000_sdata != SDATA_NONE
10876 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10878 if (DECL_INITIAL (decl) == 0
10879 || DECL_INITIAL (decl) == error_mark_node)
10881 else if (! readonly)
10888 /* .sdata2 is only for EABI. */
10889 if (sec == 0 && rs6000_sdata != SDATA_EABI)
10895 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
10896 name = (*targetm.strip_name_encoding) (name);
10897 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10898 len = strlen (name) + strlen (prefix);
10899 string = alloca (len + 1);
10901 sprintf (string, "%s%s", prefix, name);
10903 DECL_SECTION_NAME (decl) = build_string (len, string);
10907 /* If we are referencing a function that is static or is known to be
10908 in this file, make the SYMBOL_REF special. We can use this to indicate
10909 that we can branch to this function without emitting a no-op after the
10910 call. For real AIX calling sequences, we also replace the
10911 function name with the real name (1 or 2 leading .'s), rather than
10912 the function descriptor name. This saves a lot of overriding code
10913 to read the prefixes. */
10916 rs6000_elf_encode_section_info (decl, first)
10923 if (TREE_CODE (decl) == FUNCTION_DECL)
10925 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10926 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10927 && ! DECL_WEAK (decl))
10928 SYMBOL_REF_FLAG (sym_ref) = 1;
10930 if (DEFAULT_ABI == ABI_AIX)
10932 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10933 size_t len2 = strlen (XSTR (sym_ref, 0));
10934 char *str = alloca (len1 + len2 + 1);
10937 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10939 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
10942 else if (rs6000_sdata != SDATA_NONE
10943 && DEFAULT_ABI == ABI_V4
10944 && TREE_CODE (decl) == VAR_DECL)
10946 int size = int_size_in_bytes (TREE_TYPE (decl));
10947 tree section_name = DECL_SECTION_NAME (decl);
10948 const char *name = (char *)0;
10953 if (TREE_CODE (section_name) == STRING_CST)
10955 name = TREE_STRING_POINTER (section_name);
10956 len = TREE_STRING_LENGTH (section_name);
10962 if ((size > 0 && size <= g_switch_value)
10964 && ((len == sizeof (".sdata") - 1
10965 && strcmp (name, ".sdata") == 0)
10966 || (len == sizeof (".sdata2") - 1
10967 && strcmp (name, ".sdata2") == 0)
10968 || (len == sizeof (".sbss") - 1
10969 && strcmp (name, ".sbss") == 0)
10970 || (len == sizeof (".sbss2") - 1
10971 && strcmp (name, ".sbss2") == 0)
10972 || (len == sizeof (".PPC.EMB.sdata0") - 1
10973 && strcmp (name, ".PPC.EMB.sdata0") == 0)
10974 || (len == sizeof (".PPC.EMB.sbss0") - 1
10975 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
10977 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10978 size_t len = strlen (XSTR (sym_ref, 0));
10979 char *str = alloca (len + 2);
10982 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
10983 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
10988 static const char *
10989 rs6000_elf_strip_name_encoding (str)
10992 while (*str == '*' || *str == '@')
10997 #endif /* USING_ELFOS_H */
11000 /* Return a REG that occurs in ADDR with coefficient 1.
11001 ADDR can be effectively incremented by incrementing REG.
11003 r0 is special and we must not select it as an address
11004 register by this routine since our caller will try to
11005 increment the returned register via an "la" instruction. */
11008 find_addr_reg (addr)
11011 while (GET_CODE (addr) == PLUS)
11013 if (GET_CODE (XEXP (addr, 0)) == REG
11014 && REGNO (XEXP (addr, 0)) != 0)
11015 addr = XEXP (addr, 0);
11016 else if (GET_CODE (XEXP (addr, 1)) == REG
11017 && REGNO (XEXP (addr, 1)) != 0)
11018 addr = XEXP (addr, 1);
11019 else if (CONSTANT_P (XEXP (addr, 0)))
11020 addr = XEXP (addr, 1);
11021 else if (CONSTANT_P (XEXP (addr, 1)))
11022 addr = XEXP (addr, 0);
11026 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
11032 rs6000_fatal_bad_address (op)
11035 fatal_insn ("bad address", op);
11038 /* Called to register all of our global variables with the garbage
11042 rs6000_add_gc_roots ()
11044 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11045 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11046 toc_hash_mark_table);
11052 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
11053 reference and a constant. */
11056 symbolic_operand (op)
11059 switch (GET_CODE (op))
11066 return (GET_CODE (op) == SYMBOL_REF ||
11067 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11068 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11069 && GET_CODE (XEXP (op, 1)) == CONST_INT);
11076 #ifdef RS6000_LONG_BRANCH
11078 static tree stub_list = 0;
11080 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
11081 procedure calls to the linked list. */
11084 add_compiler_stub (label_name, function_name, line_number)
11086 tree function_name;
11089 tree stub = build_tree_list (function_name, label_name);
11090 TREE_TYPE (stub) = build_int_2 (line_number, 0);
11091 TREE_CHAIN (stub) = stub_list;
11095 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
11096 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
11097 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
11099 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11100 handling procedure calls from the linked list and initializes the
11104 output_compiler_stub ()
11107 char label_buf[256];
11109 tree tmp_stub, stub;
11112 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11114 fprintf (asm_out_file,
11115 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11117 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11118 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11119 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11120 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11122 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11124 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11127 label_buf[0] = '_';
11128 strcpy (label_buf+1,
11129 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11132 strcpy (tmp_buf, "lis r12,hi16(");
11133 strcat (tmp_buf, label_buf);
11134 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11135 strcat (tmp_buf, label_buf);
11136 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11137 output_asm_insn (tmp_buf, 0);
11139 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11140 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11141 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11142 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11148 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
11149 already there or not. */
11152 no_previous_def (function_name)
11153 tree function_name;
11156 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11157 if (function_name == STUB_FUNCTION_NAME (stub))
11162 /* GET_PREV_LABEL gets the label name from the previous definition of
11166 get_prev_label (function_name)
11167 tree function_name;
11170 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11171 if (function_name == STUB_FUNCTION_NAME (stub))
11172 return STUB_LABEL_NAME (stub);
11176 /* INSN is either a function call or a millicode call. It may have an
11177 unconditional jump in its delay slot.
11179 CALL_DEST is the routine we are calling. */
11182 output_call (insn, call_dest, operand_number)
11185 int operand_number;
11187 static char buf[256];
11188 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11191 tree funname = get_identifier (XSTR (call_dest, 0));
11193 if (no_previous_def (funname))
11196 rtx label_rtx = gen_label_rtx ();
11197 char *label_buf, temp_buf[256];
11198 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11199 CODE_LABEL_NUMBER (label_rtx));
11200 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11201 labelname = get_identifier (label_buf);
11202 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11204 line_number = NOTE_LINE_NUMBER (insn);
11205 add_compiler_stub (labelname, funname, line_number);
11208 labelname = get_prev_label (funname);
11210 sprintf (buf, "jbsr %%z%d,%.246s",
11211 operand_number, IDENTIFIER_POINTER (labelname));
11216 sprintf (buf, "bl %%z%d", operand_number);
11221 #endif /* RS6000_LONG_BRANCH */
11223 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11225 const char *const symbol_ = (SYMBOL); \
11226 char *buffer_ = (BUF); \
11227 if (symbol_[0] == '"') \
11229 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11231 else if (name_needs_quotes(symbol_)) \
11233 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11237 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11242 /* Generate PIC and indirect symbol stubs. */
11245 machopic_output_stub (file, symb, stub)
11247 const char *symb, *stub;
11249 unsigned int length;
11250 char *symbol_name, *lazy_ptr_name;
11251 char *local_label_0;
11252 static int label = 0;
11254 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11255 symb = (*targetm.strip_name_encoding) (symb);
11259 length = strlen (symb);
11260 symbol_name = alloca (length + 32);
11261 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11263 lazy_ptr_name = alloca (length + 32);
11264 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11266 local_label_0 = alloca (length + 32);
11267 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11270 machopic_picsymbol_stub_section ();
11272 machopic_symbol_stub_section ();
11274 fprintf (file, "%s:\n", stub);
11275 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11279 fprintf (file, "\tmflr r0\n");
11280 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11281 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11282 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11283 lazy_ptr_name, local_label_0);
11284 fprintf (file, "\tmtlr r0\n");
11285 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11286 lazy_ptr_name, local_label_0);
11287 fprintf (file, "\tmtctr r12\n");
11288 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11289 lazy_ptr_name, local_label_0);
11290 fprintf (file, "\tbctr\n");
11293 fprintf (file, "non-pure not supported\n");
11295 machopic_lazy_symbol_ptr_section ();
11296 fprintf (file, "%s:\n", lazy_ptr_name);
11297 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11298 fprintf (file, "\t.long dyld_stub_binding_helper\n");
11301 /* Legitimize PIC addresses. If the address is already
11302 position-independent, we return ORIG. Newly generated
11303 position-independent addresses go into a reg. This is REG if non
11304 zero, otherwise we allocate register(s) as necessary. */
11306 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11309 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11311 enum machine_mode mode;
11316 if (reg == NULL && ! reload_in_progress && ! reload_completed)
11317 reg = gen_reg_rtx (Pmode);
11319 if (GET_CODE (orig) == CONST)
11321 if (GET_CODE (XEXP (orig, 0)) == PLUS
11322 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11325 if (GET_CODE (XEXP (orig, 0)) == PLUS)
11328 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11331 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11337 if (GET_CODE (offset) == CONST_INT)
11339 if (SMALL_INT (offset))
11340 return plus_constant (base, INTVAL (offset));
11341 else if (! reload_in_progress && ! reload_completed)
11342 offset = force_reg (Pmode, offset);
11345 rtx mem = force_const_mem (Pmode, orig);
11346 return machopic_legitimize_pic_address (mem, Pmode, reg);
11349 return gen_rtx (PLUS, Pmode, base, offset);
11352 /* Fall back on generic machopic code. */
11353 return machopic_legitimize_pic_address (orig, mode, reg);
11356 /* This is just a placeholder to make linking work without having to
11357 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11358 ever needed for Darwin (not too likely!) this would have to get a
11359 real definition. */
11366 #endif /* TARGET_MACHO */
11369 static unsigned int
11370 rs6000_elf_section_type_flags (decl, name, reloc)
11375 unsigned int flags = default_section_type_flags (decl, name, reloc);
11377 if (TARGET_RELOCATABLE)
11378 flags |= SECTION_WRITE;
11383 /* Record an element in the table of global constructors. SYMBOL is
11384 a SYMBOL_REF of the function to be called; PRIORITY is a number
11385 between 0 and MAX_INIT_PRIORITY.
11387 This differs from default_named_section_asm_out_constructor in
11388 that we have special handling for -mrelocatable. */
11391 rs6000_elf_asm_out_constructor (symbol, priority)
11395 const char *section = ".ctors";
11398 if (priority != DEFAULT_INIT_PRIORITY)
11400 sprintf (buf, ".ctors.%.5u",
11401 /* Invert the numbering so the linker puts us in the proper
11402 order; constructors are run from right to left, and the
11403 linker sorts in increasing order. */
11404 MAX_INIT_PRIORITY - priority);
11408 named_section_flags (section, SECTION_WRITE);
11409 assemble_align (POINTER_SIZE);
11411 if (TARGET_RELOCATABLE)
11413 fputs ("\t.long (", asm_out_file);
11414 output_addr_const (asm_out_file, symbol);
11415 fputs (")@fixup\n", asm_out_file);
11418 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11422 rs6000_elf_asm_out_destructor (symbol, priority)
11426 const char *section = ".dtors";
11429 if (priority != DEFAULT_INIT_PRIORITY)
11431 sprintf (buf, ".dtors.%.5u",
11432 /* Invert the numbering so the linker puts us in the proper
11433 order; constructors are run from right to left, and the
11434 linker sorts in increasing order. */
11435 MAX_INIT_PRIORITY - priority);
11439 named_section_flags (section, SECTION_WRITE);
11440 assemble_align (POINTER_SIZE);
11442 if (TARGET_RELOCATABLE)
11444 fputs ("\t.long (", asm_out_file);
11445 output_addr_const (asm_out_file, symbol);
11446 fputs (")@fixup\n", asm_out_file);
11449 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11455 xcoff_asm_named_section (name, flags)
11457 unsigned int flags ATTRIBUTE_UNUSED;
11459 fprintf (asm_out_file, "\t.csect %s\n", name);
11463 rs6000_xcoff_select_section (exp, reloc, align)
11466 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
11468 if ((TREE_CODE (exp) == STRING_CST
11469 && ! flag_writable_strings)
11470 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
11471 && TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp)
11472 && DECL_INITIAL (exp)
11473 && (DECL_INITIAL (exp) == error_mark_node
11474 || TREE_CONSTANT (DECL_INITIAL (exp)))
11477 if (TREE_PUBLIC (exp))
11478 read_only_data_section ();
11480 read_only_private_data_section ();
11484 if (TREE_PUBLIC (exp))
11487 private_data_section ();
11492 rs6000_xcoff_unique_section (decl, reloc)
11494 int reloc ATTRIBUTE_UNUSED;
11500 if (TREE_CODE (decl) == FUNCTION_DECL)
11502 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
11503 len = strlen (name) + 5;
11504 string = alloca (len + 1);
11505 sprintf (string, ".%s[PR]", name);
11506 DECL_SECTION_NAME (decl) = build_string (len, string);
11510 /* Select section for constant in constant pool.
11512 On RS/6000, all constants are in the private read-only data area.
11513 However, if this is being placed in the TOC it must be output as a
11517 rs6000_xcoff_select_rtx_section (mode, x, align)
11518 enum machine_mode mode;
11520 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
11522 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
11525 read_only_private_data_section ();
11528 /* Remove any trailing [DS] or the like from the symbol name. */
11530 static const char *
11531 rs6000_xcoff_strip_name_encoding (name)
11537 len = strlen (name);
11538 if (name[len - 1] == ']')
11539 return ggc_alloc_string (name, len - 4);
11544 #endif /* TARGET_XCOFF */
11546 /* Note that this is also used for ELF64. */
11549 rs6000_xcoff_encode_section_info (decl, first)
11551 int first ATTRIBUTE_UNUSED;
11553 if (TREE_CODE (decl) == FUNCTION_DECL
11554 && (TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
11555 && ! DECL_WEAK (decl))
11556 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;