1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave;
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string;
83 /* Set to non-zero once AIX common-mode calls have been defined. */
84 static int common_mode_defined;
86 /* Save information from a "cmpxx" operation until the branch or scc is
88 rtx rs6000_compare_op0, rs6000_compare_op1;
89 int rs6000_compare_fp_p;
91 /* Label number of label created for -mrelocatable, to call to so we can
92 get the address of the GOT section */
93 int rs6000_pic_labelno;
96 /* Which abi to adhere to */
97 const char *rs6000_abi_name = RS6000_ABI_NAME;
99 /* Semantics of the small data area */
100 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
102 /* Which small data model to use */
103 const char *rs6000_sdata_name = (char *)0;
105 /* Counter for labels which are to be placed in .fixup. */
106 int fixuplabelno = 0;
109 /* ABI enumeration available for subtarget to use. */
110 enum rs6000_abi rs6000_current_abi;
112 /* ABI string from -mabi= option. */
113 const char *rs6000_abi_string;
116 const char *rs6000_debug_name;
117 int rs6000_debug_stack; /* debug stack applications */
118 int rs6000_debug_arg; /* debug argument handling */
120 /* Flag to say the TOC is initialized */
122 char toc_label_name[10];
124 /* Alias set for saves and restores from the rs6000 stack. */
125 static int rs6000_sr_alias_set;
127 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
128 The only place that looks at this is rs6000_set_default_type_attributes;
129 everywhere else should rely on the presence or absence of a longcall
130 attribute on the function declaration. */
131 int rs6000_default_long_calls;
132 const char *rs6000_longcall_switch;
134 static void rs6000_add_gc_roots PARAMS ((void));
135 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
136 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
137 static void validate_condition_mode
138 PARAMS ((enum rtx_code, enum machine_mode));
139 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
140 static void rs6000_maybe_dead PARAMS ((rtx));
141 static void rs6000_emit_stack_tie PARAMS ((void));
142 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
143 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
144 static unsigned rs6000_hash_constant PARAMS ((rtx));
145 static unsigned toc_hash_function PARAMS ((const void *));
146 static int toc_hash_eq PARAMS ((const void *, const void *));
147 static int toc_hash_mark_entry PARAMS ((void **, void *));
148 static void toc_hash_mark_table PARAMS ((void *));
149 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
150 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
151 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
152 static int rs6000_ra_ever_killed PARAMS ((void));
153 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
154 const struct attribute_spec rs6000_attribute_table[];
155 static void rs6000_set_default_type_attributes PARAMS ((tree));
156 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
157 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
158 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
159 HOST_WIDE_INT, HOST_WIDE_INT));
161 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
163 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
164 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
165 static void rs6000_elf_select_section PARAMS ((tree, int,
166 unsigned HOST_WIDE_INT));
167 static void rs6000_elf_unique_section PARAMS ((tree, int));
168 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
169 unsigned HOST_WIDE_INT));
170 static void rs6000_elf_encode_section_info PARAMS ((tree, int));
171 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
174 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
175 static void rs6000_xcoff_select_section PARAMS ((tree, int,
176 unsigned HOST_WIDE_INT));
177 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
178 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
179 unsigned HOST_WIDE_INT));
180 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
182 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
184 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
185 static int rs6000_adjust_priority PARAMS ((rtx, int));
186 static int rs6000_issue_rate PARAMS ((void));
188 static void rs6000_init_builtins PARAMS ((void));
189 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
190 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
191 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
192 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
193 static void altivec_init_builtins PARAMS ((void));
194 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
195 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
196 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
197 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
198 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
199 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
200 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
201 static void rs6000_parse_abi_options PARAMS ((void));
202 static void rs6000_parse_vrsave_option PARAMS ((void));
203 static int first_altivec_reg_to_save PARAMS ((void));
204 static unsigned int compute_vrsave_mask PARAMS ((void));
205 static void is_altivec_return_reg PARAMS ((rtx, void *));
206 int vrsave_operation PARAMS ((rtx, enum machine_mode));
207 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
208 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
209 static int easy_vector_constant PARAMS ((rtx));
211 /* Default register names. */
212 char rs6000_reg_names[][8] =
214 "0", "1", "2", "3", "4", "5", "6", "7",
215 "8", "9", "10", "11", "12", "13", "14", "15",
216 "16", "17", "18", "19", "20", "21", "22", "23",
217 "24", "25", "26", "27", "28", "29", "30", "31",
218 "0", "1", "2", "3", "4", "5", "6", "7",
219 "8", "9", "10", "11", "12", "13", "14", "15",
220 "16", "17", "18", "19", "20", "21", "22", "23",
221 "24", "25", "26", "27", "28", "29", "30", "31",
222 "mq", "lr", "ctr","ap",
223 "0", "1", "2", "3", "4", "5", "6", "7",
225 /* AltiVec registers. */
226 "0", "1", "2", "3", "4", "5", "6", "7",
227 "8", "9", "10", "11", "12", "13", "14", "15",
228 "16", "17", "18", "19", "20", "21", "22", "23",
229 "24", "25", "26", "27", "28", "29", "30", "31",
233 #ifdef TARGET_REGNAMES
234 static const char alt_reg_names[][8] =
236 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
237 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
238 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
239 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
240 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
241 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
242 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
243 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
244 "mq", "lr", "ctr", "ap",
245 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
247 /* AltiVec registers. */
248 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
249 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
250 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
251 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
256 #ifndef MASK_STRICT_ALIGN
257 #define MASK_STRICT_ALIGN 0
260 /* Initialize the GCC target structure. */
261 #undef TARGET_ATTRIBUTE_TABLE
262 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
263 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
264 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
266 #undef TARGET_ASM_ALIGNED_DI_OP
267 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
269 /* Default unaligned ops are only provided for ELF. Find the ops needed
270 for non-ELF systems. */
271 #ifndef OBJECT_FORMAT_ELF
273 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
275 #undef TARGET_ASM_UNALIGNED_HI_OP
276 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
277 #undef TARGET_ASM_UNALIGNED_SI_OP
278 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
279 #undef TARGET_ASM_UNALIGNED_DI_OP
280 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
283 #undef TARGET_ASM_UNALIGNED_HI_OP
284 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
285 #undef TARGET_ASM_UNALIGNED_SI_OP
286 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
290 /* This hook deals with fixups for relocatable code and DI-mode objects
292 #undef TARGET_ASM_INTEGER
293 #define TARGET_ASM_INTEGER rs6000_assemble_integer
295 #undef TARGET_ASM_FUNCTION_PROLOGUE
296 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
297 #undef TARGET_ASM_FUNCTION_EPILOGUE
298 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
301 #undef TARGET_SECTION_TYPE_FLAGS
302 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
305 #undef TARGET_SCHED_ISSUE_RATE
306 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
307 #undef TARGET_SCHED_ADJUST_COST
308 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
309 #undef TARGET_SCHED_ADJUST_PRIORITY
310 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
312 #undef TARGET_INIT_BUILTINS
313 #define TARGET_INIT_BUILTINS rs6000_init_builtins
315 #undef TARGET_EXPAND_BUILTIN
316 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
318 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
319 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
321 struct gcc_target targetm = TARGET_INITIALIZER;
323 /* Override command line options. Mostly we process the processor
324 type and sometimes adjust other TARGET_ options. */
327 rs6000_override_options (default_cpu)
328 const char *default_cpu;
331 struct rs6000_cpu_select *ptr;
333 /* Simplify the entries below by making a mask for any POWER
334 variant and any PowerPC variant. */
336 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
337 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
338 | MASK_PPC_GFXOPT | MASK_POWERPC64)
339 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
343 const char *const name; /* Canonical processor name. */
344 const enum processor_type processor; /* Processor type enum value. */
345 const int target_enable; /* Target flags to enable. */
346 const int target_disable; /* Target flags to disable. */
347 } const processor_target_table[]
348 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
349 POWER_MASKS | POWERPC_MASKS},
350 {"power", PROCESSOR_POWER,
351 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
352 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
353 {"power2", PROCESSOR_POWER,
354 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
355 POWERPC_MASKS | MASK_NEW_MNEMONICS},
356 {"power3", PROCESSOR_PPC630,
357 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
358 POWER_MASKS | MASK_PPC_GPOPT},
359 {"power4", PROCESSOR_POWER4,
360 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
361 POWER_MASKS | MASK_PPC_GPOPT},
362 {"powerpc", PROCESSOR_POWERPC,
363 MASK_POWERPC | MASK_NEW_MNEMONICS,
364 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
365 {"powerpc64", PROCESSOR_POWERPC64,
366 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
367 POWER_MASKS | POWERPC_OPT_MASKS},
368 {"rios", PROCESSOR_RIOS1,
369 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
370 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
371 {"rios1", PROCESSOR_RIOS1,
372 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
373 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
374 {"rsc", PROCESSOR_PPC601,
375 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
376 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
377 {"rsc1", PROCESSOR_PPC601,
378 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
379 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
380 {"rios2", PROCESSOR_RIOS2,
381 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
382 POWERPC_MASKS | MASK_NEW_MNEMONICS},
383 {"rs64a", PROCESSOR_RS64A,
384 MASK_POWERPC | MASK_NEW_MNEMONICS,
385 POWER_MASKS | POWERPC_OPT_MASKS},
386 {"401", PROCESSOR_PPC403,
387 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
388 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
389 {"403", PROCESSOR_PPC403,
390 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
391 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
392 {"405", PROCESSOR_PPC405,
393 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
394 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
395 {"505", PROCESSOR_MPCCORE,
396 MASK_POWERPC | MASK_NEW_MNEMONICS,
397 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
398 {"601", PROCESSOR_PPC601,
399 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
400 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
401 {"602", PROCESSOR_PPC603,
402 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
403 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
404 {"603", PROCESSOR_PPC603,
405 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
406 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
407 {"603e", PROCESSOR_PPC603,
408 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
409 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
410 {"ec603e", PROCESSOR_PPC603,
411 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
412 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
413 {"604", PROCESSOR_PPC604,
414 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
415 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
416 {"604e", PROCESSOR_PPC604e,
417 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
418 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
419 {"620", PROCESSOR_PPC620,
420 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
421 POWER_MASKS | MASK_PPC_GPOPT},
422 {"630", PROCESSOR_PPC630,
423 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
424 POWER_MASKS | MASK_PPC_GPOPT},
425 {"740", PROCESSOR_PPC750,
426 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
427 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
428 {"750", PROCESSOR_PPC750,
429 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
430 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
431 {"7400", PROCESSOR_PPC7400,
432 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
433 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
434 {"7450", PROCESSOR_PPC7450,
435 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
436 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
437 {"801", PROCESSOR_MPCCORE,
438 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
439 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
440 {"821", PROCESSOR_MPCCORE,
441 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
442 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
443 {"823", PROCESSOR_MPCCORE,
444 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
445 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
446 {"860", PROCESSOR_MPCCORE,
447 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
448 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
450 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
452 /* Save current -mmultiple/-mno-multiple status. */
453 int multiple = TARGET_MULTIPLE;
454 /* Save current -mstring/-mno-string status. */
455 int string = TARGET_STRING;
457 /* Identify the processor type. */
458 rs6000_select[0].string = default_cpu;
459 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
461 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
463 ptr = &rs6000_select[i];
464 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
466 for (j = 0; j < ptt_size; j++)
467 if (! strcmp (ptr->string, processor_target_table[j].name))
470 rs6000_cpu = processor_target_table[j].processor;
474 target_flags |= processor_target_table[j].target_enable;
475 target_flags &= ~processor_target_table[j].target_disable;
481 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
485 /* If we are optimizing big endian systems for space, use the store
486 multiple instructions. */
487 if (BYTES_BIG_ENDIAN && optimize_size)
488 target_flags |= MASK_MULTIPLE;
490 /* If -mmultiple or -mno-multiple was explicitly used, don't
491 override with the processor default */
492 if (TARGET_MULTIPLE_SET)
493 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
495 /* If -mstring or -mno-string was explicitly used, don't override
496 with the processor default. */
497 if (TARGET_STRING_SET)
498 target_flags = (target_flags & ~MASK_STRING) | string;
500 /* Don't allow -mmultiple or -mstring on little endian systems
501 unless the cpu is a 750, because the hardware doesn't support the
502 instructions used in little endian mode, and causes an alignment
503 trap. The 750 does not cause an alignment trap (except when the
504 target is unaligned). */
506 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
510 target_flags &= ~MASK_MULTIPLE;
511 if (TARGET_MULTIPLE_SET)
512 warning ("-mmultiple is not supported on little endian systems");
517 target_flags &= ~MASK_STRING;
518 if (TARGET_STRING_SET)
519 warning ("-mstring is not supported on little endian systems");
523 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
528 warning ("-f%s ignored (all code is position independent)",
529 (flag_pic > 1) ? "PIC" : "pic");
532 #ifdef XCOFF_DEBUGGING_INFO
533 if (flag_function_sections && (write_symbols != NO_DEBUG)
534 && DEFAULT_ABI == ABI_AIX)
536 warning ("-ffunction-sections disabled on AIX when debugging");
537 flag_function_sections = 0;
540 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
542 warning ("-fdata-sections not supported on AIX");
543 flag_data_sections = 0;
547 /* Set debug flags */
548 if (rs6000_debug_name)
550 if (! strcmp (rs6000_debug_name, "all"))
551 rs6000_debug_stack = rs6000_debug_arg = 1;
552 else if (! strcmp (rs6000_debug_name, "stack"))
553 rs6000_debug_stack = 1;
554 else if (! strcmp (rs6000_debug_name, "arg"))
555 rs6000_debug_arg = 1;
557 error ("unknown -mdebug-%s switch", rs6000_debug_name);
560 /* Set size of long double */
561 rs6000_long_double_type_size = 64;
562 if (rs6000_long_double_size_string)
565 int size = strtol (rs6000_long_double_size_string, &tail, 10);
566 if (*tail != '\0' || (size != 64 && size != 128))
567 error ("Unknown switch -mlong-double-%s",
568 rs6000_long_double_size_string);
570 rs6000_long_double_type_size = size;
573 /* Handle -mabi= options. */
574 rs6000_parse_abi_options ();
576 /* Handle -mvrsave= option. */
577 rs6000_parse_vrsave_option ();
579 #ifdef SUBTARGET_OVERRIDE_OPTIONS
580 SUBTARGET_OVERRIDE_OPTIONS;
582 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
583 SUBSUBTARGET_OVERRIDE_OPTIONS;
586 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
587 using TARGET_OPTIONS to handle a toggle switch, but we're out of
588 bits in target_flags so TARGET_SWITCHES cannot be used.
589 Assumption here is that rs6000_longcall_switch points into the
590 text of the complete option, rather than being a copy, so we can
591 scan back for the presence or absence of the no- modifier. */
592 if (rs6000_longcall_switch)
594 const char *base = rs6000_longcall_switch;
595 while (base[-1] != 'm') base--;
597 if (*rs6000_longcall_switch != '\0')
598 error ("invalid option `%s'", base);
599 rs6000_default_long_calls = (base[0] != 'n');
602 #ifdef TARGET_REGNAMES
603 /* If the user desires alternate register names, copy in the
604 alternate names now. */
606 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
609 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
610 If -maix-struct-return or -msvr4-struct-return was explicitly
611 used, don't override with the ABI default. */
612 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
614 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
615 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
617 target_flags |= MASK_AIX_STRUCT_RET;
620 /* Register global variables with the garbage collector. */
621 rs6000_add_gc_roots ();
623 /* Allocate an alias set for register saves & restores from stack. */
624 rs6000_sr_alias_set = new_alias_set ();
627 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
629 /* We can only guarantee the availability of DI pseudo-ops when
630 assembling for 64-bit targets. */
633 targetm.asm_out.aligned_op.di = NULL;
634 targetm.asm_out.unaligned_op.di = NULL;
637 /* Arrange to save and restore machine status around nested functions. */
638 init_machine_status = rs6000_init_machine_status;
641 /* Handle -mvrsave= options. */
643 rs6000_parse_vrsave_option ()
645 /* Generate VRSAVE instructions by default. */
646 if (rs6000_altivec_vrsave_string == 0
647 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
648 rs6000_altivec_vrsave = 1;
649 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
650 rs6000_altivec_vrsave = 0;
652 error ("unknown -mvrsave= option specified: '%s'",
653 rs6000_altivec_vrsave_string);
656 /* Handle -mabi= options. */
658 rs6000_parse_abi_options ()
660 if (rs6000_abi_string == 0)
662 else if (! strcmp (rs6000_abi_string, "altivec"))
663 rs6000_altivec_abi = 1;
664 else if (! strcmp (rs6000_abi_string, "no-altivec"))
665 rs6000_altivec_abi = 0;
667 error ("unknown ABI specified: '%s'", rs6000_abi_string);
671 optimization_options (level, size)
672 int level ATTRIBUTE_UNUSED;
673 int size ATTRIBUTE_UNUSED;
677 /* Do anything needed at the start of the asm file. */
680 rs6000_file_start (file, default_cpu)
682 const char *default_cpu;
686 const char *start = buffer;
687 struct rs6000_cpu_select *ptr;
689 if (flag_verbose_asm)
691 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
692 rs6000_select[0].string = default_cpu;
694 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
696 ptr = &rs6000_select[i];
697 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
699 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
705 switch (rs6000_sdata)
707 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
708 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
709 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
710 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
713 if (rs6000_sdata && g_switch_value)
715 fprintf (file, "%s -G %d", start, g_switch_value);
725 /* Return non-zero if this function is known to have a null epilogue. */
730 if (reload_completed)
732 rs6000_stack_t *info = rs6000_stack_info ();
734 if (info->first_gp_reg_save == 32
735 && info->first_fp_reg_save == 64
736 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
739 && info->vrsave_mask == 0
747 /* Returns 1 always. */
750 any_operand (op, mode)
751 rtx op ATTRIBUTE_UNUSED;
752 enum machine_mode mode ATTRIBUTE_UNUSED;
757 /* Returns 1 if op is the count register. */
759 count_register_operand (op, mode)
761 enum machine_mode mode ATTRIBUTE_UNUSED;
763 if (GET_CODE (op) != REG)
766 if (REGNO (op) == COUNT_REGISTER_REGNUM)
769 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
775 /* Returns 1 if op is an altivec register. */
777 altivec_register_operand (op, mode)
779 enum machine_mode mode ATTRIBUTE_UNUSED;
782 return (register_operand (op, mode)
783 && (GET_CODE (op) != REG
784 || REGNO (op) > FIRST_PSEUDO_REGISTER
785 || ALTIVEC_REGNO_P (REGNO (op))));
789 xer_operand (op, mode)
791 enum machine_mode mode ATTRIBUTE_UNUSED;
793 if (GET_CODE (op) != REG)
796 if (XER_REGNO_P (REGNO (op)))
802 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
803 by such constants completes more quickly. */
806 s8bit_cint_operand (op, mode)
808 enum machine_mode mode ATTRIBUTE_UNUSED;
810 return ( GET_CODE (op) == CONST_INT
811 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
814 /* Return 1 if OP is a constant that can fit in a D field. */
817 short_cint_operand (op, mode)
819 enum machine_mode mode ATTRIBUTE_UNUSED;
821 return (GET_CODE (op) == CONST_INT
822 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
825 /* Similar for an unsigned D field. */
828 u_short_cint_operand (op, mode)
830 enum machine_mode mode ATTRIBUTE_UNUSED;
832 return (GET_CODE (op) == CONST_INT
833 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
836 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
839 non_short_cint_operand (op, mode)
841 enum machine_mode mode ATTRIBUTE_UNUSED;
843 return (GET_CODE (op) == CONST_INT
844 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
847 /* Returns 1 if OP is a CONST_INT that is a positive value
848 and an exact power of 2. */
851 exact_log2_cint_operand (op, mode)
853 enum machine_mode mode ATTRIBUTE_UNUSED;
855 return (GET_CODE (op) == CONST_INT
857 && exact_log2 (INTVAL (op)) >= 0);
860 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
864 gpc_reg_operand (op, mode)
866 enum machine_mode mode;
868 return (register_operand (op, mode)
869 && (GET_CODE (op) != REG
870 || (REGNO (op) >= ARG_POINTER_REGNUM
871 && !XER_REGNO_P (REGNO (op)))
872 || REGNO (op) < MQ_REGNO));
875 /* Returns 1 if OP is either a pseudo-register or a register denoting a
879 cc_reg_operand (op, mode)
881 enum machine_mode mode;
883 return (register_operand (op, mode)
884 && (GET_CODE (op) != REG
885 || REGNO (op) >= FIRST_PSEUDO_REGISTER
886 || CR_REGNO_P (REGNO (op))));
889 /* Returns 1 if OP is either a pseudo-register or a register denoting a
890 CR field that isn't CR0. */
893 cc_reg_not_cr0_operand (op, mode)
895 enum machine_mode mode;
897 return (register_operand (op, mode)
898 && (GET_CODE (op) != REG
899 || REGNO (op) >= FIRST_PSEUDO_REGISTER
900 || CR_REGNO_NOT_CR0_P (REGNO (op))));
903 /* Returns 1 if OP is either a constant integer valid for a D-field or
904 a non-special register. If a register, it must be in the proper
905 mode unless MODE is VOIDmode. */
908 reg_or_short_operand (op, mode)
910 enum machine_mode mode;
912 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
915 /* Similar, except check if the negation of the constant would be
916 valid for a D-field. */
919 reg_or_neg_short_operand (op, mode)
921 enum machine_mode mode;
923 if (GET_CODE (op) == CONST_INT)
924 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
926 return gpc_reg_operand (op, mode);
929 /* Returns 1 if OP is either a constant integer valid for a DS-field or
930 a non-special register. If a register, it must be in the proper
931 mode unless MODE is VOIDmode. */
934 reg_or_aligned_short_operand (op, mode)
936 enum machine_mode mode;
938 if (gpc_reg_operand (op, mode))
940 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
947 /* Return 1 if the operand is either a register or an integer whose
948 high-order 16 bits are zero. */
951 reg_or_u_short_operand (op, mode)
953 enum machine_mode mode;
955 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
958 /* Return 1 is the operand is either a non-special register or ANY
962 reg_or_cint_operand (op, mode)
964 enum machine_mode mode;
966 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
969 /* Return 1 is the operand is either a non-special register or ANY
970 32-bit signed constant integer. */
973 reg_or_arith_cint_operand (op, mode)
975 enum machine_mode mode;
977 return (gpc_reg_operand (op, mode)
978 || (GET_CODE (op) == CONST_INT
979 #if HOST_BITS_PER_WIDE_INT != 32
980 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
981 < (unsigned HOST_WIDE_INT) 0x100000000ll)
986 /* Return 1 is the operand is either a non-special register or a 32-bit
987 signed constant integer valid for 64-bit addition. */
990 reg_or_add_cint64_operand (op, mode)
992 enum machine_mode mode;
994 return (gpc_reg_operand (op, mode)
995 || (GET_CODE (op) == CONST_INT
996 #if HOST_BITS_PER_WIDE_INT == 32
997 && INTVAL (op) < 0x7fff8000
999 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1005 /* Return 1 is the operand is either a non-special register or a 32-bit
1006 signed constant integer valid for 64-bit subtraction. */
1009 reg_or_sub_cint64_operand (op, mode)
1011 enum machine_mode mode;
1013 return (gpc_reg_operand (op, mode)
1014 || (GET_CODE (op) == CONST_INT
1015 #if HOST_BITS_PER_WIDE_INT == 32
1016 && (- INTVAL (op)) < 0x7fff8000
1018 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1024 /* Return 1 is the operand is either a non-special register or ANY
1025 32-bit unsigned constant integer. */
1028 reg_or_logical_cint_operand (op, mode)
1030 enum machine_mode mode;
1032 if (GET_CODE (op) == CONST_INT)
1034 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1036 if (GET_MODE_BITSIZE (mode) <= 32)
1039 if (INTVAL (op) < 0)
1043 return ((INTVAL (op) & GET_MODE_MASK (mode)
1044 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1046 else if (GET_CODE (op) == CONST_DOUBLE)
1048 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1052 return CONST_DOUBLE_HIGH (op) == 0;
1055 return gpc_reg_operand (op, mode);
1058 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1061 got_operand (op, mode)
1063 enum machine_mode mode ATTRIBUTE_UNUSED;
1065 return (GET_CODE (op) == SYMBOL_REF
1066 || GET_CODE (op) == CONST
1067 || GET_CODE (op) == LABEL_REF);
1070 /* Return 1 if the operand is a simple references that can be loaded via
1071 the GOT (labels involving addition aren't allowed). */
1074 got_no_const_operand (op, mode)
1076 enum machine_mode mode ATTRIBUTE_UNUSED;
1078 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1081 /* Return the number of instructions it takes to form a constant in an
1082 integer register. */
1085 num_insns_constant_wide (value)
1086 HOST_WIDE_INT value;
1088 /* signed constant loadable with {cal|addi} */
1089 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1092 /* constant loadable with {cau|addis} */
1093 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1096 #if HOST_BITS_PER_WIDE_INT == 64
1097 else if (TARGET_POWERPC64)
1099 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1100 HOST_WIDE_INT high = value >> 31;
1102 if (high == 0 || high == -1)
1108 return num_insns_constant_wide (high) + 1;
1110 return (num_insns_constant_wide (high)
1111 + num_insns_constant_wide (low) + 1);
1120 num_insns_constant (op, mode)
1122 enum machine_mode mode;
1124 if (GET_CODE (op) == CONST_INT)
1126 #if HOST_BITS_PER_WIDE_INT == 64
1127 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1128 && mask64_operand (op, mode))
1132 return num_insns_constant_wide (INTVAL (op));
1135 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1140 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1141 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1142 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1145 else if (GET_CODE (op) == CONST_DOUBLE)
1151 int endian = (WORDS_BIG_ENDIAN == 0);
1153 if (mode == VOIDmode || mode == DImode)
1155 high = CONST_DOUBLE_HIGH (op);
1156 low = CONST_DOUBLE_LOW (op);
1160 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1161 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1163 low = l[1 - endian];
1167 return (num_insns_constant_wide (low)
1168 + num_insns_constant_wide (high));
1172 if (high == 0 && low >= 0)
1173 return num_insns_constant_wide (low);
1175 else if (high == -1 && low < 0)
1176 return num_insns_constant_wide (low);
1178 else if (mask64_operand (op, mode))
1182 return num_insns_constant_wide (high) + 1;
1185 return (num_insns_constant_wide (high)
1186 + num_insns_constant_wide (low) + 1);
1194 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1195 register with one instruction per word. We only do this if we can
1196 safely read CONST_DOUBLE_{LOW,HIGH}. */
1199 easy_fp_constant (op, mode)
1201 enum machine_mode mode;
1203 if (GET_CODE (op) != CONST_DOUBLE
1204 || GET_MODE (op) != mode
1205 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1208 /* Consider all constants with -msoft-float to be easy. */
1209 if (TARGET_SOFT_FLOAT && mode != DImode)
1212 /* If we are using V.4 style PIC, consider all constants to be hard. */
1213 if (flag_pic && DEFAULT_ABI == ABI_V4)
1216 #ifdef TARGET_RELOCATABLE
1217 /* Similarly if we are using -mrelocatable, consider all constants
1219 if (TARGET_RELOCATABLE)
1228 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1229 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1231 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1232 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1235 else if (mode == SFmode)
1240 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1241 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1243 return num_insns_constant_wide (l) == 1;
1246 else if (mode == DImode)
1247 return ((TARGET_POWERPC64
1248 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1249 || (num_insns_constant (op, DImode) <= 2));
1251 else if (mode == SImode)
1257 /* Return 1 if the operand is a CONST_INT and can be put into a
1258 register with one instruction. */
1261 easy_vector_constant (op)
1267 if (GET_CODE (op) != CONST_VECTOR)
1270 units = CONST_VECTOR_NUNITS (op);
1272 /* We can generate 0 easily. Look for that. */
1273 for (i = 0; i < units; ++i)
1275 elt = CONST_VECTOR_ELT (op, i);
1277 /* We could probably simplify this by just checking for equality
1278 with CONST0_RTX for the current mode, but let's be safe
1281 switch (GET_CODE (elt))
1284 if (INTVAL (elt) != 0)
1288 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1296 /* We could probably generate a few other constants trivially, but
1297 gcc doesn't generate them yet. FIXME later. */
1301 /* Return 1 if the operand is the constant 0. This works for scalars
1302 as well as vectors. */
1304 zero_constant (op, mode)
1306 enum machine_mode mode;
1308 return op == CONST0_RTX (mode);
1311 /* Return 1 if the operand is 0.0. */
1313 zero_fp_constant (op, mode)
1315 enum machine_mode mode;
1317 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1320 /* Return 1 if the operand is in volatile memory. Note that during
1321 the RTL generation phase, memory_operand does not return TRUE for
1322 volatile memory references. So this function allows us to
1323 recognize volatile references where its safe. */
1326 volatile_mem_operand (op, mode)
1328 enum machine_mode mode;
1330 if (GET_CODE (op) != MEM)
1333 if (!MEM_VOLATILE_P (op))
1336 if (mode != GET_MODE (op))
1339 if (reload_completed)
1340 return memory_operand (op, mode);
1342 if (reload_in_progress)
1343 return strict_memory_address_p (mode, XEXP (op, 0));
1345 return memory_address_p (mode, XEXP (op, 0));
1348 /* Return 1 if the operand is an offsettable memory operand. */
1351 offsettable_mem_operand (op, mode)
1353 enum machine_mode mode;
1355 return ((GET_CODE (op) == MEM)
1356 && offsettable_address_p (reload_completed || reload_in_progress,
1357 mode, XEXP (op, 0)));
1360 /* Return 1 if the operand is either an easy FP constant (see above) or
1364 mem_or_easy_const_operand (op, mode)
1366 enum machine_mode mode;
1368 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1371 /* Return 1 if the operand is either a non-special register or an item
1372 that can be used as the operand of a `mode' add insn. */
1375 add_operand (op, mode)
1377 enum machine_mode mode;
1379 if (GET_CODE (op) == CONST_INT)
1380 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1381 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1383 return gpc_reg_operand (op, mode);
1386 /* Return 1 if OP is a constant but not a valid add_operand. */
1389 non_add_cint_operand (op, mode)
1391 enum machine_mode mode ATTRIBUTE_UNUSED;
1393 return (GET_CODE (op) == CONST_INT
1394 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1395 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1398 /* Return 1 if the operand is a non-special register or a constant that
1399 can be used as the operand of an OR or XOR insn on the RS/6000. */
1402 logical_operand (op, mode)
1404 enum machine_mode mode;
1406 HOST_WIDE_INT opl, oph;
1408 if (gpc_reg_operand (op, mode))
1411 if (GET_CODE (op) == CONST_INT)
1413 opl = INTVAL (op) & GET_MODE_MASK (mode);
1415 #if HOST_BITS_PER_WIDE_INT <= 32
1416 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1420 else if (GET_CODE (op) == CONST_DOUBLE)
1422 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1425 opl = CONST_DOUBLE_LOW (op);
1426 oph = CONST_DOUBLE_HIGH (op);
1433 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1434 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1437 /* Return 1 if C is a constant that is not a logical operand (as
1438 above), but could be split into one. */
1441 non_logical_cint_operand (op, mode)
1443 enum machine_mode mode;
1445 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1446 && ! logical_operand (op, mode)
1447 && reg_or_logical_cint_operand (op, mode));
1450 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1451 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1452 Reject all ones and all zeros, since these should have been optimized
1453 away and confuse the making of MB and ME. */
1456 mask_operand (op, mode)
1458 enum machine_mode mode ATTRIBUTE_UNUSED;
1460 HOST_WIDE_INT c, lsb;
1462 if (GET_CODE (op) != CONST_INT)
1467 /* Fail in 64-bit mode if the mask wraps around because the upper
1468 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1469 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1472 /* We don't change the number of transitions by inverting,
1473 so make sure we start with the LS bit zero. */
1477 /* Reject all zeros or all ones. */
1481 /* Find the first transition. */
1484 /* Invert to look for a second transition. */
1487 /* Erase first transition. */
1490 /* Find the second transition (if any). */
1493 /* Match if all the bits above are 1's (or c is zero). */
1497 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1498 It is if there are no more than one 1->0 or 0->1 transitions.
1499 Reject all ones and all zeros, since these should have been optimized
1500 away and confuse the making of MB and ME. */
1503 mask64_operand (op, mode)
1505 enum machine_mode mode;
1507 if (GET_CODE (op) == CONST_INT)
1509 HOST_WIDE_INT c, lsb;
1511 /* We don't change the number of transitions by inverting,
1512 so make sure we start with the LS bit zero. */
1517 /* Reject all zeros or all ones. */
1521 /* Find the transition, and check that all bits above are 1's. */
1525 else if (GET_CODE (op) == CONST_DOUBLE
1526 && (mode == VOIDmode || mode == DImode))
1528 HOST_WIDE_INT low, high, lsb;
1530 if (HOST_BITS_PER_WIDE_INT < 64)
1531 high = CONST_DOUBLE_HIGH (op);
1533 low = CONST_DOUBLE_LOW (op);
1536 if (HOST_BITS_PER_WIDE_INT < 64)
1543 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1547 return high == -lsb;
1551 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1557 /* Return 1 if the operand is either a non-special register or a constant
1558 that can be used as the operand of a PowerPC64 logical AND insn. */
1561 and64_operand (op, mode)
1563 enum machine_mode mode;
1565 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1566 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1568 return (logical_operand (op, mode) || mask64_operand (op, mode));
1571 /* Return 1 if the operand is either a non-special register or a
1572 constant that can be used as the operand of an RS/6000 logical AND insn. */
1575 and_operand (op, mode)
1577 enum machine_mode mode;
1579 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1580 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1582 return (logical_operand (op, mode) || mask_operand (op, mode));
1585 /* Return 1 if the operand is a general register or memory operand. */
1588 reg_or_mem_operand (op, mode)
1590 enum machine_mode mode;
1592 return (gpc_reg_operand (op, mode)
1593 || memory_operand (op, mode)
1594 || volatile_mem_operand (op, mode));
1597 /* Return 1 if the operand is a general register or memory operand without
1598 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1602 lwa_operand (op, mode)
1604 enum machine_mode mode;
1608 if (reload_completed && GET_CODE (inner) == SUBREG)
1609 inner = SUBREG_REG (inner);
1611 return gpc_reg_operand (inner, mode)
1612 || (memory_operand (inner, mode)
1613 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1614 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1615 && (GET_CODE (XEXP (inner, 0)) != PLUS
1616 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1617 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1620 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1623 symbol_ref_operand (op, mode)
1625 enum machine_mode mode;
1627 if (mode != VOIDmode && GET_MODE (op) != mode)
1630 return (GET_CODE (op) == SYMBOL_REF);
1633 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1634 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1637 call_operand (op, mode)
1639 enum machine_mode mode;
1641 if (mode != VOIDmode && GET_MODE (op) != mode)
1644 return (GET_CODE (op) == SYMBOL_REF
1645 || (GET_CODE (op) == REG
1646 && (REGNO (op) == LINK_REGISTER_REGNUM
1647 || REGNO (op) == COUNT_REGISTER_REGNUM
1648 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1651 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1652 this file and the function is not weakly defined. */
1655 current_file_function_operand (op, mode)
1657 enum machine_mode mode ATTRIBUTE_UNUSED;
1659 return (GET_CODE (op) == SYMBOL_REF
1660 && (SYMBOL_REF_FLAG (op)
1661 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1662 && ! DECL_WEAK (current_function_decl))));
1665 /* Return 1 if this operand is a valid input for a move insn. */
1668 input_operand (op, mode)
1670 enum machine_mode mode;
1672 /* Memory is always valid. */
1673 if (memory_operand (op, mode))
1676 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1677 if (GET_CODE (op) == CONSTANT_P_RTX)
1680 /* For floating-point, easy constants are valid. */
1681 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1683 && easy_fp_constant (op, mode))
1686 /* Allow any integer constant. */
1687 if (GET_MODE_CLASS (mode) == MODE_INT
1688 && (GET_CODE (op) == CONST_INT
1689 || GET_CODE (op) == CONST_DOUBLE))
1692 /* For floating-point or multi-word mode, the only remaining valid type
1694 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1695 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1696 return register_operand (op, mode);
1698 /* The only cases left are integral modes one word or smaller (we
1699 do not get called for MODE_CC values). These can be in any
1701 if (register_operand (op, mode))
1704 /* A SYMBOL_REF referring to the TOC is valid. */
1705 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1708 /* A constant pool expression (relative to the TOC) is valid */
1709 if (TOC_RELATIVE_EXPR_P (op))
1712 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1714 if (DEFAULT_ABI == ABI_V4
1715 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1716 && small_data_operand (op, Pmode))
1722 /* Return 1 for an operand in small memory on V.4/eabi. */
1725 small_data_operand (op, mode)
1726 rtx op ATTRIBUTE_UNUSED;
1727 enum machine_mode mode ATTRIBUTE_UNUSED;
1732 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1735 if (DEFAULT_ABI != ABI_V4)
1738 if (GET_CODE (op) == SYMBOL_REF)
1741 else if (GET_CODE (op) != CONST
1742 || GET_CODE (XEXP (op, 0)) != PLUS
1743 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1744 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1749 rtx sum = XEXP (op, 0);
1750 HOST_WIDE_INT summand;
1752 /* We have to be careful here, because it is the referenced address
1753 that must be 32k from _SDA_BASE_, not just the symbol. */
1754 summand = INTVAL (XEXP (sum, 1));
1755 if (summand < 0 || summand > g_switch_value)
1758 sym_ref = XEXP (sum, 0);
1761 if (*XSTR (sym_ref, 0) != '@')
1772 constant_pool_expr_1 (op, have_sym, have_toc)
1777 switch (GET_CODE(op))
1780 if (CONSTANT_POOL_ADDRESS_P (op))
1782 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1790 else if (! strcmp (XSTR (op, 0), toc_label_name))
1799 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1800 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1802 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1811 constant_pool_expr_p (op)
1816 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1820 toc_relative_expr_p (op)
1825 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1828 /* Try machine-dependent ways of modifying an illegitimate address
1829 to be legitimate. If we find one, return the new, valid address.
1830 This is used from only one place: `memory_address' in explow.c.
1832 OLDX is the address as it was before break_out_memory_refs was
1833 called. In some cases it is useful to look at this to decide what
1836 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1838 It is always safe for this function to do nothing. It exists to
1839 recognize opportunities to optimize the output.
1841 On RS/6000, first check for the sum of a register with a constant
1842 integer that is out of range. If so, generate code to add the
1843 constant with the low-order 16 bits masked to the register and force
1844 this result into another register (this can be done with `cau').
1845 Then generate an address of REG+(CONST&0xffff), allowing for the
1846 possibility of bit 16 being a one.
1848 Then check for the sum of a register and something not constant, try to
1849 load the other things into a register and return the sum. */
1851 rs6000_legitimize_address (x, oldx, mode)
1853 rtx oldx ATTRIBUTE_UNUSED;
1854 enum machine_mode mode;
1856 if (GET_CODE (x) == PLUS
1857 && GET_CODE (XEXP (x, 0)) == REG
1858 && GET_CODE (XEXP (x, 1)) == CONST_INT
1859 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1861 HOST_WIDE_INT high_int, low_int;
1863 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
1864 high_int = INTVAL (XEXP (x, 1)) - low_int;
1865 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1866 GEN_INT (high_int)), 0);
1867 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1869 else if (GET_CODE (x) == PLUS
1870 && GET_CODE (XEXP (x, 0)) == REG
1871 && GET_CODE (XEXP (x, 1)) != CONST_INT
1872 && GET_MODE_NUNITS (mode) == 1
1873 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1874 && (TARGET_POWERPC64 || mode != DImode)
1877 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1878 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1880 else if (ALTIVEC_VECTOR_MODE (mode))
1884 /* Make sure both operands are registers. */
1885 if (GET_CODE (x) == PLUS)
1886 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1887 force_reg (Pmode, XEXP (x, 1)));
1889 reg = force_reg (Pmode, x);
1892 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1893 && GET_CODE (x) != CONST_INT
1894 && GET_CODE (x) != CONST_DOUBLE
1896 && GET_MODE_NUNITS (mode) == 1
1897 && (GET_MODE_BITSIZE (mode) <= 32
1898 || (TARGET_HARD_FLOAT && mode == DFmode)))
1900 rtx reg = gen_reg_rtx (Pmode);
1901 emit_insn (gen_elf_high (reg, (x)));
1902 return gen_rtx_LO_SUM (Pmode, reg, (x));
1904 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1906 && GET_CODE (x) != CONST_INT
1907 && GET_CODE (x) != CONST_DOUBLE
1909 && (TARGET_HARD_FLOAT || mode != DFmode)
1913 rtx reg = gen_reg_rtx (Pmode);
1914 emit_insn (gen_macho_high (reg, (x)));
1915 return gen_rtx_LO_SUM (Pmode, reg, (x));
1918 && CONSTANT_POOL_EXPR_P (x)
1919 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1921 return create_TOC_reference (x);
1927 /* The convention appears to be to define this wherever it is used.
1928 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1929 is now used here. */
1930 #ifndef REG_MODE_OK_FOR_BASE_P
1931 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1934 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1935 replace the input X, or the original X if no replacement is called for.
1936 The output parameter *WIN is 1 if the calling macro should goto WIN,
1939 For RS/6000, we wish to handle large displacements off a base
1940 register by splitting the addend across an addiu/addis and the mem insn.
1941 This cuts number of extra insns needed from 3 to 1.
1943 On Darwin, we use this to generate code for floating point constants.
1944 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1945 The Darwin code is inside #if TARGET_MACHO because only then is
1946 machopic_function_base_name() defined. */
1948 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1950 enum machine_mode mode;
1953 int ind_levels ATTRIBUTE_UNUSED;
1956 /* We must recognize output that we have already generated ourselves. */
1957 if (GET_CODE (x) == PLUS
1958 && GET_CODE (XEXP (x, 0)) == PLUS
1959 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1960 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1961 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1963 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1964 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1965 opnum, (enum reload_type)type);
1971 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1972 && GET_CODE (x) == LO_SUM
1973 && GET_CODE (XEXP (x, 0)) == PLUS
1974 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1975 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1976 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1977 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1978 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1979 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1980 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1982 /* Result of previous invocation of this function on Darwin
1983 floating point constant. */
1984 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1985 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1986 opnum, (enum reload_type)type);
1991 if (GET_CODE (x) == PLUS
1992 && GET_CODE (XEXP (x, 0)) == REG
1993 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1994 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1995 && GET_CODE (XEXP (x, 1)) == CONST_INT
1996 && !ALTIVEC_VECTOR_MODE (mode))
1998 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1999 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2001 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2003 /* Check for 32-bit overflow. */
2004 if (high + low != val)
2010 /* Reload the high part into a base reg; leave the low part
2011 in the mem directly. */
2013 x = gen_rtx_PLUS (GET_MODE (x),
2014 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2018 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2019 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2020 opnum, (enum reload_type)type);
2025 if (GET_CODE (x) == SYMBOL_REF
2026 && DEFAULT_ABI == ABI_DARWIN
2027 && !ALTIVEC_VECTOR_MODE (mode)
2030 /* Darwin load of floating point constant. */
2031 rtx offset = gen_rtx (CONST, Pmode,
2032 gen_rtx (MINUS, Pmode, x,
2033 gen_rtx (SYMBOL_REF, Pmode,
2034 machopic_function_base_name ())));
2035 x = gen_rtx (LO_SUM, GET_MODE (x),
2036 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2037 gen_rtx (HIGH, Pmode, offset)), offset);
2038 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2039 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2040 opnum, (enum reload_type)type);
2046 && CONSTANT_POOL_EXPR_P (x)
2047 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2049 (x) = create_TOC_reference (x);
2057 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2058 that is a valid memory address for an instruction.
2059 The MODE argument is the machine mode for the MEM expression
2060 that wants to use this address.
2062 On the RS/6000, there are four valid address: a SYMBOL_REF that
2063 refers to a constant pool entry of an address (or the sum of it
2064 plus a constant), a short (16-bit signed) constant plus a register,
2065 the sum of two registers, or a register indirect, possibly with an
2066 auto-increment. For DFmode and DImode with an constant plus register,
2067 we must ensure that both words are addressable or PowerPC64 with offset
2070 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2071 32-bit DImode, TImode), indexed addressing cannot be used because
2072 adjacent memory cells are accessed by adding word-sized offsets
2073 during assembly output. */
2075 rs6000_legitimate_address (mode, x, reg_ok_strict)
2076 enum machine_mode mode;
2080 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2082 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2083 && !ALTIVEC_VECTOR_MODE (mode)
2085 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2087 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2089 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2091 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2093 && GET_CODE (x) == PLUS
2094 && GET_CODE (XEXP (x, 0)) == REG
2095 && XEXP (x, 0) == virtual_stack_vars_rtx
2096 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2098 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2101 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2102 && (TARGET_POWERPC64 || mode != DImode)
2103 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2105 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2110 /* Try to output insns to set TARGET equal to the constant C if it can
2111 be done in less than N insns. Do all computations in MODE.
2112 Returns the place where the output has been placed if it can be
2113 done and the insns have been emitted. If it would take more than N
2114 insns, zero is returned and no insns and emitted. */
2117 rs6000_emit_set_const (dest, mode, source, n)
2119 enum machine_mode mode;
2120 int n ATTRIBUTE_UNUSED;
2122 HOST_WIDE_INT c0, c1;
2124 if (mode == QImode || mode == HImode || mode == SImode)
2127 dest = gen_reg_rtx (mode);
2128 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2132 if (GET_CODE (source) == CONST_INT)
2134 c0 = INTVAL (source);
2137 else if (GET_CODE (source) == CONST_DOUBLE)
2139 #if HOST_BITS_PER_WIDE_INT >= 64
2140 c0 = CONST_DOUBLE_LOW (source);
2143 c0 = CONST_DOUBLE_LOW (source);
2144 c1 = CONST_DOUBLE_HIGH (source);
2150 return rs6000_emit_set_long_const (dest, c0, c1);
2153 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2154 fall back to a straight forward decomposition. We do this to avoid
2155 exponential run times encountered when looking for longer sequences
2156 with rs6000_emit_set_const. */
2158 rs6000_emit_set_long_const (dest, c1, c2)
2160 HOST_WIDE_INT c1, c2;
2162 if (!TARGET_POWERPC64)
2164 rtx operand1, operand2;
2166 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2168 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2170 emit_move_insn (operand1, GEN_INT (c1));
2171 emit_move_insn (operand2, GEN_INT (c2));
2175 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2178 ud2 = (c1 & 0xffff0000) >> 16;
2179 #if HOST_BITS_PER_WIDE_INT >= 64
2183 ud4 = (c2 & 0xffff0000) >> 16;
2185 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2186 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2189 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2191 emit_move_insn (dest, GEN_INT (ud1));
2194 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2195 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2198 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2201 emit_move_insn (dest, GEN_INT (ud2 << 16));
2203 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2205 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2206 || (ud4 == 0 && ! (ud3 & 0x8000)))
2209 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2212 emit_move_insn (dest, GEN_INT (ud3 << 16));
2215 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2216 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2218 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2223 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2226 emit_move_insn (dest, GEN_INT (ud4 << 16));
2229 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2231 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2233 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2234 GEN_INT (ud2 << 16)));
2236 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2242 /* Emit a move from SOURCE to DEST in mode MODE. */
2244 rs6000_emit_move (dest, source, mode)
2247 enum machine_mode mode;
2251 operands[1] = source;
2253 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2254 if (GET_CODE (operands[1]) == CONST_DOUBLE
2255 && ! FLOAT_MODE_P (mode)
2256 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2258 /* FIXME. This should never happen. */
2259 /* Since it seems that it does, do the safe thing and convert
2261 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2263 if (GET_CODE (operands[1]) == CONST_DOUBLE
2264 && ! FLOAT_MODE_P (mode)
2265 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2266 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2267 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2268 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2271 /* Check if GCC is setting up a block move that will end up using FP
2272 registers as temporaries. We must make sure this is acceptable. */
2273 if (GET_CODE (operands[0]) == MEM
2274 && GET_CODE (operands[1]) == MEM
2276 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2277 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2278 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2279 ? 32 : MEM_ALIGN (operands[0])))
2280 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2282 : MEM_ALIGN (operands[1]))))
2283 && ! MEM_VOLATILE_P (operands [0])
2284 && ! MEM_VOLATILE_P (operands [1]))
2286 emit_move_insn (adjust_address (operands[0], SImode, 0),
2287 adjust_address (operands[1], SImode, 0));
2288 emit_move_insn (adjust_address (operands[0], SImode, 4),
2289 adjust_address (operands[1], SImode, 4));
2293 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2294 operands[1] = force_reg (mode, operands[1]);
2296 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2297 && GET_CODE (operands[0]) == MEM)
2301 if (reload_in_progress || reload_completed)
2302 regnum = true_regnum (operands[1]);
2303 else if (GET_CODE (operands[1]) == REG)
2304 regnum = REGNO (operands[1]);
2308 /* If operands[1] is a register, on POWER it may have
2309 double-precision data in it, so truncate it to single
2311 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2314 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2315 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2316 operands[1] = newreg;
2320 /* Handle the case where reload calls us with an invalid address;
2321 and the case of CONSTANT_P_RTX. */
2322 if (!ALTIVEC_VECTOR_MODE (mode)
2323 && (! general_operand (operands[1], mode)
2324 || ! nonimmediate_operand (operands[0], mode)
2325 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2327 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2331 /* FIXME: In the long term, this switch statement should go away
2332 and be replaced by a sequence of tests based on things like
2338 if (CONSTANT_P (operands[1])
2339 && GET_CODE (operands[1]) != CONST_INT)
2340 operands[1] = force_const_mem (mode, operands[1]);
2346 if (CONSTANT_P (operands[1])
2347 && ! easy_fp_constant (operands[1], mode))
2348 operands[1] = force_const_mem (mode, operands[1]);
2355 if (CONSTANT_P (operands[1])
2356 && !easy_vector_constant (operands[1]))
2357 operands[1] = force_const_mem (mode, operands[1]);
2362 /* Use default pattern for address of ELF small data */
2365 && DEFAULT_ABI == ABI_V4
2366 && (GET_CODE (operands[1]) == SYMBOL_REF
2367 || GET_CODE (operands[1]) == CONST)
2368 && small_data_operand (operands[1], mode))
2370 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2374 if (DEFAULT_ABI == ABI_V4
2375 && mode == Pmode && mode == SImode
2376 && flag_pic == 1 && got_operand (operands[1], mode))
2378 emit_insn (gen_movsi_got (operands[0], operands[1]));
2382 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2383 && TARGET_NO_TOC && ! flag_pic
2385 && CONSTANT_P (operands[1])
2386 && GET_CODE (operands[1]) != HIGH
2387 && GET_CODE (operands[1]) != CONST_INT)
2389 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2391 /* If this is a function address on -mcall-aixdesc,
2392 convert it to the address of the descriptor. */
2393 if (DEFAULT_ABI == ABI_AIX
2394 && GET_CODE (operands[1]) == SYMBOL_REF
2395 && XSTR (operands[1], 0)[0] == '.')
2397 const char *name = XSTR (operands[1], 0);
2399 while (*name == '.')
2401 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2402 CONSTANT_POOL_ADDRESS_P (new_ref)
2403 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2404 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2405 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2406 operands[1] = new_ref;
2409 if (DEFAULT_ABI == ABI_DARWIN)
2411 emit_insn (gen_macho_high (target, operands[1]));
2412 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2416 emit_insn (gen_elf_high (target, operands[1]));
2417 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2421 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2422 and we have put it in the TOC, we just need to make a TOC-relative
2425 && GET_CODE (operands[1]) == SYMBOL_REF
2426 && CONSTANT_POOL_EXPR_P (operands[1])
2427 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2428 get_pool_mode (operands[1])))
2430 operands[1] = create_TOC_reference (operands[1]);
2432 else if (mode == Pmode
2433 && CONSTANT_P (operands[1])
2434 && ((GET_CODE (operands[1]) != CONST_INT
2435 && ! easy_fp_constant (operands[1], mode))
2436 || (GET_CODE (operands[1]) == CONST_INT
2437 && num_insns_constant (operands[1], mode) > 2)
2438 || (GET_CODE (operands[0]) == REG
2439 && FP_REGNO_P (REGNO (operands[0]))))
2440 && GET_CODE (operands[1]) != HIGH
2441 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2442 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2444 /* Emit a USE operation so that the constant isn't deleted if
2445 expensive optimizations are turned on because nobody
2446 references it. This should only be done for operands that
2447 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2448 This should not be done for operands that contain LABEL_REFs.
2449 For now, we just handle the obvious case. */
2450 if (GET_CODE (operands[1]) != LABEL_REF)
2451 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2454 /* Darwin uses a special PIC legitimizer. */
2455 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2458 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2460 if (operands[0] != operands[1])
2461 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2466 /* If we are to limit the number of things we put in the TOC and
2467 this is a symbol plus a constant we can add in one insn,
2468 just put the symbol in the TOC and add the constant. Don't do
2469 this if reload is in progress. */
2470 if (GET_CODE (operands[1]) == CONST
2471 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2472 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2473 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2474 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2475 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2476 && ! side_effects_p (operands[0]))
2479 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2480 rtx other = XEXP (XEXP (operands[1], 0), 1);
2482 sym = force_reg (mode, sym);
2484 emit_insn (gen_addsi3 (operands[0], sym, other));
2486 emit_insn (gen_adddi3 (operands[0], sym, other));
2490 operands[1] = force_const_mem (mode, operands[1]);
2493 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2494 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2495 get_pool_constant (XEXP (operands[1], 0)),
2496 get_pool_mode (XEXP (operands[1], 0))))
2499 = gen_rtx_MEM (mode,
2500 create_TOC_reference (XEXP (operands[1], 0)));
2501 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2502 RTX_UNCHANGING_P (operands[1]) = 1;
2508 if (GET_CODE (operands[0]) == MEM
2509 && GET_CODE (XEXP (operands[0], 0)) != REG
2510 && ! reload_in_progress)
2512 = replace_equiv_address (operands[0],
2513 copy_addr_to_reg (XEXP (operands[0], 0)));
2515 if (GET_CODE (operands[1]) == MEM
2516 && GET_CODE (XEXP (operands[1], 0)) != REG
2517 && ! reload_in_progress)
2519 = replace_equiv_address (operands[1],
2520 copy_addr_to_reg (XEXP (operands[1], 0)));
2527 /* Above, we may have called force_const_mem which may have returned
2528 an invalid address. If we can, fix this up; otherwise, reload will
2529 have to deal with it. */
2530 if (GET_CODE (operands[1]) == MEM
2531 && ! memory_address_p (mode, XEXP (operands[1], 0))
2532 && ! reload_in_progress)
2533 operands[1] = adjust_address (operands[1], mode, 0);
2535 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2539 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2540 for a call to a function whose data type is FNTYPE.
2541 For a library call, FNTYPE is 0.
2543 For incoming args we set the number of arguments in the prototype large
2544 so we never return a PARALLEL. */
2547 init_cumulative_args (cum, fntype, libname, incoming)
2548 CUMULATIVE_ARGS *cum;
2550 rtx libname ATTRIBUTE_UNUSED;
2553 static CUMULATIVE_ARGS zero_cumulative;
2555 *cum = zero_cumulative;
2557 cum->fregno = FP_ARG_MIN_REG;
2558 cum->vregno = ALTIVEC_ARG_MIN_REG;
2559 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2560 cum->call_cookie = CALL_NORMAL;
2561 cum->sysv_gregno = GP_ARG_MIN_REG;
2564 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2566 else if (cum->prototype)
2567 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2568 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2569 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2572 cum->nargs_prototype = 0;
2574 cum->orig_nargs = cum->nargs_prototype;
2576 /* Check for a longcall attribute. */
2578 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2579 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2580 cum->call_cookie = CALL_LONG;
2582 if (TARGET_DEBUG_ARG)
2584 fprintf (stderr, "\ninit_cumulative_args:");
2587 tree ret_type = TREE_TYPE (fntype);
2588 fprintf (stderr, " ret code = %s,",
2589 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2592 if (cum->call_cookie & CALL_LONG)
2593 fprintf (stderr, " longcall,");
2595 fprintf (stderr, " proto = %d, nargs = %d\n",
2596 cum->prototype, cum->nargs_prototype);
2600 /* If defined, a C expression which determines whether, and in which
2601 direction, to pad out an argument with extra space. The value
2602 should be of type `enum direction': either `upward' to pad above
2603 the argument, `downward' to pad below, or `none' to inhibit
2606 For the AIX ABI structs are always stored left shifted in their
2610 function_arg_padding (mode, type)
2611 enum machine_mode mode;
2614 if (type != 0 && AGGREGATE_TYPE_P (type))
2617 /* This is the default definition. */
2618 return (! BYTES_BIG_ENDIAN
2621 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2622 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2623 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2624 ? downward : upward));
2627 /* If defined, a C expression that gives the alignment boundary, in bits,
2628 of an argument with the specified mode and type. If it is not defined,
2629 PARM_BOUNDARY is used for all arguments.
2631 V.4 wants long longs to be double word aligned. */
2634 function_arg_boundary (mode, type)
2635 enum machine_mode mode;
2636 tree type ATTRIBUTE_UNUSED;
2638 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2640 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2643 return PARM_BOUNDARY;
2646 /* Update the data in CUM to advance over an argument
2647 of mode MODE and data type TYPE.
2648 (TYPE is null for libcalls where that information may not be available.) */
2651 function_arg_advance (cum, mode, type, named)
2652 CUMULATIVE_ARGS *cum;
2653 enum machine_mode mode;
2657 cum->nargs_prototype--;
2659 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2661 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2664 cum->words += RS6000_ARG_SIZE (mode, type);
2666 else if (DEFAULT_ABI == ABI_V4)
2668 if (TARGET_HARD_FLOAT
2669 && (mode == SFmode || mode == DFmode))
2671 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2676 cum->words += cum->words & 1;
2677 cum->words += RS6000_ARG_SIZE (mode, type);
2683 int gregno = cum->sysv_gregno;
2685 /* Aggregates and IEEE quad get passed by reference. */
2686 if ((type && AGGREGATE_TYPE_P (type))
2690 n_words = RS6000_ARG_SIZE (mode, type);
2692 /* Long long is put in odd registers. */
2693 if (n_words == 2 && (gregno & 1) == 0)
2696 /* Long long is not split between registers and stack. */
2697 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2699 /* Long long is aligned on the stack. */
2701 cum->words += cum->words & 1;
2702 cum->words += n_words;
2705 /* Note: continuing to accumulate gregno past when we've started
2706 spilling to the stack indicates the fact that we've started
2707 spilling to the stack to expand_builtin_saveregs. */
2708 cum->sysv_gregno = gregno + n_words;
2711 if (TARGET_DEBUG_ARG)
2713 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2714 cum->words, cum->fregno);
2715 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2716 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2717 fprintf (stderr, "mode = %4s, named = %d\n",
2718 GET_MODE_NAME (mode), named);
2723 int align = (TARGET_32BIT && (cum->words & 1) != 0
2724 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2726 cum->words += align + RS6000_ARG_SIZE (mode, type);
2728 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2731 if (TARGET_DEBUG_ARG)
2733 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2734 cum->words, cum->fregno);
2735 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2736 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2737 fprintf (stderr, "named = %d, align = %d\n", named, align);
2742 /* Determine where to put an argument to a function.
2743 Value is zero to push the argument on the stack,
2744 or a hard register in which to store the argument.
2746 MODE is the argument's machine mode.
2747 TYPE is the data type of the argument (as a tree).
2748 This is null for libcalls where that information may
2750 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2751 the preceding args and about the function being called.
2752 NAMED is nonzero if this argument is a named parameter
2753 (otherwise it is an extra parameter matching an ellipsis).
2755 On RS/6000 the first eight words of non-FP are normally in registers
2756 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2757 Under V.4, the first 8 FP args are in registers.
2759 If this is floating-point and no prototype is specified, we use
2760 both an FP and integer register (or possibly FP reg and stack). Library
2761 functions (when TYPE is zero) always have the proper types for args,
2762 so we can pass the FP value just in one register. emit_library_function
2763 doesn't support PARALLEL anyway. */
2766 function_arg (cum, mode, type, named)
2767 CUMULATIVE_ARGS *cum;
2768 enum machine_mode mode;
2772 enum rs6000_abi abi = DEFAULT_ABI;
2774 /* Return a marker to indicate whether CR1 needs to set or clear the
2775 bit that V.4 uses to say fp args were passed in registers.
2776 Assume that we don't need the marker for software floating point,
2777 or compiler generated library calls. */
2778 if (mode == VOIDmode)
2781 && TARGET_HARD_FLOAT
2782 && cum->nargs_prototype < 0
2783 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2785 return GEN_INT (cum->call_cookie
2786 | ((cum->fregno == FP_ARG_MIN_REG)
2787 ? CALL_V4_SET_FP_ARGS
2788 : CALL_V4_CLEAR_FP_ARGS));
2791 return GEN_INT (cum->call_cookie);
2794 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2796 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2797 return gen_rtx_REG (mode, cum->vregno);
2801 else if (abi == ABI_V4)
2803 if (TARGET_HARD_FLOAT
2804 && (mode == SFmode || mode == DFmode))
2806 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2807 return gen_rtx_REG (mode, cum->fregno);
2814 int gregno = cum->sysv_gregno;
2816 /* Aggregates and IEEE quad get passed by reference. */
2817 if ((type && AGGREGATE_TYPE_P (type))
2821 n_words = RS6000_ARG_SIZE (mode, type);
2823 /* Long long is put in odd registers. */
2824 if (n_words == 2 && (gregno & 1) == 0)
2827 /* Long long is not split between registers and stack. */
2828 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2829 return gen_rtx_REG (mode, gregno);
2836 int align = (TARGET_32BIT && (cum->words & 1) != 0
2837 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2838 int align_words = cum->words + align;
2840 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2843 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2846 || ((cum->nargs_prototype > 0)
2847 /* IBM AIX extended its linkage convention definition always
2848 to require FP args after register save area hole on the
2850 && (DEFAULT_ABI != ABI_AIX
2852 || (align_words < GP_ARG_NUM_REG))))
2853 return gen_rtx_REG (mode, cum->fregno);
2855 return gen_rtx_PARALLEL (mode,
2857 gen_rtx_EXPR_LIST (VOIDmode,
2858 ((align_words >= GP_ARG_NUM_REG)
2861 + RS6000_ARG_SIZE (mode, type)
2863 /* If this is partially on the stack, then
2864 we only include the portion actually
2865 in registers here. */
2866 ? gen_rtx_REG (SImode,
2867 GP_ARG_MIN_REG + align_words)
2868 : gen_rtx_REG (mode,
2869 GP_ARG_MIN_REG + align_words))),
2871 gen_rtx_EXPR_LIST (VOIDmode,
2872 gen_rtx_REG (mode, cum->fregno),
2875 else if (align_words < GP_ARG_NUM_REG)
2876 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2882 /* For an arg passed partly in registers and partly in memory,
2883 this is the number of registers used.
2884 For args passed entirely in registers or entirely in memory, zero. */
2887 function_arg_partial_nregs (cum, mode, type, named)
2888 CUMULATIVE_ARGS *cum;
2889 enum machine_mode mode;
2891 int named ATTRIBUTE_UNUSED;
2893 if (DEFAULT_ABI == ABI_V4)
2896 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2897 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2899 if (cum->nargs_prototype >= 0)
2903 if (cum->words < GP_ARG_NUM_REG
2904 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2906 int ret = GP_ARG_NUM_REG - cum->words;
2907 if (ret && TARGET_DEBUG_ARG)
2908 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2916 /* A C expression that indicates when an argument must be passed by
2917 reference. If nonzero for an argument, a copy of that argument is
2918 made in memory and a pointer to the argument is passed instead of
2919 the argument itself. The pointer is passed in whatever way is
2920 appropriate for passing a pointer to that type.
2922 Under V.4, structures and unions are passed by reference. */
2925 function_arg_pass_by_reference (cum, mode, type, named)
2926 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2927 enum machine_mode mode ATTRIBUTE_UNUSED;
2929 int named ATTRIBUTE_UNUSED;
2931 if (DEFAULT_ABI == ABI_V4
2932 && ((type && AGGREGATE_TYPE_P (type))
2935 if (TARGET_DEBUG_ARG)
2936 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2944 /* Perform any needed actions needed for a function that is receiving a
2945 variable number of arguments.
2949 MODE and TYPE are the mode and type of the current parameter.
2951 PRETEND_SIZE is a variable that should be set to the amount of stack
2952 that must be pushed by the prolog to pretend that our caller pushed
2955 Normally, this macro will push all remaining incoming registers on the
2956 stack and set PRETEND_SIZE to the length of the registers pushed. */
2959 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2960 CUMULATIVE_ARGS *cum;
2961 enum machine_mode mode;
2967 CUMULATIVE_ARGS next_cum;
2968 int reg_size = TARGET_32BIT ? 4 : 8;
2969 rtx save_area = NULL_RTX, mem;
2970 int first_reg_offset, set;
2974 fntype = TREE_TYPE (current_function_decl);
2975 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2976 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2977 != void_type_node));
2979 /* For varargs, we do not want to skip the dummy va_dcl argument.
2980 For stdargs, we do want to skip the last named argument. */
2983 function_arg_advance (&next_cum, mode, type, 1);
2985 if (DEFAULT_ABI == ABI_V4)
2987 /* Indicate to allocate space on the stack for varargs save area. */
2988 cfun->machine->sysv_varargs_p = 1;
2990 save_area = plus_constant (virtual_stack_vars_rtx,
2991 - RS6000_VARARGS_SIZE);
2993 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2997 first_reg_offset = next_cum.words;
2998 save_area = virtual_incoming_args_rtx;
2999 cfun->machine->sysv_varargs_p = 0;
3001 if (MUST_PASS_IN_STACK (mode, type))
3002 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3005 set = get_varargs_alias_set ();
3006 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3008 mem = gen_rtx_MEM (BLKmode,
3009 plus_constant (save_area,
3010 first_reg_offset * reg_size)),
3011 set_mem_alias_set (mem, set);
3012 set_mem_align (mem, BITS_PER_WORD);
3015 (GP_ARG_MIN_REG + first_reg_offset, mem,
3016 GP_ARG_NUM_REG - first_reg_offset,
3017 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3019 /* ??? Does ABI_V4 need this at all? */
3020 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
3023 /* Save FP registers if needed. */
3024 if (DEFAULT_ABI == ABI_V4
3025 && TARGET_HARD_FLOAT && ! no_rtl
3026 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3028 int fregno = next_cum.fregno;
3029 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3030 rtx lab = gen_label_rtx ();
3031 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3033 emit_jump_insn (gen_rtx_SET (VOIDmode,
3035 gen_rtx_IF_THEN_ELSE (VOIDmode,
3036 gen_rtx_NE (VOIDmode, cr1,
3038 gen_rtx_LABEL_REF (VOIDmode, lab),
3041 while (fregno <= FP_ARG_V4_MAX_REG)
3043 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3044 set_mem_alias_set (mem, set);
3045 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3054 /* Create the va_list data type. */
3057 rs6000_build_va_list ()
3059 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3061 /* For AIX, prefer 'char *' because that's what the system
3062 header files like. */
3063 if (DEFAULT_ABI != ABI_V4)
3064 return build_pointer_type (char_type_node);
3066 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3067 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3069 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3070 unsigned_char_type_node);
3071 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3072 unsigned_char_type_node);
3073 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3075 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3078 DECL_FIELD_CONTEXT (f_gpr) = record;
3079 DECL_FIELD_CONTEXT (f_fpr) = record;
3080 DECL_FIELD_CONTEXT (f_ovf) = record;
3081 DECL_FIELD_CONTEXT (f_sav) = record;
3083 TREE_CHAIN (record) = type_decl;
3084 TYPE_NAME (record) = type_decl;
3085 TYPE_FIELDS (record) = f_gpr;
3086 TREE_CHAIN (f_gpr) = f_fpr;
3087 TREE_CHAIN (f_fpr) = f_ovf;
3088 TREE_CHAIN (f_ovf) = f_sav;
3090 layout_type (record);
3092 /* The correct type is an array type of one element. */
3093 return build_array_type (record, build_index_type (size_zero_node));
3096 /* Implement va_start. */
3099 rs6000_va_start (stdarg_p, valist, nextarg)
3104 HOST_WIDE_INT words, n_gpr, n_fpr;
3105 tree f_gpr, f_fpr, f_ovf, f_sav;
3106 tree gpr, fpr, ovf, sav, t;
3108 /* Only SVR4 needs something special. */
3109 if (DEFAULT_ABI != ABI_V4)
3111 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
3115 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3116 f_fpr = TREE_CHAIN (f_gpr);
3117 f_ovf = TREE_CHAIN (f_fpr);
3118 f_sav = TREE_CHAIN (f_ovf);
3120 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3121 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3122 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3123 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3124 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3126 /* Count number of gp and fp argument registers used. */
3127 words = current_function_args_info.words;
3128 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3129 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3131 if (TARGET_DEBUG_ARG)
3133 fputs ("va_start: words = ", stderr);
3134 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3135 fputs (", n_gpr = ", stderr);
3136 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3137 fputs (", n_fpr = ", stderr);
3138 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3139 putc ('\n', stderr);
3142 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3143 TREE_SIDE_EFFECTS (t) = 1;
3144 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3146 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3147 TREE_SIDE_EFFECTS (t) = 1;
3148 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3150 /* Find the overflow area. */
3151 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3153 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3154 build_int_2 (words * UNITS_PER_WORD, 0));
3155 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3156 TREE_SIDE_EFFECTS (t) = 1;
3157 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3159 /* Find the register save area. */
3160 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3161 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3162 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3163 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3164 TREE_SIDE_EFFECTS (t) = 1;
3165 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3168 /* Implement va_arg. */
3171 rs6000_va_arg (valist, type)
3174 tree f_gpr, f_fpr, f_ovf, f_sav;
3175 tree gpr, fpr, ovf, sav, reg, t, u;
3176 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3177 rtx lab_false, lab_over, addr_rtx, r;
3179 if (DEFAULT_ABI != ABI_V4)
3180 return std_expand_builtin_va_arg (valist, type);
3182 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3183 f_fpr = TREE_CHAIN (f_gpr);
3184 f_ovf = TREE_CHAIN (f_fpr);
3185 f_sav = TREE_CHAIN (f_ovf);
3187 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3188 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3189 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3190 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3191 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3193 size = int_size_in_bytes (type);
3194 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3196 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3198 /* Aggregates and long doubles are passed by reference. */
3204 size = UNITS_PER_WORD;
3207 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3209 /* FP args go in FP registers, if present. */
3218 /* Otherwise into GP registers. */
3226 /* Pull the value out of the saved registers ... */
3228 lab_false = gen_label_rtx ();
3229 lab_over = gen_label_rtx ();
3230 addr_rtx = gen_reg_rtx (Pmode);
3232 /* AltiVec vectors never go in registers. */
3233 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3235 TREE_THIS_VOLATILE (reg) = 1;
3236 emit_cmp_and_jump_insns
3237 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3238 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3241 /* Long long is aligned in the registers. */
3244 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3245 build_int_2 (n_reg - 1, 0));
3246 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3247 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3248 TREE_SIDE_EFFECTS (u) = 1;
3249 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3253 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3257 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3258 build_int_2 (n_reg, 0));
3259 TREE_SIDE_EFFECTS (u) = 1;
3261 u = build1 (CONVERT_EXPR, integer_type_node, u);
3262 TREE_SIDE_EFFECTS (u) = 1;
3264 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3265 TREE_SIDE_EFFECTS (u) = 1;
3267 t = build (PLUS_EXPR, ptr_type_node, t, u);
3268 TREE_SIDE_EFFECTS (t) = 1;
3270 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3272 emit_move_insn (addr_rtx, r);
3274 emit_jump_insn (gen_jump (lab_over));
3278 emit_label (lab_false);
3280 /* ... otherwise out of the overflow area. */
3282 /* Make sure we don't find reg 7 for the next int arg.
3284 All AltiVec vectors go in the overflow area. So in the AltiVec
3285 case we need to get the vectors from the overflow area, but
3286 remember where the GPRs and FPRs are. */
3287 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3288 || !TARGET_ALTIVEC))
3290 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3291 TREE_SIDE_EFFECTS (t) = 1;
3292 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3295 /* Care for on-stack alignment if needed. */
3302 /* AltiVec vectors are 16 byte aligned. */
3303 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3308 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3309 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3313 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3315 emit_move_insn (addr_rtx, r);
3317 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3318 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3319 TREE_SIDE_EFFECTS (t) = 1;
3320 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3322 emit_label (lab_over);
3326 r = gen_rtx_MEM (Pmode, addr_rtx);
3327 set_mem_alias_set (r, get_varargs_alias_set ());
3328 emit_move_insn (addr_rtx, r);
3336 #define def_builtin(MASK, NAME, TYPE, CODE) \
3338 if ((MASK) & target_flags) \
3339 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3342 struct builtin_description
3344 const unsigned int mask;
3345 const enum insn_code icode;
3346 const char *const name;
3347 const enum rs6000_builtins code;
3350 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3352 static const struct builtin_description bdesc_3arg[] =
3354 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3355 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3356 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3357 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3358 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3359 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3360 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3361 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3362 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3363 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3364 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3365 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3366 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3367 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3368 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3369 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3370 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3371 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3372 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3373 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3374 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3375 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3376 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3379 /* DST operations: void foo (void *, const int, const char). */
3381 static const struct builtin_description bdesc_dst[] =
3383 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3384 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3385 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3386 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3389 /* Simple binary operations: VECc = foo (VECa, VECb). */
3391 static const struct builtin_description bdesc_2arg[] =
3393 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3394 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3395 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3396 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3397 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3398 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3399 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3400 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3401 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3402 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3403 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3404 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3405 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3406 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3407 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3408 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3409 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3410 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3411 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3412 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3413 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3414 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3415 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3416 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3417 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3418 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3419 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3420 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3421 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3422 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3423 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3424 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3425 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3426 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3427 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3428 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3429 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3430 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3431 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3432 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3433 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3434 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3435 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3436 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3437 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3438 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3439 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3440 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3441 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3442 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3443 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3444 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3445 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3446 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3447 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3448 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3449 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3450 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3451 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3452 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3453 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3454 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3455 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3456 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3457 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3458 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3459 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3460 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3461 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3462 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3463 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3464 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3465 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3466 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3467 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3468 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3469 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3470 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3471 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3472 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3473 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3474 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3475 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3476 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3477 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3478 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3479 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3480 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3481 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3482 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3483 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3484 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3485 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3486 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3487 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3488 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3489 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3490 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3491 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3492 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3493 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3494 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3495 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3496 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3497 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3498 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3499 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3500 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3501 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3502 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3503 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3504 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3505 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3508 /* AltiVec predicates. */
3510 struct builtin_description_predicates
3512 const unsigned int mask;
3513 const enum insn_code icode;
3515 const char *const name;
3516 const enum rs6000_builtins code;
3519 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3521 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3522 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3523 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3524 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3525 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3526 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3527 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3528 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3529 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3530 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3531 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3532 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3533 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3536 /* ABS* opreations. */
3538 static const struct builtin_description bdesc_abs[] =
3540 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3541 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3542 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3543 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3544 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3545 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3546 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3549 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3552 static const struct builtin_description bdesc_1arg[] =
3554 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3555 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3556 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3557 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3558 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3559 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3560 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3561 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3562 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3563 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3564 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3565 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3566 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3567 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3568 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3569 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3570 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3574 rs6000_expand_unop_builtin (icode, arglist, target)
3575 enum insn_code icode;
3580 tree arg0 = TREE_VALUE (arglist);
3581 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3582 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3583 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3585 /* If we got invalid arguments bail out before generating bad rtl. */
3586 if (arg0 == error_mark_node)
3591 /* Only allow 5-bit *signed* literals. */
3592 case CODE_FOR_altivec_vspltisb:
3593 case CODE_FOR_altivec_vspltish:
3594 case CODE_FOR_altivec_vspltisw:
3595 if (GET_CODE (op0) != CONST_INT
3596 || INTVAL (op0) > 0x1f
3597 || INTVAL (op0) < -0x1f)
3599 error ("argument 1 must be a 5-bit signed literal");
3608 || GET_MODE (target) != tmode
3609 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3610 target = gen_reg_rtx (tmode);
3612 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3613 op0 = copy_to_mode_reg (mode0, op0);
3615 pat = GEN_FCN (icode) (target, op0);
3624 altivec_expand_abs_builtin (icode, arglist, target)
3625 enum insn_code icode;
3629 rtx pat, scratch1, scratch2;
3630 tree arg0 = TREE_VALUE (arglist);
3631 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3632 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3633 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3635 /* If we have invalid arguments, bail out before generating bad rtl. */
3636 if (arg0 == error_mark_node)
3640 || GET_MODE (target) != tmode
3641 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3642 target = gen_reg_rtx (tmode);
3644 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3645 op0 = copy_to_mode_reg (mode0, op0);
3647 scratch1 = gen_reg_rtx (mode0);
3648 scratch2 = gen_reg_rtx (mode0);
3650 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3659 rs6000_expand_binop_builtin (icode, arglist, target)
3660 enum insn_code icode;
3665 tree arg0 = TREE_VALUE (arglist);
3666 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3667 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3668 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3669 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3670 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3671 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3673 /* If we got invalid arguments bail out before generating bad rtl. */
3674 if (arg0 == error_mark_node || arg1 == error_mark_node)
3679 /* Only allow 5-bit unsigned literals. */
3680 case CODE_FOR_altivec_vcfux:
3681 case CODE_FOR_altivec_vcfsx:
3682 case CODE_FOR_altivec_vctsxs:
3683 case CODE_FOR_altivec_vctuxs:
3684 case CODE_FOR_altivec_vspltb:
3685 case CODE_FOR_altivec_vsplth:
3686 case CODE_FOR_altivec_vspltw:
3687 if (TREE_CODE (arg1) != INTEGER_CST
3688 || TREE_INT_CST_LOW (arg1) & ~0x1f)
3690 error ("argument 2 must be a 5-bit unsigned literal");
3699 || GET_MODE (target) != tmode
3700 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3701 target = gen_reg_rtx (tmode);
3703 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3704 op0 = copy_to_mode_reg (mode0, op0);
3705 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3706 op1 = copy_to_mode_reg (mode1, op1);
3708 pat = GEN_FCN (icode) (target, op0, op1);
3717 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3718 enum insn_code icode;
3724 tree cr6_form = TREE_VALUE (arglist);
3725 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3726 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3727 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3728 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3729 enum machine_mode tmode = SImode;
3730 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3731 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3734 if (TREE_CODE (cr6_form) != INTEGER_CST)
3736 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3740 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3745 /* If we have invalid arguments, bail out before generating bad rtl. */
3746 if (arg0 == error_mark_node || arg1 == error_mark_node)
3750 || GET_MODE (target) != tmode
3751 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3752 target = gen_reg_rtx (tmode);
3754 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3755 op0 = copy_to_mode_reg (mode0, op0);
3756 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3757 op1 = copy_to_mode_reg (mode1, op1);
3759 scratch = gen_reg_rtx (mode0);
3761 pat = GEN_FCN (icode) (scratch, op0, op1,
3762 gen_rtx (SYMBOL_REF, Pmode, opcode));
3767 /* The vec_any* and vec_all* predicates use the same opcodes for two
3768 different operations, but the bits in CR6 will be different
3769 depending on what information we want. So we have to play tricks
3770 with CR6 to get the right bits out.
3772 If you think this is disgusting, look at the specs for the
3773 AltiVec predicates. */
3775 switch (cr6_form_int)
3778 emit_insn (gen_cr6_test_for_zero (target));
3781 emit_insn (gen_cr6_test_for_zero_reverse (target));
3784 emit_insn (gen_cr6_test_for_lt (target));
3787 emit_insn (gen_cr6_test_for_lt_reverse (target));
3790 error ("argument 1 of __builtin_altivec_predicate is out of range");
3798 altivec_expand_stv_builtin (icode, arglist)
3799 enum insn_code icode;
3802 tree arg0 = TREE_VALUE (arglist);
3803 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3804 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3805 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3806 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3807 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3809 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3810 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3811 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3813 /* Invalid arguments. Bail before doing anything stoopid! */
3814 if (arg0 == error_mark_node
3815 || arg1 == error_mark_node
3816 || arg2 == error_mark_node)
3819 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3820 op0 = copy_to_mode_reg (mode2, op0);
3821 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3822 op1 = copy_to_mode_reg (mode0, op1);
3823 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3824 op2 = copy_to_mode_reg (mode1, op2);
3826 pat = GEN_FCN (icode) (op1, op2, op0);
3833 rs6000_expand_ternop_builtin (icode, arglist, target)
3834 enum insn_code icode;
3839 tree arg0 = TREE_VALUE (arglist);
3840 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3841 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3842 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3843 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3844 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3845 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3846 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3847 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3848 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3850 /* If we got invalid arguments bail out before generating bad rtl. */
3851 if (arg0 == error_mark_node
3852 || arg1 == error_mark_node
3853 || arg2 == error_mark_node)
3858 /* Only allow 4-bit unsigned literals. */
3859 case CODE_FOR_altivec_vsldoi_4sf:
3860 case CODE_FOR_altivec_vsldoi_4si:
3861 case CODE_FOR_altivec_vsldoi_8hi:
3862 case CODE_FOR_altivec_vsldoi_16qi:
3863 if (TREE_CODE (arg2) != INTEGER_CST
3864 || TREE_INT_CST_LOW (arg2) & ~0xf)
3866 error ("argument 3 must be a 4-bit unsigned literal");
3875 || GET_MODE (target) != tmode
3876 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3877 target = gen_reg_rtx (tmode);
3879 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3880 op0 = copy_to_mode_reg (mode0, op0);
3881 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3882 op1 = copy_to_mode_reg (mode1, op1);
3883 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3884 op2 = copy_to_mode_reg (mode2, op2);
3886 pat = GEN_FCN (icode) (target, op0, op1, op2);
3894 /* Expand the lvx builtins. */
3896 altivec_expand_ld_builtin (exp, target, expandedp)
3901 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3902 tree arglist = TREE_OPERAND (exp, 1);
3903 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3905 enum machine_mode tmode, mode0;
3906 rtx pat, target, op0;
3907 enum insn_code icode;
3911 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3912 icode = CODE_FOR_altivec_lvx_16qi;
3914 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3915 icode = CODE_FOR_altivec_lvx_8hi;
3917 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3918 icode = CODE_FOR_altivec_lvx_4si;
3920 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3921 icode = CODE_FOR_altivec_lvx_4sf;
3930 arg0 = TREE_VALUE (arglist);
3931 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3932 tmode = insn_data[icode].operand[0].mode;
3933 mode0 = insn_data[icode].operand[1].mode;
3936 || GET_MODE (target) != tmode
3937 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3938 target = gen_reg_rtx (tmode);
3940 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3941 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3943 pat = GEN_FCN (icode) (target, op0);
3950 /* Expand the stvx builtins. */
3952 altivec_expand_st_builtin (exp, target, expandedp)
3957 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3958 tree arglist = TREE_OPERAND (exp, 1);
3959 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3961 enum machine_mode mode0, mode1;
3962 rtx pat, target, op0, op1;
3963 enum insn_code icode;
3967 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3968 icode = CODE_FOR_altivec_stvx_16qi;
3970 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3971 icode = CODE_FOR_altivec_stvx_8hi;
3973 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3974 icode = CODE_FOR_altivec_stvx_4si;
3976 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3977 icode = CODE_FOR_altivec_stvx_4sf;
3984 arg0 = TREE_VALUE (arglist);
3985 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3986 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3987 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3988 mode0 = insn_data[icode].operand[0].mode;
3989 mode1 = insn_data[icode].operand[1].mode;
3991 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3992 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3993 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3994 op1 = copy_to_mode_reg (mode1, op1);
3996 pat = GEN_FCN (icode) (op0, op1);
4004 /* Expand the dst builtins. */
4006 altivec_expand_dst_builtin (exp, target, expandedp)
4011 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4012 tree arglist = TREE_OPERAND (exp, 1);
4013 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4014 tree arg0, arg1, arg2;
4015 enum machine_mode mode0, mode1, mode2;
4016 rtx pat, target, op0, op1, op2;
4017 struct builtin_description *d;
4022 /* Handle DST variants. */
4023 d = (struct builtin_description *) bdesc_dst;
4024 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4025 if (d->code == fcode)
4027 arg0 = TREE_VALUE (arglist);
4028 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4029 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4030 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4031 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4032 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4033 mode0 = insn_data[d->icode].operand[0].mode;
4034 mode1 = insn_data[d->icode].operand[1].mode;
4035 mode2 = insn_data[d->icode].operand[2].mode;
4037 /* Invalid arguments, bail out before generating bad rtl. */
4038 if (arg0 == error_mark_node
4039 || arg1 == error_mark_node
4040 || arg2 == error_mark_node)
4043 if (TREE_CODE (arg2) != INTEGER_CST
4044 || TREE_INT_CST_LOW (arg2) & ~0x3)
4046 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4050 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4051 op0 = copy_to_mode_reg (mode0, op0);
4052 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4053 op1 = copy_to_mode_reg (mode1, op1);
4055 pat = GEN_FCN (d->icode) (op0, op1, op2);
4066 /* Expand the builtin in EXP and store the result in TARGET. Store
4067 true in *EXPANDEDP if we found a builtin to expand. */
4069 altivec_expand_builtin (exp, target, expandedp)
4074 struct builtin_description *d;
4075 struct builtin_description_predicates *dp;
4077 enum insn_code icode;
4078 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4079 tree arglist = TREE_OPERAND (exp, 1);
4080 tree arg0, arg1, arg2;
4081 rtx op0, op1, op2, pat;
4082 enum machine_mode tmode, mode0, mode1, mode2;
4083 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4085 target = altivec_expand_ld_builtin (exp, target, expandedp);
4089 target = altivec_expand_st_builtin (exp, target, expandedp);
4093 target = altivec_expand_dst_builtin (exp, target, expandedp);
4101 case ALTIVEC_BUILTIN_STVX:
4102 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4103 case ALTIVEC_BUILTIN_STVEBX:
4104 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4105 case ALTIVEC_BUILTIN_STVEHX:
4106 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4107 case ALTIVEC_BUILTIN_STVEWX:
4108 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4109 case ALTIVEC_BUILTIN_STVXL:
4110 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4112 case ALTIVEC_BUILTIN_MFVSCR:
4113 icode = CODE_FOR_altivec_mfvscr;
4114 tmode = insn_data[icode].operand[0].mode;
4117 || GET_MODE (target) != tmode
4118 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4119 target = gen_reg_rtx (tmode);
4121 pat = GEN_FCN (icode) (target);
4127 case ALTIVEC_BUILTIN_MTVSCR:
4128 icode = CODE_FOR_altivec_mtvscr;
4129 arg0 = TREE_VALUE (arglist);
4130 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4131 mode0 = insn_data[icode].operand[0].mode;
4133 /* If we got invalid arguments bail out before generating bad rtl. */
4134 if (arg0 == error_mark_node)
4137 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4138 op0 = copy_to_mode_reg (mode0, op0);
4140 pat = GEN_FCN (icode) (op0);
4145 case ALTIVEC_BUILTIN_DSSALL:
4146 emit_insn (gen_altivec_dssall ());
4149 case ALTIVEC_BUILTIN_DSS:
4150 icode = CODE_FOR_altivec_dss;
4151 arg0 = TREE_VALUE (arglist);
4152 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4153 mode0 = insn_data[icode].operand[0].mode;
4155 /* If we got invalid arguments bail out before generating bad rtl. */
4156 if (arg0 == error_mark_node)
4159 if (TREE_CODE (arg0) != INTEGER_CST
4160 || TREE_INT_CST_LOW (arg0) & ~0x3)
4162 error ("argument to dss must be a 2-bit unsigned literal");
4166 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4167 op0 = copy_to_mode_reg (mode0, op0);
4169 emit_insn (gen_altivec_dss (op0));
4173 /* Expand abs* operations. */
4174 d = (struct builtin_description *) bdesc_abs;
4175 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4176 if (d->code == fcode)
4177 return altivec_expand_abs_builtin (d->icode, arglist, target);
4179 /* Expand the AltiVec predicates. */
4180 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4181 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4182 if (dp->code == fcode)
4183 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4185 /* LV* are funky. We initialized them differently. */
4188 case ALTIVEC_BUILTIN_LVSL:
4189 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4191 case ALTIVEC_BUILTIN_LVSR:
4192 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4194 case ALTIVEC_BUILTIN_LVEBX:
4195 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4197 case ALTIVEC_BUILTIN_LVEHX:
4198 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4200 case ALTIVEC_BUILTIN_LVEWX:
4201 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4203 case ALTIVEC_BUILTIN_LVXL:
4204 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4206 case ALTIVEC_BUILTIN_LVX:
4207 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4218 /* Expand an expression EXP that calls a built-in function,
4219 with result going to TARGET if that's convenient
4220 (and in mode MODE if that's convenient).
4221 SUBTARGET may be used as the target for computing one of EXP's operands.
4222 IGNORE is nonzero if the value is to be ignored. */
4225 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4228 rtx subtarget ATTRIBUTE_UNUSED;
4229 enum machine_mode mode ATTRIBUTE_UNUSED;
4230 int ignore ATTRIBUTE_UNUSED;
4232 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4233 tree arglist = TREE_OPERAND (exp, 1);
4234 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4235 struct builtin_description *d;
4242 ret = altivec_expand_builtin (exp, target, &success);
4248 /* Handle simple unary operations. */
4249 d = (struct builtin_description *) bdesc_1arg;
4250 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4251 if (d->code == fcode)
4252 return rs6000_expand_unop_builtin (d->icode, arglist, target);
4254 /* Handle simple binary operations. */
4255 d = (struct builtin_description *) bdesc_2arg;
4256 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4257 if (d->code == fcode)
4258 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4260 /* Handle simple ternary operations. */
4261 d = (struct builtin_description *) bdesc_3arg;
4262 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4263 if (d->code == fcode)
4264 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
4271 rs6000_init_builtins ()
4274 altivec_init_builtins ();
4278 altivec_init_builtins (void)
4280 struct builtin_description *d;
4281 struct builtin_description_predicates *dp;
4284 tree endlink = void_list_node;
4286 tree pint_type_node = build_pointer_type (integer_type_node);
4287 tree pvoid_type_node = build_pointer_type (void_type_node);
4288 tree pshort_type_node = build_pointer_type (short_integer_type_node);
4289 tree pchar_type_node = build_pointer_type (char_type_node);
4290 tree pfloat_type_node = build_pointer_type (float_type_node);
4292 tree v4sf_ftype_v4sf_v4sf_v16qi
4293 = build_function_type (V4SF_type_node,
4294 tree_cons (NULL_TREE, V4SF_type_node,
4295 tree_cons (NULL_TREE, V4SF_type_node,
4296 tree_cons (NULL_TREE,
4299 tree v4si_ftype_v4si_v4si_v16qi
4300 = build_function_type (V4SI_type_node,
4301 tree_cons (NULL_TREE, V4SI_type_node,
4302 tree_cons (NULL_TREE, V4SI_type_node,
4303 tree_cons (NULL_TREE,
4306 tree v8hi_ftype_v8hi_v8hi_v16qi
4307 = build_function_type (V8HI_type_node,
4308 tree_cons (NULL_TREE, V8HI_type_node,
4309 tree_cons (NULL_TREE, V8HI_type_node,
4310 tree_cons (NULL_TREE,
4313 tree v16qi_ftype_v16qi_v16qi_v16qi
4314 = build_function_type (V16QI_type_node,
4315 tree_cons (NULL_TREE, V16QI_type_node,
4316 tree_cons (NULL_TREE, V16QI_type_node,
4317 tree_cons (NULL_TREE,
4321 /* V4SI foo (char). */
4322 tree v4si_ftype_char
4323 = build_function_type (V4SI_type_node,
4324 tree_cons (NULL_TREE, char_type_node, endlink));
4326 /* V8HI foo (char). */
4327 tree v8hi_ftype_char
4328 = build_function_type (V8HI_type_node,
4329 tree_cons (NULL_TREE, char_type_node, endlink));
4331 /* V16QI foo (char). */
4332 tree v16qi_ftype_char
4333 = build_function_type (V16QI_type_node,
4334 tree_cons (NULL_TREE, char_type_node, endlink));
4335 /* V4SF foo (V4SF). */
4336 tree v4sf_ftype_v4sf
4337 = build_function_type (V4SF_type_node,
4338 tree_cons (NULL_TREE, V4SF_type_node, endlink));
4340 /* V4SI foo (int *). */
4341 tree v4si_ftype_pint
4342 = build_function_type (V4SI_type_node,
4343 tree_cons (NULL_TREE, pint_type_node, endlink));
4344 /* V8HI foo (short *). */
4345 tree v8hi_ftype_pshort
4346 = build_function_type (V8HI_type_node,
4347 tree_cons (NULL_TREE, pshort_type_node, endlink));
4348 /* V16QI foo (char *). */
4349 tree v16qi_ftype_pchar
4350 = build_function_type (V16QI_type_node,
4351 tree_cons (NULL_TREE, pchar_type_node, endlink));
4352 /* V4SF foo (float *). */
4353 tree v4sf_ftype_pfloat
4354 = build_function_type (V4SF_type_node,
4355 tree_cons (NULL_TREE, pfloat_type_node, endlink));
4357 /* V8HI foo (V16QI). */
4358 tree v8hi_ftype_v16qi
4359 = build_function_type (V8HI_type_node,
4360 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4362 /* void foo (void *, int, char/literal). */
4363 tree void_ftype_pvoid_int_char
4364 = build_function_type (void_type_node,
4365 tree_cons (NULL_TREE, pvoid_type_node,
4366 tree_cons (NULL_TREE, integer_type_node,
4367 tree_cons (NULL_TREE,
4371 /* void foo (int *, V4SI). */
4372 tree void_ftype_pint_v4si
4373 = build_function_type (void_type_node,
4374 tree_cons (NULL_TREE, pint_type_node,
4375 tree_cons (NULL_TREE, V4SI_type_node,
4377 /* void foo (short *, V8HI). */
4378 tree void_ftype_pshort_v8hi
4379 = build_function_type (void_type_node,
4380 tree_cons (NULL_TREE, pshort_type_node,
4381 tree_cons (NULL_TREE, V8HI_type_node,
4383 /* void foo (char *, V16QI). */
4384 tree void_ftype_pchar_v16qi
4385 = build_function_type (void_type_node,
4386 tree_cons (NULL_TREE, pchar_type_node,
4387 tree_cons (NULL_TREE, V16QI_type_node,
4389 /* void foo (float *, V4SF). */
4390 tree void_ftype_pfloat_v4sf
4391 = build_function_type (void_type_node,
4392 tree_cons (NULL_TREE, pfloat_type_node,
4393 tree_cons (NULL_TREE, V4SF_type_node,
4396 /* void foo (V4SI). */
4397 tree void_ftype_v4si
4398 = build_function_type (void_type_node,
4399 tree_cons (NULL_TREE, V4SI_type_node,
4402 /* void foo (vint, int, void *). */
4403 tree void_ftype_v4si_int_pvoid
4404 = build_function_type (void_type_node,
4405 tree_cons (NULL_TREE, V4SI_type_node,
4406 tree_cons (NULL_TREE, integer_type_node,
4407 tree_cons (NULL_TREE,
4411 /* void foo (vchar, int, void *). */
4412 tree void_ftype_v16qi_int_pvoid
4413 = build_function_type (void_type_node,
4414 tree_cons (NULL_TREE, V16QI_type_node,
4415 tree_cons (NULL_TREE, integer_type_node,
4416 tree_cons (NULL_TREE,
4420 /* void foo (vshort, int, void *). */
4421 tree void_ftype_v8hi_int_pvoid
4422 = build_function_type (void_type_node,
4423 tree_cons (NULL_TREE, V8HI_type_node,
4424 tree_cons (NULL_TREE, integer_type_node,
4425 tree_cons (NULL_TREE,
4429 /* void foo (char). */
4431 = build_function_type (void_type_node,
4432 tree_cons (NULL_TREE, char_type_node,
4435 /* void foo (void). */
4436 tree void_ftype_void
4437 = build_function_type (void_type_node, void_list_node);
4439 /* vshort foo (void). */
4440 tree v8hi_ftype_void
4441 = build_function_type (V8HI_type_node, void_list_node);
4443 tree v4si_ftype_v4si_v4si
4444 = build_function_type (V4SI_type_node,
4445 tree_cons (NULL_TREE, V4SI_type_node,
4446 tree_cons (NULL_TREE, V4SI_type_node,
4449 /* These are for the unsigned 5 bit literals. */
4451 tree v4sf_ftype_v4si_char
4452 = build_function_type (V4SF_type_node,
4453 tree_cons (NULL_TREE, V4SI_type_node,
4454 tree_cons (NULL_TREE, char_type_node,
4456 tree v4si_ftype_v4sf_char
4457 = build_function_type (V4SI_type_node,
4458 tree_cons (NULL_TREE, V4SF_type_node,
4459 tree_cons (NULL_TREE, char_type_node,
4461 tree v4si_ftype_v4si_char
4462 = build_function_type (V4SI_type_node,
4463 tree_cons (NULL_TREE, V4SI_type_node,
4464 tree_cons (NULL_TREE, char_type_node,
4466 tree v8hi_ftype_v8hi_char
4467 = build_function_type (V8HI_type_node,
4468 tree_cons (NULL_TREE, V8HI_type_node,
4469 tree_cons (NULL_TREE, char_type_node,
4471 tree v16qi_ftype_v16qi_char
4472 = build_function_type (V16QI_type_node,
4473 tree_cons (NULL_TREE, V16QI_type_node,
4474 tree_cons (NULL_TREE, char_type_node,
4477 /* These are for the unsigned 4 bit literals. */
4479 tree v16qi_ftype_v16qi_v16qi_char
4480 = build_function_type (V16QI_type_node,
4481 tree_cons (NULL_TREE, V16QI_type_node,
4482 tree_cons (NULL_TREE, V16QI_type_node,
4483 tree_cons (NULL_TREE,
4487 tree v8hi_ftype_v8hi_v8hi_char
4488 = build_function_type (V8HI_type_node,
4489 tree_cons (NULL_TREE, V8HI_type_node,
4490 tree_cons (NULL_TREE, V8HI_type_node,
4491 tree_cons (NULL_TREE,
4495 tree v4si_ftype_v4si_v4si_char
4496 = build_function_type (V4SI_type_node,
4497 tree_cons (NULL_TREE, V4SI_type_node,
4498 tree_cons (NULL_TREE, V4SI_type_node,
4499 tree_cons (NULL_TREE,
4503 tree v4sf_ftype_v4sf_v4sf_char
4504 = build_function_type (V4SF_type_node,
4505 tree_cons (NULL_TREE, V4SF_type_node,
4506 tree_cons (NULL_TREE, V4SF_type_node,
4507 tree_cons (NULL_TREE,
4511 /* End of 4 bit literals. */
4513 tree v4sf_ftype_v4sf_v4sf
4514 = build_function_type (V4SF_type_node,
4515 tree_cons (NULL_TREE, V4SF_type_node,
4516 tree_cons (NULL_TREE, V4SF_type_node,
4518 tree v4sf_ftype_v4sf_v4sf_v4si
4519 = build_function_type (V4SF_type_node,
4520 tree_cons (NULL_TREE, V4SF_type_node,
4521 tree_cons (NULL_TREE, V4SF_type_node,
4522 tree_cons (NULL_TREE,
4525 tree v4sf_ftype_v4sf_v4sf_v4sf
4526 = build_function_type (V4SF_type_node,
4527 tree_cons (NULL_TREE, V4SF_type_node,
4528 tree_cons (NULL_TREE, V4SF_type_node,
4529 tree_cons (NULL_TREE,
4532 tree v4si_ftype_v4si_v4si_v4si
4533 = build_function_type (V4SI_type_node,
4534 tree_cons (NULL_TREE, V4SI_type_node,
4535 tree_cons (NULL_TREE, V4SI_type_node,
4536 tree_cons (NULL_TREE,
4540 tree v8hi_ftype_v8hi_v8hi
4541 = build_function_type (V8HI_type_node,
4542 tree_cons (NULL_TREE, V8HI_type_node,
4543 tree_cons (NULL_TREE, V8HI_type_node,
4545 tree v8hi_ftype_v8hi_v8hi_v8hi
4546 = build_function_type (V8HI_type_node,
4547 tree_cons (NULL_TREE, V8HI_type_node,
4548 tree_cons (NULL_TREE, V8HI_type_node,
4549 tree_cons (NULL_TREE,
4552 tree v4si_ftype_v8hi_v8hi_v4si
4553 = build_function_type (V4SI_type_node,
4554 tree_cons (NULL_TREE, V8HI_type_node,
4555 tree_cons (NULL_TREE, V8HI_type_node,
4556 tree_cons (NULL_TREE,
4559 tree v4si_ftype_v16qi_v16qi_v4si
4560 = build_function_type (V4SI_type_node,
4561 tree_cons (NULL_TREE, V16QI_type_node,
4562 tree_cons (NULL_TREE, V16QI_type_node,
4563 tree_cons (NULL_TREE,
4567 tree v16qi_ftype_v16qi_v16qi
4568 = build_function_type (V16QI_type_node,
4569 tree_cons (NULL_TREE, V16QI_type_node,
4570 tree_cons (NULL_TREE, V16QI_type_node,
4573 tree v4si_ftype_v4sf_v4sf
4574 = build_function_type (V4SI_type_node,
4575 tree_cons (NULL_TREE, V4SF_type_node,
4576 tree_cons (NULL_TREE, V4SF_type_node,
4579 tree v4si_ftype_v4si
4580 = build_function_type (V4SI_type_node,
4581 tree_cons (NULL_TREE, V4SI_type_node, endlink));
4583 tree v8hi_ftype_v8hi
4584 = build_function_type (V8HI_type_node,
4585 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4587 tree v16qi_ftype_v16qi
4588 = build_function_type (V16QI_type_node,
4589 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4591 tree v8hi_ftype_v16qi_v16qi
4592 = build_function_type (V8HI_type_node,
4593 tree_cons (NULL_TREE, V16QI_type_node,
4594 tree_cons (NULL_TREE, V16QI_type_node,
4597 tree v4si_ftype_v8hi_v8hi
4598 = build_function_type (V4SI_type_node,
4599 tree_cons (NULL_TREE, V8HI_type_node,
4600 tree_cons (NULL_TREE, V8HI_type_node,
4603 tree v8hi_ftype_v4si_v4si
4604 = build_function_type (V8HI_type_node,
4605 tree_cons (NULL_TREE, V4SI_type_node,
4606 tree_cons (NULL_TREE, V4SI_type_node,
4609 tree v16qi_ftype_v8hi_v8hi
4610 = build_function_type (V16QI_type_node,
4611 tree_cons (NULL_TREE, V8HI_type_node,
4612 tree_cons (NULL_TREE, V8HI_type_node,
4615 tree v4si_ftype_v16qi_v4si
4616 = build_function_type (V4SI_type_node,
4617 tree_cons (NULL_TREE, V16QI_type_node,
4618 tree_cons (NULL_TREE, V4SI_type_node,
4621 tree v4si_ftype_v16qi_v16qi
4622 = build_function_type (V4SI_type_node,
4623 tree_cons (NULL_TREE, V16QI_type_node,
4624 tree_cons (NULL_TREE, V16QI_type_node,
4627 tree v4si_ftype_v8hi_v4si
4628 = build_function_type (V4SI_type_node,
4629 tree_cons (NULL_TREE, V8HI_type_node,
4630 tree_cons (NULL_TREE, V4SI_type_node,
4633 tree v4si_ftype_v8hi
4634 = build_function_type (V4SI_type_node,
4635 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4637 tree int_ftype_v4si_v4si
4638 = build_function_type (integer_type_node,
4639 tree_cons (NULL_TREE, V4SI_type_node,
4640 tree_cons (NULL_TREE, V4SI_type_node,
4643 tree int_ftype_v4sf_v4sf
4644 = build_function_type (integer_type_node,
4645 tree_cons (NULL_TREE, V4SF_type_node,
4646 tree_cons (NULL_TREE, V4SF_type_node,
4649 tree int_ftype_v16qi_v16qi
4650 = build_function_type (integer_type_node,
4651 tree_cons (NULL_TREE, V16QI_type_node,
4652 tree_cons (NULL_TREE, V16QI_type_node,
4655 tree int_ftype_int_v4si_v4si
4656 = build_function_type
4658 tree_cons (NULL_TREE, integer_type_node,
4659 tree_cons (NULL_TREE, V4SI_type_node,
4660 tree_cons (NULL_TREE, V4SI_type_node,
4663 tree int_ftype_int_v4sf_v4sf
4664 = build_function_type
4666 tree_cons (NULL_TREE, integer_type_node,
4667 tree_cons (NULL_TREE, V4SF_type_node,
4668 tree_cons (NULL_TREE, V4SF_type_node,
4671 tree int_ftype_int_v8hi_v8hi
4672 = build_function_type
4674 tree_cons (NULL_TREE, integer_type_node,
4675 tree_cons (NULL_TREE, V8HI_type_node,
4676 tree_cons (NULL_TREE, V8HI_type_node,
4679 tree int_ftype_int_v16qi_v16qi
4680 = build_function_type
4682 tree_cons (NULL_TREE, integer_type_node,
4683 tree_cons (NULL_TREE, V16QI_type_node,
4684 tree_cons (NULL_TREE, V16QI_type_node,
4687 tree v16qi_ftype_int_pvoid
4688 = build_function_type (V16QI_type_node,
4689 tree_cons (NULL_TREE, integer_type_node,
4690 tree_cons (NULL_TREE, pvoid_type_node,
4693 tree v4si_ftype_int_pvoid
4694 = build_function_type (V4SI_type_node,
4695 tree_cons (NULL_TREE, integer_type_node,
4696 tree_cons (NULL_TREE, pvoid_type_node,
4699 tree v8hi_ftype_int_pvoid
4700 = build_function_type (V8HI_type_node,
4701 tree_cons (NULL_TREE, integer_type_node,
4702 tree_cons (NULL_TREE, pvoid_type_node,
4705 tree int_ftype_v8hi_v8hi
4706 = build_function_type (integer_type_node,
4707 tree_cons (NULL_TREE, V8HI_type_node,
4708 tree_cons (NULL_TREE, V8HI_type_node,
4711 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4712 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4713 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4714 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4715 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4716 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4717 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4718 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4719 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4720 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4721 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4722 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4723 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4724 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4725 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4726 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4727 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4728 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4729 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4730 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4731 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4732 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4733 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4734 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4736 /* Add the simple ternary operators. */
4737 d = (struct builtin_description *) bdesc_3arg;
4738 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4741 enum machine_mode mode0, mode1, mode2, mode3;
4747 mode0 = insn_data[d->icode].operand[0].mode;
4748 mode1 = insn_data[d->icode].operand[1].mode;
4749 mode2 = insn_data[d->icode].operand[2].mode;
4750 mode3 = insn_data[d->icode].operand[3].mode;
4752 /* When all four are of the same mode. */
4753 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4758 type = v4si_ftype_v4si_v4si_v4si;
4761 type = v4sf_ftype_v4sf_v4sf_v4sf;
4764 type = v8hi_ftype_v8hi_v8hi_v8hi;
4767 type = v16qi_ftype_v16qi_v16qi_v16qi;
4773 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4778 type = v4si_ftype_v4si_v4si_v16qi;
4781 type = v4sf_ftype_v4sf_v4sf_v16qi;
4784 type = v8hi_ftype_v8hi_v8hi_v16qi;
4787 type = v16qi_ftype_v16qi_v16qi_v16qi;
4793 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4794 && mode3 == V4SImode)
4795 type = v4si_ftype_v16qi_v16qi_v4si;
4796 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4797 && mode3 == V4SImode)
4798 type = v4si_ftype_v8hi_v8hi_v4si;
4799 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4800 && mode3 == V4SImode)
4801 type = v4sf_ftype_v4sf_v4sf_v4si;
4803 /* vchar, vchar, vchar, 4 bit literal. */
4804 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4806 type = v16qi_ftype_v16qi_v16qi_char;
4808 /* vshort, vshort, vshort, 4 bit literal. */
4809 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4811 type = v8hi_ftype_v8hi_v8hi_char;
4813 /* vint, vint, vint, 4 bit literal. */
4814 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4816 type = v4si_ftype_v4si_v4si_char;
4818 /* vfloat, vfloat, vfloat, 4 bit literal. */
4819 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4821 type = v4sf_ftype_v4sf_v4sf_char;
4826 def_builtin (d->mask, d->name, type, d->code);
4829 /* Add the DST variants. */
4830 d = (struct builtin_description *) bdesc_dst;
4831 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4832 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4834 /* Initialize the predicates. */
4835 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4836 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4838 enum machine_mode mode1;
4841 mode1 = insn_data[dp->icode].operand[1].mode;
4846 type = int_ftype_int_v4si_v4si;
4849 type = int_ftype_int_v8hi_v8hi;
4852 type = int_ftype_int_v16qi_v16qi;
4855 type = int_ftype_int_v4sf_v4sf;
4861 def_builtin (dp->mask, dp->name, type, dp->code);
4864 /* Add the simple binary operators. */
4865 d = (struct builtin_description *) bdesc_2arg;
4866 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4868 enum machine_mode mode0, mode1, mode2;
4874 mode0 = insn_data[d->icode].operand[0].mode;
4875 mode1 = insn_data[d->icode].operand[1].mode;
4876 mode2 = insn_data[d->icode].operand[2].mode;
4878 /* When all three operands are of the same mode. */
4879 if (mode0 == mode1 && mode1 == mode2)
4884 type = v4sf_ftype_v4sf_v4sf;
4887 type = v4si_ftype_v4si_v4si;
4890 type = v16qi_ftype_v16qi_v16qi;
4893 type = v8hi_ftype_v8hi_v8hi;
4900 /* A few other combos we really don't want to do manually. */
4902 /* vint, vfloat, vfloat. */
4903 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4904 type = v4si_ftype_v4sf_v4sf;
4906 /* vshort, vchar, vchar. */
4907 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4908 type = v8hi_ftype_v16qi_v16qi;
4910 /* vint, vshort, vshort. */
4911 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4912 type = v4si_ftype_v8hi_v8hi;
4914 /* vshort, vint, vint. */
4915 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4916 type = v8hi_ftype_v4si_v4si;
4918 /* vchar, vshort, vshort. */
4919 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4920 type = v16qi_ftype_v8hi_v8hi;
4922 /* vint, vchar, vint. */
4923 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4924 type = v4si_ftype_v16qi_v4si;
4926 /* vint, vchar, vchar. */
4927 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4928 type = v4si_ftype_v16qi_v16qi;
4930 /* vint, vshort, vint. */
4931 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4932 type = v4si_ftype_v8hi_v4si;
4934 /* vint, vint, 5 bit literal. */
4935 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4936 type = v4si_ftype_v4si_char;
4938 /* vshort, vshort, 5 bit literal. */
4939 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4940 type = v8hi_ftype_v8hi_char;
4942 /* vchar, vchar, 5 bit literal. */
4943 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4944 type = v16qi_ftype_v16qi_char;
4946 /* vfloat, vint, 5 bit literal. */
4947 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4948 type = v4sf_ftype_v4si_char;
4950 /* vint, vfloat, 5 bit literal. */
4951 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4952 type = v4si_ftype_v4sf_char;
4955 else if (mode0 == SImode)
4960 type = int_ftype_v4si_v4si;
4963 type = int_ftype_v4sf_v4sf;
4966 type = int_ftype_v16qi_v16qi;
4969 type = int_ftype_v8hi_v8hi;
4979 def_builtin (d->mask, d->name, type, d->code);
4982 /* Initialize the abs* operators. */
4983 d = (struct builtin_description *) bdesc_abs;
4984 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4986 enum machine_mode mode0;
4989 mode0 = insn_data[d->icode].operand[0].mode;
4994 type = v4si_ftype_v4si;
4997 type = v8hi_ftype_v8hi;
5000 type = v16qi_ftype_v16qi;
5003 type = v4sf_ftype_v4sf;
5009 def_builtin (d->mask, d->name, type, d->code);
5012 /* Add the simple unary operators. */
5013 d = (struct builtin_description *) bdesc_1arg;
5014 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5016 enum machine_mode mode0, mode1;
5022 mode0 = insn_data[d->icode].operand[0].mode;
5023 mode1 = insn_data[d->icode].operand[1].mode;
5025 if (mode0 == V4SImode && mode1 == QImode)
5026 type = v4si_ftype_char;
5027 else if (mode0 == V8HImode && mode1 == QImode)
5028 type = v8hi_ftype_char;
5029 else if (mode0 == V16QImode && mode1 == QImode)
5030 type = v16qi_ftype_char;
5031 else if (mode0 == V4SFmode && mode1 == V4SFmode)
5032 type = v4sf_ftype_v4sf;
5033 else if (mode0 == V8HImode && mode1 == V16QImode)
5034 type = v8hi_ftype_v16qi;
5035 else if (mode0 == V4SImode && mode1 == V8HImode)
5036 type = v4si_ftype_v8hi;
5040 def_builtin (d->mask, d->name, type, d->code);
5045 /* Generate a memory reference for expand_block_move, copying volatile,
5046 and other bits from an original memory reference. */
5049 expand_block_move_mem (mode, addr, orig_mem)
5050 enum machine_mode mode;
5054 rtx mem = gen_rtx_MEM (mode, addr);
5056 MEM_COPY_ATTRIBUTES (mem, orig_mem);
5060 /* Expand a block move operation, and return 1 if successful. Return 0
5061 if we should let the compiler generate normal code.
5063 operands[0] is the destination
5064 operands[1] is the source
5065 operands[2] is the length
5066 operands[3] is the alignment */
5068 #define MAX_MOVE_REG 4
5071 expand_block_move (operands)
5074 rtx orig_dest = operands[0];
5075 rtx orig_src = operands[1];
5076 rtx bytes_rtx = operands[2];
5077 rtx align_rtx = operands[3];
5078 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5089 rtx stores[MAX_MOVE_REG];
5092 /* If this is not a fixed size move, just call memcpy */
5096 /* If this is not a fixed size alignment, abort */
5097 if (GET_CODE (align_rtx) != CONST_INT)
5099 align = INTVAL (align_rtx);
5101 /* Anything to move? */
5102 bytes = INTVAL (bytes_rtx);
5106 /* store_one_arg depends on expand_block_move to handle at least the size of
5107 reg_parm_stack_space. */
5108 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
5111 /* Move the address into scratch registers. */
5112 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
5113 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
5115 if (TARGET_STRING) /* string instructions are available */
5117 for ( ; bytes > 0; bytes -= move_bytes)
5119 if (bytes > 24 /* move up to 32 bytes at a time */
5127 && ! fixed_regs[12])
5129 move_bytes = (bytes > 32) ? 32 : bytes;
5130 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
5133 expand_block_move_mem (BLKmode,
5136 GEN_INT ((move_bytes == 32)
5140 else if (bytes > 16 /* move up to 24 bytes at a time */
5146 && ! fixed_regs[10])
5148 move_bytes = (bytes > 24) ? 24 : bytes;
5149 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
5152 expand_block_move_mem (BLKmode,
5155 GEN_INT (move_bytes),
5158 else if (bytes > 8 /* move up to 16 bytes at a time */
5164 move_bytes = (bytes > 16) ? 16 : bytes;
5165 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
5168 expand_block_move_mem (BLKmode,
5171 GEN_INT (move_bytes),
5174 else if (bytes >= 8 && TARGET_POWERPC64
5175 /* 64-bit loads and stores require word-aligned
5177 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5180 tmp_reg = gen_reg_rtx (DImode);
5181 emit_move_insn (tmp_reg,
5182 expand_block_move_mem (DImode,
5183 src_reg, orig_src));
5184 emit_move_insn (expand_block_move_mem (DImode,
5185 dest_reg, orig_dest),
5188 else if (bytes > 4 && !TARGET_POWERPC64)
5189 { /* move up to 8 bytes at a time */
5190 move_bytes = (bytes > 8) ? 8 : bytes;
5191 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
5194 expand_block_move_mem (BLKmode,
5197 GEN_INT (move_bytes),
5200 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5201 { /* move 4 bytes */
5203 tmp_reg = gen_reg_rtx (SImode);
5204 emit_move_insn (tmp_reg,
5205 expand_block_move_mem (SImode,
5206 src_reg, orig_src));
5207 emit_move_insn (expand_block_move_mem (SImode,
5208 dest_reg, orig_dest),
5211 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5212 { /* move 2 bytes */
5214 tmp_reg = gen_reg_rtx (HImode);
5215 emit_move_insn (tmp_reg,
5216 expand_block_move_mem (HImode,
5217 src_reg, orig_src));
5218 emit_move_insn (expand_block_move_mem (HImode,
5219 dest_reg, orig_dest),
5222 else if (bytes == 1) /* move 1 byte */
5225 tmp_reg = gen_reg_rtx (QImode);
5226 emit_move_insn (tmp_reg,
5227 expand_block_move_mem (QImode,
5228 src_reg, orig_src));
5229 emit_move_insn (expand_block_move_mem (QImode,
5230 dest_reg, orig_dest),
5234 { /* move up to 4 bytes at a time */
5235 move_bytes = (bytes > 4) ? 4 : bytes;
5236 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
5239 expand_block_move_mem (BLKmode,
5242 GEN_INT (move_bytes),
5246 if (bytes > move_bytes)
5248 if (! TARGET_POWERPC64)
5250 emit_insn (gen_addsi3 (src_reg, src_reg,
5251 GEN_INT (move_bytes)));
5252 emit_insn (gen_addsi3 (dest_reg, dest_reg,
5253 GEN_INT (move_bytes)));
5257 emit_insn (gen_adddi3 (src_reg, src_reg,
5258 GEN_INT (move_bytes)));
5259 emit_insn (gen_adddi3 (dest_reg, dest_reg,
5260 GEN_INT (move_bytes)));
5266 else /* string instructions not available */
5268 num_reg = offset = 0;
5269 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
5271 /* Calculate the correct offset for src/dest */
5275 dest_addr = dest_reg;
5279 src_addr = plus_constant (src_reg, offset);
5280 dest_addr = plus_constant (dest_reg, offset);
5283 /* Generate the appropriate load and store, saving the stores
5285 if (bytes >= 8 && TARGET_POWERPC64
5286 /* 64-bit loads and stores require word-aligned
5288 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5291 tmp_reg = gen_reg_rtx (DImode);
5292 emit_insn (gen_movdi (tmp_reg,
5293 expand_block_move_mem (DImode,
5296 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
5301 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5304 tmp_reg = gen_reg_rtx (SImode);
5305 emit_insn (gen_movsi (tmp_reg,
5306 expand_block_move_mem (SImode,
5309 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5314 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5317 tmp_reg = gen_reg_rtx (HImode);
5318 emit_insn (gen_movhi (tmp_reg,
5319 expand_block_move_mem (HImode,
5322 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5330 tmp_reg = gen_reg_rtx (QImode);
5331 emit_insn (gen_movqi (tmp_reg,
5332 expand_block_move_mem (QImode,
5335 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5341 if (num_reg >= MAX_MOVE_REG)
5343 for (i = 0; i < num_reg; i++)
5344 emit_insn (stores[i]);
5349 for (i = 0; i < num_reg; i++)
5350 emit_insn (stores[i]);
5357 /* Return 1 if OP is a load multiple operation. It is known to be a
5358 PARALLEL and the first section will be tested. */
5361 load_multiple_operation (op, mode)
5363 enum machine_mode mode ATTRIBUTE_UNUSED;
5365 int count = XVECLEN (op, 0);
5366 unsigned int dest_regno;
5370 /* Perform a quick check so we don't blow up below. */
5372 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5373 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5374 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5377 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5378 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5380 for (i = 1; i < count; i++)
5382 rtx elt = XVECEXP (op, 0, i);
5384 if (GET_CODE (elt) != SET
5385 || GET_CODE (SET_DEST (elt)) != REG
5386 || GET_MODE (SET_DEST (elt)) != SImode
5387 || REGNO (SET_DEST (elt)) != dest_regno + i
5388 || GET_CODE (SET_SRC (elt)) != MEM
5389 || GET_MODE (SET_SRC (elt)) != SImode
5390 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5391 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5392 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5393 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5400 /* Similar, but tests for store multiple. Here, the second vector element
5401 is a CLOBBER. It will be tested later. */
5404 store_multiple_operation (op, mode)
5406 enum machine_mode mode ATTRIBUTE_UNUSED;
5408 int count = XVECLEN (op, 0) - 1;
5409 unsigned int src_regno;
5413 /* Perform a quick check so we don't blow up below. */
5415 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5416 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5417 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5420 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5421 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5423 for (i = 1; i < count; i++)
5425 rtx elt = XVECEXP (op, 0, i + 1);
5427 if (GET_CODE (elt) != SET
5428 || GET_CODE (SET_SRC (elt)) != REG
5429 || GET_MODE (SET_SRC (elt)) != SImode
5430 || REGNO (SET_SRC (elt)) != src_regno + i
5431 || GET_CODE (SET_DEST (elt)) != MEM
5432 || GET_MODE (SET_DEST (elt)) != SImode
5433 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5434 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5435 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5436 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5443 /* Return 1 for a parallel vrsave operation. */
5446 vrsave_operation (op, mode)
5448 enum machine_mode mode ATTRIBUTE_UNUSED;
5450 int count = XVECLEN (op, 0);
5451 unsigned int dest_regno, src_regno;
5455 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5456 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5457 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5460 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5461 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5463 if (dest_regno != VRSAVE_REGNO
5464 && src_regno != VRSAVE_REGNO)
5467 for (i = 1; i < count; i++)
5469 rtx elt = XVECEXP (op, 0, i);
5471 if (GET_CODE (elt) != CLOBBER
5472 && GET_CODE (elt) != SET)
5479 /* Return 1 for an PARALLEL suitable for mtcrf. */
5482 mtcrf_operation (op, mode)
5484 enum machine_mode mode ATTRIBUTE_UNUSED;
5486 int count = XVECLEN (op, 0);
5490 /* Perform a quick check so we don't blow up below. */
5492 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5493 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5494 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5496 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5498 if (GET_CODE (src_reg) != REG
5499 || GET_MODE (src_reg) != SImode
5500 || ! INT_REGNO_P (REGNO (src_reg)))
5503 for (i = 0; i < count; i++)
5505 rtx exp = XVECEXP (op, 0, i);
5509 if (GET_CODE (exp) != SET
5510 || GET_CODE (SET_DEST (exp)) != REG
5511 || GET_MODE (SET_DEST (exp)) != CCmode
5512 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5514 unspec = SET_SRC (exp);
5515 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5517 if (GET_CODE (unspec) != UNSPEC
5518 || XINT (unspec, 1) != 20
5519 || XVECLEN (unspec, 0) != 2
5520 || XVECEXP (unspec, 0, 0) != src_reg
5521 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5522 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5528 /* Return 1 for an PARALLEL suitable for lmw. */
5531 lmw_operation (op, mode)
5533 enum machine_mode mode ATTRIBUTE_UNUSED;
5535 int count = XVECLEN (op, 0);
5536 unsigned int dest_regno;
5538 unsigned int base_regno;
5539 HOST_WIDE_INT offset;
5542 /* Perform a quick check so we don't blow up below. */
5544 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5545 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5546 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5549 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5550 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5553 || count != 32 - (int) dest_regno)
5556 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5559 base_regno = REGNO (src_addr);
5560 if (base_regno == 0)
5563 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5565 offset = INTVAL (XEXP (src_addr, 1));
5566 base_regno = REGNO (XEXP (src_addr, 0));
5571 for (i = 0; i < count; i++)
5573 rtx elt = XVECEXP (op, 0, i);
5576 HOST_WIDE_INT newoffset;
5578 if (GET_CODE (elt) != SET
5579 || GET_CODE (SET_DEST (elt)) != REG
5580 || GET_MODE (SET_DEST (elt)) != SImode
5581 || REGNO (SET_DEST (elt)) != dest_regno + i
5582 || GET_CODE (SET_SRC (elt)) != MEM
5583 || GET_MODE (SET_SRC (elt)) != SImode)
5585 newaddr = XEXP (SET_SRC (elt), 0);
5586 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5591 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5593 addr_reg = XEXP (newaddr, 0);
5594 newoffset = INTVAL (XEXP (newaddr, 1));
5598 if (REGNO (addr_reg) != base_regno
5599 || newoffset != offset + 4 * i)
5606 /* Return 1 for an PARALLEL suitable for stmw. */
5609 stmw_operation (op, mode)
5611 enum machine_mode mode ATTRIBUTE_UNUSED;
5613 int count = XVECLEN (op, 0);
5614 unsigned int src_regno;
5616 unsigned int base_regno;
5617 HOST_WIDE_INT offset;
5620 /* Perform a quick check so we don't blow up below. */
5622 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5623 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5624 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5627 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5628 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5631 || count != 32 - (int) src_regno)
5634 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5637 base_regno = REGNO (dest_addr);
5638 if (base_regno == 0)
5641 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5643 offset = INTVAL (XEXP (dest_addr, 1));
5644 base_regno = REGNO (XEXP (dest_addr, 0));
5649 for (i = 0; i < count; i++)
5651 rtx elt = XVECEXP (op, 0, i);
5654 HOST_WIDE_INT newoffset;
5656 if (GET_CODE (elt) != SET
5657 || GET_CODE (SET_SRC (elt)) != REG
5658 || GET_MODE (SET_SRC (elt)) != SImode
5659 || REGNO (SET_SRC (elt)) != src_regno + i
5660 || GET_CODE (SET_DEST (elt)) != MEM
5661 || GET_MODE (SET_DEST (elt)) != SImode)
5663 newaddr = XEXP (SET_DEST (elt), 0);
5664 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5669 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5671 addr_reg = XEXP (newaddr, 0);
5672 newoffset = INTVAL (XEXP (newaddr, 1));
5676 if (REGNO (addr_reg) != base_regno
5677 || newoffset != offset + 4 * i)
5684 /* A validation routine: say whether CODE, a condition code, and MODE
5685 match. The other alternatives either don't make sense or should
5686 never be generated. */
5689 validate_condition_mode (code, mode)
5691 enum machine_mode mode;
5693 if (GET_RTX_CLASS (code) != '<'
5694 || GET_MODE_CLASS (mode) != MODE_CC)
5697 /* These don't make sense. */
5698 if ((code == GT || code == LT || code == GE || code == LE)
5699 && mode == CCUNSmode)
5702 if ((code == GTU || code == LTU || code == GEU || code == LEU)
5703 && mode != CCUNSmode)
5706 if (mode != CCFPmode
5707 && (code == ORDERED || code == UNORDERED
5708 || code == UNEQ || code == LTGT
5709 || code == UNGT || code == UNLT
5710 || code == UNGE || code == UNLE))
5713 /* These should never be generated except for
5714 flag_unsafe_math_optimizations. */
5715 if (mode == CCFPmode
5716 && ! flag_unsafe_math_optimizations
5717 && (code == LE || code == GE
5718 || code == UNEQ || code == LTGT
5719 || code == UNGT || code == UNLT))
5722 /* These are invalid; the information is not there. */
5723 if (mode == CCEQmode
5724 && code != EQ && code != NE)
5728 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5729 We only check the opcode against the mode of the CC value here. */
5732 branch_comparison_operator (op, mode)
5734 enum machine_mode mode ATTRIBUTE_UNUSED;
5736 enum rtx_code code = GET_CODE (op);
5737 enum machine_mode cc_mode;
5739 if (GET_RTX_CLASS (code) != '<')
5742 cc_mode = GET_MODE (XEXP (op, 0));
5743 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5746 validate_condition_mode (code, cc_mode);
5751 /* Return 1 if OP is a comparison operation that is valid for a branch
5752 insn and which is true if the corresponding bit in the CC register
5756 branch_positive_comparison_operator (op, mode)
5758 enum machine_mode mode;
5762 if (! branch_comparison_operator (op, mode))
5765 code = GET_CODE (op);
5766 return (code == EQ || code == LT || code == GT
5767 || code == LTU || code == GTU
5768 || code == UNORDERED);
5771 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5772 We check the opcode against the mode of the CC value and disallow EQ or
5773 NE comparisons for integers. */
5776 scc_comparison_operator (op, mode)
5778 enum machine_mode mode;
5780 enum rtx_code code = GET_CODE (op);
5781 enum machine_mode cc_mode;
5783 if (GET_MODE (op) != mode && mode != VOIDmode)
5786 if (GET_RTX_CLASS (code) != '<')
5789 cc_mode = GET_MODE (XEXP (op, 0));
5790 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5793 validate_condition_mode (code, cc_mode);
5795 if (code == NE && cc_mode != CCFPmode)
5802 trap_comparison_operator (op, mode)
5804 enum machine_mode mode;
5806 if (mode != VOIDmode && mode != GET_MODE (op))
5808 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5812 boolean_operator (op, mode)
5814 enum machine_mode mode ATTRIBUTE_UNUSED;
5816 enum rtx_code code = GET_CODE (op);
5817 return (code == AND || code == IOR || code == XOR);
5821 boolean_or_operator (op, mode)
5823 enum machine_mode mode ATTRIBUTE_UNUSED;
5825 enum rtx_code code = GET_CODE (op);
5826 return (code == IOR || code == XOR);
5830 min_max_operator (op, mode)
5832 enum machine_mode mode ATTRIBUTE_UNUSED;
5834 enum rtx_code code = GET_CODE (op);
5835 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5838 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5839 mask required to convert the result of a rotate insn into a shift
5840 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
5843 includes_lshift_p (shiftop, andop)
5847 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5849 shift_mask <<= INTVAL (shiftop);
5851 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5854 /* Similar, but for right shift. */
5857 includes_rshift_p (shiftop, andop)
5861 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5863 shift_mask >>= INTVAL (shiftop);
5865 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5868 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5869 to perform a left shift. It must have exactly SHIFTOP least
5870 signifigant 0's, then one or more 1's, then zero or more 0's. */
5873 includes_rldic_lshift_p (shiftop, andop)
5877 if (GET_CODE (andop) == CONST_INT)
5879 HOST_WIDE_INT c, lsb, shift_mask;
5882 if (c == 0 || c == ~0)
5886 shift_mask <<= INTVAL (shiftop);
5888 /* Find the least signifigant one bit. */
5891 /* It must coincide with the LSB of the shift mask. */
5892 if (-lsb != shift_mask)
5895 /* Invert to look for the next transition (if any). */
5898 /* Remove the low group of ones (originally low group of zeros). */
5901 /* Again find the lsb, and check we have all 1's above. */
5905 else if (GET_CODE (andop) == CONST_DOUBLE
5906 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5908 HOST_WIDE_INT low, high, lsb;
5909 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5911 low = CONST_DOUBLE_LOW (andop);
5912 if (HOST_BITS_PER_WIDE_INT < 64)
5913 high = CONST_DOUBLE_HIGH (andop);
5915 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5916 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5919 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5921 shift_mask_high = ~0;
5922 if (INTVAL (shiftop) > 32)
5923 shift_mask_high <<= INTVAL (shiftop) - 32;
5927 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5934 return high == -lsb;
5937 shift_mask_low = ~0;
5938 shift_mask_low <<= INTVAL (shiftop);
5942 if (-lsb != shift_mask_low)
5945 if (HOST_BITS_PER_WIDE_INT < 64)
5950 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5953 return high == -lsb;
5957 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5963 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5964 to perform a left shift. It must have SHIFTOP or more least
5965 signifigant 0's, with the remainder of the word 1's. */
5968 includes_rldicr_lshift_p (shiftop, andop)
5972 if (GET_CODE (andop) == CONST_INT)
5974 HOST_WIDE_INT c, lsb, shift_mask;
5977 shift_mask <<= INTVAL (shiftop);
5980 /* Find the least signifigant one bit. */
5983 /* It must be covered by the shift mask.
5984 This test also rejects c == 0. */
5985 if ((lsb & shift_mask) == 0)
5988 /* Check we have all 1's above the transition, and reject all 1's. */
5989 return c == -lsb && lsb != 1;
5991 else if (GET_CODE (andop) == CONST_DOUBLE
5992 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5994 HOST_WIDE_INT low, lsb, shift_mask_low;
5996 low = CONST_DOUBLE_LOW (andop);
5998 if (HOST_BITS_PER_WIDE_INT < 64)
6000 HOST_WIDE_INT high, shift_mask_high;
6002 high = CONST_DOUBLE_HIGH (andop);
6006 shift_mask_high = ~0;
6007 if (INTVAL (shiftop) > 32)
6008 shift_mask_high <<= INTVAL (shiftop) - 32;
6012 if ((lsb & shift_mask_high) == 0)
6015 return high == -lsb;
6021 shift_mask_low = ~0;
6022 shift_mask_low <<= INTVAL (shiftop);
6026 if ((lsb & shift_mask_low) == 0)
6029 return low == -lsb && lsb != 1;
6035 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
6036 for lfq and stfq insns.
6038 Note reg1 and reg2 *must* be hard registers. To be sure we will
6039 abort if we are passed pseudo registers. */
6042 registers_ok_for_quad_peep (reg1, reg2)
6045 /* We might have been passed a SUBREG. */
6046 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
6049 return (REGNO (reg1) == REGNO (reg2) - 1);
6052 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
6053 addr1 and addr2 must be in consecutive memory locations
6054 (addr2 == addr1 + 8). */
6057 addrs_ok_for_quad_peep (addr1, addr2)
6064 /* Extract an offset (if used) from the first addr. */
6065 if (GET_CODE (addr1) == PLUS)
6067 /* If not a REG, return zero. */
6068 if (GET_CODE (XEXP (addr1, 0)) != REG)
6072 reg1 = REGNO (XEXP (addr1, 0));
6073 /* The offset must be constant! */
6074 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
6076 offset1 = INTVAL (XEXP (addr1, 1));
6079 else if (GET_CODE (addr1) != REG)
6083 reg1 = REGNO (addr1);
6084 /* This was a simple (mem (reg)) expression. Offset is 0. */
6088 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
6089 if (GET_CODE (addr2) != PLUS)
6092 if (GET_CODE (XEXP (addr2, 0)) != REG
6093 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
6096 if (reg1 != REGNO (XEXP (addr2, 0)))
6099 /* The offset for the second addr must be 8 more than the first addr. */
6100 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
6103 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
6108 /* Return the register class of a scratch register needed to copy IN into
6109 or out of a register in CLASS in MODE. If it can be done directly,
6110 NO_REGS is returned. */
6113 secondary_reload_class (class, mode, in)
6114 enum reg_class class;
6115 enum machine_mode mode ATTRIBUTE_UNUSED;
6120 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
6122 /* We cannot copy a symbolic operand directly into anything
6123 other than BASE_REGS for TARGET_ELF. So indicate that a
6124 register from BASE_REGS is needed as an intermediate
6127 On Darwin, pic addresses require a load from memory, which
6128 needs a base register. */
6129 if (class != BASE_REGS
6130 && (GET_CODE (in) == SYMBOL_REF
6131 || GET_CODE (in) == HIGH
6132 || GET_CODE (in) == LABEL_REF
6133 || GET_CODE (in) == CONST))
6137 if (GET_CODE (in) == REG)
6140 if (regno >= FIRST_PSEUDO_REGISTER)
6142 regno = true_regnum (in);
6143 if (regno >= FIRST_PSEUDO_REGISTER)
6147 else if (GET_CODE (in) == SUBREG)
6149 regno = true_regnum (in);
6150 if (regno >= FIRST_PSEUDO_REGISTER)
6156 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
6158 if (class == GENERAL_REGS || class == BASE_REGS
6159 || (regno >= 0 && INT_REGNO_P (regno)))
6162 /* Constants, memory, and FP registers can go into FP registers. */
6163 if ((regno == -1 || FP_REGNO_P (regno))
6164 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
6167 /* Memory, and AltiVec registers can go into AltiVec registers. */
6168 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
6169 && class == ALTIVEC_REGS)
6172 /* We can copy among the CR registers. */
6173 if ((class == CR_REGS || class == CR0_REGS)
6174 && regno >= 0 && CR_REGNO_P (regno))
6177 /* Otherwise, we need GENERAL_REGS. */
6178 return GENERAL_REGS;
6181 /* Given a comparison operation, return the bit number in CCR to test. We
6182 know this is a valid comparison.
6184 SCC_P is 1 if this is for an scc. That means that %D will have been
6185 used instead of %C, so the bits will be in different places.
6187 Return -1 if OP isn't a valid comparison for some reason. */
6194 enum rtx_code code = GET_CODE (op);
6195 enum machine_mode cc_mode;
6200 if (GET_RTX_CLASS (code) != '<')
6205 if (GET_CODE (reg) != REG
6206 || ! CR_REGNO_P (REGNO (reg)))
6209 cc_mode = GET_MODE (reg);
6210 cc_regnum = REGNO (reg);
6211 base_bit = 4 * (cc_regnum - CR0_REGNO);
6213 validate_condition_mode (code, cc_mode);
6218 return scc_p ? base_bit + 3 : base_bit + 2;
6220 return base_bit + 2;
6221 case GT: case GTU: case UNLE:
6222 return base_bit + 1;
6223 case LT: case LTU: case UNGE:
6225 case ORDERED: case UNORDERED:
6226 return base_bit + 3;
6229 /* If scc, we will have done a cror to put the bit in the
6230 unordered position. So test that bit. For integer, this is ! LT
6231 unless this is an scc insn. */
6232 return scc_p ? base_bit + 3 : base_bit;
6235 return scc_p ? base_bit + 3 : base_bit + 1;
6242 /* Return the GOT register. */
6245 rs6000_got_register (value)
6246 rtx value ATTRIBUTE_UNUSED;
6248 /* The second flow pass currently (June 1999) can't update
6249 regs_ever_live without disturbing other parts of the compiler, so
6250 update it here to make the prolog/epilogue code happy. */
6251 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
6252 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
6254 current_function_uses_pic_offset_table = 1;
6256 return pic_offset_table_rtx;
6259 /* Function to init struct machine_function.
6260 This will be called, via a pointer variable,
6261 from push_function_context. */
6263 static struct machine_function *
6264 rs6000_init_machine_status ()
6266 return ggc_alloc_cleared (sizeof (machine_function));
6269 /* Print an operand. Recognize special options, documented below. */
6272 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6273 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6275 #define SMALL_DATA_RELOC "sda21"
6276 #define SMALL_DATA_REG 0
6280 print_operand (file, x, code)
6288 /* These macros test for integers and extract the low-order bits. */
6290 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
6291 && GET_MODE (X) == VOIDmode)
6293 #define INT_LOWPART(X) \
6294 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6299 /* Write out an instruction after the call which may be replaced
6300 with glue code by the loader. This depends on the AIX version. */
6301 asm_fprintf (file, RS6000_CALL_GLUE);
6304 /* %a is output_address. */
6307 /* If X is a constant integer whose low-order 5 bits are zero,
6308 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6309 in the AIX assembler where "sri" with a zero shift count
6310 writes a trash instruction. */
6311 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6318 /* If constant, low-order 16 bits of constant, unsigned.
6319 Otherwise, write normally. */
6321 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6323 print_operand (file, x, 0);
6327 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6328 for 64-bit mask direction. */
6329 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6332 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6336 /* There used to be a comment for 'C' reading "This is an
6337 optional cror needed for certain floating-point
6338 comparisons. Otherwise write nothing." */
6340 /* Similar, except that this is for an scc, so we must be able to
6341 encode the test in a single bit that is one. We do the above
6342 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6343 if (GET_CODE (x) == LE || GET_CODE (x) == GE
6344 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6346 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6348 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6350 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6353 else if (GET_CODE (x) == NE)
6355 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6357 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6358 base_bit + 2, base_bit + 2);
6363 /* X is a CR register. Print the number of the EQ bit of the CR */
6364 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6365 output_operand_lossage ("invalid %%E value");
6367 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6371 /* X is a CR register. Print the shift count needed to move it
6372 to the high-order four bits. */
6373 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6374 output_operand_lossage ("invalid %%f value");
6376 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6380 /* Similar, but print the count for the rotate in the opposite
6382 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6383 output_operand_lossage ("invalid %%F value");
6385 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6389 /* X is a constant integer. If it is negative, print "m",
6390 otherwise print "z". This is to make a aze or ame insn. */
6391 if (GET_CODE (x) != CONST_INT)
6392 output_operand_lossage ("invalid %%G value");
6393 else if (INTVAL (x) >= 0)
6400 /* If constant, output low-order five bits. Otherwise, write
6403 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6405 print_operand (file, x, 0);
6409 /* If constant, output low-order six bits. Otherwise, write
6412 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6414 print_operand (file, x, 0);
6418 /* Print `i' if this is a constant, else nothing. */
6424 /* Write the bit number in CCR for jump. */
6427 output_operand_lossage ("invalid %%j code");
6429 fprintf (file, "%d", i);
6433 /* Similar, but add one for shift count in rlinm for scc and pass
6434 scc flag to `ccr_bit'. */
6437 output_operand_lossage ("invalid %%J code");
6439 /* If we want bit 31, write a shift count of zero, not 32. */
6440 fprintf (file, "%d", i == 31 ? 0 : i + 1);
6444 /* X must be a constant. Write the 1's complement of the
6447 output_operand_lossage ("invalid %%k value");
6449 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6453 /* X must be a symbolic constant on ELF. Write an
6454 expression suitable for an 'addi' that adds in the low 16
6456 if (GET_CODE (x) != CONST)
6458 print_operand_address (file, x);
6463 if (GET_CODE (XEXP (x, 0)) != PLUS
6464 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6465 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6466 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6467 output_operand_lossage ("invalid %%K value");
6468 print_operand_address (file, XEXP (XEXP (x, 0), 0));
6470 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6474 /* %l is output_asm_label. */
6477 /* Write second word of DImode or DFmode reference. Works on register
6478 or non-indexed memory only. */
6479 if (GET_CODE (x) == REG)
6480 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6481 else if (GET_CODE (x) == MEM)
6483 /* Handle possible auto-increment. Since it is pre-increment and
6484 we have already done it, we can just use an offset of word. */
6485 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6486 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6487 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6490 output_address (XEXP (adjust_address_nv (x, SImode,
6494 if (small_data_operand (x, GET_MODE (x)))
6495 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6496 reg_names[SMALL_DATA_REG]);
6501 /* MB value for a mask operand. */
6502 if (! mask_operand (x, SImode))
6503 output_operand_lossage ("invalid %%m value");
6505 val = INT_LOWPART (x);
6507 /* If the high bit is set and the low bit is not, the value is zero.
6508 If the high bit is zero, the value is the first 1 bit we find from
6510 if ((val & 0x80000000) && ((val & 1) == 0))
6515 else if ((val & 0x80000000) == 0)
6517 for (i = 1; i < 32; i++)
6518 if ((val <<= 1) & 0x80000000)
6520 fprintf (file, "%d", i);
6524 /* Otherwise, look for the first 0 bit from the right. The result is its
6525 number plus 1. We know the low-order bit is one. */
6526 for (i = 0; i < 32; i++)
6527 if (((val >>= 1) & 1) == 0)
6530 /* If we ended in ...01, i would be 0. The correct value is 31, so
6532 fprintf (file, "%d", 31 - i);
6536 /* ME value for a mask operand. */
6537 if (! mask_operand (x, SImode))
6538 output_operand_lossage ("invalid %%M value");
6540 val = INT_LOWPART (x);
6542 /* If the low bit is set and the high bit is not, the value is 31.
6543 If the low bit is zero, the value is the first 1 bit we find from
6545 if ((val & 1) && ((val & 0x80000000) == 0))
6550 else if ((val & 1) == 0)
6552 for (i = 0; i < 32; i++)
6553 if ((val >>= 1) & 1)
6556 /* If we had ....10, i would be 0. The result should be
6557 30, so we need 30 - i. */
6558 fprintf (file, "%d", 30 - i);
6562 /* Otherwise, look for the first 0 bit from the left. The result is its
6563 number minus 1. We know the high-order bit is one. */
6564 for (i = 0; i < 32; i++)
6565 if (((val <<= 1) & 0x80000000) == 0)
6568 fprintf (file, "%d", i);
6571 /* %n outputs the negative of its operand. */
6574 /* Write the number of elements in the vector times 4. */
6575 if (GET_CODE (x) != PARALLEL)
6576 output_operand_lossage ("invalid %%N value");
6578 fprintf (file, "%d", XVECLEN (x, 0) * 4);
6582 /* Similar, but subtract 1 first. */
6583 if (GET_CODE (x) != PARALLEL)
6584 output_operand_lossage ("invalid %%O value");
6586 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6590 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6592 || INT_LOWPART (x) < 0
6593 || (i = exact_log2 (INT_LOWPART (x))) < 0)
6594 output_operand_lossage ("invalid %%p value");
6596 fprintf (file, "%d", i);
6600 /* The operand must be an indirect memory reference. The result
6601 is the register number. */
6602 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6603 || REGNO (XEXP (x, 0)) >= 32)
6604 output_operand_lossage ("invalid %%P value");
6606 fprintf (file, "%d", REGNO (XEXP (x, 0)));
6610 /* This outputs the logical code corresponding to a boolean
6611 expression. The expression may have one or both operands
6612 negated (if one, only the first one). For condition register
6613 logical operations, it will also treat the negated
6614 CR codes as NOTs, but not handle NOTs of them. */
6616 const char *const *t = 0;
6618 enum rtx_code code = GET_CODE (x);
6619 static const char * const tbl[3][3] = {
6620 { "and", "andc", "nor" },
6621 { "or", "orc", "nand" },
6622 { "xor", "eqv", "xor" } };
6626 else if (code == IOR)
6628 else if (code == XOR)
6631 output_operand_lossage ("invalid %%q value");
6633 if (GET_CODE (XEXP (x, 0)) != NOT)
6637 if (GET_CODE (XEXP (x, 1)) == NOT)
6648 /* X is a CR register. Print the mask for `mtcrf'. */
6649 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6650 output_operand_lossage ("invalid %%R value");
6652 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6656 /* Low 5 bits of 32 - value */
6658 output_operand_lossage ("invalid %%s value");
6660 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6664 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6665 CONST_INT 32-bit mask is considered sign-extended so any
6666 transition must occur within the CONST_INT, not on the boundary. */
6667 if (! mask64_operand (x, DImode))
6668 output_operand_lossage ("invalid %%S value");
6670 val = INT_LOWPART (x);
6672 if (val & 1) /* Clear Left */
6674 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6675 if (!((val >>= 1) & 1))
6678 #if HOST_BITS_PER_WIDE_INT == 32
6679 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6681 val = CONST_DOUBLE_HIGH (x);
6686 for (i = 32; i < 64; i++)
6687 if (!((val >>= 1) & 1))
6691 /* i = index of last set bit from right
6692 mask begins at 63 - i from left */
6694 output_operand_lossage ("%%S computed all 1's mask");
6696 fprintf (file, "%d", 63 - i);
6699 else /* Clear Right */
6701 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6702 if ((val >>= 1) & 1)
6705 #if HOST_BITS_PER_WIDE_INT == 32
6706 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6708 val = CONST_DOUBLE_HIGH (x);
6710 if (val == (HOST_WIDE_INT) -1)
6713 for (i = 32; i < 64; i++)
6714 if ((val >>= 1) & 1)
6718 /* i = index of last clear bit from right
6719 mask ends at 62 - i from left */
6721 output_operand_lossage ("%%S computed all 0's mask");
6723 fprintf (file, "%d", 62 - i);
6728 /* Print the symbolic name of a branch target register. */
6729 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6730 && REGNO (x) != COUNT_REGISTER_REGNUM))
6731 output_operand_lossage ("invalid %%T value");
6732 else if (REGNO (x) == LINK_REGISTER_REGNUM)
6733 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6735 fputs ("ctr", file);
6739 /* High-order 16 bits of constant for use in unsigned operand. */
6741 output_operand_lossage ("invalid %%u value");
6743 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6744 (INT_LOWPART (x) >> 16) & 0xffff);
6748 /* High-order 16 bits of constant for use in signed operand. */
6750 output_operand_lossage ("invalid %%v value");
6752 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6753 (INT_LOWPART (x) >> 16) & 0xffff);
6757 /* Print `u' if this has an auto-increment or auto-decrement. */
6758 if (GET_CODE (x) == MEM
6759 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6760 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6765 /* Print the trap code for this operand. */
6766 switch (GET_CODE (x))
6769 fputs ("eq", file); /* 4 */
6772 fputs ("ne", file); /* 24 */
6775 fputs ("lt", file); /* 16 */
6778 fputs ("le", file); /* 20 */
6781 fputs ("gt", file); /* 8 */
6784 fputs ("ge", file); /* 12 */
6787 fputs ("llt", file); /* 2 */
6790 fputs ("lle", file); /* 6 */
6793 fputs ("lgt", file); /* 1 */
6796 fputs ("lge", file); /* 5 */
6804 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6807 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6808 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6810 print_operand (file, x, 0);
6814 /* MB value for a PowerPC64 rldic operand. */
6815 val = (GET_CODE (x) == CONST_INT
6816 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6821 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6822 if ((val <<= 1) < 0)
6825 #if HOST_BITS_PER_WIDE_INT == 32
6826 if (GET_CODE (x) == CONST_INT && i >= 0)
6827 i += 32; /* zero-extend high-part was all 0's */
6828 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6830 val = CONST_DOUBLE_LOW (x);
6837 for ( ; i < 64; i++)
6838 if ((val <<= 1) < 0)
6843 fprintf (file, "%d", i + 1);
6847 if (GET_CODE (x) == MEM
6848 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6853 /* Like 'L', for third word of TImode */
6854 if (GET_CODE (x) == REG)
6855 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6856 else if (GET_CODE (x) == MEM)
6858 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6859 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6860 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6862 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6863 if (small_data_operand (x, GET_MODE (x)))
6864 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6865 reg_names[SMALL_DATA_REG]);
6870 /* X is a SYMBOL_REF. Write out the name preceded by a
6871 period and without any trailing data in brackets. Used for function
6872 names. If we are configured for System V (or the embedded ABI) on
6873 the PowerPC, do not emit the period, since those systems do not use
6874 TOCs and the like. */
6875 if (GET_CODE (x) != SYMBOL_REF)
6878 if (XSTR (x, 0)[0] != '.')
6880 switch (DEFAULT_ABI)
6890 case ABI_AIX_NODESC:
6896 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6898 assemble_name (file, XSTR (x, 0));
6903 /* Like 'L', for last word of TImode. */
6904 if (GET_CODE (x) == REG)
6905 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6906 else if (GET_CODE (x) == MEM)
6908 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6909 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6910 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6912 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6913 if (small_data_operand (x, GET_MODE (x)))
6914 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6915 reg_names[SMALL_DATA_REG]);
6919 /* Print AltiVec memory operand. */
6924 if (GET_CODE (x) != MEM)
6929 if (GET_CODE (tmp) == REG)
6930 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6931 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6933 if (REGNO (XEXP (tmp, 0)) == 0)
6934 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6935 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6937 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6938 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6946 if (GET_CODE (x) == REG)
6947 fprintf (file, "%s", reg_names[REGNO (x)]);
6948 else if (GET_CODE (x) == MEM)
6950 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6951 know the width from the mode. */
6952 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6953 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6954 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6955 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6956 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6957 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6959 output_address (XEXP (x, 0));
6962 output_addr_const (file, x);
6966 output_operand_lossage ("invalid %%xn code");
6970 /* Print the address of an operand. */
6973 print_operand_address (file, x)
6977 if (GET_CODE (x) == REG)
6978 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6979 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6980 || GET_CODE (x) == LABEL_REF)
6982 output_addr_const (file, x);
6983 if (small_data_operand (x, GET_MODE (x)))
6984 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6985 reg_names[SMALL_DATA_REG]);
6986 else if (TARGET_TOC)
6989 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6991 if (REGNO (XEXP (x, 0)) == 0)
6992 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6993 reg_names[ REGNO (XEXP (x, 0)) ]);
6995 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6996 reg_names[ REGNO (XEXP (x, 1)) ]);
6998 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
7000 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
7001 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7004 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
7005 && CONSTANT_P (XEXP (x, 1)))
7007 output_addr_const (file, XEXP (x, 1));
7008 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7012 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
7013 && CONSTANT_P (XEXP (x, 1)))
7015 fprintf (file, "lo16(");
7016 output_addr_const (file, XEXP (x, 1));
7017 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7020 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
7022 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
7024 rtx contains_minus = XEXP (x, 1);
7028 /* Find the (minus (sym) (toc)) buried in X, and temporarily
7029 turn it into (sym) for output_addr_const. */
7030 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
7031 contains_minus = XEXP (contains_minus, 0);
7033 minus = XEXP (contains_minus, 0);
7034 symref = XEXP (minus, 0);
7035 XEXP (contains_minus, 0) = symref;
7040 name = XSTR (symref, 0);
7041 newname = alloca (strlen (name) + sizeof ("@toc"));
7042 strcpy (newname, name);
7043 strcat (newname, "@toc");
7044 XSTR (symref, 0) = newname;
7046 output_addr_const (file, XEXP (x, 1));
7048 XSTR (symref, 0) = name;
7049 XEXP (contains_minus, 0) = minus;
7052 output_addr_const (file, XEXP (x, 1));
7054 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
7060 /* Target hook for assembling integer objects. The powerpc version has
7061 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
7062 is defined. It also needs to handle DI-mode objects on 64-bit
7066 rs6000_assemble_integer (x, size, aligned_p)
7071 #ifdef RELOCATABLE_NEEDS_FIXUP
7072 /* Special handling for SI values. */
7073 if (size == 4 && aligned_p)
7075 extern int in_toc_section PARAMS ((void));
7076 static int recurse = 0;
7078 /* For -mrelocatable, we mark all addresses that need to be fixed up
7079 in the .fixup section. */
7080 if (TARGET_RELOCATABLE
7081 && !in_toc_section ()
7082 && !in_text_section ()
7084 && GET_CODE (x) != CONST_INT
7085 && GET_CODE (x) != CONST_DOUBLE
7091 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
7093 ASM_OUTPUT_LABEL (asm_out_file, buf);
7094 fprintf (asm_out_file, "\t.long\t(");
7095 output_addr_const (asm_out_file, x);
7096 fprintf (asm_out_file, ")@fixup\n");
7097 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
7098 ASM_OUTPUT_ALIGN (asm_out_file, 2);
7099 fprintf (asm_out_file, "\t.long\t");
7100 assemble_name (asm_out_file, buf);
7101 fprintf (asm_out_file, "\n\t.previous\n");
7105 /* Remove initial .'s to turn a -mcall-aixdesc function
7106 address into the address of the descriptor, not the function
7108 else if (GET_CODE (x) == SYMBOL_REF
7109 && XSTR (x, 0)[0] == '.'
7110 && DEFAULT_ABI == ABI_AIX)
7112 const char *name = XSTR (x, 0);
7113 while (*name == '.')
7116 fprintf (asm_out_file, "\t.long\t%s\n", name);
7120 #endif /* RELOCATABLE_NEEDS_FIXUP */
7121 return default_assemble_integer (x, size, aligned_p);
7125 rs6000_reverse_condition (mode, code)
7126 enum machine_mode mode;
7129 /* Reversal of FP compares takes care -- an ordered compare
7130 becomes an unordered compare and vice versa. */
7131 if (mode == CCFPmode)
7132 return reverse_condition_maybe_unordered (code);
7134 return reverse_condition (code);
7137 /* Generate a compare for CODE. Return a brand-new rtx that
7138 represents the result of the compare. */
7141 rs6000_generate_compare (code)
7144 enum machine_mode comp_mode;
7147 if (rs6000_compare_fp_p)
7148 comp_mode = CCFPmode;
7149 else if (code == GTU || code == LTU
7150 || code == GEU || code == LEU)
7151 comp_mode = CCUNSmode;
7155 /* First, the compare. */
7156 compare_result = gen_reg_rtx (comp_mode);
7157 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
7158 gen_rtx_COMPARE (comp_mode,
7160 rs6000_compare_op1)));
7162 /* Some kinds of FP comparisons need an OR operation;
7163 except for flag_unsafe_math_optimizations we don't bother. */
7164 if (rs6000_compare_fp_p
7165 && ! flag_unsafe_math_optimizations
7166 && (code == LE || code == GE
7167 || code == UNEQ || code == LTGT
7168 || code == UNGT || code == UNLT))
7170 enum rtx_code or1, or2;
7171 rtx or1_rtx, or2_rtx, compare2_rtx;
7172 rtx or_result = gen_reg_rtx (CCEQmode);
7176 case LE: or1 = LT; or2 = EQ; break;
7177 case GE: or1 = GT; or2 = EQ; break;
7178 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
7179 case LTGT: or1 = LT; or2 = GT; break;
7180 case UNGT: or1 = UNORDERED; or2 = GT; break;
7181 case UNLT: or1 = UNORDERED; or2 = LT; break;
7184 validate_condition_mode (or1, comp_mode);
7185 validate_condition_mode (or2, comp_mode);
7186 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
7187 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
7188 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
7189 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
7191 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
7193 compare_result = or_result;
7197 validate_condition_mode (code, GET_MODE (compare_result));
7199 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
7203 /* Emit the RTL for an sCOND pattern. */
7206 rs6000_emit_sCOND (code, result)
7211 enum machine_mode op_mode;
7213 condition_rtx = rs6000_generate_compare (code);
7215 op_mode = GET_MODE (rs6000_compare_op0);
7216 if (op_mode == VOIDmode)
7217 op_mode = GET_MODE (rs6000_compare_op1);
7219 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7221 PUT_MODE (condition_rtx, DImode);
7222 convert_move (result, condition_rtx, 0);
7226 PUT_MODE (condition_rtx, SImode);
7227 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7231 /* Emit a branch of kind CODE to location LOC. */
7234 rs6000_emit_cbranch (code, loc)
7238 rtx condition_rtx, loc_ref;
7240 condition_rtx = rs6000_generate_compare (code);
7241 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7242 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7243 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7247 /* Return the string to output a conditional branch to LABEL, which is
7248 the operand number of the label, or -1 if the branch is really a
7251 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
7252 condition code register and its mode specifies what kind of
7255 REVERSED is non-zero if we should reverse the sense of the comparison.
7257 INSN is the insn. */
7260 output_cbranch (op, label, reversed, insn)
7266 static char string[64];
7267 enum rtx_code code = GET_CODE (op);
7268 rtx cc_reg = XEXP (op, 0);
7269 enum machine_mode mode = GET_MODE (cc_reg);
7270 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7271 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7272 int really_reversed = reversed ^ need_longbranch;
7278 validate_condition_mode (code, mode);
7280 /* Work out which way this really branches. We could use
7281 reverse_condition_maybe_unordered here always but this
7282 makes the resulting assembler clearer. */
7283 if (really_reversed)
7284 code = rs6000_reverse_condition (mode, code);
7288 /* Not all of these are actually distinct opcodes, but
7289 we distinguish them for clarity of the resulting assembler. */
7291 ccode = "ne"; break;
7293 ccode = "eq"; break;
7295 ccode = "ge"; break;
7296 case GT: case GTU: case UNGT:
7297 ccode = "gt"; break;
7299 ccode = "le"; break;
7300 case LT: case LTU: case UNLT:
7301 ccode = "lt"; break;
7302 case UNORDERED: ccode = "un"; break;
7303 case ORDERED: ccode = "nu"; break;
7304 case UNGE: ccode = "nl"; break;
7305 case UNLE: ccode = "ng"; break;
7310 /* Maybe we have a guess as to how likely the branch is.
7311 The old mnemonics don't have a way to specify this information. */
7312 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7313 if (note != NULL_RTX)
7315 /* PROB is the difference from 50%. */
7316 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7318 /* For branches that are very close to 50%, assume not-taken. */
7319 if (abs (prob) > REG_BR_PROB_BASE / 20
7320 && ((prob > 0) ^ need_longbranch))
7329 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7331 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7333 /* We need to escape any '%' characters in the reg_names string.
7334 Assume they'd only be the first character... */
7335 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7337 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7341 /* If the branch distance was too far, we may have to use an
7342 unconditional branch to go the distance. */
7343 if (need_longbranch)
7344 s += sprintf (s, ",$+8\n\tb %s", label);
7346 s += sprintf (s, ",%s", label);
7352 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7353 operands of the last comparison is nonzero/true, FALSE_COND if it
7354 is zero/false. Return 0 if the hardware has no such operation. */
7357 rs6000_emit_cmove (dest, op, true_cond, false_cond)
7363 enum rtx_code code = GET_CODE (op);
7364 rtx op0 = rs6000_compare_op0;
7365 rtx op1 = rs6000_compare_op1;
7367 enum machine_mode compare_mode = GET_MODE (op0);
7368 enum machine_mode result_mode = GET_MODE (dest);
7371 /* These modes should always match. */
7372 if (GET_MODE (op1) != compare_mode)
7374 if (GET_MODE (true_cond) != result_mode)
7376 if (GET_MODE (false_cond) != result_mode)
7379 /* First, work out if the hardware can do this at all, or
7380 if it's too slow... */
7381 /* If the comparison is an integer one, since we only have fsel
7382 it'll be cheaper to use a branch. */
7383 if (! rs6000_compare_fp_p)
7386 /* Eliminate half of the comparisons by switching operands, this
7387 makes the remaining code simpler. */
7388 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7389 || code == LTGT || code == LT)
7391 code = reverse_condition_maybe_unordered (code);
7393 true_cond = false_cond;
7397 /* UNEQ and LTGT take four instructions for a comparison with zero,
7398 it'll probably be faster to use a branch here too. */
7402 if (GET_CODE (op1) == CONST_DOUBLE)
7403 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7405 /* We're going to try to implement comparions by performing
7406 a subtract, then comparing against zero. Unfortunately,
7407 Inf - Inf is NaN which is not zero, and so if we don't
7408 know that the the operand is finite and the comparison
7409 would treat EQ different to UNORDERED, we can't do it. */
7410 if (! flag_unsafe_math_optimizations
7411 && code != GT && code != UNGE
7412 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7413 /* Constructs of the form (a OP b ? a : b) are safe. */
7414 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7415 || (! rtx_equal_p (op0, true_cond)
7416 && ! rtx_equal_p (op1, true_cond))))
7418 /* At this point we know we can use fsel. */
7420 /* Reduce the comparison to a comparison against zero. */
7421 temp = gen_reg_rtx (compare_mode);
7422 emit_insn (gen_rtx_SET (VOIDmode, temp,
7423 gen_rtx_MINUS (compare_mode, op0, op1)));
7425 op1 = CONST0_RTX (compare_mode);
7427 /* If we don't care about NaNs we can reduce some of the comparisons
7428 down to faster ones. */
7429 if (flag_unsafe_math_optimizations)
7435 true_cond = false_cond;
7448 /* Now, reduce everything down to a GE. */
7455 temp = gen_reg_rtx (compare_mode);
7456 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7461 temp = gen_reg_rtx (compare_mode);
7462 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
7467 temp = gen_reg_rtx (compare_mode);
7468 emit_insn (gen_rtx_SET (VOIDmode, temp,
7469 gen_rtx_NEG (compare_mode,
7470 gen_rtx_ABS (compare_mode, op0))));
7475 temp = gen_reg_rtx (result_mode);
7476 emit_insn (gen_rtx_SET (VOIDmode, temp,
7477 gen_rtx_IF_THEN_ELSE (result_mode,
7478 gen_rtx_GE (VOIDmode,
7480 true_cond, false_cond)));
7482 true_cond = false_cond;
7484 temp = gen_reg_rtx (compare_mode);
7485 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7490 temp = gen_reg_rtx (result_mode);
7491 emit_insn (gen_rtx_SET (VOIDmode, temp,
7492 gen_rtx_IF_THEN_ELSE (result_mode,
7493 gen_rtx_GE (VOIDmode,
7495 true_cond, false_cond)));
7497 false_cond = true_cond;
7499 temp = gen_reg_rtx (compare_mode);
7500 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7508 emit_insn (gen_rtx_SET (VOIDmode, dest,
7509 gen_rtx_IF_THEN_ELSE (result_mode,
7510 gen_rtx_GE (VOIDmode,
7512 true_cond, false_cond)));
7517 rs6000_emit_minmax (dest, code, op0, op1)
7523 enum machine_mode mode = GET_MODE (op0);
7527 if (code == SMAX || code == SMIN)
7532 if (code == SMAX || code == UMAX)
7533 target = emit_conditional_move (dest, c, op0, op1, mode,
7536 target = emit_conditional_move (dest, c, op0, op1, mode,
7538 if (target == NULL_RTX)
7541 emit_move_insn (dest, target);
7544 /* This page contains routines that are used to determine what the
7545 function prologue and epilogue code will do and write them out. */
7547 /* Return the first fixed-point register that is required to be
7548 saved. 32 if none. */
7551 first_reg_to_save ()
7555 /* Find lowest numbered live register. */
7556 for (first_reg = 13; first_reg <= 31; first_reg++)
7557 if (regs_ever_live[first_reg]
7558 && (! call_used_regs[first_reg]
7559 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
7560 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7561 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7564 if (current_function_profile)
7566 /* AIX must save/restore every register that contains a parameter
7567 before/after the .__mcount call plus an additional register
7568 for the static chain, if needed; use registers from 30 down to 22
7570 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7572 int last_parm_reg, profile_first_reg;
7574 /* Figure out last used parameter register. The proper thing
7575 to do is to walk incoming args of the function. A function
7576 might have live parameter registers even if it has no
7578 for (last_parm_reg = 10;
7579 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7583 /* Calculate first reg for saving parameter registers
7585 Skip reg 31 which may contain the frame pointer. */
7586 profile_first_reg = (33 - last_parm_reg
7587 - (current_function_needs_context ? 1 : 0));
7589 /* Need to skip another reg to account for R31 being PICBASE
7590 (when flag_pic is set) or R30 being used as the frame
7591 pointer (when flag_pic is not set). */
7592 --profile_first_reg;
7594 /* Do not save frame pointer if no parameters needs to be saved. */
7595 if (profile_first_reg == 31)
7596 profile_first_reg = 32;
7598 if (first_reg > profile_first_reg)
7599 first_reg = profile_first_reg;
7602 /* SVR4 may need one register to preserve the static chain. */
7603 else if (current_function_needs_context)
7605 /* Skip reg 31 which may contain the frame pointer. */
7612 if (flag_pic && current_function_uses_pic_offset_table &&
7613 (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
7614 return RS6000_PIC_OFFSET_TABLE_REGNUM;
7620 /* Similar, for FP regs. */
7623 first_fp_reg_to_save ()
7627 /* Find lowest numbered live register. */
7628 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7629 if (regs_ever_live[first_reg])
7635 /* Similar, for AltiVec regs. */
7638 first_altivec_reg_to_save ()
7642 /* Stack frame remains as is unless we are in AltiVec ABI. */
7643 if (! TARGET_ALTIVEC_ABI)
7644 return LAST_ALTIVEC_REGNO + 1;
7646 /* Find lowest numbered live register. */
7647 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7648 if (regs_ever_live[i])
7654 /* Return a 32-bit mask of the AltiVec registers we need to set in
7655 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7656 the 32-bit word is 0. */
7659 compute_vrsave_mask ()
7661 unsigned int i, mask = 0;
7663 /* First, find out if we use _any_ altivec registers. */
7664 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7665 if (regs_ever_live[i])
7666 mask |= ALTIVEC_REG_BIT (i);
7671 /* Next, add all registers that are call-clobbered. We do this
7672 because post-reload register optimizers such as regrename_optimize
7673 may choose to use them. They never change the register class
7674 chosen by reload, so cannot create new uses of altivec registers
7675 if there were none before, so the early exit above is safe. */
7676 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7677 altivec registers not saved in the mask, which might well make the
7678 adjustments below more effective in eliding the save/restore of
7679 VRSAVE in small functions. */
7680 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7681 if (call_used_regs[i])
7682 mask |= ALTIVEC_REG_BIT (i);
7684 /* Next, remove the argument registers from the set. These must
7685 be in the VRSAVE mask set by the caller, so we don't need to add
7686 them in again. More importantly, the mask we compute here is
7687 used to generate CLOBBERs in the set_vrsave insn, and we do not
7688 wish the argument registers to die. */
7689 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7690 mask &= ~ALTIVEC_REG_BIT (i);
7692 /* Similarly, remove the return value from the set. */
7695 diddle_return_value (is_altivec_return_reg, &yes);
7697 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7704 is_altivec_return_reg (reg, xyes)
7708 bool *yes = (bool *) xyes;
7709 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7714 /* Calculate the stack information for the current function. This is
7715 complicated by having two separate calling sequences, the AIX calling
7716 sequence and the V.4 calling sequence.
7718 AIX (and Darwin/Mac OS X) stack frames look like:
7720 SP----> +---------------------------------------+
7721 | back chain to caller | 0 0
7722 +---------------------------------------+
7723 | saved CR | 4 8 (8-11)
7724 +---------------------------------------+
7726 +---------------------------------------+
7727 | reserved for compilers | 12 24
7728 +---------------------------------------+
7729 | reserved for binders | 16 32
7730 +---------------------------------------+
7731 | saved TOC pointer | 20 40
7732 +---------------------------------------+
7733 | Parameter save area (P) | 24 48
7734 +---------------------------------------+
7735 | Alloca space (A) | 24+P etc.
7736 +---------------------------------------+
7737 | Local variable space (L) | 24+P+A
7738 +---------------------------------------+
7739 | Float/int conversion temporary (X) | 24+P+A+L
7740 +---------------------------------------+
7741 | Save area for AltiVec registers (W) | 24+P+A+L+X
7742 +---------------------------------------+
7743 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7744 +---------------------------------------+
7745 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7746 +---------------------------------------+
7747 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7748 +---------------------------------------+
7749 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7750 +---------------------------------------+
7751 old SP->| back chain to caller's caller |
7752 +---------------------------------------+
7754 The required alignment for AIX configurations is two words (i.e., 8
7758 V.4 stack frames look like:
7760 SP----> +---------------------------------------+
7761 | back chain to caller | 0
7762 +---------------------------------------+
7763 | caller's saved LR | 4
7764 +---------------------------------------+
7765 | Parameter save area (P) | 8
7766 +---------------------------------------+
7767 | Alloca space (A) | 8+P
7768 +---------------------------------------+
7769 | Varargs save area (V) | 8+P+A
7770 +---------------------------------------+
7771 | Local variable space (L) | 8+P+A+V
7772 +---------------------------------------+
7773 | Float/int conversion temporary (X) | 8+P+A+V+L
7774 +---------------------------------------+
7775 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7776 +---------------------------------------+
7777 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7778 +---------------------------------------+
7779 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7780 +---------------------------------------+
7781 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7782 +---------------------------------------+
7783 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7784 +---------------------------------------+
7785 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7786 +---------------------------------------+
7787 old SP->| back chain to caller's caller |
7788 +---------------------------------------+
7790 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7791 given. (But note below and in sysv4.h that we require only 8 and
7792 may round up the size of our stack frame anyways. The historical
7793 reason is early versions of powerpc-linux which didn't properly
7794 align the stack at program startup. A happy side-effect is that
7795 -mno-eabi libraries can be used with -meabi programs.)
7797 The EABI configuration defaults to the V.4 layout, unless
7798 -mcall-aix is used, in which case the AIX layout is used. However,
7799 the stack alignment requirements may differ. If -mno-eabi is not
7800 given, the required stack alignment is 8 bytes; if -mno-eabi is
7801 given, the required alignment is 16 bytes. (But see V.4 comment
7804 #ifndef ABI_STACK_BOUNDARY
7805 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7809 rs6000_stack_info ()
7811 static rs6000_stack_t info, zero_info;
7812 rs6000_stack_t *info_ptr = &info;
7813 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7814 enum rs6000_abi abi;
7818 /* Zero all fields portably. */
7821 /* Select which calling sequence. */
7822 info_ptr->abi = abi = DEFAULT_ABI;
7824 /* Calculate which registers need to be saved & save area size. */
7825 info_ptr->first_gp_reg_save = first_reg_to_save ();
7826 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
7827 even if it currently looks like we won't. */
7828 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7829 || (flag_pic == 1 && abi == ABI_V4)
7830 || (flag_pic && abi == ABI_DARWIN))
7831 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
7832 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
7834 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7836 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7837 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7839 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7840 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7841 - info_ptr->first_altivec_reg_save);
7843 /* Does this function call anything? */
7844 info_ptr->calls_p = (! current_function_is_leaf
7845 || cfun->machine->ra_needs_full_frame);
7847 /* Determine if we need to save the link register. */
7848 if (rs6000_ra_ever_killed ()
7849 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7850 #ifdef TARGET_RELOCATABLE
7851 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7853 || (info_ptr->first_fp_reg_save != 64
7854 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7855 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7856 || (abi == ABI_V4 && current_function_calls_alloca)
7857 || (DEFAULT_ABI == ABI_DARWIN
7859 && current_function_uses_pic_offset_table)
7860 || info_ptr->calls_p)
7862 info_ptr->lr_save_p = 1;
7863 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7866 /* Determine if we need to save the condition code registers. */
7867 if (regs_ever_live[CR2_REGNO]
7868 || regs_ever_live[CR3_REGNO]
7869 || regs_ever_live[CR4_REGNO])
7871 info_ptr->cr_save_p = 1;
7873 info_ptr->cr_size = reg_size;
7876 /* If the current function calls __builtin_eh_return, then we need
7877 to allocate stack space for registers that will hold data for
7878 the exception handler. */
7879 if (current_function_calls_eh_return)
7882 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7884 ehrd_size = i * UNITS_PER_WORD;
7889 /* Determine various sizes. */
7890 info_ptr->reg_size = reg_size;
7891 info_ptr->fixed_size = RS6000_SAVE_AREA;
7892 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7893 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7894 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7897 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
7899 info_ptr->vrsave_mask = compute_vrsave_mask ();
7900 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7904 info_ptr->vrsave_mask = 0;
7905 info_ptr->vrsave_size = 0;
7908 /* Calculate the offsets. */
7916 case ABI_AIX_NODESC:
7918 info_ptr->fp_save_offset = - info_ptr->fp_size;
7919 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7921 if (TARGET_ALTIVEC_ABI)
7923 info_ptr->vrsave_save_offset
7924 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7926 /* Align stack so vector save area is on a quadword boundary. */
7927 if (info_ptr->altivec_size != 0)
7928 info_ptr->altivec_padding_size
7929 = 16 - (-info_ptr->vrsave_save_offset % 16);
7931 info_ptr->altivec_padding_size = 0;
7933 info_ptr->altivec_save_offset
7934 = info_ptr->vrsave_save_offset
7935 - info_ptr->altivec_padding_size
7936 - info_ptr->altivec_size;
7938 /* Adjust for AltiVec case. */
7939 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7942 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7943 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7944 info_ptr->lr_save_offset = 2*reg_size;
7948 info_ptr->fp_save_offset = - info_ptr->fp_size;
7949 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7950 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7952 if (TARGET_ALTIVEC_ABI)
7954 info_ptr->vrsave_save_offset
7955 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7957 /* Align stack so vector save area is on a quadword boundary. */
7958 if (info_ptr->altivec_size != 0)
7959 info_ptr->altivec_padding_size
7960 = 16 - (-info_ptr->vrsave_save_offset % 16);
7962 info_ptr->altivec_padding_size = 0;
7964 info_ptr->altivec_save_offset
7965 = info_ptr->vrsave_save_offset
7966 - info_ptr->altivec_padding_size
7967 - info_ptr->altivec_size;
7969 /* Adjust for AltiVec case. */
7970 info_ptr->toc_save_offset
7971 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7974 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7975 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7976 info_ptr->lr_save_offset = reg_size;
7980 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7982 + info_ptr->altivec_size
7983 + info_ptr->altivec_padding_size
7984 + info_ptr->vrsave_size
7988 + info_ptr->vrsave_size
7989 + info_ptr->toc_size,
7990 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7993 total_raw_size = (info_ptr->vars_size
7994 + info_ptr->parm_size
7995 + info_ptr->save_size
7996 + info_ptr->varargs_size
7997 + info_ptr->fixed_size);
7999 info_ptr->total_size =
8000 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
8002 /* Determine if we need to allocate any stack frame:
8004 For AIX we need to push the stack if a frame pointer is needed
8005 (because the stack might be dynamically adjusted), if we are
8006 debugging, if we make calls, or if the sum of fp_save, gp_save,
8007 and local variables are more than the space needed to save all
8008 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
8009 + 18*8 = 288 (GPR13 reserved).
8011 For V.4 we don't have the stack cushion that AIX uses, but assume
8012 that the debugger can handle stackless frames. */
8014 if (info_ptr->calls_p)
8015 info_ptr->push_p = 1;
8017 else if (abi == ABI_V4)
8018 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
8021 info_ptr->push_p = (frame_pointer_needed
8022 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
8023 || ((total_raw_size - info_ptr->fixed_size)
8024 > (TARGET_32BIT ? 220 : 288)));
8026 /* Zero offsets if we're not saving those registers. */
8027 if (info_ptr->fp_size == 0)
8028 info_ptr->fp_save_offset = 0;
8030 if (info_ptr->gp_size == 0)
8031 info_ptr->gp_save_offset = 0;
8033 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
8034 info_ptr->altivec_save_offset = 0;
8036 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
8037 info_ptr->vrsave_save_offset = 0;
8039 if (! info_ptr->lr_save_p)
8040 info_ptr->lr_save_offset = 0;
8042 if (! info_ptr->cr_save_p)
8043 info_ptr->cr_save_offset = 0;
8045 if (! info_ptr->toc_save_p)
8046 info_ptr->toc_save_offset = 0;
8052 debug_stack_info (info)
8053 rs6000_stack_t *info;
8055 const char *abi_string;
8058 info = rs6000_stack_info ();
8060 fprintf (stderr, "\nStack information for function %s:\n",
8061 ((current_function_decl && DECL_NAME (current_function_decl))
8062 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
8067 default: abi_string = "Unknown"; break;
8068 case ABI_NONE: abi_string = "NONE"; break;
8070 case ABI_AIX_NODESC: abi_string = "AIX"; break;
8071 case ABI_DARWIN: abi_string = "Darwin"; break;
8072 case ABI_V4: abi_string = "V.4"; break;
8075 fprintf (stderr, "\tABI = %5s\n", abi_string);
8077 if (TARGET_ALTIVEC_ABI)
8078 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
8080 if (info->first_gp_reg_save != 32)
8081 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
8083 if (info->first_fp_reg_save != 64)
8084 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
8086 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
8087 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
8088 info->first_altivec_reg_save);
8090 if (info->lr_save_p)
8091 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
8093 if (info->cr_save_p)
8094 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
8096 if (info->toc_save_p)
8097 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
8099 if (info->vrsave_mask)
8100 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
8103 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
8106 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
8108 if (info->gp_save_offset)
8109 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
8111 if (info->fp_save_offset)
8112 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
8114 if (info->altivec_save_offset)
8115 fprintf (stderr, "\taltivec_save_offset = %5d\n",
8116 info->altivec_save_offset);
8118 if (info->vrsave_save_offset)
8119 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
8120 info->vrsave_save_offset);
8122 if (info->lr_save_offset)
8123 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
8125 if (info->cr_save_offset)
8126 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
8128 if (info->toc_save_offset)
8129 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
8131 if (info->varargs_save_offset)
8132 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
8134 if (info->total_size)
8135 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
8137 if (info->varargs_size)
8138 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
8140 if (info->vars_size)
8141 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
8143 if (info->parm_size)
8144 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
8146 if (info->fixed_size)
8147 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
8150 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
8153 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
8155 if (info->altivec_size)
8156 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
8158 if (info->vrsave_size)
8159 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
8161 if (info->altivec_padding_size)
8162 fprintf (stderr, "\taltivec_padding_size= %5d\n",
8163 info->altivec_padding_size);
8166 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
8169 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
8172 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
8174 if (info->save_size)
8175 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
8177 if (info->reg_size != 4)
8178 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
8180 fprintf (stderr, "\n");
8184 rs6000_return_addr (count, frame)
8188 /* Currently we don't optimize very well between prolog and body
8189 code and for PIC code the code can be actually quite bad, so
8190 don't try to be too clever here. */
8191 if (count != 0 || flag_pic != 0)
8193 cfun->machine->ra_needs_full_frame = 1;
8200 plus_constant (copy_to_reg
8201 (gen_rtx_MEM (Pmode,
8202 memory_address (Pmode, frame))),
8203 RETURN_ADDRESS_OFFSET)));
8206 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
8210 rs6000_ra_ever_killed ()
8214 #ifdef ASM_OUTPUT_MI_THUNK
8215 if (current_function_is_thunk)
8218 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
8219 || cfun->machine->ra_needs_full_frame)
8220 return regs_ever_live[LINK_REGISTER_REGNUM];
8222 push_topmost_sequence ();
8224 pop_topmost_sequence ();
8226 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8230 /* Add a REG_MAYBE_DEAD note to the insn. */
8232 rs6000_maybe_dead (insn)
8235 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
8240 /* Emit instructions needed to load the TOC register.
8241 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8242 a constant pool; or for SVR4 -fpic. */
8245 rs6000_emit_load_toc_table (fromprolog)
8249 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
8251 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
8253 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8255 rtx temp = (fromprolog
8256 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8257 : gen_reg_rtx (Pmode));
8258 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8259 rs6000_maybe_dead (emit_move_insn (dest, temp));
8261 else if (flag_pic == 2)
8264 rtx tempLR = (fromprolog
8265 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8266 : gen_reg_rtx (Pmode));
8267 rtx temp0 = (fromprolog
8268 ? gen_rtx_REG (Pmode, 0)
8269 : gen_reg_rtx (Pmode));
8272 /* possibly create the toc section */
8273 if (! toc_initialized)
8276 function_section (current_function_decl);
8283 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8284 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8286 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8287 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8289 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8291 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8292 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8299 static int reload_toc_labelno = 0;
8301 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8303 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8304 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8306 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8309 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8310 rs6000_maybe_dead (emit_move_insn (temp0,
8311 gen_rtx_MEM (Pmode, dest)));
8313 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8315 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8317 /* This is for AIX code running in non-PIC ELF. */
8320 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8321 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8323 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8324 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8332 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8334 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8339 get_TOC_alias_set ()
8341 static int set = -1;
8343 set = new_alias_set ();
8347 /* This retuns nonzero if the current function uses the TOC. This is
8348 determined by the presence of (unspec ... 7), which is generated by
8349 the various load_toc_* patterns. */
8356 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8359 rtx pat = PATTERN (insn);
8362 if (GET_CODE (pat) == PARALLEL)
8363 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8364 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8365 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8372 create_TOC_reference (symbol)
8375 return gen_rtx_PLUS (Pmode,
8376 gen_rtx_REG (Pmode, TOC_REGISTER),
8377 gen_rtx_CONST (Pmode,
8378 gen_rtx_MINUS (Pmode, symbol,
8379 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8383 /* __throw will restore its own return address to be the same as the
8384 return address of the function that the throw is being made to.
8385 This is unfortunate, because we want to check the original
8386 return address to see if we need to restore the TOC.
8387 So we have to squirrel it away here.
8388 This is used only in compiling __throw and __rethrow.
8390 Most of this code should be removed by CSE. */
8391 static rtx insn_after_throw;
8393 /* This does the saving... */
8395 rs6000_aix_emit_builtin_unwind_init ()
8398 rtx stack_top = gen_reg_rtx (Pmode);
8399 rtx opcode_addr = gen_reg_rtx (Pmode);
8401 insn_after_throw = gen_reg_rtx (SImode);
8403 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8404 emit_move_insn (stack_top, mem);
8406 mem = gen_rtx_MEM (Pmode,
8407 gen_rtx_PLUS (Pmode, stack_top,
8408 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8409 emit_move_insn (opcode_addr, mem);
8410 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8413 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8414 in _eh.o). Only used on AIX.
8416 The idea is that on AIX, function calls look like this:
8417 bl somefunction-trampoline
8421 somefunction-trampoline:
8423 ... load function address in the count register ...
8425 or like this, if the linker determines that this is not a cross-module call
8426 and so the TOC need not be restored:
8429 or like this, if the compiler could determine that this is not a
8432 now, the tricky bit here is that register 2 is saved and restored
8433 by the _linker_, so we can't readily generate debugging information
8434 for it. So we need to go back up the call chain looking at the
8435 insns at return addresses to see which calls saved the TOC register
8436 and so see where it gets restored from.
8438 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8439 just before the actual epilogue.
8441 On the bright side, this incurs no space or time overhead unless an
8442 exception is thrown, except for the extra code in libgcc.a.
8444 The parameter STACKSIZE is a register containing (at runtime)
8445 the amount to be popped off the stack in addition to the stack frame
8446 of this routine (which will be __throw or __rethrow, and so is
8447 guaranteed to have a stack frame). */
8450 rs6000_emit_eh_toc_restore (stacksize)
8454 rtx bottom_of_stack = gen_reg_rtx (Pmode);
8455 rtx tocompare = gen_reg_rtx (SImode);
8456 rtx opcode = gen_reg_rtx (SImode);
8457 rtx opcode_addr = gen_reg_rtx (Pmode);
8459 rtx loop_start = gen_label_rtx ();
8460 rtx no_toc_restore_needed = gen_label_rtx ();
8461 rtx loop_exit = gen_label_rtx ();
8463 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8464 set_mem_alias_set (mem, rs6000_sr_alias_set);
8465 emit_move_insn (bottom_of_stack, mem);
8467 top_of_stack = expand_binop (Pmode, add_optab,
8468 bottom_of_stack, stacksize,
8469 NULL_RTX, 1, OPTAB_WIDEN);
8471 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
8472 : 0xE8410028, SImode));
8474 if (insn_after_throw == NULL_RTX)
8476 emit_move_insn (opcode, insn_after_throw);
8478 emit_note (NULL, NOTE_INSN_LOOP_BEG);
8479 emit_label (loop_start);
8481 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8482 SImode, NULL_RTX, NULL_RTX,
8483 no_toc_restore_needed);
8485 mem = gen_rtx_MEM (Pmode,
8486 gen_rtx_PLUS (Pmode, bottom_of_stack,
8487 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8488 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8490 emit_label (no_toc_restore_needed);
8491 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8492 Pmode, NULL_RTX, NULL_RTX,
8495 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8496 set_mem_alias_set (mem, rs6000_sr_alias_set);
8497 emit_move_insn (bottom_of_stack, mem);
8499 mem = gen_rtx_MEM (Pmode,
8500 gen_rtx_PLUS (Pmode, bottom_of_stack,
8501 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8502 emit_move_insn (opcode_addr, mem);
8503 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8505 emit_note (NULL, NOTE_INSN_LOOP_CONT);
8506 emit_jump (loop_start);
8507 emit_note (NULL, NOTE_INSN_LOOP_END);
8508 emit_label (loop_exit);
8510 #endif /* TARGET_AIX */
8512 /* This ties together stack memory (MEM with an alias set of
8513 rs6000_sr_alias_set) and the change to the stack pointer. */
8516 rs6000_emit_stack_tie ()
8518 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8520 set_mem_alias_set (mem, rs6000_sr_alias_set);
8521 emit_insn (gen_stack_tie (mem));
8524 /* Emit the correct code for allocating stack space, as insns.
8525 If COPY_R12, make sure a copy of the old frame is left in r12.
8526 The generated code may use hard register 0 as a temporary. */
8529 rs6000_emit_allocate_stack (size, copy_r12)
8534 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8535 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8536 rtx todec = GEN_INT (-size);
8538 if (current_function_limit_stack)
8540 if (REG_P (stack_limit_rtx)
8541 && REGNO (stack_limit_rtx) > 1
8542 && REGNO (stack_limit_rtx) <= 31)
8544 emit_insn (Pmode == SImode
8545 ? gen_addsi3 (tmp_reg,
8548 : gen_adddi3 (tmp_reg,
8552 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8555 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8557 && DEFAULT_ABI == ABI_V4)
8559 rtx toload = gen_rtx_CONST (VOIDmode,
8560 gen_rtx_PLUS (Pmode,
8564 emit_insn (gen_elf_high (tmp_reg, toload));
8565 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8566 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8570 warning ("stack limit expression is not supported");
8573 if (copy_r12 || ! TARGET_UPDATE)
8574 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8580 /* Need a note here so that try_split doesn't get confused. */
8581 if (get_last_insn() == NULL_RTX)
8582 emit_note (0, NOTE_INSN_DELETED);
8583 insn = emit_move_insn (tmp_reg, todec);
8584 try_split (PATTERN (insn), insn, 0);
8588 if (Pmode == SImode)
8589 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8592 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8597 if (Pmode == SImode)
8598 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8600 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8601 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8602 gen_rtx_REG (Pmode, 12));
8605 RTX_FRAME_RELATED_P (insn) = 1;
8607 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8608 gen_rtx_SET (VOIDmode, stack_reg,
8609 gen_rtx_PLUS (Pmode, stack_reg,
8614 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8617 (mem (plus (blah) (regXX)))
8621 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8624 altivec_frame_fixup (insn, reg, val)
8630 real = copy_rtx (PATTERN (insn));
8632 real = replace_rtx (real, reg, GEN_INT (val));
8634 RTX_FRAME_RELATED_P (insn) = 1;
8635 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8640 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8641 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8642 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8643 deduce these equivalences by itself so it wasn't necessary to hold
8644 its hand so much. */
8647 rs6000_frame_related (insn, reg, val, reg2, rreg)
8656 /* copy_rtx will not make unique copies of registers, so we need to
8657 ensure we don't have unwanted sharing here. */
8659 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
8662 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
8664 real = copy_rtx (PATTERN (insn));
8666 real = replace_rtx (real, reg,
8667 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8668 STACK_POINTER_REGNUM),
8671 /* We expect that 'real' is either a SET or a PARALLEL containing
8672 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8673 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8675 if (GET_CODE (real) == SET)
8679 temp = simplify_rtx (SET_SRC (set));
8681 SET_SRC (set) = temp;
8682 temp = simplify_rtx (SET_DEST (set));
8684 SET_DEST (set) = temp;
8685 if (GET_CODE (SET_DEST (set)) == MEM)
8687 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8689 XEXP (SET_DEST (set), 0) = temp;
8692 else if (GET_CODE (real) == PARALLEL)
8695 for (i = 0; i < XVECLEN (real, 0); i++)
8696 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8698 rtx set = XVECEXP (real, 0, i);
8700 temp = simplify_rtx (SET_SRC (set));
8702 SET_SRC (set) = temp;
8703 temp = simplify_rtx (SET_DEST (set));
8705 SET_DEST (set) = temp;
8706 if (GET_CODE (SET_DEST (set)) == MEM)
8708 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8710 XEXP (SET_DEST (set), 0) = temp;
8712 RTX_FRAME_RELATED_P (set) = 1;
8718 if (reg2 != NULL_RTX)
8719 real = replace_rtx (real, reg2, rreg);
8721 RTX_FRAME_RELATED_P (insn) = 1;
8722 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8727 /* Returns an insn that has a vrsave set operation with the
8728 appropriate CLOBBERs. */
8731 generate_set_vrsave (reg, info, epiloguep)
8733 rs6000_stack_t *info;
8737 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8738 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8741 = gen_rtx_SET (VOIDmode,
8743 gen_rtx_UNSPEC_VOLATILE (SImode,
8744 gen_rtvec (2, reg, vrsave),
8749 /* We need to clobber the registers in the mask so the scheduler
8750 does not move sets to VRSAVE before sets of AltiVec registers.
8752 However, if the function receives nonlocal gotos, reload will set
8753 all call saved registers live. We will end up with:
8755 (set (reg 999) (mem))
8756 (parallel [ (set (reg vrsave) (unspec blah))
8757 (clobber (reg 999))])
8759 The clobber will cause the store into reg 999 to be dead, and
8760 flow will attempt to delete an epilogue insn. In this case, we
8761 need an unspec use/set of the register. */
8763 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8764 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8766 if (!epiloguep || call_used_regs [i])
8767 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8768 gen_rtx_REG (V4SImode, i));
8771 rtx reg = gen_rtx_REG (V4SImode, i);
8774 = gen_rtx_SET (VOIDmode,
8776 gen_rtx_UNSPEC (V4SImode,
8777 gen_rtvec (1, reg), 27));
8781 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8783 for (i = 0; i < nclobs; ++i)
8784 XVECEXP (insn, 0, i) = clobs[i];
8789 /* Emit function prologue as insns. */
8792 rs6000_emit_prologue ()
8794 rs6000_stack_t *info = rs6000_stack_info ();
8795 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8796 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8797 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8798 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8799 rtx frame_reg_rtx = sp_reg_rtx;
8800 rtx cr_save_rtx = NULL;
8802 int saving_FPRs_inline;
8803 int using_store_multiple;
8804 HOST_WIDE_INT sp_offset = 0;
8806 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8807 && info->first_gp_reg_save < 31);
8808 saving_FPRs_inline = (info->first_fp_reg_save == 64
8809 || FP_SAVE_INLINE (info->first_fp_reg_save));
8811 /* For V.4, update stack before we do any saving and set back pointer. */
8812 if (info->push_p && DEFAULT_ABI == ABI_V4)
8814 if (info->total_size < 32767)
8815 sp_offset = info->total_size;
8817 frame_reg_rtx = frame_ptr_rtx;
8818 rs6000_emit_allocate_stack (info->total_size,
8819 (frame_reg_rtx != sp_reg_rtx
8822 || info->first_fp_reg_save < 64
8823 || info->first_gp_reg_save < 32
8825 if (frame_reg_rtx != sp_reg_rtx)
8826 rs6000_emit_stack_tie ();
8829 /* Save AltiVec registers if needed. */
8830 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8834 /* There should be a non inline version of this, for when we
8835 are saving lots of vector registers. */
8836 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8837 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8839 rtx areg, savereg, mem;
8842 offset = info->altivec_save_offset + sp_offset
8843 + 16 * (i - info->first_altivec_reg_save);
8845 savereg = gen_rtx_REG (V4SImode, i);
8847 areg = gen_rtx_REG (Pmode, 0);
8848 emit_move_insn (areg, GEN_INT (offset));
8850 /* AltiVec addressing mode is [reg+reg]. */
8851 mem = gen_rtx_MEM (V4SImode,
8852 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8854 set_mem_alias_set (mem, rs6000_sr_alias_set);
8856 insn = emit_move_insn (mem, savereg);
8858 altivec_frame_fixup (insn, areg, offset);
8862 /* VRSAVE is a bit vector representing which AltiVec registers
8863 are used. The OS uses this to determine which vector
8864 registers to save on a context switch. We need to save
8865 VRSAVE on the stack frame, add whatever AltiVec registers we
8866 used in this function, and do the corresponding magic in the
8869 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8871 rtx reg, mem, vrsave;
8874 /* Get VRSAVE onto a GPR. */
8875 reg = gen_rtx_REG (SImode, 12);
8876 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8878 emit_insn (gen_get_vrsave_internal (reg));
8880 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8883 offset = info->vrsave_save_offset + sp_offset;
8885 = gen_rtx_MEM (SImode,
8886 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8887 set_mem_alias_set (mem, rs6000_sr_alias_set);
8888 insn = emit_move_insn (mem, reg);
8890 /* Include the registers in the mask. */
8891 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8893 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8896 /* If we use the link register, get it into r0. */
8897 if (info->lr_save_p)
8898 emit_move_insn (gen_rtx_REG (Pmode, 0),
8899 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8901 /* If we need to save CR, put it into r12. */
8902 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8904 cr_save_rtx = gen_rtx_REG (SImode, 12);
8905 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8908 /* Do any required saving of fpr's. If only one or two to save, do
8909 it ourselves. Otherwise, call function. */
8910 if (saving_FPRs_inline)
8913 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8914 if ((regs_ever_live[info->first_fp_reg_save+i]
8915 && ! call_used_regs[info->first_fp_reg_save+i]))
8918 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8919 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8920 GEN_INT (info->fp_save_offset
8923 mem = gen_rtx_MEM (DFmode, addr);
8924 set_mem_alias_set (mem, rs6000_sr_alias_set);
8926 insn = emit_move_insn (mem, reg);
8927 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8928 NULL_RTX, NULL_RTX);
8931 else if (info->first_fp_reg_save != 64)
8935 const char *alloc_rname;
8937 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8939 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8941 LINK_REGISTER_REGNUM));
8942 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8943 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8944 alloc_rname = ggc_strdup (rname);
8945 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8946 gen_rtx_SYMBOL_REF (Pmode,
8948 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8951 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8952 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8953 GEN_INT (info->fp_save_offset
8954 + sp_offset + 8*i));
8955 mem = gen_rtx_MEM (DFmode, addr);
8956 set_mem_alias_set (mem, rs6000_sr_alias_set);
8958 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8960 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8961 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8962 NULL_RTX, NULL_RTX);
8965 /* Save GPRs. This is done as a PARALLEL if we are using
8966 the store-multiple instructions. */
8967 if (using_store_multiple)
8971 p = rtvec_alloc (32 - info->first_gp_reg_save);
8972 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8973 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8976 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8977 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8978 GEN_INT (info->gp_save_offset
8981 mem = gen_rtx_MEM (reg_mode, addr);
8982 set_mem_alias_set (mem, rs6000_sr_alias_set);
8984 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8986 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8987 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8988 NULL_RTX, NULL_RTX);
8993 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8994 if ((regs_ever_live[info->first_gp_reg_save+i]
8995 && ! call_used_regs[info->first_gp_reg_save+i])
8996 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
8997 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8998 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9001 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
9002 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9003 GEN_INT (info->gp_save_offset
9006 mem = gen_rtx_MEM (reg_mode, addr);
9007 set_mem_alias_set (mem, rs6000_sr_alias_set);
9009 insn = emit_move_insn (mem, reg);
9010 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9011 NULL_RTX, NULL_RTX);
9015 /* ??? There's no need to emit actual instructions here, but it's the
9016 easiest way to get the frame unwind information emitted. */
9017 if (current_function_calls_eh_return)
9019 unsigned int i, regno;
9025 regno = EH_RETURN_DATA_REGNO (i);
9026 if (regno == INVALID_REGNUM)
9029 reg = gen_rtx_REG (reg_mode, regno);
9030 addr = plus_constant (frame_reg_rtx,
9031 info->ehrd_offset + sp_offset
9032 + reg_size * (int) i);
9033 mem = gen_rtx_MEM (reg_mode, addr);
9034 set_mem_alias_set (mem, rs6000_sr_alias_set);
9036 insn = emit_move_insn (mem, reg);
9037 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9038 NULL_RTX, NULL_RTX);
9042 /* Save lr if we used it. */
9043 if (info->lr_save_p)
9045 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9046 GEN_INT (info->lr_save_offset + sp_offset));
9047 rtx reg = gen_rtx_REG (Pmode, 0);
9048 rtx mem = gen_rtx_MEM (Pmode, addr);
9049 /* This should not be of rs6000_sr_alias_set, because of
9050 __builtin_return_address. */
9052 insn = emit_move_insn (mem, reg);
9053 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9054 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
9057 /* Save CR if we use any that must be preserved. */
9058 if (info->cr_save_p)
9060 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9061 GEN_INT (info->cr_save_offset + sp_offset));
9062 rtx mem = gen_rtx_MEM (SImode, addr);
9064 set_mem_alias_set (mem, rs6000_sr_alias_set);
9066 /* If r12 was used to hold the original sp, copy cr into r0 now
9068 if (REGNO (frame_reg_rtx) == 12)
9070 cr_save_rtx = gen_rtx_REG (SImode, 0);
9071 emit_insn (gen_movesi_from_cr (cr_save_rtx));
9073 insn = emit_move_insn (mem, cr_save_rtx);
9075 /* Now, there's no way that dwarf2out_frame_debug_expr is going
9076 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
9077 OK. All we have to do is specify that _one_ condition code
9078 register is saved in this stack slot. The thrower's epilogue
9079 will then restore all the call-saved registers.
9080 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
9081 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9082 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
9085 /* Update stack and set back pointer unless this is V.4,
9086 for which it was done previously. */
9087 if (info->push_p && DEFAULT_ABI != ABI_V4)
9088 rs6000_emit_allocate_stack (info->total_size, FALSE);
9090 /* Set frame pointer, if needed. */
9091 if (frame_pointer_needed)
9093 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
9095 RTX_FRAME_RELATED_P (insn) = 1;
9098 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9099 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
9100 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
9101 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
9103 /* If emit_load_toc_table will use the link register, we need to save
9104 it. We use R11 for this purpose because emit_load_toc_table
9105 can use register 0. This allows us to use a plain 'blr' to return
9106 from the procedure more often. */
9107 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
9108 && ! info->lr_save_p
9109 && EXIT_BLOCK_PTR->pred != NULL);
9110 if (save_LR_around_toc_setup)
9111 emit_move_insn (gen_rtx_REG (Pmode, 11),
9112 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
9114 rs6000_emit_load_toc_table (TRUE);
9116 if (save_LR_around_toc_setup)
9117 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9118 gen_rtx_REG (Pmode, 11));
9121 if (DEFAULT_ABI == ABI_DARWIN
9122 && flag_pic && current_function_uses_pic_offset_table)
9124 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9126 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
9129 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
9130 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
9134 /* Write function prologue. */
9137 rs6000_output_function_prologue (file, size)
9139 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9141 rs6000_stack_t *info = rs6000_stack_info ();
9143 if (TARGET_DEBUG_STACK)
9144 debug_stack_info (info);
9146 /* Write .extern for any function we will call to save and restore
9148 if (info->first_fp_reg_save < 64
9149 && !FP_SAVE_INLINE (info->first_fp_reg_save))
9150 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
9151 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
9152 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
9155 /* Write .extern for AIX common mode routines, if needed. */
9156 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
9158 fputs ("\t.extern __mulh\n", file);
9159 fputs ("\t.extern __mull\n", file);
9160 fputs ("\t.extern __divss\n", file);
9161 fputs ("\t.extern __divus\n", file);
9162 fputs ("\t.extern __quoss\n", file);
9163 fputs ("\t.extern __quous\n", file);
9164 common_mode_defined = 1;
9167 if (! HAVE_prologue)
9171 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
9172 the "toplevel" insn chain. */
9173 emit_note (0, NOTE_INSN_DELETED);
9174 rs6000_emit_prologue ();
9175 emit_note (0, NOTE_INSN_DELETED);
9177 /* Expand INSN_ADDRESSES so final() doesn't crash. */
9181 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9183 INSN_ADDRESSES_NEW (insn, addr);
9188 if (TARGET_DEBUG_STACK)
9189 debug_rtx_list (get_insns (), 100);
9190 final (get_insns (), file, FALSE, FALSE);
9194 rs6000_pic_labelno++;
9197 /* Emit function epilogue as insns.
9199 At present, dwarf2out_frame_debug_expr doesn't understand
9200 register restores, so we don't bother setting RTX_FRAME_RELATED_P
9201 anywhere in the epilogue. Most of the insns below would in any case
9202 need special notes to explain where r11 is in relation to the stack. */
9205 rs6000_emit_epilogue (sibcall)
9208 rs6000_stack_t *info;
9209 int restoring_FPRs_inline;
9210 int using_load_multiple;
9211 int using_mfcr_multiple;
9212 int use_backchain_to_restore_sp;
9214 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
9215 rtx frame_reg_rtx = sp_reg_rtx;
9216 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
9217 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9220 info = rs6000_stack_info ();
9221 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
9222 && info->first_gp_reg_save < 31);
9223 restoring_FPRs_inline = (sibcall
9224 || current_function_calls_eh_return
9225 || info->first_fp_reg_save == 64
9226 || FP_SAVE_INLINE (info->first_fp_reg_save));
9227 use_backchain_to_restore_sp = (frame_pointer_needed
9228 || current_function_calls_alloca
9229 || info->total_size > 32767);
9230 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9231 || rs6000_cpu == PROCESSOR_PPC603
9232 || rs6000_cpu == PROCESSOR_PPC750
9235 /* If we have a frame pointer, a call to alloca, or a large stack
9236 frame, restore the old stack pointer using the backchain. Otherwise,
9237 we know what size to update it with. */
9238 if (use_backchain_to_restore_sp)
9240 /* Under V.4, don't reset the stack pointer until after we're done
9241 loading the saved registers. */
9242 if (DEFAULT_ABI == ABI_V4)
9243 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
9245 emit_move_insn (frame_reg_rtx,
9246 gen_rtx_MEM (Pmode, sp_reg_rtx));
9249 else if (info->push_p)
9251 if (DEFAULT_ABI == ABI_V4)
9252 sp_offset = info->total_size;
9255 emit_insn (TARGET_32BIT
9256 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9257 GEN_INT (info->total_size))
9258 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9259 GEN_INT (info->total_size)));
9263 /* Restore AltiVec registers if needed. */
9264 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9268 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9269 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9271 rtx addr, areg, mem;
9273 areg = gen_rtx_REG (Pmode, 0);
9275 (areg, GEN_INT (info->altivec_save_offset
9277 + 16 * (i - info->first_altivec_reg_save)));
9279 /* AltiVec addressing mode is [reg+reg]. */
9280 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9281 mem = gen_rtx_MEM (V4SImode, addr);
9282 set_mem_alias_set (mem, rs6000_sr_alias_set);
9284 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9288 /* Restore VRSAVE if needed. */
9289 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9293 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9294 GEN_INT (info->vrsave_save_offset + sp_offset));
9295 mem = gen_rtx_MEM (SImode, addr);
9296 set_mem_alias_set (mem, rs6000_sr_alias_set);
9297 reg = gen_rtx_REG (SImode, 12);
9298 emit_move_insn (reg, mem);
9300 emit_insn (generate_set_vrsave (reg, info, 1));
9303 /* Get the old lr if we saved it. */
9304 if (info->lr_save_p)
9306 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9307 GEN_INT (info->lr_save_offset + sp_offset));
9308 rtx mem = gen_rtx_MEM (Pmode, addr);
9310 set_mem_alias_set (mem, rs6000_sr_alias_set);
9312 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9315 /* Get the old cr if we saved it. */
9316 if (info->cr_save_p)
9318 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9319 GEN_INT (info->cr_save_offset + sp_offset));
9320 rtx mem = gen_rtx_MEM (SImode, addr);
9322 set_mem_alias_set (mem, rs6000_sr_alias_set);
9324 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9327 /* Set LR here to try to overlap restores below. */
9328 if (info->lr_save_p)
9329 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9330 gen_rtx_REG (Pmode, 0));
9332 /* Load exception handler data registers, if needed. */
9333 if (current_function_calls_eh_return)
9335 unsigned int i, regno;
9341 regno = EH_RETURN_DATA_REGNO (i);
9342 if (regno == INVALID_REGNUM)
9345 addr = plus_constant (frame_reg_rtx,
9346 info->ehrd_offset + sp_offset
9347 + reg_size * (int) i);
9348 mem = gen_rtx_MEM (reg_mode, addr);
9349 set_mem_alias_set (mem, rs6000_sr_alias_set);
9351 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9355 /* Restore GPRs. This is done as a PARALLEL if we are using
9356 the load-multiple instructions. */
9357 if (using_load_multiple)
9360 p = rtvec_alloc (32 - info->first_gp_reg_save);
9361 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9363 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9364 GEN_INT (info->gp_save_offset
9367 rtx mem = gen_rtx_MEM (reg_mode, addr);
9369 set_mem_alias_set (mem, rs6000_sr_alias_set);
9372 gen_rtx_SET (VOIDmode,
9373 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9376 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9379 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9380 if ((regs_ever_live[info->first_gp_reg_save+i]
9381 && ! call_used_regs[info->first_gp_reg_save+i])
9382 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
9383 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9384 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9386 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9387 GEN_INT (info->gp_save_offset
9390 rtx mem = gen_rtx_MEM (reg_mode, addr);
9392 set_mem_alias_set (mem, rs6000_sr_alias_set);
9394 emit_move_insn (gen_rtx_REG (reg_mode,
9395 info->first_gp_reg_save + i),
9399 /* Restore fpr's if we need to do it without calling a function. */
9400 if (restoring_FPRs_inline)
9401 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9402 if ((regs_ever_live[info->first_fp_reg_save+i]
9403 && ! call_used_regs[info->first_fp_reg_save+i]))
9406 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9407 GEN_INT (info->fp_save_offset
9410 mem = gen_rtx_MEM (DFmode, addr);
9411 set_mem_alias_set (mem, rs6000_sr_alias_set);
9413 emit_move_insn (gen_rtx_REG (DFmode,
9414 info->first_fp_reg_save + i),
9418 /* If we saved cr, restore it here. Just those that were used. */
9419 if (info->cr_save_p)
9421 rtx r12_rtx = gen_rtx_REG (SImode, 12);
9424 if (using_mfcr_multiple)
9426 for (i = 0; i < 8; i++)
9427 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9433 if (using_mfcr_multiple && count > 1)
9438 p = rtvec_alloc (count);
9441 for (i = 0; i < 8; i++)
9442 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9444 rtvec r = rtvec_alloc (2);
9445 RTVEC_ELT (r, 0) = r12_rtx;
9446 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9447 RTVEC_ELT (p, ndx) =
9448 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9449 gen_rtx_UNSPEC (CCmode, r, 20));
9452 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9457 for (i = 0; i < 8; i++)
9458 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9460 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9466 /* If this is V.4, unwind the stack pointer after all of the loads
9467 have been done. We need to emit a block here so that sched
9468 doesn't decide to move the sp change before the register restores
9469 (which may not have any obvious dependency on the stack). This
9470 doesn't hurt performance, because there is no scheduling that can
9471 be done after this point. */
9472 if (DEFAULT_ABI == ABI_V4)
9474 if (frame_reg_rtx != sp_reg_rtx)
9475 rs6000_emit_stack_tie ();
9477 if (use_backchain_to_restore_sp)
9479 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9481 else if (sp_offset != 0)
9483 emit_insn (Pmode == SImode
9484 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9485 GEN_INT (sp_offset))
9486 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9487 GEN_INT (sp_offset)));
9491 if (current_function_calls_eh_return)
9493 rtx sa = EH_RETURN_STACKADJ_RTX;
9494 emit_insn (Pmode == SImode
9495 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9496 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9502 if (! restoring_FPRs_inline)
9503 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9505 p = rtvec_alloc (2);
9507 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9508 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9510 LINK_REGISTER_REGNUM));
9512 /* If we have to restore more than two FP registers, branch to the
9513 restore function. It will return to our caller. */
9514 if (! restoring_FPRs_inline)
9518 const char *alloc_rname;
9520 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9521 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9522 alloc_rname = ggc_strdup (rname);
9523 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9524 gen_rtx_SYMBOL_REF (Pmode,
9527 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9530 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9531 GEN_INT (info->fp_save_offset + 8*i));
9532 mem = gen_rtx_MEM (DFmode, addr);
9533 set_mem_alias_set (mem, rs6000_sr_alias_set);
9535 RTVEC_ELT (p, i+3) =
9536 gen_rtx_SET (VOIDmode,
9537 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9542 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9546 /* Write function epilogue. */
9549 rs6000_output_function_epilogue (file, size)
9551 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9553 rs6000_stack_t *info = rs6000_stack_info ();
9554 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9556 if (! HAVE_epilogue)
9558 rtx insn = get_last_insn ();
9559 /* If the last insn was a BARRIER, we don't have to write anything except
9561 if (GET_CODE (insn) == NOTE)
9562 insn = prev_nonnote_insn (insn);
9563 if (insn == 0 || GET_CODE (insn) != BARRIER)
9565 /* This is slightly ugly, but at least we don't have two
9566 copies of the epilogue-emitting code. */
9569 /* A NOTE_INSN_DELETED is supposed to be at the start
9570 and end of the "toplevel" insn chain. */
9571 emit_note (0, NOTE_INSN_DELETED);
9572 rs6000_emit_epilogue (FALSE);
9573 emit_note (0, NOTE_INSN_DELETED);
9575 /* Expand INSN_ADDRESSES so final() doesn't crash. */
9579 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9581 INSN_ADDRESSES_NEW (insn, addr);
9586 if (TARGET_DEBUG_STACK)
9587 debug_rtx_list (get_insns (), 100);
9588 final (get_insns (), file, FALSE, FALSE);
9593 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9596 We don't output a traceback table if -finhibit-size-directive was
9597 used. The documentation for -finhibit-size-directive reads
9598 ``don't output a @code{.size} assembler directive, or anything
9599 else that would cause trouble if the function is split in the
9600 middle, and the two halves are placed at locations far apart in
9601 memory.'' The traceback table has this property, since it
9602 includes the offset from the start of the function to the
9603 traceback table itself.
9605 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9606 different traceback table. */
9607 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9609 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9610 const char *language_string = lang_hooks.name;
9611 int fixed_parms = 0, float_parms = 0, parm_info = 0;
9614 while (*fname == '.') /* V.4 encodes . in the name */
9617 /* Need label immediately before tbtab, so we can compute its offset
9618 from the function start. */
9621 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9622 ASM_OUTPUT_LABEL (file, fname);
9624 /* The .tbtab pseudo-op can only be used for the first eight
9625 expressions, since it can't handle the possibly variable
9626 length fields that follow. However, if you omit the optional
9627 fields, the assembler outputs zeros for all optional fields
9628 anyways, giving each variable length field is minimum length
9629 (as defined in sys/debug.h). Thus we can not use the .tbtab
9630 pseudo-op at all. */
9632 /* An all-zero word flags the start of the tbtab, for debuggers
9633 that have to find it by searching forward from the entry
9634 point or from the current pc. */
9635 fputs ("\t.long 0\n", file);
9637 /* Tbtab format type. Use format type 0. */
9638 fputs ("\t.byte 0,", file);
9640 /* Language type. Unfortunately, there doesn't seem to be any
9641 official way to get this info, so we use language_string. C
9642 is 0. C++ is 9. No number defined for Obj-C, so use the
9643 value for C for now. There is no official value for Java,
9644 although IBM appears to be using 13. There is no official value
9645 for Chill, so we've chosen 44 pseudo-randomly. */
9646 if (! strcmp (language_string, "GNU C")
9647 || ! strcmp (language_string, "GNU Objective-C"))
9649 else if (! strcmp (language_string, "GNU F77"))
9651 else if (! strcmp (language_string, "GNU Ada"))
9653 else if (! strcmp (language_string, "GNU Pascal"))
9655 else if (! strcmp (language_string, "GNU C++"))
9657 else if (! strcmp (language_string, "GNU Java"))
9659 else if (! strcmp (language_string, "GNU CHILL"))
9663 fprintf (file, "%d,", i);
9665 /* 8 single bit fields: global linkage (not set for C extern linkage,
9666 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9667 from start of procedure stored in tbtab, internal function, function
9668 has controlled storage, function has no toc, function uses fp,
9669 function logs/aborts fp operations. */
9670 /* Assume that fp operations are used if any fp reg must be saved. */
9671 fprintf (file, "%d,",
9672 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9674 /* 6 bitfields: function is interrupt handler, name present in
9675 proc table, function calls alloca, on condition directives
9676 (controls stack walks, 3 bits), saves condition reg, saves
9678 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9679 set up as a frame pointer, even when there is no alloca call. */
9680 fprintf (file, "%d,",
9681 ((optional_tbtab << 6)
9682 | ((optional_tbtab & frame_pointer_needed) << 5)
9683 | (info->cr_save_p << 1)
9684 | (info->lr_save_p)));
9686 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9688 fprintf (file, "%d,",
9689 (info->push_p << 7) | (64 - info->first_fp_reg_save));
9691 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9692 fprintf (file, "%d,", (32 - first_reg_to_save ()));
9696 /* Compute the parameter info from the function decl argument
9699 int next_parm_info_bit = 31;
9701 for (decl = DECL_ARGUMENTS (current_function_decl);
9702 decl; decl = TREE_CHAIN (decl))
9704 rtx parameter = DECL_INCOMING_RTL (decl);
9705 enum machine_mode mode = GET_MODE (parameter);
9707 if (GET_CODE (parameter) == REG)
9709 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9717 else if (mode == DFmode)
9722 /* If only one bit will fit, don't or in this entry. */
9723 if (next_parm_info_bit > 0)
9724 parm_info |= (bits << (next_parm_info_bit - 1));
9725 next_parm_info_bit -= 2;
9729 fixed_parms += ((GET_MODE_SIZE (mode)
9730 + (UNITS_PER_WORD - 1))
9732 next_parm_info_bit -= 1;
9738 /* Number of fixed point parameters. */
9739 /* This is actually the number of words of fixed point parameters; thus
9740 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9741 fprintf (file, "%d,", fixed_parms);
9743 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9745 /* This is actually the number of fp registers that hold parameters;
9746 and thus the maximum value is 13. */
9747 /* Set parameters on stack bit if parameters are not in their original
9748 registers, regardless of whether they are on the stack? Xlc
9749 seems to set the bit when not optimizing. */
9750 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9752 if (! optional_tbtab)
9755 /* Optional fields follow. Some are variable length. */
9757 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9759 /* There is an entry for each parameter in a register, in the order that
9760 they occur in the parameter list. Any intervening arguments on the
9761 stack are ignored. If the list overflows a long (max possible length
9762 34 bits) then completely leave off all elements that don't fit. */
9763 /* Only emit this long if there was at least one parameter. */
9764 if (fixed_parms || float_parms)
9765 fprintf (file, "\t.long %d\n", parm_info);
9767 /* Offset from start of code to tb table. */
9768 fputs ("\t.long ", file);
9769 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9771 RS6000_OUTPUT_BASENAME (file, fname);
9773 assemble_name (file, fname);
9777 RS6000_OUTPUT_BASENAME (file, fname);
9779 assemble_name (file, fname);
9783 /* Interrupt handler mask. */
9784 /* Omit this long, since we never set the interrupt handler bit
9787 /* Number of CTL (controlled storage) anchors. */
9788 /* Omit this long, since the has_ctl bit is never set above. */
9790 /* Displacement into stack of each CTL anchor. */
9791 /* Omit this list of longs, because there are no CTL anchors. */
9793 /* Length of function name. */
9794 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9796 /* Function name. */
9797 assemble_string (fname, strlen (fname));
9799 /* Register for alloca automatic storage; this is always reg 31.
9800 Only emit this if the alloca bit was set above. */
9801 if (frame_pointer_needed)
9802 fputs ("\t.byte 31\n", file);
9804 fputs ("\t.align 2\n", file);
9809 /* A C compound statement that outputs the assembler code for a thunk
9810 function, used to implement C++ virtual function calls with
9811 multiple inheritance. The thunk acts as a wrapper around a virtual
9812 function, adjusting the implicit object parameter before handing
9813 control off to the real function.
9815 First, emit code to add the integer DELTA to the location that
9816 contains the incoming first argument. Assume that this argument
9817 contains a pointer, and is the one used to pass the `this' pointer
9818 in C++. This is the incoming argument *before* the function
9819 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9820 values of all other incoming arguments.
9822 After the addition, emit code to jump to FUNCTION, which is a
9823 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9824 not touch the return address. Hence returning from FUNCTION will
9825 return to whoever called the current `thunk'.
9827 The effect must be as if FUNCTION had been called directly with the
9828 adjusted first argument. This macro is responsible for emitting
9829 all of the code for a thunk function; output_function_prologue()
9830 and output_function_epilogue() are not invoked.
9832 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9833 been extracted from it.) It might possibly be useful on some
9834 targets, but probably not.
9836 If you do not define this macro, the target-independent code in the
9837 C++ frontend will generate a less efficient heavyweight thunk that
9838 calls FUNCTION instead of jumping to it. The generic approach does
9839 not support varargs. */
9842 output_mi_thunk (file, thunk_fndecl, delta, function)
9844 tree thunk_fndecl ATTRIBUTE_UNUSED;
9848 const char *this_reg =
9849 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9852 const char *r0 = reg_names[0];
9853 const char *toc = reg_names[2];
9854 const char *schain = reg_names[11];
9855 const char *r12 = reg_names[12];
9857 static int labelno = 0;
9859 /* Small constants that can be done by one add instruction. */
9860 if (delta >= -32768 && delta <= 32767)
9862 if (! TARGET_NEW_MNEMONICS)
9863 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9865 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9868 /* Large constants that can be done by one addis instruction. */
9869 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9870 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9873 /* 32-bit constants that can be done by an add and addis instruction. */
9874 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9876 /* Break into two pieces, propagating the sign bit from the low
9877 word to the upper word. */
9878 int delta_high = delta >> 16;
9879 int delta_low = delta & 0xffff;
9880 if ((delta_low & 0x8000) != 0)
9883 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9886 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9889 if (! TARGET_NEW_MNEMONICS)
9890 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9892 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9895 /* 64-bit constants, fixme */
9899 /* Get the prefix in front of the names. */
9900 switch (DEFAULT_ABI)
9910 case ABI_AIX_NODESC:
9915 /* If the function is compiled in this module, jump to it directly.
9916 Otherwise, load up its address and jump to it. */
9918 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9920 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9921 && (! lookup_attribute ("longcall",
9922 TYPE_ATTRIBUTES (TREE_TYPE (function)))
9923 || lookup_attribute ("shortcall",
9924 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
9927 fprintf (file, "\tb %s", prefix);
9928 assemble_name (file, fname);
9929 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9935 switch (DEFAULT_ABI)
9941 /* Set up a TOC entry for the function. */
9942 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9944 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9947 if (TARGET_MINIMAL_TOC)
9948 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9951 fputs ("\t.tc ", file);
9952 assemble_name (file, fname);
9953 fputs ("[TC],", file);
9955 assemble_name (file, fname);
9958 if (TARGET_MINIMAL_TOC)
9959 asm_fprintf (file, (TARGET_32BIT)
9960 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9961 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9962 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9963 assemble_name (file, buf);
9964 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9965 fputs ("-(.LCTOC1)", file);
9966 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9968 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9972 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9975 asm_fprintf (file, "\tmtctr %s\n", r0);
9977 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9980 asm_fprintf (file, "\tbctr\n");
9983 case ABI_AIX_NODESC:
9985 fprintf (file, "\tb %s", prefix);
9986 assemble_name (file, fname);
9987 if (flag_pic) fputs ("@plt", file);
9993 fprintf (file, "\tb %s", prefix);
9994 if (flag_pic && !machopic_name_defined_p (fname))
9995 assemble_name (file, machopic_stub_name (fname));
9997 assemble_name (file, fname);
10006 /* A quick summary of the various types of 'constant-pool tables'
10009 Target Flags Name One table per
10010 AIX (none) AIX TOC object file
10011 AIX -mfull-toc AIX TOC object file
10012 AIX -mminimal-toc AIX minimal TOC translation unit
10013 SVR4/EABI (none) SVR4 SDATA object file
10014 SVR4/EABI -fpic SVR4 pic object file
10015 SVR4/EABI -fPIC SVR4 PIC translation unit
10016 SVR4/EABI -mrelocatable EABI TOC function
10017 SVR4/EABI -maix AIX TOC object file
10018 SVR4/EABI -maix -mminimal-toc
10019 AIX minimal TOC translation unit
10021 Name Reg. Set by entries contains:
10022 made by addrs? fp? sum?
10024 AIX TOC 2 crt0 as Y option option
10025 AIX minimal TOC 30 prolog gcc Y Y option
10026 SVR4 SDATA 13 crt0 gcc N Y N
10027 SVR4 pic 30 prolog ld Y not yet N
10028 SVR4 PIC 30 prolog gcc Y option option
10029 EABI TOC 30 prolog gcc Y option option
10033 /* Hash table stuff for keeping track of TOC entries. */
10035 struct toc_hash_struct
10037 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
10038 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
10040 enum machine_mode key_mode;
10044 static htab_t toc_hash_table;
10046 /* Hash functions for the hash table. */
10049 rs6000_hash_constant (k)
10052 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
10053 const char *format = GET_RTX_FORMAT (GET_CODE (k));
10054 int flen = strlen (format);
10057 if (GET_CODE (k) == LABEL_REF)
10058 return result * 1231 + X0INT (XEXP (k, 0), 3);
10060 if (GET_CODE (k) == CODE_LABEL)
10065 for (; fidx < flen; fidx++)
10066 switch (format[fidx])
10071 const char *str = XSTR (k, fidx);
10072 len = strlen (str);
10073 result = result * 613 + len;
10074 for (i = 0; i < len; i++)
10075 result = result * 613 + (unsigned) str[i];
10080 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
10084 result = result * 613 + (unsigned) XINT (k, fidx);
10087 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
10088 result = result * 613 + (unsigned) XWINT (k, fidx);
10092 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
10093 result = result * 613 + (unsigned) (XWINT (k, fidx)
10104 toc_hash_function (hash_entry)
10105 const void * hash_entry;
10107 const struct toc_hash_struct *thc =
10108 (const struct toc_hash_struct *) hash_entry;
10109 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
10112 /* Compare H1 and H2 for equivalence. */
10115 toc_hash_eq (h1, h2)
10119 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
10120 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
10122 if (((const struct toc_hash_struct *) h1)->key_mode
10123 != ((const struct toc_hash_struct *) h2)->key_mode)
10126 return rtx_equal_p (r1, r2);
10129 /* Mark the hash table-entry HASH_ENTRY. */
10132 toc_hash_mark_entry (hash_slot, unused)
10134 void * unused ATTRIBUTE_UNUSED;
10136 const struct toc_hash_struct * hash_entry =
10137 *(const struct toc_hash_struct **) hash_slot;
10138 rtx r = hash_entry->key;
10139 ggc_set_mark (hash_entry);
10140 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
10141 if (GET_CODE (r) == LABEL_REF)
10144 ggc_set_mark (XEXP (r, 0));
10151 /* Mark all the elements of the TOC hash-table *HT. */
10154 toc_hash_mark_table (vht)
10159 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
10162 /* These are the names given by the C++ front-end to vtables, and
10163 vtable-like objects. Ideally, this logic should not be here;
10164 instead, there should be some programmatic way of inquiring as
10165 to whether or not an object is a vtable. */
10167 #define VTABLE_NAME_P(NAME) \
10168 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
10169 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
10170 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
10171 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
10174 rs6000_output_symbol_ref (file, x)
10178 /* Currently C++ toc references to vtables can be emitted before it
10179 is decided whether the vtable is public or private. If this is
10180 the case, then the linker will eventually complain that there is
10181 a reference to an unknown section. Thus, for vtables only,
10182 we emit the TOC reference to reference the symbol and not the
10184 const char *name = XSTR (x, 0);
10186 if (VTABLE_NAME_P (name))
10188 RS6000_OUTPUT_BASENAME (file, name);
10191 assemble_name (file, name);
10194 /* Output a TOC entry. We derive the entry name from what is being
10198 output_toc (file, x, labelno, mode)
10202 enum machine_mode mode;
10205 const char *name = buf;
10206 const char *real_name;
10213 /* When the linker won't eliminate them, don't output duplicate
10214 TOC entries (this happens on AIX if there is any kind of TOC,
10215 and on SVR4 under -fPIC or -mrelocatable). */
10218 struct toc_hash_struct *h;
10221 h = ggc_alloc (sizeof (*h));
10223 h->key_mode = mode;
10224 h->labelno = labelno;
10226 found = htab_find_slot (toc_hash_table, h, 1);
10227 if (*found == NULL)
10229 else /* This is indeed a duplicate.
10230 Set this label equal to that label. */
10232 fputs ("\t.set ", file);
10233 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10234 fprintf (file, "%d,", labelno);
10235 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10236 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
10242 /* If we're going to put a double constant in the TOC, make sure it's
10243 aligned properly when strict alignment is on. */
10244 if (GET_CODE (x) == CONST_DOUBLE
10245 && STRICT_ALIGNMENT
10246 && GET_MODE_BITSIZE (mode) >= 64
10247 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10248 ASM_OUTPUT_ALIGN (file, 3);
10251 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10253 /* Handle FP constants specially. Note that if we have a minimal
10254 TOC, things we put here aren't actually in the TOC, so we can allow
10256 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10258 REAL_VALUE_TYPE rv;
10261 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10262 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10266 if (TARGET_MINIMAL_TOC)
10267 fputs (DOUBLE_INT_ASM_OP, file);
10269 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
10270 k[0] & 0xffffffff, k[1] & 0xffffffff);
10271 fprintf (file, "0x%lx%08lx\n",
10272 k[0] & 0xffffffff, k[1] & 0xffffffff);
10277 if (TARGET_MINIMAL_TOC)
10278 fputs ("\t.long ", file);
10280 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
10281 k[0] & 0xffffffff, k[1] & 0xffffffff);
10282 fprintf (file, "0x%lx,0x%lx\n",
10283 k[0] & 0xffffffff, k[1] & 0xffffffff);
10287 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10289 REAL_VALUE_TYPE rv;
10292 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10293 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10297 if (TARGET_MINIMAL_TOC)
10298 fputs (DOUBLE_INT_ASM_OP, file);
10300 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
10301 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
10306 if (TARGET_MINIMAL_TOC)
10307 fputs ("\t.long ", file);
10309 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
10310 fprintf (file, "0x%lx\n", l & 0xffffffff);
10314 else if (GET_MODE (x) == VOIDmode
10315 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10317 unsigned HOST_WIDE_INT low;
10318 HOST_WIDE_INT high;
10320 if (GET_CODE (x) == CONST_DOUBLE)
10322 low = CONST_DOUBLE_LOW (x);
10323 high = CONST_DOUBLE_HIGH (x);
10326 #if HOST_BITS_PER_WIDE_INT == 32
10329 high = (low & 0x80000000) ? ~0 : 0;
10333 low = INTVAL (x) & 0xffffffff;
10334 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10338 /* TOC entries are always Pmode-sized, but since this
10339 is a bigendian machine then if we're putting smaller
10340 integer constants in the TOC we have to pad them.
10341 (This is still a win over putting the constants in
10342 a separate constant pool, because then we'd have
10343 to have both a TOC entry _and_ the actual constant.)
10345 For a 32-bit target, CONST_INT values are loaded and shifted
10346 entirely within `low' and can be stored in one TOC entry. */
10348 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10349 abort ();/* It would be easy to make this work, but it doesn't now. */
10351 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10352 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10353 POINTER_SIZE, &low, &high, 0);
10357 if (TARGET_MINIMAL_TOC)
10358 fputs (DOUBLE_INT_ASM_OP, file);
10360 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10361 (long) high & 0xffffffff, (long) low & 0xffffffff);
10362 fprintf (file, "0x%lx%08lx\n",
10363 (long) high & 0xffffffff, (long) low & 0xffffffff);
10368 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10370 if (TARGET_MINIMAL_TOC)
10371 fputs ("\t.long ", file);
10373 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10374 (long) high & 0xffffffff, (long) low & 0xffffffff);
10375 fprintf (file, "0x%lx,0x%lx\n",
10376 (long) high & 0xffffffff, (long) low & 0xffffffff);
10380 if (TARGET_MINIMAL_TOC)
10381 fputs ("\t.long ", file);
10383 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
10384 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
10390 if (GET_CODE (x) == CONST)
10392 if (GET_CODE (XEXP (x, 0)) != PLUS)
10395 base = XEXP (XEXP (x, 0), 0);
10396 offset = INTVAL (XEXP (XEXP (x, 0), 1));
10399 if (GET_CODE (base) == SYMBOL_REF)
10400 name = XSTR (base, 0);
10401 else if (GET_CODE (base) == LABEL_REF)
10402 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10403 else if (GET_CODE (base) == CODE_LABEL)
10404 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10408 real_name = (*targetm.strip_name_encoding) (name);
10409 if (TARGET_MINIMAL_TOC)
10410 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10413 fprintf (file, "\t.tc %s", real_name);
10416 fprintf (file, ".N%d", - offset);
10418 fprintf (file, ".P%d", offset);
10420 fputs ("[TC],", file);
10423 /* Currently C++ toc references to vtables can be emitted before it
10424 is decided whether the vtable is public or private. If this is
10425 the case, then the linker will eventually complain that there is
10426 a TOC reference to an unknown section. Thus, for vtables only,
10427 we emit the TOC reference to reference the symbol and not the
10429 if (VTABLE_NAME_P (name))
10431 RS6000_OUTPUT_BASENAME (file, name);
10433 fprintf (file, "%d", offset);
10434 else if (offset > 0)
10435 fprintf (file, "+%d", offset);
10438 output_addr_const (file, x);
10442 /* Output an assembler pseudo-op to write an ASCII string of N characters
10443 starting at P to FILE.
10445 On the RS/6000, we have to do this using the .byte operation and
10446 write out special characters outside the quoted string.
10447 Also, the assembler is broken; very long strings are truncated,
10448 so we must artificially break them up early. */
10451 output_ascii (file, p, n)
10457 int i, count_string;
10458 const char *for_string = "\t.byte \"";
10459 const char *for_decimal = "\t.byte ";
10460 const char *to_close = NULL;
10463 for (i = 0; i < n; i++)
10466 if (c >= ' ' && c < 0177)
10469 fputs (for_string, file);
10472 /* Write two quotes to get one. */
10480 for_decimal = "\"\n\t.byte ";
10484 if (count_string >= 512)
10486 fputs (to_close, file);
10488 for_string = "\t.byte \"";
10489 for_decimal = "\t.byte ";
10497 fputs (for_decimal, file);
10498 fprintf (file, "%d", c);
10500 for_string = "\n\t.byte \"";
10501 for_decimal = ", ";
10507 /* Now close the string if we have written one. Then end the line. */
10509 fputs (to_close, file);
10512 /* Generate a unique section name for FILENAME for a section type
10513 represented by SECTION_DESC. Output goes into BUF.
10515 SECTION_DESC can be any string, as long as it is different for each
10516 possible section type.
10518 We name the section in the same manner as xlc. The name begins with an
10519 underscore followed by the filename (after stripping any leading directory
10520 names) with the last period replaced by the string SECTION_DESC. If
10521 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10525 rs6000_gen_section_name (buf, filename, section_desc)
10527 const char *filename;
10528 const char *section_desc;
10530 const char *q, *after_last_slash, *last_period = 0;
10534 after_last_slash = filename;
10535 for (q = filename; *q; q++)
10538 after_last_slash = q + 1;
10539 else if (*q == '.')
10543 len = strlen (after_last_slash) + strlen (section_desc) + 2;
10544 *buf = (char *) permalloc (len);
10549 for (q = after_last_slash; *q; q++)
10551 if (q == last_period)
10553 strcpy (p, section_desc);
10554 p += strlen (section_desc);
10557 else if (ISALNUM (*q))
10561 if (last_period == 0)
10562 strcpy (p, section_desc);
10567 /* Emit profile function. */
10570 output_profile_hook (labelno)
10573 if (DEFAULT_ABI == ABI_AIX)
10576 const char *label_name;
10579 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10580 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
10581 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10583 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10586 else if (DEFAULT_ABI == ABI_DARWIN)
10588 const char *mcount_name = RS6000_MCOUNT;
10589 int caller_addr_regno = LINK_REGISTER_REGNUM;
10591 /* Be conservative and always set this, at least for now. */
10592 current_function_uses_pic_offset_table = 1;
10595 /* For PIC code, set up a stub and collect the caller's address
10596 from r0, which is where the prologue puts it. */
10599 mcount_name = machopic_stub_name (mcount_name);
10600 if (current_function_uses_pic_offset_table)
10601 caller_addr_regno = 0;
10604 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10606 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10610 /* Write function profiler code. */
10613 output_function_profiler (file, labelno)
10619 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10620 switch (DEFAULT_ABI)
10626 case ABI_AIX_NODESC:
10627 fprintf (file, "\tmflr %s\n", reg_names[0]);
10630 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10631 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10632 reg_names[0], reg_names[1]);
10633 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10634 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10635 assemble_name (file, buf);
10636 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10638 else if (flag_pic > 1)
10640 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10641 reg_names[0], reg_names[1]);
10642 /* Now, we need to get the address of the label. */
10643 fputs ("\tbl 1f\n\t.long ", file);
10644 assemble_name (file, buf);
10645 fputs ("-.\n1:", file);
10646 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10647 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10648 reg_names[0], reg_names[11]);
10649 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10650 reg_names[0], reg_names[0], reg_names[11]);
10654 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10655 assemble_name (file, buf);
10656 fputs ("@ha\n", file);
10657 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10658 reg_names[0], reg_names[1]);
10659 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10660 assemble_name (file, buf);
10661 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10664 if (current_function_needs_context)
10665 asm_fprintf (file, "\tmr %s,%s\n",
10666 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10667 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10668 if (current_function_needs_context)
10669 asm_fprintf (file, "\tmr %s,%s\n",
10670 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10675 /* Don't do anything, done in output_profile_hook (). */
10681 /* Adjust the cost of a scheduling dependency. Return the new cost of
10682 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10685 rs6000_adjust_cost (insn, link, dep_insn, cost)
10688 rtx dep_insn ATTRIBUTE_UNUSED;
10691 if (! recog_memoized (insn))
10694 if (REG_NOTE_KIND (link) != 0)
10697 if (REG_NOTE_KIND (link) == 0)
10699 /* Data dependency; DEP_INSN writes a register that INSN reads
10700 some cycles later. */
10701 switch (get_attr_type (insn))
10704 /* Tell the first scheduling pass about the latency between
10705 a mtctr and bctr (and mtlr and br/blr). The first
10706 scheduling pass will not know about this latency since
10707 the mtctr instruction, which has the latency associated
10708 to it, will be generated by reload. */
10709 return TARGET_POWER ? 5 : 4;
10711 /* Leave some extra cycles between a compare and its
10712 dependent branch, to inhibit expensive mispredicts. */
10713 if ((rs6000_cpu_attr == CPU_PPC603
10714 || rs6000_cpu_attr == CPU_PPC604
10715 || rs6000_cpu_attr == CPU_PPC604E
10716 || rs6000_cpu_attr == CPU_PPC620
10717 || rs6000_cpu_attr == CPU_PPC630
10718 || rs6000_cpu_attr == CPU_PPC750
10719 || rs6000_cpu_attr == CPU_PPC7400
10720 || rs6000_cpu_attr == CPU_PPC7450
10721 || rs6000_cpu_attr == CPU_POWER4)
10722 && recog_memoized (dep_insn)
10723 && (INSN_CODE (dep_insn) >= 0)
10724 && (get_attr_type (dep_insn) == TYPE_COMPARE
10725 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10726 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10727 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10732 /* Fall out to return default cost. */
10738 /* A C statement (sans semicolon) to update the integer scheduling
10739 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10740 INSN earlier, increase the priority to execute INSN later. Do not
10741 define this macro if you do not need to adjust the scheduling
10742 priorities of insns. */
10745 rs6000_adjust_priority (insn, priority)
10746 rtx insn ATTRIBUTE_UNUSED;
10749 /* On machines (like the 750) which have asymmetric integer units,
10750 where one integer unit can do multiply and divides and the other
10751 can't, reduce the priority of multiply/divide so it is scheduled
10752 before other integer operations. */
10755 if (! INSN_P (insn))
10758 if (GET_CODE (PATTERN (insn)) == USE)
10761 switch (rs6000_cpu_attr) {
10763 switch (get_attr_type (insn))
10770 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10771 priority, priority);
10772 if (priority >= 0 && priority < 0x01000000)
10782 /* Return how many instructions the machine can issue per cycle. */
10785 rs6000_issue_rate ()
10787 switch (rs6000_cpu_attr) {
10788 case CPU_RIOS1: /* ? */
10790 case CPU_PPC601: /* ? */
10810 /* Length in units of the trampoline for entering a nested function. */
10813 rs6000_trampoline_size ()
10817 switch (DEFAULT_ABI)
10823 ret = (TARGET_32BIT) ? 12 : 24;
10828 case ABI_AIX_NODESC:
10829 ret = (TARGET_32BIT) ? 40 : 48;
10836 /* Emit RTL insns to initialize the variable parts of a trampoline.
10837 FNADDR is an RTX for the address of the function's pure code.
10838 CXT is an RTX for the static chain value for the function. */
10841 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10846 enum machine_mode pmode = Pmode;
10847 int regsize = (TARGET_32BIT) ? 4 : 8;
10848 rtx ctx_reg = force_reg (pmode, cxt);
10850 switch (DEFAULT_ABI)
10855 /* Macros to shorten the code expansions below. */
10856 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10857 #define MEM_PLUS(addr,offset) \
10858 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10860 /* Under AIX, just build the 3 word function descriptor */
10863 rtx fn_reg = gen_reg_rtx (pmode);
10864 rtx toc_reg = gen_reg_rtx (pmode);
10865 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10866 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10867 emit_move_insn (MEM_DEREF (addr), fn_reg);
10868 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10869 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10873 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10876 case ABI_AIX_NODESC:
10877 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10878 FALSE, VOIDmode, 4,
10880 GEN_INT (rs6000_trampoline_size ()), SImode,
10890 /* Table of valid machine attributes. */
10892 const struct attribute_spec rs6000_attribute_table[] =
10894 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10895 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10896 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10897 { NULL, 0, 0, false, false, false, NULL }
10900 /* Handle a "longcall" or "shortcall" attribute; arguments as in
10901 struct attribute_spec.handler. */
10904 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10907 tree args ATTRIBUTE_UNUSED;
10908 int flags ATTRIBUTE_UNUSED;
10909 bool *no_add_attrs;
10911 if (TREE_CODE (*node) != FUNCTION_TYPE
10912 && TREE_CODE (*node) != FIELD_DECL
10913 && TREE_CODE (*node) != TYPE_DECL)
10915 warning ("`%s' attribute only applies to functions",
10916 IDENTIFIER_POINTER (name));
10917 *no_add_attrs = true;
10923 /* Set longcall attributes on all functions declared when
10924 rs6000_default_long_calls is true. */
10926 rs6000_set_default_type_attributes (type)
10929 if (rs6000_default_long_calls
10930 && (TREE_CODE (type) == FUNCTION_TYPE
10931 || TREE_CODE (type) == METHOD_TYPE))
10932 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
10934 TYPE_ATTRIBUTES (type));
10937 /* Return a reference suitable for calling a function with the
10938 longcall attribute. */
10941 rs6000_longcall_ref (call_ref)
10944 const char *call_name;
10947 if (GET_CODE (call_ref) != SYMBOL_REF)
10950 /* System V adds '.' to the internal name, so skip them. */
10951 call_name = XSTR (call_ref, 0);
10952 if (*call_name == '.')
10954 while (*call_name == '.')
10957 node = get_identifier (call_name);
10958 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10961 return force_reg (Pmode, call_ref);
10965 #ifdef USING_ELFOS_H
10967 /* A C statement or statements to switch to the appropriate section
10968 for output of RTX in mode MODE. You can assume that RTX is some
10969 kind of constant in RTL. The argument MODE is redundant except in
10970 the case of a `const_int' rtx. Select the section by calling
10971 `text_section' or one of the alternatives for other sections.
10973 Do not define this macro if you put all constants in the read-only
10977 rs6000_elf_select_rtx_section (mode, x, align)
10978 enum machine_mode mode;
10980 unsigned HOST_WIDE_INT align;
10982 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10985 default_elf_select_rtx_section (mode, x, align);
10988 /* A C statement or statements to switch to the appropriate
10989 section for output of DECL. DECL is either a `VAR_DECL' node
10990 or a constant of some sort. RELOC indicates whether forming
10991 the initial value of DECL requires link-time relocations. */
10994 rs6000_elf_select_section (decl, reloc, align)
10997 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
10999 int size = int_size_in_bytes (TREE_TYPE (decl));
11002 static void (* const sec_funcs[4]) PARAMS ((void)) = {
11003 &readonly_data_section,
11009 needs_sdata = (size > 0
11010 && size <= g_switch_value
11011 && rs6000_sdata != SDATA_NONE
11012 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
11014 if (TREE_CODE (decl) == STRING_CST)
11015 readonly = ! flag_writable_strings;
11016 else if (TREE_CODE (decl) == VAR_DECL)
11017 readonly = (! (flag_pic && reloc)
11018 && TREE_READONLY (decl)
11019 && ! TREE_SIDE_EFFECTS (decl)
11020 && DECL_INITIAL (decl)
11021 && DECL_INITIAL (decl) != error_mark_node
11022 && TREE_CONSTANT (DECL_INITIAL (decl)));
11023 else if (TREE_CODE (decl) == CONSTRUCTOR)
11024 readonly = (! (flag_pic && reloc)
11025 && ! TREE_SIDE_EFFECTS (decl)
11026 && TREE_CONSTANT (decl));
11029 if (needs_sdata && rs6000_sdata != SDATA_EABI)
11032 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
11035 /* A C statement to build up a unique section name, expressed as a
11036 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
11037 RELOC indicates whether the initial value of EXP requires
11038 link-time relocations. If you do not define this macro, GCC will use
11039 the symbol name prefixed by `.' as the section name. Note - this
11040 macro can now be called for uninitialized data items as well as
11041 initialised data and functions. */
11044 rs6000_elf_unique_section (decl, reloc)
11052 const char *prefix;
11054 static const char *const prefixes[7][2] =
11056 { ".rodata.", ".gnu.linkonce.r." },
11057 { ".sdata2.", ".gnu.linkonce.s2." },
11058 { ".data.", ".gnu.linkonce.d." },
11059 { ".sdata.", ".gnu.linkonce.s." },
11060 { ".bss.", ".gnu.linkonce.b." },
11061 { ".sbss.", ".gnu.linkonce.sb." },
11062 { ".text.", ".gnu.linkonce.t." }
11065 if (TREE_CODE (decl) == FUNCTION_DECL)
11074 if (TREE_CODE (decl) == STRING_CST)
11075 readonly = ! flag_writable_strings;
11076 else if (TREE_CODE (decl) == VAR_DECL)
11077 readonly = (! (flag_pic && reloc)
11078 && TREE_READONLY (decl)
11079 && ! TREE_SIDE_EFFECTS (decl)
11080 && TREE_CONSTANT (DECL_INITIAL (decl)));
11082 size = int_size_in_bytes (TREE_TYPE (decl));
11083 needs_sdata = (size > 0
11084 && size <= g_switch_value
11085 && rs6000_sdata != SDATA_NONE
11086 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
11088 if (DECL_INITIAL (decl) == 0
11089 || DECL_INITIAL (decl) == error_mark_node)
11091 else if (! readonly)
11098 /* .sdata2 is only for EABI. */
11099 if (sec == 0 && rs6000_sdata != SDATA_EABI)
11105 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
11106 name = (*targetm.strip_name_encoding) (name);
11107 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
11108 len = strlen (name) + strlen (prefix);
11109 string = alloca (len + 1);
11111 sprintf (string, "%s%s", prefix, name);
11113 DECL_SECTION_NAME (decl) = build_string (len, string);
11117 /* If we are referencing a function that is static or is known to be
11118 in this file, make the SYMBOL_REF special. We can use this to indicate
11119 that we can branch to this function without emitting a no-op after the
11120 call. For real AIX calling sequences, we also replace the
11121 function name with the real name (1 or 2 leading .'s), rather than
11122 the function descriptor name. This saves a lot of overriding code
11123 to read the prefixes. */
11126 rs6000_elf_encode_section_info (decl, first)
11133 if (TREE_CODE (decl) == FUNCTION_DECL)
11135 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11136 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
11137 && ! DECL_WEAK (decl))
11138 SYMBOL_REF_FLAG (sym_ref) = 1;
11140 if (DEFAULT_ABI == ABI_AIX)
11142 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
11143 size_t len2 = strlen (XSTR (sym_ref, 0));
11144 char *str = alloca (len1 + len2 + 1);
11147 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
11149 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
11152 else if (rs6000_sdata != SDATA_NONE
11153 && DEFAULT_ABI == ABI_V4
11154 && TREE_CODE (decl) == VAR_DECL)
11156 int size = int_size_in_bytes (TREE_TYPE (decl));
11157 tree section_name = DECL_SECTION_NAME (decl);
11158 const char *name = (char *)0;
11163 if (TREE_CODE (section_name) == STRING_CST)
11165 name = TREE_STRING_POINTER (section_name);
11166 len = TREE_STRING_LENGTH (section_name);
11172 if ((size > 0 && size <= g_switch_value)
11174 && ((len == sizeof (".sdata") - 1
11175 && strcmp (name, ".sdata") == 0)
11176 || (len == sizeof (".sdata2") - 1
11177 && strcmp (name, ".sdata2") == 0)
11178 || (len == sizeof (".sbss") - 1
11179 && strcmp (name, ".sbss") == 0)
11180 || (len == sizeof (".sbss2") - 1
11181 && strcmp (name, ".sbss2") == 0)
11182 || (len == sizeof (".PPC.EMB.sdata0") - 1
11183 && strcmp (name, ".PPC.EMB.sdata0") == 0)
11184 || (len == sizeof (".PPC.EMB.sbss0") - 1
11185 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
11187 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11188 size_t len = strlen (XSTR (sym_ref, 0));
11189 char *str = alloca (len + 2);
11192 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
11193 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
11198 static const char *
11199 rs6000_elf_strip_name_encoding (str)
11202 while (*str == '*' || *str == '@')
11207 #endif /* USING_ELFOS_H */
11210 /* Return a REG that occurs in ADDR with coefficient 1.
11211 ADDR can be effectively incremented by incrementing REG.
11213 r0 is special and we must not select it as an address
11214 register by this routine since our caller will try to
11215 increment the returned register via an "la" instruction. */
11218 find_addr_reg (addr)
11221 while (GET_CODE (addr) == PLUS)
11223 if (GET_CODE (XEXP (addr, 0)) == REG
11224 && REGNO (XEXP (addr, 0)) != 0)
11225 addr = XEXP (addr, 0);
11226 else if (GET_CODE (XEXP (addr, 1)) == REG
11227 && REGNO (XEXP (addr, 1)) != 0)
11228 addr = XEXP (addr, 1);
11229 else if (CONSTANT_P (XEXP (addr, 0)))
11230 addr = XEXP (addr, 1);
11231 else if (CONSTANT_P (XEXP (addr, 1)))
11232 addr = XEXP (addr, 0);
11236 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
11242 rs6000_fatal_bad_address (op)
11245 fatal_insn ("bad address", op);
11248 /* Called to register all of our global variables with the garbage
11252 rs6000_add_gc_roots ()
11254 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11255 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11256 toc_hash_mark_table);
11262 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
11263 reference and a constant. */
11266 symbolic_operand (op)
11269 switch (GET_CODE (op))
11276 return (GET_CODE (op) == SYMBOL_REF ||
11277 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11278 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11279 && GET_CODE (XEXP (op, 1)) == CONST_INT);
11286 #ifdef RS6000_LONG_BRANCH
11288 static tree stub_list = 0;
11290 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
11291 procedure calls to the linked list. */
11294 add_compiler_stub (label_name, function_name, line_number)
11296 tree function_name;
11299 tree stub = build_tree_list (function_name, label_name);
11300 TREE_TYPE (stub) = build_int_2 (line_number, 0);
11301 TREE_CHAIN (stub) = stub_list;
11305 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
11306 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
11307 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
11309 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11310 handling procedure calls from the linked list and initializes the
11314 output_compiler_stub ()
11317 char label_buf[256];
11319 tree tmp_stub, stub;
11322 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11324 fprintf (asm_out_file,
11325 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11327 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11328 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11329 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11330 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11332 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11334 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11337 label_buf[0] = '_';
11338 strcpy (label_buf+1,
11339 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11342 strcpy (tmp_buf, "lis r12,hi16(");
11343 strcat (tmp_buf, label_buf);
11344 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11345 strcat (tmp_buf, label_buf);
11346 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11347 output_asm_insn (tmp_buf, 0);
11349 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11350 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11351 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11352 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11358 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
11359 already there or not. */
11362 no_previous_def (function_name)
11363 tree function_name;
11366 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11367 if (function_name == STUB_FUNCTION_NAME (stub))
11372 /* GET_PREV_LABEL gets the label name from the previous definition of
11376 get_prev_label (function_name)
11377 tree function_name;
11380 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11381 if (function_name == STUB_FUNCTION_NAME (stub))
11382 return STUB_LABEL_NAME (stub);
11386 /* INSN is either a function call or a millicode call. It may have an
11387 unconditional jump in its delay slot.
11389 CALL_DEST is the routine we are calling. */
11392 output_call (insn, call_dest, operand_number)
11395 int operand_number;
11397 static char buf[256];
11398 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11401 tree funname = get_identifier (XSTR (call_dest, 0));
11403 if (no_previous_def (funname))
11406 rtx label_rtx = gen_label_rtx ();
11407 char *label_buf, temp_buf[256];
11408 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11409 CODE_LABEL_NUMBER (label_rtx));
11410 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11411 labelname = get_identifier (label_buf);
11412 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11414 line_number = NOTE_LINE_NUMBER (insn);
11415 add_compiler_stub (labelname, funname, line_number);
11418 labelname = get_prev_label (funname);
11420 sprintf (buf, "jbsr %%z%d,%.246s",
11421 operand_number, IDENTIFIER_POINTER (labelname));
11426 sprintf (buf, "bl %%z%d", operand_number);
11431 #endif /* RS6000_LONG_BRANCH */
11433 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11435 const char *const symbol_ = (SYMBOL); \
11436 char *buffer_ = (BUF); \
11437 if (symbol_[0] == '"') \
11439 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11441 else if (name_needs_quotes(symbol_)) \
11443 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11447 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11452 /* Generate PIC and indirect symbol stubs. */
11455 machopic_output_stub (file, symb, stub)
11457 const char *symb, *stub;
11459 unsigned int length;
11460 char *symbol_name, *lazy_ptr_name;
11461 char *local_label_0;
11462 static int label = 0;
11464 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11465 symb = (*targetm.strip_name_encoding) (symb);
11469 length = strlen (symb);
11470 symbol_name = alloca (length + 32);
11471 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11473 lazy_ptr_name = alloca (length + 32);
11474 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11476 local_label_0 = alloca (length + 32);
11477 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11480 machopic_picsymbol_stub_section ();
11482 machopic_symbol_stub_section ();
11484 fprintf (file, "%s:\n", stub);
11485 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11489 fprintf (file, "\tmflr r0\n");
11490 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11491 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11492 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11493 lazy_ptr_name, local_label_0);
11494 fprintf (file, "\tmtlr r0\n");
11495 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11496 lazy_ptr_name, local_label_0);
11497 fprintf (file, "\tmtctr r12\n");
11498 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11499 lazy_ptr_name, local_label_0);
11500 fprintf (file, "\tbctr\n");
11503 fprintf (file, "non-pure not supported\n");
11505 machopic_lazy_symbol_ptr_section ();
11506 fprintf (file, "%s:\n", lazy_ptr_name);
11507 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11508 fprintf (file, "\t.long dyld_stub_binding_helper\n");
11511 /* Legitimize PIC addresses. If the address is already
11512 position-independent, we return ORIG. Newly generated
11513 position-independent addresses go into a reg. This is REG if non
11514 zero, otherwise we allocate register(s) as necessary. */
11516 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11519 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11521 enum machine_mode mode;
11526 if (reg == NULL && ! reload_in_progress && ! reload_completed)
11527 reg = gen_reg_rtx (Pmode);
11529 if (GET_CODE (orig) == CONST)
11531 if (GET_CODE (XEXP (orig, 0)) == PLUS
11532 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11535 if (GET_CODE (XEXP (orig, 0)) == PLUS)
11538 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11541 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11547 if (GET_CODE (offset) == CONST_INT)
11549 if (SMALL_INT (offset))
11550 return plus_constant (base, INTVAL (offset));
11551 else if (! reload_in_progress && ! reload_completed)
11552 offset = force_reg (Pmode, offset);
11555 rtx mem = force_const_mem (Pmode, orig);
11556 return machopic_legitimize_pic_address (mem, Pmode, reg);
11559 return gen_rtx (PLUS, Pmode, base, offset);
11562 /* Fall back on generic machopic code. */
11563 return machopic_legitimize_pic_address (orig, mode, reg);
11566 /* This is just a placeholder to make linking work without having to
11567 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11568 ever needed for Darwin (not too likely!) this would have to get a
11569 real definition. */
11576 #endif /* TARGET_MACHO */
11579 static unsigned int
11580 rs6000_elf_section_type_flags (decl, name, reloc)
11585 unsigned int flags = default_section_type_flags (decl, name, reloc);
11587 if (TARGET_RELOCATABLE)
11588 flags |= SECTION_WRITE;
11593 /* Record an element in the table of global constructors. SYMBOL is
11594 a SYMBOL_REF of the function to be called; PRIORITY is a number
11595 between 0 and MAX_INIT_PRIORITY.
11597 This differs from default_named_section_asm_out_constructor in
11598 that we have special handling for -mrelocatable. */
11601 rs6000_elf_asm_out_constructor (symbol, priority)
11605 const char *section = ".ctors";
11608 if (priority != DEFAULT_INIT_PRIORITY)
11610 sprintf (buf, ".ctors.%.5u",
11611 /* Invert the numbering so the linker puts us in the proper
11612 order; constructors are run from right to left, and the
11613 linker sorts in increasing order. */
11614 MAX_INIT_PRIORITY - priority);
11618 named_section_flags (section, SECTION_WRITE);
11619 assemble_align (POINTER_SIZE);
11621 if (TARGET_RELOCATABLE)
11623 fputs ("\t.long (", asm_out_file);
11624 output_addr_const (asm_out_file, symbol);
11625 fputs (")@fixup\n", asm_out_file);
11628 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11632 rs6000_elf_asm_out_destructor (symbol, priority)
11636 const char *section = ".dtors";
11639 if (priority != DEFAULT_INIT_PRIORITY)
11641 sprintf (buf, ".dtors.%.5u",
11642 /* Invert the numbering so the linker puts us in the proper
11643 order; constructors are run from right to left, and the
11644 linker sorts in increasing order. */
11645 MAX_INIT_PRIORITY - priority);
11649 named_section_flags (section, SECTION_WRITE);
11650 assemble_align (POINTER_SIZE);
11652 if (TARGET_RELOCATABLE)
11654 fputs ("\t.long (", asm_out_file);
11655 output_addr_const (asm_out_file, symbol);
11656 fputs (")@fixup\n", asm_out_file);
11659 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11665 xcoff_asm_named_section (name, flags)
11667 unsigned int flags ATTRIBUTE_UNUSED;
11669 fprintf (asm_out_file, "\t.csect %s\n", name);
11673 rs6000_xcoff_select_section (exp, reloc, align)
11676 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
11678 if ((TREE_CODE (exp) == STRING_CST
11679 && ! flag_writable_strings)
11680 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
11681 && TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp)
11682 && DECL_INITIAL (exp)
11683 && (DECL_INITIAL (exp) == error_mark_node
11684 || TREE_CONSTANT (DECL_INITIAL (exp)))
11687 if (TREE_PUBLIC (exp))
11688 read_only_data_section ();
11690 read_only_private_data_section ();
11694 if (TREE_PUBLIC (exp))
11697 private_data_section ();
11702 rs6000_xcoff_unique_section (decl, reloc)
11704 int reloc ATTRIBUTE_UNUSED;
11710 if (TREE_CODE (decl) == FUNCTION_DECL)
11712 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
11713 len = strlen (name) + 5;
11714 string = alloca (len + 1);
11715 sprintf (string, ".%s[PR]", name);
11716 DECL_SECTION_NAME (decl) = build_string (len, string);
11720 /* Select section for constant in constant pool.
11722 On RS/6000, all constants are in the private read-only data area.
11723 However, if this is being placed in the TOC it must be output as a
11727 rs6000_xcoff_select_rtx_section (mode, x, align)
11728 enum machine_mode mode;
11730 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
11732 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
11735 read_only_private_data_section ();
11738 /* Remove any trailing [DS] or the like from the symbol name. */
11740 static const char *
11741 rs6000_xcoff_strip_name_encoding (name)
11747 len = strlen (name);
11748 if (name[len - 1] == ']')
11749 return ggc_alloc_string (name, len - 4);
11754 #endif /* TARGET_XCOFF */
11756 /* Note that this is also used for ELF64. */
11759 rs6000_xcoff_encode_section_info (decl, first)
11761 int first ATTRIBUTE_UNUSED;
11763 if (TREE_CODE (decl) == FUNCTION_DECL
11764 && (TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
11765 && ! DECL_WEAK (decl))
11766 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;