1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave;
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string;
83 /* Set to non-zero once AIX common-mode calls have been defined. */
84 static int common_mode_defined;
86 /* Save information from a "cmpxx" operation until the branch or scc is
88 rtx rs6000_compare_op0, rs6000_compare_op1;
89 int rs6000_compare_fp_p;
91 /* Label number of label created for -mrelocatable, to call to so we can
92 get the address of the GOT section */
93 int rs6000_pic_labelno;
96 /* Which abi to adhere to */
97 const char *rs6000_abi_name = RS6000_ABI_NAME;
99 /* Semantics of the small data area */
100 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
102 /* Which small data model to use */
103 const char *rs6000_sdata_name = (char *)0;
105 /* Counter for labels which are to be placed in .fixup. */
106 int fixuplabelno = 0;
109 /* ABI enumeration available for subtarget to use. */
110 enum rs6000_abi rs6000_current_abi;
112 /* ABI string from -mabi= option. */
113 const char *rs6000_abi_string;
116 const char *rs6000_debug_name;
117 int rs6000_debug_stack; /* debug stack applications */
118 int rs6000_debug_arg; /* debug argument handling */
120 /* Flag to say the TOC is initialized */
122 char toc_label_name[10];
124 /* Alias set for saves and restores from the rs6000 stack. */
125 static int rs6000_sr_alias_set;
127 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
128 The only place that looks at this is rs6000_set_default_type_attributes;
129 everywhere else should rely on the presence or absence of a longcall
130 attribute on the function declaration. */
131 int rs6000_default_long_calls;
132 const char *rs6000_longcall_switch;
134 static void rs6000_add_gc_roots PARAMS ((void));
135 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
136 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
137 static void validate_condition_mode
138 PARAMS ((enum rtx_code, enum machine_mode));
139 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
140 static void rs6000_maybe_dead PARAMS ((rtx));
141 static void rs6000_emit_stack_tie PARAMS ((void));
142 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
143 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
144 static unsigned rs6000_hash_constant PARAMS ((rtx));
145 static unsigned toc_hash_function PARAMS ((const void *));
146 static int toc_hash_eq PARAMS ((const void *, const void *));
147 static int toc_hash_mark_entry PARAMS ((void **, void *));
148 static void toc_hash_mark_table PARAMS ((void *));
149 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
150 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
151 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
152 static int rs6000_ra_ever_killed PARAMS ((void));
153 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
154 const struct attribute_spec rs6000_attribute_table[];
155 static void rs6000_set_default_type_attributes PARAMS ((tree));
156 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
157 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
158 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
159 HOST_WIDE_INT, HOST_WIDE_INT));
161 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
163 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
164 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
165 static void rs6000_elf_select_section PARAMS ((tree, int,
166 unsigned HOST_WIDE_INT));
167 static void rs6000_elf_unique_section PARAMS ((tree, int));
168 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
169 unsigned HOST_WIDE_INT));
170 static void rs6000_elf_encode_section_info PARAMS ((tree, int));
171 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
174 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
175 static void rs6000_xcoff_select_section PARAMS ((tree, int,
176 unsigned HOST_WIDE_INT));
177 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
178 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
179 unsigned HOST_WIDE_INT));
180 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
182 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
184 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
185 static int rs6000_adjust_priority PARAMS ((rtx, int));
186 static int rs6000_issue_rate PARAMS ((void));
188 static void rs6000_init_builtins PARAMS ((void));
189 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
190 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
191 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
192 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
193 static void altivec_init_builtins PARAMS ((void));
194 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
195 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
196 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
197 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
198 static void rs6000_parse_abi_options PARAMS ((void));
199 static void rs6000_parse_vrsave_option PARAMS ((void));
200 static int first_altivec_reg_to_save PARAMS ((void));
201 static unsigned int compute_vrsave_mask PARAMS ((void));
202 static void is_altivec_return_reg PARAMS ((rtx, void *));
203 int vrsave_operation PARAMS ((rtx, enum machine_mode));
204 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
205 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
206 static int easy_vector_constant PARAMS ((rtx));
208 /* Default register names. */
209 char rs6000_reg_names[][8] =
211 "0", "1", "2", "3", "4", "5", "6", "7",
212 "8", "9", "10", "11", "12", "13", "14", "15",
213 "16", "17", "18", "19", "20", "21", "22", "23",
214 "24", "25", "26", "27", "28", "29", "30", "31",
215 "0", "1", "2", "3", "4", "5", "6", "7",
216 "8", "9", "10", "11", "12", "13", "14", "15",
217 "16", "17", "18", "19", "20", "21", "22", "23",
218 "24", "25", "26", "27", "28", "29", "30", "31",
219 "mq", "lr", "ctr","ap",
220 "0", "1", "2", "3", "4", "5", "6", "7",
222 /* AltiVec registers. */
223 "0", "1", "2", "3", "4", "5", "6", "7",
224 "8", "9", "10", "11", "12", "13", "14", "15",
225 "16", "17", "18", "19", "20", "21", "22", "23",
226 "24", "25", "26", "27", "28", "29", "30", "31",
230 #ifdef TARGET_REGNAMES
231 static const char alt_reg_names[][8] =
233 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
234 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
235 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
236 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
237 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
238 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
239 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
240 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
241 "mq", "lr", "ctr", "ap",
242 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
244 /* AltiVec registers. */
245 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
246 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
247 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
248 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
253 #ifndef MASK_STRICT_ALIGN
254 #define MASK_STRICT_ALIGN 0
257 /* Initialize the GCC target structure. */
258 #undef TARGET_ATTRIBUTE_TABLE
259 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
260 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
261 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
263 #undef TARGET_ASM_ALIGNED_DI_OP
264 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
266 /* Default unaligned ops are only provided for ELF. Find the ops needed
267 for non-ELF systems. */
268 #ifndef OBJECT_FORMAT_ELF
270 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
272 #undef TARGET_ASM_UNALIGNED_HI_OP
273 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
274 #undef TARGET_ASM_UNALIGNED_SI_OP
275 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
276 #undef TARGET_ASM_UNALIGNED_DI_OP
277 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
280 #undef TARGET_ASM_UNALIGNED_HI_OP
281 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
282 #undef TARGET_ASM_UNALIGNED_SI_OP
283 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
287 /* This hook deals with fixups for relocatable code and DI-mode objects
289 #undef TARGET_ASM_INTEGER
290 #define TARGET_ASM_INTEGER rs6000_assemble_integer
292 #undef TARGET_ASM_FUNCTION_PROLOGUE
293 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
294 #undef TARGET_ASM_FUNCTION_EPILOGUE
295 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
298 #undef TARGET_SECTION_TYPE_FLAGS
299 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
302 #undef TARGET_SCHED_ISSUE_RATE
303 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
304 #undef TARGET_SCHED_ADJUST_COST
305 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
306 #undef TARGET_SCHED_ADJUST_PRIORITY
307 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
309 #undef TARGET_INIT_BUILTINS
310 #define TARGET_INIT_BUILTINS rs6000_init_builtins
312 #undef TARGET_EXPAND_BUILTIN
313 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
315 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
316 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
318 struct gcc_target targetm = TARGET_INITIALIZER;
320 /* Override command line options. Mostly we process the processor
321 type and sometimes adjust other TARGET_ options. */
324 rs6000_override_options (default_cpu)
325 const char *default_cpu;
328 struct rs6000_cpu_select *ptr;
330 /* Simplify the entries below by making a mask for any POWER
331 variant and any PowerPC variant. */
333 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
334 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
335 | MASK_PPC_GFXOPT | MASK_POWERPC64)
336 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
340 const char *const name; /* Canonical processor name. */
341 const enum processor_type processor; /* Processor type enum value. */
342 const int target_enable; /* Target flags to enable. */
343 const int target_disable; /* Target flags to disable. */
344 } const processor_target_table[]
345 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
346 POWER_MASKS | POWERPC_MASKS},
347 {"power", PROCESSOR_POWER,
348 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
349 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
350 {"power2", PROCESSOR_POWER,
351 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
352 POWERPC_MASKS | MASK_NEW_MNEMONICS},
353 {"power3", PROCESSOR_PPC630,
354 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
355 POWER_MASKS | MASK_PPC_GPOPT},
356 {"power4", PROCESSOR_POWER4,
357 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
358 POWER_MASKS | MASK_PPC_GPOPT},
359 {"powerpc", PROCESSOR_POWERPC,
360 MASK_POWERPC | MASK_NEW_MNEMONICS,
361 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
362 {"powerpc64", PROCESSOR_POWERPC64,
363 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
364 POWER_MASKS | POWERPC_OPT_MASKS},
365 {"rios", PROCESSOR_RIOS1,
366 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
367 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
368 {"rios1", PROCESSOR_RIOS1,
369 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
370 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
371 {"rsc", PROCESSOR_PPC601,
372 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
373 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
374 {"rsc1", PROCESSOR_PPC601,
375 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
376 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
377 {"rios2", PROCESSOR_RIOS2,
378 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
379 POWERPC_MASKS | MASK_NEW_MNEMONICS},
380 {"rs64a", PROCESSOR_RS64A,
381 MASK_POWERPC | MASK_NEW_MNEMONICS,
382 POWER_MASKS | POWERPC_OPT_MASKS},
383 {"401", PROCESSOR_PPC403,
384 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
385 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
386 {"403", PROCESSOR_PPC403,
387 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
388 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
389 {"405", PROCESSOR_PPC405,
390 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
391 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
392 {"505", PROCESSOR_MPCCORE,
393 MASK_POWERPC | MASK_NEW_MNEMONICS,
394 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
395 {"601", PROCESSOR_PPC601,
396 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
397 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
398 {"602", PROCESSOR_PPC603,
399 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
400 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
401 {"603", PROCESSOR_PPC603,
402 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
403 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
404 {"603e", PROCESSOR_PPC603,
405 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
406 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
407 {"ec603e", PROCESSOR_PPC603,
408 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
409 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
410 {"604", PROCESSOR_PPC604,
411 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
412 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
413 {"604e", PROCESSOR_PPC604e,
414 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
415 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
416 {"620", PROCESSOR_PPC620,
417 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
418 POWER_MASKS | MASK_PPC_GPOPT},
419 {"630", PROCESSOR_PPC630,
420 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
421 POWER_MASKS | MASK_PPC_GPOPT},
422 {"740", PROCESSOR_PPC750,
423 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
424 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
425 {"750", PROCESSOR_PPC750,
426 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
427 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
428 {"7400", PROCESSOR_PPC7400,
429 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
430 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
431 {"7450", PROCESSOR_PPC7450,
432 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
433 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
434 {"801", PROCESSOR_MPCCORE,
435 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
436 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
437 {"821", PROCESSOR_MPCCORE,
438 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
439 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
440 {"823", PROCESSOR_MPCCORE,
441 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
442 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
443 {"860", PROCESSOR_MPCCORE,
444 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
445 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
447 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
449 /* Save current -mmultiple/-mno-multiple status. */
450 int multiple = TARGET_MULTIPLE;
451 /* Save current -mstring/-mno-string status. */
452 int string = TARGET_STRING;
454 /* Identify the processor type. */
455 rs6000_select[0].string = default_cpu;
456 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
458 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
460 ptr = &rs6000_select[i];
461 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
463 for (j = 0; j < ptt_size; j++)
464 if (! strcmp (ptr->string, processor_target_table[j].name))
467 rs6000_cpu = processor_target_table[j].processor;
471 target_flags |= processor_target_table[j].target_enable;
472 target_flags &= ~processor_target_table[j].target_disable;
478 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
482 /* If we are optimizing big endian systems for space, use the store
483 multiple instructions. */
484 if (BYTES_BIG_ENDIAN && optimize_size)
485 target_flags |= MASK_MULTIPLE;
487 /* If -mmultiple or -mno-multiple was explicitly used, don't
488 override with the processor default */
489 if (TARGET_MULTIPLE_SET)
490 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
492 /* If -mstring or -mno-string was explicitly used, don't override
493 with the processor default. */
494 if (TARGET_STRING_SET)
495 target_flags = (target_flags & ~MASK_STRING) | string;
497 /* Don't allow -mmultiple or -mstring on little endian systems
498 unless the cpu is a 750, because the hardware doesn't support the
499 instructions used in little endian mode, and causes an alignment
500 trap. The 750 does not cause an alignment trap (except when the
501 target is unaligned). */
503 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
507 target_flags &= ~MASK_MULTIPLE;
508 if (TARGET_MULTIPLE_SET)
509 warning ("-mmultiple is not supported on little endian systems");
514 target_flags &= ~MASK_STRING;
515 if (TARGET_STRING_SET)
516 warning ("-mstring is not supported on little endian systems");
520 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
525 warning ("-f%s ignored (all code is position independent)",
526 (flag_pic > 1) ? "PIC" : "pic");
529 #ifdef XCOFF_DEBUGGING_INFO
530 if (flag_function_sections && (write_symbols != NO_DEBUG)
531 && DEFAULT_ABI == ABI_AIX)
533 warning ("-ffunction-sections disabled on AIX when debugging");
534 flag_function_sections = 0;
537 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
539 warning ("-fdata-sections not supported on AIX");
540 flag_data_sections = 0;
544 /* Set debug flags */
545 if (rs6000_debug_name)
547 if (! strcmp (rs6000_debug_name, "all"))
548 rs6000_debug_stack = rs6000_debug_arg = 1;
549 else if (! strcmp (rs6000_debug_name, "stack"))
550 rs6000_debug_stack = 1;
551 else if (! strcmp (rs6000_debug_name, "arg"))
552 rs6000_debug_arg = 1;
554 error ("unknown -mdebug-%s switch", rs6000_debug_name);
557 /* Set size of long double */
558 rs6000_long_double_type_size = 64;
559 if (rs6000_long_double_size_string)
562 int size = strtol (rs6000_long_double_size_string, &tail, 10);
563 if (*tail != '\0' || (size != 64 && size != 128))
564 error ("Unknown switch -mlong-double-%s",
565 rs6000_long_double_size_string);
567 rs6000_long_double_type_size = size;
570 /* Handle -mabi= options. */
571 rs6000_parse_abi_options ();
573 /* Handle -mvrsave= option. */
574 rs6000_parse_vrsave_option ();
576 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
577 using TARGET_OPTIONS to handle a toggle switch, but we're out of
578 bits in target_flags so TARGET_SWITCHES cannot be used.
579 Assumption here is that rs6000_longcall_switch points into the
580 text of the complete option, rather than being a copy, so we can
581 scan back for the presence or absence of the no- modifier. */
582 if (rs6000_longcall_switch)
584 const char *base = rs6000_longcall_switch;
585 while (base[-1] != 'm') base--;
587 if (*rs6000_longcall_switch != '\0')
588 error ("invalid option `%s'", base);
589 rs6000_default_long_calls = (base[0] != 'n');
592 #ifdef TARGET_REGNAMES
593 /* If the user desires alternate register names, copy in the
594 alternate names now. */
596 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
599 #ifdef SUBTARGET_OVERRIDE_OPTIONS
600 SUBTARGET_OVERRIDE_OPTIONS;
602 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
603 SUBSUBTARGET_OVERRIDE_OPTIONS;
606 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
607 If -maix-struct-return or -msvr4-struct-return was explicitly
608 used, don't override with the ABI default. */
609 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
611 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
612 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
614 target_flags |= MASK_AIX_STRUCT_RET;
617 /* Register global variables with the garbage collector. */
618 rs6000_add_gc_roots ();
620 /* Allocate an alias set for register saves & restores from stack. */
621 rs6000_sr_alias_set = new_alias_set ();
624 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
626 /* We can only guarantee the availability of DI pseudo-ops when
627 assembling for 64-bit targets. */
630 targetm.asm_out.aligned_op.di = NULL;
631 targetm.asm_out.unaligned_op.di = NULL;
634 /* Arrange to save and restore machine status around nested functions. */
635 init_machine_status = rs6000_init_machine_status;
638 /* Handle -mvrsave= options. */
640 rs6000_parse_vrsave_option ()
642 /* Generate VRSAVE instructions by default. */
643 if (rs6000_altivec_vrsave_string == 0
644 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
645 rs6000_altivec_vrsave = 1;
646 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
647 rs6000_altivec_vrsave = 0;
649 error ("unknown -mvrsave= option specified: '%s'",
650 rs6000_altivec_vrsave_string);
653 /* Handle -mabi= options. */
655 rs6000_parse_abi_options ()
657 if (rs6000_abi_string == 0)
659 else if (! strcmp (rs6000_abi_string, "altivec"))
660 rs6000_altivec_abi = 1;
661 else if (! strcmp (rs6000_abi_string, "no-altivec"))
662 rs6000_altivec_abi = 0;
664 error ("unknown ABI specified: '%s'", rs6000_abi_string);
668 optimization_options (level, size)
669 int level ATTRIBUTE_UNUSED;
670 int size ATTRIBUTE_UNUSED;
674 /* Do anything needed at the start of the asm file. */
677 rs6000_file_start (file, default_cpu)
679 const char *default_cpu;
683 const char *start = buffer;
684 struct rs6000_cpu_select *ptr;
686 if (flag_verbose_asm)
688 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
689 rs6000_select[0].string = default_cpu;
691 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
693 ptr = &rs6000_select[i];
694 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
696 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
702 switch (rs6000_sdata)
704 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
705 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
706 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
707 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
710 if (rs6000_sdata && g_switch_value)
712 fprintf (file, "%s -G %d", start, g_switch_value);
722 /* Return non-zero if this function is known to have a null epilogue. */
727 if (reload_completed)
729 rs6000_stack_t *info = rs6000_stack_info ();
731 if (info->first_gp_reg_save == 32
732 && info->first_fp_reg_save == 64
733 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
736 && info->vrsave_mask == 0
744 /* Returns 1 always. */
747 any_operand (op, mode)
748 rtx op ATTRIBUTE_UNUSED;
749 enum machine_mode mode ATTRIBUTE_UNUSED;
754 /* Returns 1 if op is the count register. */
756 count_register_operand (op, mode)
758 enum machine_mode mode ATTRIBUTE_UNUSED;
760 if (GET_CODE (op) != REG)
763 if (REGNO (op) == COUNT_REGISTER_REGNUM)
766 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
772 /* Returns 1 if op is an altivec register. */
774 altivec_register_operand (op, mode)
776 enum machine_mode mode ATTRIBUTE_UNUSED;
779 return (register_operand (op, mode)
780 && (GET_CODE (op) != REG
781 || REGNO (op) > FIRST_PSEUDO_REGISTER
782 || ALTIVEC_REGNO_P (REGNO (op))));
786 xer_operand (op, mode)
788 enum machine_mode mode ATTRIBUTE_UNUSED;
790 if (GET_CODE (op) != REG)
793 if (XER_REGNO_P (REGNO (op)))
799 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
800 by such constants completes more quickly. */
803 s8bit_cint_operand (op, mode)
805 enum machine_mode mode ATTRIBUTE_UNUSED;
807 return ( GET_CODE (op) == CONST_INT
808 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
811 /* Return 1 if OP is a constant that can fit in a D field. */
814 short_cint_operand (op, mode)
816 enum machine_mode mode ATTRIBUTE_UNUSED;
818 return (GET_CODE (op) == CONST_INT
819 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
822 /* Similar for an unsigned D field. */
825 u_short_cint_operand (op, mode)
827 enum machine_mode mode ATTRIBUTE_UNUSED;
829 return (GET_CODE (op) == CONST_INT
830 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
833 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
836 non_short_cint_operand (op, mode)
838 enum machine_mode mode ATTRIBUTE_UNUSED;
840 return (GET_CODE (op) == CONST_INT
841 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
844 /* Returns 1 if OP is a CONST_INT that is a positive value
845 and an exact power of 2. */
848 exact_log2_cint_operand (op, mode)
850 enum machine_mode mode ATTRIBUTE_UNUSED;
852 return (GET_CODE (op) == CONST_INT
854 && exact_log2 (INTVAL (op)) >= 0);
857 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
861 gpc_reg_operand (op, mode)
863 enum machine_mode mode;
865 return (register_operand (op, mode)
866 && (GET_CODE (op) != REG
867 || (REGNO (op) >= ARG_POINTER_REGNUM
868 && !XER_REGNO_P (REGNO (op)))
869 || REGNO (op) < MQ_REGNO));
872 /* Returns 1 if OP is either a pseudo-register or a register denoting a
876 cc_reg_operand (op, mode)
878 enum machine_mode mode;
880 return (register_operand (op, mode)
881 && (GET_CODE (op) != REG
882 || REGNO (op) >= FIRST_PSEUDO_REGISTER
883 || CR_REGNO_P (REGNO (op))));
886 /* Returns 1 if OP is either a pseudo-register or a register denoting a
887 CR field that isn't CR0. */
890 cc_reg_not_cr0_operand (op, mode)
892 enum machine_mode mode;
894 return (register_operand (op, mode)
895 && (GET_CODE (op) != REG
896 || REGNO (op) >= FIRST_PSEUDO_REGISTER
897 || CR_REGNO_NOT_CR0_P (REGNO (op))));
900 /* Returns 1 if OP is either a constant integer valid for a D-field or
901 a non-special register. If a register, it must be in the proper
902 mode unless MODE is VOIDmode. */
905 reg_or_short_operand (op, mode)
907 enum machine_mode mode;
909 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
912 /* Similar, except check if the negation of the constant would be
913 valid for a D-field. */
916 reg_or_neg_short_operand (op, mode)
918 enum machine_mode mode;
920 if (GET_CODE (op) == CONST_INT)
921 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
923 return gpc_reg_operand (op, mode);
926 /* Returns 1 if OP is either a constant integer valid for a DS-field or
927 a non-special register. If a register, it must be in the proper
928 mode unless MODE is VOIDmode. */
931 reg_or_aligned_short_operand (op, mode)
933 enum machine_mode mode;
935 if (gpc_reg_operand (op, mode))
937 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
944 /* Return 1 if the operand is either a register or an integer whose
945 high-order 16 bits are zero. */
948 reg_or_u_short_operand (op, mode)
950 enum machine_mode mode;
952 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
955 /* Return 1 is the operand is either a non-special register or ANY
959 reg_or_cint_operand (op, mode)
961 enum machine_mode mode;
963 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
966 /* Return 1 is the operand is either a non-special register or ANY
967 32-bit signed constant integer. */
970 reg_or_arith_cint_operand (op, mode)
972 enum machine_mode mode;
974 return (gpc_reg_operand (op, mode)
975 || (GET_CODE (op) == CONST_INT
976 #if HOST_BITS_PER_WIDE_INT != 32
977 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
978 < (unsigned HOST_WIDE_INT) 0x100000000ll)
983 /* Return 1 is the operand is either a non-special register or a 32-bit
984 signed constant integer valid for 64-bit addition. */
987 reg_or_add_cint64_operand (op, mode)
989 enum machine_mode mode;
991 return (gpc_reg_operand (op, mode)
992 || (GET_CODE (op) == CONST_INT
993 #if HOST_BITS_PER_WIDE_INT == 32
994 && INTVAL (op) < 0x7fff8000
996 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1002 /* Return 1 is the operand is either a non-special register or a 32-bit
1003 signed constant integer valid for 64-bit subtraction. */
1006 reg_or_sub_cint64_operand (op, mode)
1008 enum machine_mode mode;
1010 return (gpc_reg_operand (op, mode)
1011 || (GET_CODE (op) == CONST_INT
1012 #if HOST_BITS_PER_WIDE_INT == 32
1013 && (- INTVAL (op)) < 0x7fff8000
1015 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1021 /* Return 1 is the operand is either a non-special register or ANY
1022 32-bit unsigned constant integer. */
1025 reg_or_logical_cint_operand (op, mode)
1027 enum machine_mode mode;
1029 if (GET_CODE (op) == CONST_INT)
1031 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1033 if (GET_MODE_BITSIZE (mode) <= 32)
1036 if (INTVAL (op) < 0)
1040 return ((INTVAL (op) & GET_MODE_MASK (mode)
1041 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1043 else if (GET_CODE (op) == CONST_DOUBLE)
1045 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1049 return CONST_DOUBLE_HIGH (op) == 0;
1052 return gpc_reg_operand (op, mode);
1055 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1058 got_operand (op, mode)
1060 enum machine_mode mode ATTRIBUTE_UNUSED;
1062 return (GET_CODE (op) == SYMBOL_REF
1063 || GET_CODE (op) == CONST
1064 || GET_CODE (op) == LABEL_REF);
1067 /* Return 1 if the operand is a simple references that can be loaded via
1068 the GOT (labels involving addition aren't allowed). */
1071 got_no_const_operand (op, mode)
1073 enum machine_mode mode ATTRIBUTE_UNUSED;
1075 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1078 /* Return the number of instructions it takes to form a constant in an
1079 integer register. */
1082 num_insns_constant_wide (value)
1083 HOST_WIDE_INT value;
1085 /* signed constant loadable with {cal|addi} */
1086 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1089 /* constant loadable with {cau|addis} */
1090 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1093 #if HOST_BITS_PER_WIDE_INT == 64
1094 else if (TARGET_POWERPC64)
1096 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1097 HOST_WIDE_INT high = value >> 31;
1099 if (high == 0 || high == -1)
1105 return num_insns_constant_wide (high) + 1;
1107 return (num_insns_constant_wide (high)
1108 + num_insns_constant_wide (low) + 1);
1117 num_insns_constant (op, mode)
1119 enum machine_mode mode;
1121 if (GET_CODE (op) == CONST_INT)
1123 #if HOST_BITS_PER_WIDE_INT == 64
1124 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1125 && mask64_operand (op, mode))
1129 return num_insns_constant_wide (INTVAL (op));
1132 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1137 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1138 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1139 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1142 else if (GET_CODE (op) == CONST_DOUBLE)
1148 int endian = (WORDS_BIG_ENDIAN == 0);
1150 if (mode == VOIDmode || mode == DImode)
1152 high = CONST_DOUBLE_HIGH (op);
1153 low = CONST_DOUBLE_LOW (op);
1157 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1158 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1160 low = l[1 - endian];
1164 return (num_insns_constant_wide (low)
1165 + num_insns_constant_wide (high));
1169 if (high == 0 && low >= 0)
1170 return num_insns_constant_wide (low);
1172 else if (high == -1 && low < 0)
1173 return num_insns_constant_wide (low);
1175 else if (mask64_operand (op, mode))
1179 return num_insns_constant_wide (high) + 1;
1182 return (num_insns_constant_wide (high)
1183 + num_insns_constant_wide (low) + 1);
1191 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1192 register with one instruction per word. We only do this if we can
1193 safely read CONST_DOUBLE_{LOW,HIGH}. */
1196 easy_fp_constant (op, mode)
1198 enum machine_mode mode;
1200 if (GET_CODE (op) != CONST_DOUBLE
1201 || GET_MODE (op) != mode
1202 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1205 /* Consider all constants with -msoft-float to be easy. */
1206 if (TARGET_SOFT_FLOAT && mode != DImode)
1209 /* If we are using V.4 style PIC, consider all constants to be hard. */
1210 if (flag_pic && DEFAULT_ABI == ABI_V4)
1213 #ifdef TARGET_RELOCATABLE
1214 /* Similarly if we are using -mrelocatable, consider all constants
1216 if (TARGET_RELOCATABLE)
1225 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1226 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1228 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1229 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1232 else if (mode == SFmode)
1237 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1238 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1240 return num_insns_constant_wide (l) == 1;
1243 else if (mode == DImode)
1244 return ((TARGET_POWERPC64
1245 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1246 || (num_insns_constant (op, DImode) <= 2));
1248 else if (mode == SImode)
1254 /* Return 1 if the operand is a CONST_INT and can be put into a
1255 register with one instruction. */
1258 easy_vector_constant (op)
1264 if (GET_CODE (op) != CONST_VECTOR)
1267 units = CONST_VECTOR_NUNITS (op);
1269 /* We can generate 0 easily. Look for that. */
1270 for (i = 0; i < units; ++i)
1272 elt = CONST_VECTOR_ELT (op, i);
1274 /* We could probably simplify this by just checking for equality
1275 with CONST0_RTX for the current mode, but let's be safe
1278 switch (GET_CODE (elt))
1281 if (INTVAL (elt) != 0)
1285 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1293 /* We could probably generate a few other constants trivially, but
1294 gcc doesn't generate them yet. FIXME later. */
1298 /* Return 1 if the operand is the constant 0. This works for scalars
1299 as well as vectors. */
1301 zero_constant (op, mode)
1303 enum machine_mode mode;
1305 return op == CONST0_RTX (mode);
1308 /* Return 1 if the operand is 0.0. */
1310 zero_fp_constant (op, mode)
1312 enum machine_mode mode;
1314 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1317 /* Return 1 if the operand is in volatile memory. Note that during
1318 the RTL generation phase, memory_operand does not return TRUE for
1319 volatile memory references. So this function allows us to
1320 recognize volatile references where its safe. */
1323 volatile_mem_operand (op, mode)
1325 enum machine_mode mode;
1327 if (GET_CODE (op) != MEM)
1330 if (!MEM_VOLATILE_P (op))
1333 if (mode != GET_MODE (op))
1336 if (reload_completed)
1337 return memory_operand (op, mode);
1339 if (reload_in_progress)
1340 return strict_memory_address_p (mode, XEXP (op, 0));
1342 return memory_address_p (mode, XEXP (op, 0));
1345 /* Return 1 if the operand is an offsettable memory operand. */
1348 offsettable_mem_operand (op, mode)
1350 enum machine_mode mode;
1352 return ((GET_CODE (op) == MEM)
1353 && offsettable_address_p (reload_completed || reload_in_progress,
1354 mode, XEXP (op, 0)));
1357 /* Return 1 if the operand is either an easy FP constant (see above) or
1361 mem_or_easy_const_operand (op, mode)
1363 enum machine_mode mode;
1365 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1368 /* Return 1 if the operand is either a non-special register or an item
1369 that can be used as the operand of a `mode' add insn. */
1372 add_operand (op, mode)
1374 enum machine_mode mode;
1376 if (GET_CODE (op) == CONST_INT)
1377 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1378 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1380 return gpc_reg_operand (op, mode);
1383 /* Return 1 if OP is a constant but not a valid add_operand. */
1386 non_add_cint_operand (op, mode)
1388 enum machine_mode mode ATTRIBUTE_UNUSED;
1390 return (GET_CODE (op) == CONST_INT
1391 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1392 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1395 /* Return 1 if the operand is a non-special register or a constant that
1396 can be used as the operand of an OR or XOR insn on the RS/6000. */
1399 logical_operand (op, mode)
1401 enum machine_mode mode;
1403 HOST_WIDE_INT opl, oph;
1405 if (gpc_reg_operand (op, mode))
1408 if (GET_CODE (op) == CONST_INT)
1410 opl = INTVAL (op) & GET_MODE_MASK (mode);
1412 #if HOST_BITS_PER_WIDE_INT <= 32
1413 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1417 else if (GET_CODE (op) == CONST_DOUBLE)
1419 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1422 opl = CONST_DOUBLE_LOW (op);
1423 oph = CONST_DOUBLE_HIGH (op);
1430 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1431 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1434 /* Return 1 if C is a constant that is not a logical operand (as
1435 above), but could be split into one. */
1438 non_logical_cint_operand (op, mode)
1440 enum machine_mode mode;
1442 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1443 && ! logical_operand (op, mode)
1444 && reg_or_logical_cint_operand (op, mode));
1447 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1448 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1449 Reject all ones and all zeros, since these should have been optimized
1450 away and confuse the making of MB and ME. */
1453 mask_operand (op, mode)
1455 enum machine_mode mode ATTRIBUTE_UNUSED;
1457 HOST_WIDE_INT c, lsb;
1459 if (GET_CODE (op) != CONST_INT)
1464 /* Fail in 64-bit mode if the mask wraps around because the upper
1465 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1466 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1469 /* We don't change the number of transitions by inverting,
1470 so make sure we start with the LS bit zero. */
1474 /* Reject all zeros or all ones. */
1478 /* Find the first transition. */
1481 /* Invert to look for a second transition. */
1484 /* Erase first transition. */
1487 /* Find the second transition (if any). */
1490 /* Match if all the bits above are 1's (or c is zero). */
1494 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1495 It is if there are no more than one 1->0 or 0->1 transitions.
1496 Reject all ones and all zeros, since these should have been optimized
1497 away and confuse the making of MB and ME. */
1500 mask64_operand (op, mode)
1502 enum machine_mode mode;
1504 if (GET_CODE (op) == CONST_INT)
1506 HOST_WIDE_INT c, lsb;
1508 /* We don't change the number of transitions by inverting,
1509 so make sure we start with the LS bit zero. */
1514 /* Reject all zeros or all ones. */
1518 /* Find the transition, and check that all bits above are 1's. */
1522 else if (GET_CODE (op) == CONST_DOUBLE
1523 && (mode == VOIDmode || mode == DImode))
1525 HOST_WIDE_INT low, high, lsb;
1527 if (HOST_BITS_PER_WIDE_INT < 64)
1528 high = CONST_DOUBLE_HIGH (op);
1530 low = CONST_DOUBLE_LOW (op);
1533 if (HOST_BITS_PER_WIDE_INT < 64)
1540 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1544 return high == -lsb;
1548 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1554 /* Return 1 if the operand is either a non-special register or a constant
1555 that can be used as the operand of a PowerPC64 logical AND insn. */
1558 and64_operand (op, mode)
1560 enum machine_mode mode;
1562 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1563 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1565 return (logical_operand (op, mode) || mask64_operand (op, mode));
1568 /* Return 1 if the operand is either a non-special register or a
1569 constant that can be used as the operand of an RS/6000 logical AND insn. */
1572 and_operand (op, mode)
1574 enum machine_mode mode;
1576 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1577 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1579 return (logical_operand (op, mode) || mask_operand (op, mode));
1582 /* Return 1 if the operand is a general register or memory operand. */
1585 reg_or_mem_operand (op, mode)
1587 enum machine_mode mode;
1589 return (gpc_reg_operand (op, mode)
1590 || memory_operand (op, mode)
1591 || volatile_mem_operand (op, mode));
1594 /* Return 1 if the operand is a general register or memory operand without
1595 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1599 lwa_operand (op, mode)
1601 enum machine_mode mode;
1605 if (reload_completed && GET_CODE (inner) == SUBREG)
1606 inner = SUBREG_REG (inner);
1608 return gpc_reg_operand (inner, mode)
1609 || (memory_operand (inner, mode)
1610 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1611 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1612 && (GET_CODE (XEXP (inner, 0)) != PLUS
1613 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1614 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1617 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1620 symbol_ref_operand (op, mode)
1622 enum machine_mode mode;
1624 if (mode != VOIDmode && GET_MODE (op) != mode)
1627 return (GET_CODE (op) == SYMBOL_REF);
1630 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1631 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1634 call_operand (op, mode)
1636 enum machine_mode mode;
1638 if (mode != VOIDmode && GET_MODE (op) != mode)
1641 return (GET_CODE (op) == SYMBOL_REF
1642 || (GET_CODE (op) == REG
1643 && (REGNO (op) == LINK_REGISTER_REGNUM
1644 || REGNO (op) == COUNT_REGISTER_REGNUM
1645 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1648 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1649 this file and the function is not weakly defined. */
1652 current_file_function_operand (op, mode)
1654 enum machine_mode mode ATTRIBUTE_UNUSED;
1656 return (GET_CODE (op) == SYMBOL_REF
1657 && (SYMBOL_REF_FLAG (op)
1658 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1659 && ! DECL_WEAK (current_function_decl))));
1662 /* Return 1 if this operand is a valid input for a move insn. */
1665 input_operand (op, mode)
1667 enum machine_mode mode;
1669 /* Memory is always valid. */
1670 if (memory_operand (op, mode))
1673 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1674 if (GET_CODE (op) == CONSTANT_P_RTX)
1677 /* For floating-point, easy constants are valid. */
1678 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1680 && easy_fp_constant (op, mode))
1683 /* Allow any integer constant. */
1684 if (GET_MODE_CLASS (mode) == MODE_INT
1685 && (GET_CODE (op) == CONST_INT
1686 || GET_CODE (op) == CONST_DOUBLE))
1689 /* For floating-point or multi-word mode, the only remaining valid type
1691 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1692 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1693 return register_operand (op, mode);
1695 /* The only cases left are integral modes one word or smaller (we
1696 do not get called for MODE_CC values). These can be in any
1698 if (register_operand (op, mode))
1701 /* A SYMBOL_REF referring to the TOC is valid. */
1702 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1705 /* A constant pool expression (relative to the TOC) is valid */
1706 if (TOC_RELATIVE_EXPR_P (op))
1709 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1711 if (DEFAULT_ABI == ABI_V4
1712 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1713 && small_data_operand (op, Pmode))
1719 /* Return 1 for an operand in small memory on V.4/eabi. */
1722 small_data_operand (op, mode)
1723 rtx op ATTRIBUTE_UNUSED;
1724 enum machine_mode mode ATTRIBUTE_UNUSED;
1729 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1732 if (DEFAULT_ABI != ABI_V4)
1735 if (GET_CODE (op) == SYMBOL_REF)
1738 else if (GET_CODE (op) != CONST
1739 || GET_CODE (XEXP (op, 0)) != PLUS
1740 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1741 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1746 rtx sum = XEXP (op, 0);
1747 HOST_WIDE_INT summand;
1749 /* We have to be careful here, because it is the referenced address
1750 that must be 32k from _SDA_BASE_, not just the symbol. */
1751 summand = INTVAL (XEXP (sum, 1));
1752 if (summand < 0 || summand > g_switch_value)
1755 sym_ref = XEXP (sum, 0);
1758 if (*XSTR (sym_ref, 0) != '@')
1769 constant_pool_expr_1 (op, have_sym, have_toc)
1774 switch (GET_CODE(op))
1777 if (CONSTANT_POOL_ADDRESS_P (op))
1779 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1787 else if (! strcmp (XSTR (op, 0), toc_label_name))
1796 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1797 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1799 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1808 constant_pool_expr_p (op)
1813 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1817 toc_relative_expr_p (op)
1822 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1825 /* Try machine-dependent ways of modifying an illegitimate address
1826 to be legitimate. If we find one, return the new, valid address.
1827 This is used from only one place: `memory_address' in explow.c.
1829 OLDX is the address as it was before break_out_memory_refs was
1830 called. In some cases it is useful to look at this to decide what
1833 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1835 It is always safe for this function to do nothing. It exists to
1836 recognize opportunities to optimize the output.
1838 On RS/6000, first check for the sum of a register with a constant
1839 integer that is out of range. If so, generate code to add the
1840 constant with the low-order 16 bits masked to the register and force
1841 this result into another register (this can be done with `cau').
1842 Then generate an address of REG+(CONST&0xffff), allowing for the
1843 possibility of bit 16 being a one.
1845 Then check for the sum of a register and something not constant, try to
1846 load the other things into a register and return the sum. */
1848 rs6000_legitimize_address (x, oldx, mode)
1850 rtx oldx ATTRIBUTE_UNUSED;
1851 enum machine_mode mode;
1853 if (GET_CODE (x) == PLUS
1854 && GET_CODE (XEXP (x, 0)) == REG
1855 && GET_CODE (XEXP (x, 1)) == CONST_INT
1856 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1858 HOST_WIDE_INT high_int, low_int;
1860 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
1861 high_int = INTVAL (XEXP (x, 1)) - low_int;
1862 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1863 GEN_INT (high_int)), 0);
1864 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1866 else if (GET_CODE (x) == PLUS
1867 && GET_CODE (XEXP (x, 0)) == REG
1868 && GET_CODE (XEXP (x, 1)) != CONST_INT
1869 && GET_MODE_NUNITS (mode) == 1
1870 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1871 && (TARGET_POWERPC64 || mode != DImode)
1874 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1875 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1877 else if (ALTIVEC_VECTOR_MODE (mode))
1881 /* Make sure both operands are registers. */
1882 if (GET_CODE (x) == PLUS)
1883 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1884 force_reg (Pmode, XEXP (x, 1)));
1886 reg = force_reg (Pmode, x);
1889 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1890 && GET_CODE (x) != CONST_INT
1891 && GET_CODE (x) != CONST_DOUBLE
1893 && GET_MODE_NUNITS (mode) == 1
1894 && (GET_MODE_BITSIZE (mode) <= 32
1895 || (TARGET_HARD_FLOAT && mode == DFmode)))
1897 rtx reg = gen_reg_rtx (Pmode);
1898 emit_insn (gen_elf_high (reg, (x)));
1899 return gen_rtx_LO_SUM (Pmode, reg, (x));
1901 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1903 && GET_CODE (x) != CONST_INT
1904 && GET_CODE (x) != CONST_DOUBLE
1906 && (TARGET_HARD_FLOAT || mode != DFmode)
1910 rtx reg = gen_reg_rtx (Pmode);
1911 emit_insn (gen_macho_high (reg, (x)));
1912 return gen_rtx_LO_SUM (Pmode, reg, (x));
1915 && CONSTANT_POOL_EXPR_P (x)
1916 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1918 return create_TOC_reference (x);
1924 /* The convention appears to be to define this wherever it is used.
1925 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1926 is now used here. */
1927 #ifndef REG_MODE_OK_FOR_BASE_P
1928 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1931 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1932 replace the input X, or the original X if no replacement is called for.
1933 The output parameter *WIN is 1 if the calling macro should goto WIN,
1936 For RS/6000, we wish to handle large displacements off a base
1937 register by splitting the addend across an addiu/addis and the mem insn.
1938 This cuts number of extra insns needed from 3 to 1.
1940 On Darwin, we use this to generate code for floating point constants.
1941 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1942 The Darwin code is inside #if TARGET_MACHO because only then is
1943 machopic_function_base_name() defined. */
1945 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1947 enum machine_mode mode;
1950 int ind_levels ATTRIBUTE_UNUSED;
1953 /* We must recognize output that we have already generated ourselves. */
1954 if (GET_CODE (x) == PLUS
1955 && GET_CODE (XEXP (x, 0)) == PLUS
1956 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1957 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1958 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1960 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1961 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1962 opnum, (enum reload_type)type);
1968 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1969 && GET_CODE (x) == LO_SUM
1970 && GET_CODE (XEXP (x, 0)) == PLUS
1971 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1972 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1973 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1974 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1975 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1976 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1977 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1979 /* Result of previous invocation of this function on Darwin
1980 floating point constant. */
1981 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1982 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1983 opnum, (enum reload_type)type);
1988 if (GET_CODE (x) == PLUS
1989 && GET_CODE (XEXP (x, 0)) == REG
1990 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1991 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1992 && GET_CODE (XEXP (x, 1)) == CONST_INT
1993 && !ALTIVEC_VECTOR_MODE (mode))
1995 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1996 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1998 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2000 /* Check for 32-bit overflow. */
2001 if (high + low != val)
2007 /* Reload the high part into a base reg; leave the low part
2008 in the mem directly. */
2010 x = gen_rtx_PLUS (GET_MODE (x),
2011 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2015 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2016 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2017 opnum, (enum reload_type)type);
2022 if (GET_CODE (x) == SYMBOL_REF
2023 && DEFAULT_ABI == ABI_DARWIN
2024 && !ALTIVEC_VECTOR_MODE (mode)
2027 /* Darwin load of floating point constant. */
2028 rtx offset = gen_rtx (CONST, Pmode,
2029 gen_rtx (MINUS, Pmode, x,
2030 gen_rtx (SYMBOL_REF, Pmode,
2031 machopic_function_base_name ())));
2032 x = gen_rtx (LO_SUM, GET_MODE (x),
2033 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2034 gen_rtx (HIGH, Pmode, offset)), offset);
2035 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2036 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2037 opnum, (enum reload_type)type);
2043 && CONSTANT_POOL_EXPR_P (x)
2044 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2046 (x) = create_TOC_reference (x);
2054 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2055 that is a valid memory address for an instruction.
2056 The MODE argument is the machine mode for the MEM expression
2057 that wants to use this address.
2059 On the RS/6000, there are four valid address: a SYMBOL_REF that
2060 refers to a constant pool entry of an address (or the sum of it
2061 plus a constant), a short (16-bit signed) constant plus a register,
2062 the sum of two registers, or a register indirect, possibly with an
2063 auto-increment. For DFmode and DImode with an constant plus register,
2064 we must ensure that both words are addressable or PowerPC64 with offset
2067 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2068 32-bit DImode, TImode), indexed addressing cannot be used because
2069 adjacent memory cells are accessed by adding word-sized offsets
2070 during assembly output. */
2072 rs6000_legitimate_address (mode, x, reg_ok_strict)
2073 enum machine_mode mode;
2077 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2079 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2080 && !ALTIVEC_VECTOR_MODE (mode)
2082 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2084 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2086 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2088 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2090 && GET_CODE (x) == PLUS
2091 && GET_CODE (XEXP (x, 0)) == REG
2092 && XEXP (x, 0) == virtual_stack_vars_rtx
2093 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2095 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2098 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2099 && (TARGET_POWERPC64 || mode != DImode)
2100 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2102 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2107 /* Try to output insns to set TARGET equal to the constant C if it can
2108 be done in less than N insns. Do all computations in MODE.
2109 Returns the place where the output has been placed if it can be
2110 done and the insns have been emitted. If it would take more than N
2111 insns, zero is returned and no insns and emitted. */
2114 rs6000_emit_set_const (dest, mode, source, n)
2116 enum machine_mode mode;
2117 int n ATTRIBUTE_UNUSED;
2119 HOST_WIDE_INT c0, c1;
2121 if (mode == QImode || mode == HImode || mode == SImode)
2124 dest = gen_reg_rtx (mode);
2125 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2129 if (GET_CODE (source) == CONST_INT)
2131 c0 = INTVAL (source);
2134 else if (GET_CODE (source) == CONST_DOUBLE)
2136 #if HOST_BITS_PER_WIDE_INT >= 64
2137 c0 = CONST_DOUBLE_LOW (source);
2140 c0 = CONST_DOUBLE_LOW (source);
2141 c1 = CONST_DOUBLE_HIGH (source);
2147 return rs6000_emit_set_long_const (dest, c0, c1);
2150 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2151 fall back to a straight forward decomposition. We do this to avoid
2152 exponential run times encountered when looking for longer sequences
2153 with rs6000_emit_set_const. */
2155 rs6000_emit_set_long_const (dest, c1, c2)
2157 HOST_WIDE_INT c1, c2;
2159 if (!TARGET_POWERPC64)
2161 rtx operand1, operand2;
2163 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2165 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2167 emit_move_insn (operand1, GEN_INT (c1));
2168 emit_move_insn (operand2, GEN_INT (c2));
2172 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2175 ud2 = (c1 & 0xffff0000) >> 16;
2176 #if HOST_BITS_PER_WIDE_INT >= 64
2180 ud4 = (c2 & 0xffff0000) >> 16;
2182 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2183 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2186 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2188 emit_move_insn (dest, GEN_INT (ud1));
2191 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2192 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2195 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2198 emit_move_insn (dest, GEN_INT (ud2 << 16));
2200 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2202 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2203 || (ud4 == 0 && ! (ud3 & 0x8000)))
2206 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2209 emit_move_insn (dest, GEN_INT (ud3 << 16));
2212 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2213 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2215 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2220 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2223 emit_move_insn (dest, GEN_INT (ud4 << 16));
2226 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2228 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2230 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2231 GEN_INT (ud2 << 16)));
2233 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2239 /* Emit a move from SOURCE to DEST in mode MODE. */
2241 rs6000_emit_move (dest, source, mode)
2244 enum machine_mode mode;
2248 operands[1] = source;
2250 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2251 if (GET_CODE (operands[1]) == CONST_DOUBLE
2252 && ! FLOAT_MODE_P (mode)
2253 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2255 /* FIXME. This should never happen. */
2256 /* Since it seems that it does, do the safe thing and convert
2258 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2260 if (GET_CODE (operands[1]) == CONST_DOUBLE
2261 && ! FLOAT_MODE_P (mode)
2262 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2263 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2264 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2265 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2268 /* Check if GCC is setting up a block move that will end up using FP
2269 registers as temporaries. We must make sure this is acceptable. */
2270 if (GET_CODE (operands[0]) == MEM
2271 && GET_CODE (operands[1]) == MEM
2273 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2274 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2275 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2276 ? 32 : MEM_ALIGN (operands[0])))
2277 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2279 : MEM_ALIGN (operands[1]))))
2280 && ! MEM_VOLATILE_P (operands [0])
2281 && ! MEM_VOLATILE_P (operands [1]))
2283 emit_move_insn (adjust_address (operands[0], SImode, 0),
2284 adjust_address (operands[1], SImode, 0));
2285 emit_move_insn (adjust_address (operands[0], SImode, 4),
2286 adjust_address (operands[1], SImode, 4));
2290 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2291 operands[1] = force_reg (mode, operands[1]);
2293 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2294 && GET_CODE (operands[0]) == MEM)
2298 if (reload_in_progress || reload_completed)
2299 regnum = true_regnum (operands[1]);
2300 else if (GET_CODE (operands[1]) == REG)
2301 regnum = REGNO (operands[1]);
2305 /* If operands[1] is a register, on POWER it may have
2306 double-precision data in it, so truncate it to single
2308 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2311 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2312 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2313 operands[1] = newreg;
2317 /* Handle the case where reload calls us with an invalid address;
2318 and the case of CONSTANT_P_RTX. */
2319 if (!ALTIVEC_VECTOR_MODE (mode)
2320 && (! general_operand (operands[1], mode)
2321 || ! nonimmediate_operand (operands[0], mode)
2322 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2324 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2328 /* FIXME: In the long term, this switch statement should go away
2329 and be replaced by a sequence of tests based on things like
2335 if (CONSTANT_P (operands[1])
2336 && GET_CODE (operands[1]) != CONST_INT)
2337 operands[1] = force_const_mem (mode, operands[1]);
2343 if (CONSTANT_P (operands[1])
2344 && ! easy_fp_constant (operands[1], mode))
2345 operands[1] = force_const_mem (mode, operands[1]);
2352 if (CONSTANT_P (operands[1])
2353 && !easy_vector_constant (operands[1]))
2354 operands[1] = force_const_mem (mode, operands[1]);
2359 /* Use default pattern for address of ELF small data */
2362 && DEFAULT_ABI == ABI_V4
2363 && (GET_CODE (operands[1]) == SYMBOL_REF
2364 || GET_CODE (operands[1]) == CONST)
2365 && small_data_operand (operands[1], mode))
2367 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2371 if (DEFAULT_ABI == ABI_V4
2372 && mode == Pmode && mode == SImode
2373 && flag_pic == 1 && got_operand (operands[1], mode))
2375 emit_insn (gen_movsi_got (operands[0], operands[1]));
2379 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2380 && TARGET_NO_TOC && ! flag_pic
2382 && CONSTANT_P (operands[1])
2383 && GET_CODE (operands[1]) != HIGH
2384 && GET_CODE (operands[1]) != CONST_INT)
2386 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2388 /* If this is a function address on -mcall-aixdesc,
2389 convert it to the address of the descriptor. */
2390 if (DEFAULT_ABI == ABI_AIX
2391 && GET_CODE (operands[1]) == SYMBOL_REF
2392 && XSTR (operands[1], 0)[0] == '.')
2394 const char *name = XSTR (operands[1], 0);
2396 while (*name == '.')
2398 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2399 CONSTANT_POOL_ADDRESS_P (new_ref)
2400 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2401 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2402 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2403 operands[1] = new_ref;
2406 if (DEFAULT_ABI == ABI_DARWIN)
2408 emit_insn (gen_macho_high (target, operands[1]));
2409 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2413 emit_insn (gen_elf_high (target, operands[1]));
2414 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2418 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2419 and we have put it in the TOC, we just need to make a TOC-relative
2422 && GET_CODE (operands[1]) == SYMBOL_REF
2423 && CONSTANT_POOL_EXPR_P (operands[1])
2424 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2425 get_pool_mode (operands[1])))
2427 operands[1] = create_TOC_reference (operands[1]);
2429 else if (mode == Pmode
2430 && CONSTANT_P (operands[1])
2431 && ((GET_CODE (operands[1]) != CONST_INT
2432 && ! easy_fp_constant (operands[1], mode))
2433 || (GET_CODE (operands[1]) == CONST_INT
2434 && num_insns_constant (operands[1], mode) > 2)
2435 || (GET_CODE (operands[0]) == REG
2436 && FP_REGNO_P (REGNO (operands[0]))))
2437 && GET_CODE (operands[1]) != HIGH
2438 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2439 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2441 /* Emit a USE operation so that the constant isn't deleted if
2442 expensive optimizations are turned on because nobody
2443 references it. This should only be done for operands that
2444 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2445 This should not be done for operands that contain LABEL_REFs.
2446 For now, we just handle the obvious case. */
2447 if (GET_CODE (operands[1]) != LABEL_REF)
2448 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2451 /* Darwin uses a special PIC legitimizer. */
2452 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2455 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2457 if (operands[0] != operands[1])
2458 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2463 /* If we are to limit the number of things we put in the TOC and
2464 this is a symbol plus a constant we can add in one insn,
2465 just put the symbol in the TOC and add the constant. Don't do
2466 this if reload is in progress. */
2467 if (GET_CODE (operands[1]) == CONST
2468 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2469 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2470 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2471 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2472 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2473 && ! side_effects_p (operands[0]))
2476 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2477 rtx other = XEXP (XEXP (operands[1], 0), 1);
2479 sym = force_reg (mode, sym);
2481 emit_insn (gen_addsi3 (operands[0], sym, other));
2483 emit_insn (gen_adddi3 (operands[0], sym, other));
2487 operands[1] = force_const_mem (mode, operands[1]);
2490 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2491 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2492 get_pool_constant (XEXP (operands[1], 0)),
2493 get_pool_mode (XEXP (operands[1], 0))))
2496 = gen_rtx_MEM (mode,
2497 create_TOC_reference (XEXP (operands[1], 0)));
2498 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2499 RTX_UNCHANGING_P (operands[1]) = 1;
2505 if (GET_CODE (operands[0]) == MEM
2506 && GET_CODE (XEXP (operands[0], 0)) != REG
2507 && ! reload_in_progress)
2509 = replace_equiv_address (operands[0],
2510 copy_addr_to_reg (XEXP (operands[0], 0)));
2512 if (GET_CODE (operands[1]) == MEM
2513 && GET_CODE (XEXP (operands[1], 0)) != REG
2514 && ! reload_in_progress)
2516 = replace_equiv_address (operands[1],
2517 copy_addr_to_reg (XEXP (operands[1], 0)));
2524 /* Above, we may have called force_const_mem which may have returned
2525 an invalid address. If we can, fix this up; otherwise, reload will
2526 have to deal with it. */
2527 if (GET_CODE (operands[1]) == MEM
2528 && ! memory_address_p (mode, XEXP (operands[1], 0))
2529 && ! reload_in_progress)
2530 operands[1] = adjust_address (operands[1], mode, 0);
2532 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2536 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2537 for a call to a function whose data type is FNTYPE.
2538 For a library call, FNTYPE is 0.
2540 For incoming args we set the number of arguments in the prototype large
2541 so we never return a PARALLEL. */
2544 init_cumulative_args (cum, fntype, libname, incoming)
2545 CUMULATIVE_ARGS *cum;
2547 rtx libname ATTRIBUTE_UNUSED;
2550 static CUMULATIVE_ARGS zero_cumulative;
2552 *cum = zero_cumulative;
2554 cum->fregno = FP_ARG_MIN_REG;
2555 cum->vregno = ALTIVEC_ARG_MIN_REG;
2556 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2557 cum->call_cookie = CALL_NORMAL;
2558 cum->sysv_gregno = GP_ARG_MIN_REG;
2561 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2563 else if (cum->prototype)
2564 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2565 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2566 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2569 cum->nargs_prototype = 0;
2571 cum->orig_nargs = cum->nargs_prototype;
2573 /* Check for a longcall attribute. */
2575 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2576 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2577 cum->call_cookie = CALL_LONG;
2579 if (TARGET_DEBUG_ARG)
2581 fprintf (stderr, "\ninit_cumulative_args:");
2584 tree ret_type = TREE_TYPE (fntype);
2585 fprintf (stderr, " ret code = %s,",
2586 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2589 if (cum->call_cookie & CALL_LONG)
2590 fprintf (stderr, " longcall,");
2592 fprintf (stderr, " proto = %d, nargs = %d\n",
2593 cum->prototype, cum->nargs_prototype);
2597 /* If defined, a C expression which determines whether, and in which
2598 direction, to pad out an argument with extra space. The value
2599 should be of type `enum direction': either `upward' to pad above
2600 the argument, `downward' to pad below, or `none' to inhibit
2603 For the AIX ABI structs are always stored left shifted in their
2607 function_arg_padding (mode, type)
2608 enum machine_mode mode;
2611 if (type != 0 && AGGREGATE_TYPE_P (type))
2614 /* This is the default definition. */
2615 return (! BYTES_BIG_ENDIAN
2618 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2619 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2620 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2621 ? downward : upward));
2624 /* If defined, a C expression that gives the alignment boundary, in bits,
2625 of an argument with the specified mode and type. If it is not defined,
2626 PARM_BOUNDARY is used for all arguments.
2628 V.4 wants long longs to be double word aligned. */
2631 function_arg_boundary (mode, type)
2632 enum machine_mode mode;
2633 tree type ATTRIBUTE_UNUSED;
2635 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2637 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2640 return PARM_BOUNDARY;
2643 /* Update the data in CUM to advance over an argument
2644 of mode MODE and data type TYPE.
2645 (TYPE is null for libcalls where that information may not be available.) */
2648 function_arg_advance (cum, mode, type, named)
2649 CUMULATIVE_ARGS *cum;
2650 enum machine_mode mode;
2654 cum->nargs_prototype--;
2656 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2658 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2661 cum->words += RS6000_ARG_SIZE (mode, type);
2663 else if (DEFAULT_ABI == ABI_V4)
2665 if (TARGET_HARD_FLOAT
2666 && (mode == SFmode || mode == DFmode))
2668 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2673 cum->words += cum->words & 1;
2674 cum->words += RS6000_ARG_SIZE (mode, type);
2680 int gregno = cum->sysv_gregno;
2682 /* Aggregates and IEEE quad get passed by reference. */
2683 if ((type && AGGREGATE_TYPE_P (type))
2687 n_words = RS6000_ARG_SIZE (mode, type);
2689 /* Long long is put in odd registers. */
2690 if (n_words == 2 && (gregno & 1) == 0)
2693 /* Long long is not split between registers and stack. */
2694 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2696 /* Long long is aligned on the stack. */
2698 cum->words += cum->words & 1;
2699 cum->words += n_words;
2702 /* Note: continuing to accumulate gregno past when we've started
2703 spilling to the stack indicates the fact that we've started
2704 spilling to the stack to expand_builtin_saveregs. */
2705 cum->sysv_gregno = gregno + n_words;
2708 if (TARGET_DEBUG_ARG)
2710 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2711 cum->words, cum->fregno);
2712 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2713 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2714 fprintf (stderr, "mode = %4s, named = %d\n",
2715 GET_MODE_NAME (mode), named);
2720 int align = (TARGET_32BIT && (cum->words & 1) != 0
2721 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2723 cum->words += align + RS6000_ARG_SIZE (mode, type);
2725 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2728 if (TARGET_DEBUG_ARG)
2730 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2731 cum->words, cum->fregno);
2732 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2733 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2734 fprintf (stderr, "named = %d, align = %d\n", named, align);
2739 /* Determine where to put an argument to a function.
2740 Value is zero to push the argument on the stack,
2741 or a hard register in which to store the argument.
2743 MODE is the argument's machine mode.
2744 TYPE is the data type of the argument (as a tree).
2745 This is null for libcalls where that information may
2747 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2748 the preceding args and about the function being called.
2749 NAMED is nonzero if this argument is a named parameter
2750 (otherwise it is an extra parameter matching an ellipsis).
2752 On RS/6000 the first eight words of non-FP are normally in registers
2753 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2754 Under V.4, the first 8 FP args are in registers.
2756 If this is floating-point and no prototype is specified, we use
2757 both an FP and integer register (or possibly FP reg and stack). Library
2758 functions (when TYPE is zero) always have the proper types for args,
2759 so we can pass the FP value just in one register. emit_library_function
2760 doesn't support PARALLEL anyway. */
2763 function_arg (cum, mode, type, named)
2764 CUMULATIVE_ARGS *cum;
2765 enum machine_mode mode;
2769 enum rs6000_abi abi = DEFAULT_ABI;
2771 /* Return a marker to indicate whether CR1 needs to set or clear the
2772 bit that V.4 uses to say fp args were passed in registers.
2773 Assume that we don't need the marker for software floating point,
2774 or compiler generated library calls. */
2775 if (mode == VOIDmode)
2778 && TARGET_HARD_FLOAT
2779 && cum->nargs_prototype < 0
2780 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2782 return GEN_INT (cum->call_cookie
2783 | ((cum->fregno == FP_ARG_MIN_REG)
2784 ? CALL_V4_SET_FP_ARGS
2785 : CALL_V4_CLEAR_FP_ARGS));
2788 return GEN_INT (cum->call_cookie);
2791 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2793 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2794 return gen_rtx_REG (mode, cum->vregno);
2798 else if (abi == ABI_V4)
2800 if (TARGET_HARD_FLOAT
2801 && (mode == SFmode || mode == DFmode))
2803 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2804 return gen_rtx_REG (mode, cum->fregno);
2811 int gregno = cum->sysv_gregno;
2813 /* Aggregates and IEEE quad get passed by reference. */
2814 if ((type && AGGREGATE_TYPE_P (type))
2818 n_words = RS6000_ARG_SIZE (mode, type);
2820 /* Long long is put in odd registers. */
2821 if (n_words == 2 && (gregno & 1) == 0)
2824 /* Long long is not split between registers and stack. */
2825 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2826 return gen_rtx_REG (mode, gregno);
2833 int align = (TARGET_32BIT && (cum->words & 1) != 0
2834 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2835 int align_words = cum->words + align;
2837 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2840 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2843 || ((cum->nargs_prototype > 0)
2844 /* IBM AIX extended its linkage convention definition always
2845 to require FP args after register save area hole on the
2847 && (DEFAULT_ABI != ABI_AIX
2849 || (align_words < GP_ARG_NUM_REG))))
2850 return gen_rtx_REG (mode, cum->fregno);
2852 return gen_rtx_PARALLEL (mode,
2854 gen_rtx_EXPR_LIST (VOIDmode,
2855 ((align_words >= GP_ARG_NUM_REG)
2858 + RS6000_ARG_SIZE (mode, type)
2860 /* If this is partially on the stack, then
2861 we only include the portion actually
2862 in registers here. */
2863 ? gen_rtx_REG (SImode,
2864 GP_ARG_MIN_REG + align_words)
2865 : gen_rtx_REG (mode,
2866 GP_ARG_MIN_REG + align_words))),
2868 gen_rtx_EXPR_LIST (VOIDmode,
2869 gen_rtx_REG (mode, cum->fregno),
2872 else if (align_words < GP_ARG_NUM_REG)
2873 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2879 /* For an arg passed partly in registers and partly in memory,
2880 this is the number of registers used.
2881 For args passed entirely in registers or entirely in memory, zero. */
2884 function_arg_partial_nregs (cum, mode, type, named)
2885 CUMULATIVE_ARGS *cum;
2886 enum machine_mode mode;
2888 int named ATTRIBUTE_UNUSED;
2890 if (DEFAULT_ABI == ABI_V4)
2893 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2894 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2896 if (cum->nargs_prototype >= 0)
2900 if (cum->words < GP_ARG_NUM_REG
2901 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2903 int ret = GP_ARG_NUM_REG - cum->words;
2904 if (ret && TARGET_DEBUG_ARG)
2905 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2913 /* A C expression that indicates when an argument must be passed by
2914 reference. If nonzero for an argument, a copy of that argument is
2915 made in memory and a pointer to the argument is passed instead of
2916 the argument itself. The pointer is passed in whatever way is
2917 appropriate for passing a pointer to that type.
2919 Under V.4, structures and unions are passed by reference. */
2922 function_arg_pass_by_reference (cum, mode, type, named)
2923 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2924 enum machine_mode mode ATTRIBUTE_UNUSED;
2926 int named ATTRIBUTE_UNUSED;
2928 if (DEFAULT_ABI == ABI_V4
2929 && ((type && AGGREGATE_TYPE_P (type))
2932 if (TARGET_DEBUG_ARG)
2933 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2941 /* Perform any needed actions needed for a function that is receiving a
2942 variable number of arguments.
2946 MODE and TYPE are the mode and type of the current parameter.
2948 PRETEND_SIZE is a variable that should be set to the amount of stack
2949 that must be pushed by the prolog to pretend that our caller pushed
2952 Normally, this macro will push all remaining incoming registers on the
2953 stack and set PRETEND_SIZE to the length of the registers pushed. */
2956 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2957 CUMULATIVE_ARGS *cum;
2958 enum machine_mode mode;
2964 CUMULATIVE_ARGS next_cum;
2965 int reg_size = TARGET_32BIT ? 4 : 8;
2966 rtx save_area = NULL_RTX, mem;
2967 int first_reg_offset, set;
2971 fntype = TREE_TYPE (current_function_decl);
2972 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2973 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2974 != void_type_node));
2976 /* For varargs, we do not want to skip the dummy va_dcl argument.
2977 For stdargs, we do want to skip the last named argument. */
2980 function_arg_advance (&next_cum, mode, type, 1);
2982 if (DEFAULT_ABI == ABI_V4)
2984 /* Indicate to allocate space on the stack for varargs save area. */
2985 cfun->machine->sysv_varargs_p = 1;
2987 save_area = plus_constant (virtual_stack_vars_rtx,
2988 - RS6000_VARARGS_SIZE);
2990 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2994 first_reg_offset = next_cum.words;
2995 save_area = virtual_incoming_args_rtx;
2996 cfun->machine->sysv_varargs_p = 0;
2998 if (MUST_PASS_IN_STACK (mode, type))
2999 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3002 set = get_varargs_alias_set ();
3003 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3005 mem = gen_rtx_MEM (BLKmode,
3006 plus_constant (save_area,
3007 first_reg_offset * reg_size)),
3008 set_mem_alias_set (mem, set);
3009 set_mem_align (mem, BITS_PER_WORD);
3012 (GP_ARG_MIN_REG + first_reg_offset, mem,
3013 GP_ARG_NUM_REG - first_reg_offset,
3014 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3016 /* ??? Does ABI_V4 need this at all? */
3017 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
3020 /* Save FP registers if needed. */
3021 if (DEFAULT_ABI == ABI_V4
3022 && TARGET_HARD_FLOAT && ! no_rtl
3023 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3025 int fregno = next_cum.fregno;
3026 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3027 rtx lab = gen_label_rtx ();
3028 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3030 emit_jump_insn (gen_rtx_SET (VOIDmode,
3032 gen_rtx_IF_THEN_ELSE (VOIDmode,
3033 gen_rtx_NE (VOIDmode, cr1,
3035 gen_rtx_LABEL_REF (VOIDmode, lab),
3038 while (fregno <= FP_ARG_V4_MAX_REG)
3040 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3041 set_mem_alias_set (mem, set);
3042 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3051 /* Create the va_list data type. */
3054 rs6000_build_va_list ()
3056 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3058 /* For AIX, prefer 'char *' because that's what the system
3059 header files like. */
3060 if (DEFAULT_ABI != ABI_V4)
3061 return build_pointer_type (char_type_node);
3063 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3064 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3066 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3067 unsigned_char_type_node);
3068 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3069 unsigned_char_type_node);
3070 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3072 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3075 DECL_FIELD_CONTEXT (f_gpr) = record;
3076 DECL_FIELD_CONTEXT (f_fpr) = record;
3077 DECL_FIELD_CONTEXT (f_ovf) = record;
3078 DECL_FIELD_CONTEXT (f_sav) = record;
3080 TREE_CHAIN (record) = type_decl;
3081 TYPE_NAME (record) = type_decl;
3082 TYPE_FIELDS (record) = f_gpr;
3083 TREE_CHAIN (f_gpr) = f_fpr;
3084 TREE_CHAIN (f_fpr) = f_ovf;
3085 TREE_CHAIN (f_ovf) = f_sav;
3087 layout_type (record);
3089 /* The correct type is an array type of one element. */
3090 return build_array_type (record, build_index_type (size_zero_node));
3093 /* Implement va_start. */
3096 rs6000_va_start (stdarg_p, valist, nextarg)
3101 HOST_WIDE_INT words, n_gpr, n_fpr;
3102 tree f_gpr, f_fpr, f_ovf, f_sav;
3103 tree gpr, fpr, ovf, sav, t;
3105 /* Only SVR4 needs something special. */
3106 if (DEFAULT_ABI != ABI_V4)
3108 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
3112 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3113 f_fpr = TREE_CHAIN (f_gpr);
3114 f_ovf = TREE_CHAIN (f_fpr);
3115 f_sav = TREE_CHAIN (f_ovf);
3117 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3118 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3119 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3120 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3121 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3123 /* Count number of gp and fp argument registers used. */
3124 words = current_function_args_info.words;
3125 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3126 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3128 if (TARGET_DEBUG_ARG)
3130 fputs ("va_start: words = ", stderr);
3131 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3132 fputs (", n_gpr = ", stderr);
3133 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3134 fputs (", n_fpr = ", stderr);
3135 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3136 putc ('\n', stderr);
3139 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3140 TREE_SIDE_EFFECTS (t) = 1;
3141 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3143 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3144 TREE_SIDE_EFFECTS (t) = 1;
3145 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3147 /* Find the overflow area. */
3148 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3150 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3151 build_int_2 (words * UNITS_PER_WORD, 0));
3152 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3153 TREE_SIDE_EFFECTS (t) = 1;
3154 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3156 /* Find the register save area. */
3157 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3158 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3159 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3160 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3161 TREE_SIDE_EFFECTS (t) = 1;
3162 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3165 /* Implement va_arg. */
3168 rs6000_va_arg (valist, type)
3171 tree f_gpr, f_fpr, f_ovf, f_sav;
3172 tree gpr, fpr, ovf, sav, reg, t, u;
3173 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3174 rtx lab_false, lab_over, addr_rtx, r;
3176 if (DEFAULT_ABI != ABI_V4)
3177 return std_expand_builtin_va_arg (valist, type);
3179 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3180 f_fpr = TREE_CHAIN (f_gpr);
3181 f_ovf = TREE_CHAIN (f_fpr);
3182 f_sav = TREE_CHAIN (f_ovf);
3184 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3185 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3186 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3187 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3188 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3190 size = int_size_in_bytes (type);
3191 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3193 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3195 /* Aggregates and long doubles are passed by reference. */
3201 size = UNITS_PER_WORD;
3204 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3206 /* FP args go in FP registers, if present. */
3215 /* Otherwise into GP registers. */
3223 /* Pull the value out of the saved registers ... */
3225 lab_false = gen_label_rtx ();
3226 lab_over = gen_label_rtx ();
3227 addr_rtx = gen_reg_rtx (Pmode);
3229 /* AltiVec vectors never go in registers. */
3230 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3232 TREE_THIS_VOLATILE (reg) = 1;
3233 emit_cmp_and_jump_insns
3234 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3235 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3238 /* Long long is aligned in the registers. */
3241 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3242 build_int_2 (n_reg - 1, 0));
3243 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3244 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3245 TREE_SIDE_EFFECTS (u) = 1;
3246 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3250 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3254 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3255 build_int_2 (n_reg, 0));
3256 TREE_SIDE_EFFECTS (u) = 1;
3258 u = build1 (CONVERT_EXPR, integer_type_node, u);
3259 TREE_SIDE_EFFECTS (u) = 1;
3261 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3262 TREE_SIDE_EFFECTS (u) = 1;
3264 t = build (PLUS_EXPR, ptr_type_node, t, u);
3265 TREE_SIDE_EFFECTS (t) = 1;
3267 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3269 emit_move_insn (addr_rtx, r);
3271 emit_jump_insn (gen_jump (lab_over));
3275 emit_label (lab_false);
3277 /* ... otherwise out of the overflow area. */
3279 /* Make sure we don't find reg 7 for the next int arg.
3281 All AltiVec vectors go in the overflow area. So in the AltiVec
3282 case we need to get the vectors from the overflow area, but
3283 remember where the GPRs and FPRs are. */
3284 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3285 || !TARGET_ALTIVEC))
3287 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3288 TREE_SIDE_EFFECTS (t) = 1;
3289 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3292 /* Care for on-stack alignment if needed. */
3299 /* AltiVec vectors are 16 byte aligned. */
3300 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3305 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3306 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3310 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3312 emit_move_insn (addr_rtx, r);
3314 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3315 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3316 TREE_SIDE_EFFECTS (t) = 1;
3317 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3319 emit_label (lab_over);
3323 r = gen_rtx_MEM (Pmode, addr_rtx);
3324 set_mem_alias_set (r, get_varargs_alias_set ());
3325 emit_move_insn (addr_rtx, r);
3333 #define def_builtin(MASK, NAME, TYPE, CODE) \
3335 if ((MASK) & target_flags) \
3336 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3339 struct builtin_description
3341 const unsigned int mask;
3342 const enum insn_code icode;
3343 const char *const name;
3344 const enum rs6000_builtins code;
3347 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3349 static const struct builtin_description bdesc_3arg[] =
3351 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3352 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3353 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3354 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3355 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3356 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3357 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3358 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3359 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3360 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3361 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3362 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3363 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3364 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3365 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3366 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3367 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3368 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3369 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3370 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3371 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3372 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3373 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3376 /* DST operations: void foo (void *, const int, const char). */
3378 static const struct builtin_description bdesc_dst[] =
3380 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3381 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3382 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3383 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3386 /* Simple binary operations: VECc = foo (VECa, VECb). */
3388 static const struct builtin_description bdesc_2arg[] =
3390 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3391 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3392 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3393 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3394 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3395 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3396 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3397 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3398 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3399 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3400 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3401 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3402 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3403 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3404 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3405 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3406 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3407 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3408 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3409 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3410 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3411 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3412 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3413 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3414 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3415 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3416 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3417 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3418 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3419 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3420 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3421 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3422 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3423 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3424 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3425 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3426 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3427 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3428 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3429 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3430 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3431 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3432 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3433 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3434 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3435 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3436 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3437 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3438 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3439 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3440 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3441 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3442 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3443 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3444 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3445 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3446 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3447 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3448 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3449 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3450 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3451 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3452 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3453 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3454 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3455 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3456 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3457 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3458 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3459 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3460 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3461 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3462 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3463 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3464 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3465 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3466 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3467 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3468 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3469 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3470 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3471 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3472 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3473 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3474 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3475 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3476 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3477 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3478 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3479 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3480 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3481 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3482 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3483 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3484 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3485 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3486 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3487 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3488 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3489 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3490 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3491 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3492 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3493 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3494 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3495 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3496 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3497 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3498 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3499 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3500 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3501 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3502 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3505 /* AltiVec predicates. */
3507 struct builtin_description_predicates
3509 const unsigned int mask;
3510 const enum insn_code icode;
3512 const char *const name;
3513 const enum rs6000_builtins code;
3516 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3518 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3519 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3520 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3521 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3522 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3523 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3524 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3525 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3526 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3527 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3528 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3529 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3530 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3533 /* ABS* opreations. */
3535 static const struct builtin_description bdesc_abs[] =
3537 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3538 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3539 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3540 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3541 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3542 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3543 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3546 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3549 static const struct builtin_description bdesc_1arg[] =
3551 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3552 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3553 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3554 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3555 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3556 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3557 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3558 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3559 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3560 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3561 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3562 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3563 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3564 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3565 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3566 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3567 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3571 rs6000_expand_unop_builtin (icode, arglist, target)
3572 enum insn_code icode;
3577 tree arg0 = TREE_VALUE (arglist);
3578 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3579 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3580 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3582 /* If we got invalid arguments bail out before generating bad rtl. */
3583 if (arg0 == error_mark_node)
3588 /* Only allow 5-bit *signed* literals. */
3589 case CODE_FOR_altivec_vspltisb:
3590 case CODE_FOR_altivec_vspltish:
3591 case CODE_FOR_altivec_vspltisw:
3592 if (GET_CODE (op0) != CONST_INT
3593 || INTVAL (op0) > 0x1f
3594 || INTVAL (op0) < -0x1f)
3596 error ("argument 1 must be a 5-bit signed literal");
3605 || GET_MODE (target) != tmode
3606 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3607 target = gen_reg_rtx (tmode);
3609 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3610 op0 = copy_to_mode_reg (mode0, op0);
3612 pat = GEN_FCN (icode) (target, op0);
3621 altivec_expand_abs_builtin (icode, arglist, target)
3622 enum insn_code icode;
3626 rtx pat, scratch1, scratch2;
3627 tree arg0 = TREE_VALUE (arglist);
3628 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3629 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3630 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3632 /* If we have invalid arguments, bail out before generating bad rtl. */
3633 if (arg0 == error_mark_node)
3637 || GET_MODE (target) != tmode
3638 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3639 target = gen_reg_rtx (tmode);
3641 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3642 op0 = copy_to_mode_reg (mode0, op0);
3644 scratch1 = gen_reg_rtx (mode0);
3645 scratch2 = gen_reg_rtx (mode0);
3647 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3656 rs6000_expand_binop_builtin (icode, arglist, target)
3657 enum insn_code icode;
3662 tree arg0 = TREE_VALUE (arglist);
3663 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3664 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3665 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3666 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3667 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3668 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3670 /* If we got invalid arguments bail out before generating bad rtl. */
3671 if (arg0 == error_mark_node || arg1 == error_mark_node)
3676 /* Only allow 5-bit unsigned literals. */
3677 case CODE_FOR_altivec_vcfux:
3678 case CODE_FOR_altivec_vcfsx:
3679 case CODE_FOR_altivec_vctsxs:
3680 case CODE_FOR_altivec_vctuxs:
3681 case CODE_FOR_altivec_vspltb:
3682 case CODE_FOR_altivec_vsplth:
3683 case CODE_FOR_altivec_vspltw:
3684 if (TREE_CODE (arg1) != INTEGER_CST
3685 || TREE_INT_CST_LOW (arg1) & ~0x1f)
3687 error ("argument 2 must be a 5-bit unsigned literal");
3696 || GET_MODE (target) != tmode
3697 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3698 target = gen_reg_rtx (tmode);
3700 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3701 op0 = copy_to_mode_reg (mode0, op0);
3702 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3703 op1 = copy_to_mode_reg (mode1, op1);
3705 pat = GEN_FCN (icode) (target, op0, op1);
3714 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3715 enum insn_code icode;
3721 tree cr6_form = TREE_VALUE (arglist);
3722 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3723 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3724 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3725 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3726 enum machine_mode tmode = SImode;
3727 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3728 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3731 if (TREE_CODE (cr6_form) != INTEGER_CST)
3733 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3737 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3742 /* If we have invalid arguments, bail out before generating bad rtl. */
3743 if (arg0 == error_mark_node || arg1 == error_mark_node)
3747 || GET_MODE (target) != tmode
3748 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3749 target = gen_reg_rtx (tmode);
3751 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3752 op0 = copy_to_mode_reg (mode0, op0);
3753 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3754 op1 = copy_to_mode_reg (mode1, op1);
3756 scratch = gen_reg_rtx (mode0);
3758 pat = GEN_FCN (icode) (scratch, op0, op1,
3759 gen_rtx (SYMBOL_REF, Pmode, opcode));
3764 /* The vec_any* and vec_all* predicates use the same opcodes for two
3765 different operations, but the bits in CR6 will be different
3766 depending on what information we want. So we have to play tricks
3767 with CR6 to get the right bits out.
3769 If you think this is disgusting, look at the specs for the
3770 AltiVec predicates. */
3772 switch (cr6_form_int)
3775 emit_insn (gen_cr6_test_for_zero (target));
3778 emit_insn (gen_cr6_test_for_zero_reverse (target));
3781 emit_insn (gen_cr6_test_for_lt (target));
3784 emit_insn (gen_cr6_test_for_lt_reverse (target));
3787 error ("argument 1 of __builtin_altivec_predicate is out of range");
3795 altivec_expand_stv_builtin (icode, arglist)
3796 enum insn_code icode;
3799 tree arg0 = TREE_VALUE (arglist);
3800 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3801 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3802 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3803 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3804 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3806 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3807 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3808 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3810 /* Invalid arguments. Bail before doing anything stoopid! */
3811 if (arg0 == error_mark_node
3812 || arg1 == error_mark_node
3813 || arg2 == error_mark_node)
3816 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3817 op0 = copy_to_mode_reg (mode2, op0);
3818 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3819 op1 = copy_to_mode_reg (mode0, op1);
3820 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3821 op2 = copy_to_mode_reg (mode1, op2);
3823 pat = GEN_FCN (icode) (op1, op2, op0);
3830 rs6000_expand_ternop_builtin (icode, arglist, target)
3831 enum insn_code icode;
3836 tree arg0 = TREE_VALUE (arglist);
3837 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3838 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3839 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3840 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3841 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3842 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3843 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3844 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3845 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3847 /* If we got invalid arguments bail out before generating bad rtl. */
3848 if (arg0 == error_mark_node
3849 || arg1 == error_mark_node
3850 || arg2 == error_mark_node)
3855 /* Only allow 4-bit unsigned literals. */
3856 case CODE_FOR_altivec_vsldoi_4sf:
3857 case CODE_FOR_altivec_vsldoi_4si:
3858 case CODE_FOR_altivec_vsldoi_8hi:
3859 case CODE_FOR_altivec_vsldoi_16qi:
3860 if (TREE_CODE (arg2) != INTEGER_CST
3861 || TREE_INT_CST_LOW (arg2) & ~0xf)
3863 error ("argument 3 must be a 4-bit unsigned literal");
3872 || GET_MODE (target) != tmode
3873 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3874 target = gen_reg_rtx (tmode);
3876 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3877 op0 = copy_to_mode_reg (mode0, op0);
3878 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3879 op1 = copy_to_mode_reg (mode1, op1);
3880 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3881 op2 = copy_to_mode_reg (mode2, op2);
3883 pat = GEN_FCN (icode) (target, op0, op1, op2);
3891 /* Expand the builtin in EXP and store the result in TARGET. Store
3892 true in *EXPANDEDP if we found a builtin to expand. */
3894 altivec_expand_builtin (exp, target, expandedp)
3899 struct builtin_description *d;
3900 struct builtin_description_predicates *dp;
3902 enum insn_code icode;
3903 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3904 tree arglist = TREE_OPERAND (exp, 1);
3905 tree arg0, arg1, arg2;
3906 rtx op0, op1, op2, pat;
3907 enum machine_mode tmode, mode0, mode1, mode2;
3908 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3914 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3915 icode = CODE_FOR_altivec_lvx_16qi;
3916 arg0 = TREE_VALUE (arglist);
3917 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3918 tmode = insn_data[icode].operand[0].mode;
3919 mode0 = insn_data[icode].operand[1].mode;
3922 || GET_MODE (target) != tmode
3923 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3924 target = gen_reg_rtx (tmode);
3926 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3927 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3929 pat = GEN_FCN (icode) (target, op0);
3935 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3936 icode = CODE_FOR_altivec_lvx_8hi;
3937 arg0 = TREE_VALUE (arglist);
3938 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3939 tmode = insn_data[icode].operand[0].mode;
3940 mode0 = insn_data[icode].operand[1].mode;
3943 || GET_MODE (target) != tmode
3944 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3945 target = gen_reg_rtx (tmode);
3947 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3948 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3950 pat = GEN_FCN (icode) (target, op0);
3956 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3957 icode = CODE_FOR_altivec_lvx_4si;
3958 arg0 = TREE_VALUE (arglist);
3959 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3960 tmode = insn_data[icode].operand[0].mode;
3961 mode0 = insn_data[icode].operand[1].mode;
3964 || GET_MODE (target) != tmode
3965 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3966 target = gen_reg_rtx (tmode);
3968 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3969 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3971 pat = GEN_FCN (icode) (target, op0);
3977 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3978 icode = CODE_FOR_altivec_lvx_4sf;
3979 arg0 = TREE_VALUE (arglist);
3980 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3981 tmode = insn_data[icode].operand[0].mode;
3982 mode0 = insn_data[icode].operand[1].mode;
3985 || GET_MODE (target) != tmode
3986 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3987 target = gen_reg_rtx (tmode);
3989 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3990 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3992 pat = GEN_FCN (icode) (target, op0);
3998 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3999 icode = CODE_FOR_altivec_stvx_16qi;
4000 arg0 = TREE_VALUE (arglist);
4001 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4002 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4003 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4004 mode0 = insn_data[icode].operand[0].mode;
4005 mode1 = insn_data[icode].operand[1].mode;
4007 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4008 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4009 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4010 op1 = copy_to_mode_reg (mode1, op1);
4012 pat = GEN_FCN (icode) (op0, op1);
4017 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4018 icode = CODE_FOR_altivec_stvx_8hi;
4019 arg0 = TREE_VALUE (arglist);
4020 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4021 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4022 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4023 mode0 = insn_data[icode].operand[0].mode;
4024 mode1 = insn_data[icode].operand[1].mode;
4026 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4027 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4028 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4029 op1 = copy_to_mode_reg (mode1, op1);
4031 pat = GEN_FCN (icode) (op0, op1);
4036 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4037 icode = CODE_FOR_altivec_stvx_4si;
4038 arg0 = TREE_VALUE (arglist);
4039 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4040 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4041 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4042 mode0 = insn_data[icode].operand[0].mode;
4043 mode1 = insn_data[icode].operand[1].mode;
4045 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4046 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4047 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4048 op1 = copy_to_mode_reg (mode1, op1);
4050 pat = GEN_FCN (icode) (op0, op1);
4055 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4056 icode = CODE_FOR_altivec_stvx_4sf;
4057 arg0 = TREE_VALUE (arglist);
4058 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4059 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4060 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4061 mode0 = insn_data[icode].operand[0].mode;
4062 mode1 = insn_data[icode].operand[1].mode;
4064 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4065 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4066 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4067 op1 = copy_to_mode_reg (mode1, op1);
4069 pat = GEN_FCN (icode) (op0, op1);
4074 case ALTIVEC_BUILTIN_STVX:
4075 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4076 case ALTIVEC_BUILTIN_STVEBX:
4077 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4078 case ALTIVEC_BUILTIN_STVEHX:
4079 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4080 case ALTIVEC_BUILTIN_STVEWX:
4081 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4082 case ALTIVEC_BUILTIN_STVXL:
4083 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4085 case ALTIVEC_BUILTIN_MFVSCR:
4086 icode = CODE_FOR_altivec_mfvscr;
4087 tmode = insn_data[icode].operand[0].mode;
4090 || GET_MODE (target) != tmode
4091 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4092 target = gen_reg_rtx (tmode);
4094 pat = GEN_FCN (icode) (target);
4100 case ALTIVEC_BUILTIN_MTVSCR:
4101 icode = CODE_FOR_altivec_mtvscr;
4102 arg0 = TREE_VALUE (arglist);
4103 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4104 mode0 = insn_data[icode].operand[0].mode;
4106 /* If we got invalid arguments bail out before generating bad rtl. */
4107 if (arg0 == error_mark_node)
4110 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4111 op0 = copy_to_mode_reg (mode0, op0);
4113 pat = GEN_FCN (icode) (op0);
4118 case ALTIVEC_BUILTIN_DSSALL:
4119 emit_insn (gen_altivec_dssall ());
4122 case ALTIVEC_BUILTIN_DSS:
4123 icode = CODE_FOR_altivec_dss;
4124 arg0 = TREE_VALUE (arglist);
4125 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4126 mode0 = insn_data[icode].operand[0].mode;
4128 /* If we got invalid arguments bail out before generating bad rtl. */
4129 if (arg0 == error_mark_node)
4132 if (TREE_CODE (arg0) != INTEGER_CST
4133 || TREE_INT_CST_LOW (arg0) & ~0x3)
4135 error ("argument to dss must be a 2-bit unsigned literal");
4139 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4140 op0 = copy_to_mode_reg (mode0, op0);
4142 emit_insn (gen_altivec_dss (op0));
4146 /* Handle DST variants. */
4147 d = (struct builtin_description *) bdesc_dst;
4148 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4149 if (d->code == fcode)
4151 arg0 = TREE_VALUE (arglist);
4152 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4153 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4154 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4155 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4156 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4157 mode0 = insn_data[d->icode].operand[0].mode;
4158 mode1 = insn_data[d->icode].operand[1].mode;
4159 mode2 = insn_data[d->icode].operand[2].mode;
4161 /* Invalid arguments, bail out before generating bad rtl. */
4162 if (arg0 == error_mark_node
4163 || arg1 == error_mark_node
4164 || arg2 == error_mark_node)
4167 if (TREE_CODE (arg2) != INTEGER_CST
4168 || TREE_INT_CST_LOW (arg2) & ~0x3)
4170 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4174 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4175 op0 = copy_to_mode_reg (mode0, op0);
4176 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4177 op1 = copy_to_mode_reg (mode1, op1);
4179 pat = GEN_FCN (d->icode) (op0, op1, op2);
4186 /* Expand abs* operations. */
4187 d = (struct builtin_description *) bdesc_abs;
4188 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4189 if (d->code == fcode)
4190 return altivec_expand_abs_builtin (d->icode, arglist, target);
4192 /* Expand the AltiVec predicates. */
4193 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4194 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4195 if (dp->code == fcode)
4196 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4198 /* LV* are funky. We initialized them differently. */
4201 case ALTIVEC_BUILTIN_LVSL:
4202 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4204 case ALTIVEC_BUILTIN_LVSR:
4205 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4207 case ALTIVEC_BUILTIN_LVEBX:
4208 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4210 case ALTIVEC_BUILTIN_LVEHX:
4211 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4213 case ALTIVEC_BUILTIN_LVEWX:
4214 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4216 case ALTIVEC_BUILTIN_LVXL:
4217 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4219 case ALTIVEC_BUILTIN_LVX:
4220 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4231 /* Expand an expression EXP that calls a built-in function,
4232 with result going to TARGET if that's convenient
4233 (and in mode MODE if that's convenient).
4234 SUBTARGET may be used as the target for computing one of EXP's operands.
4235 IGNORE is nonzero if the value is to be ignored. */
4238 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4241 rtx subtarget ATTRIBUTE_UNUSED;
4242 enum machine_mode mode ATTRIBUTE_UNUSED;
4243 int ignore ATTRIBUTE_UNUSED;
4245 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4246 tree arglist = TREE_OPERAND (exp, 1);
4247 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4248 struct builtin_description *d;
4255 ret = altivec_expand_builtin (exp, target, &success);
4261 /* Handle simple unary operations. */
4262 d = (struct builtin_description *) bdesc_1arg;
4263 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4264 if (d->code == fcode)
4265 return rs6000_expand_unop_builtin (d->icode, arglist, target);
4267 /* Handle simple binary operations. */
4268 d = (struct builtin_description *) bdesc_2arg;
4269 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4270 if (d->code == fcode)
4271 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4273 /* Handle simple ternary operations. */
4274 d = (struct builtin_description *) bdesc_3arg;
4275 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4276 if (d->code == fcode)
4277 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
4284 rs6000_init_builtins ()
4287 altivec_init_builtins ();
4291 altivec_init_builtins (void)
4293 struct builtin_description *d;
4294 struct builtin_description_predicates *dp;
4297 tree endlink = void_list_node;
4299 tree pint_type_node = build_pointer_type (integer_type_node);
4300 tree pvoid_type_node = build_pointer_type (void_type_node);
4301 tree pshort_type_node = build_pointer_type (short_integer_type_node);
4302 tree pchar_type_node = build_pointer_type (char_type_node);
4303 tree pfloat_type_node = build_pointer_type (float_type_node);
4305 tree v4sf_ftype_v4sf_v4sf_v16qi
4306 = build_function_type (V4SF_type_node,
4307 tree_cons (NULL_TREE, V4SF_type_node,
4308 tree_cons (NULL_TREE, V4SF_type_node,
4309 tree_cons (NULL_TREE,
4312 tree v4si_ftype_v4si_v4si_v16qi
4313 = build_function_type (V4SI_type_node,
4314 tree_cons (NULL_TREE, V4SI_type_node,
4315 tree_cons (NULL_TREE, V4SI_type_node,
4316 tree_cons (NULL_TREE,
4319 tree v8hi_ftype_v8hi_v8hi_v16qi
4320 = build_function_type (V8HI_type_node,
4321 tree_cons (NULL_TREE, V8HI_type_node,
4322 tree_cons (NULL_TREE, V8HI_type_node,
4323 tree_cons (NULL_TREE,
4326 tree v16qi_ftype_v16qi_v16qi_v16qi
4327 = build_function_type (V16QI_type_node,
4328 tree_cons (NULL_TREE, V16QI_type_node,
4329 tree_cons (NULL_TREE, V16QI_type_node,
4330 tree_cons (NULL_TREE,
4334 /* V4SI foo (char). */
4335 tree v4si_ftype_char
4336 = build_function_type (V4SI_type_node,
4337 tree_cons (NULL_TREE, char_type_node, endlink));
4339 /* V8HI foo (char). */
4340 tree v8hi_ftype_char
4341 = build_function_type (V8HI_type_node,
4342 tree_cons (NULL_TREE, char_type_node, endlink));
4344 /* V16QI foo (char). */
4345 tree v16qi_ftype_char
4346 = build_function_type (V16QI_type_node,
4347 tree_cons (NULL_TREE, char_type_node, endlink));
4348 /* V4SF foo (V4SF). */
4349 tree v4sf_ftype_v4sf
4350 = build_function_type (V4SF_type_node,
4351 tree_cons (NULL_TREE, V4SF_type_node, endlink));
4353 /* V4SI foo (int *). */
4354 tree v4si_ftype_pint
4355 = build_function_type (V4SI_type_node,
4356 tree_cons (NULL_TREE, pint_type_node, endlink));
4357 /* V8HI foo (short *). */
4358 tree v8hi_ftype_pshort
4359 = build_function_type (V8HI_type_node,
4360 tree_cons (NULL_TREE, pshort_type_node, endlink));
4361 /* V16QI foo (char *). */
4362 tree v16qi_ftype_pchar
4363 = build_function_type (V16QI_type_node,
4364 tree_cons (NULL_TREE, pchar_type_node, endlink));
4365 /* V4SF foo (float *). */
4366 tree v4sf_ftype_pfloat
4367 = build_function_type (V4SF_type_node,
4368 tree_cons (NULL_TREE, pfloat_type_node, endlink));
4370 /* V8HI foo (V16QI). */
4371 tree v8hi_ftype_v16qi
4372 = build_function_type (V8HI_type_node,
4373 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4375 /* void foo (void *, int, char/literal). */
4376 tree void_ftype_pvoid_int_char
4377 = build_function_type (void_type_node,
4378 tree_cons (NULL_TREE, pvoid_type_node,
4379 tree_cons (NULL_TREE, integer_type_node,
4380 tree_cons (NULL_TREE,
4384 /* void foo (int *, V4SI). */
4385 tree void_ftype_pint_v4si
4386 = build_function_type (void_type_node,
4387 tree_cons (NULL_TREE, pint_type_node,
4388 tree_cons (NULL_TREE, V4SI_type_node,
4390 /* void foo (short *, V8HI). */
4391 tree void_ftype_pshort_v8hi
4392 = build_function_type (void_type_node,
4393 tree_cons (NULL_TREE, pshort_type_node,
4394 tree_cons (NULL_TREE, V8HI_type_node,
4396 /* void foo (char *, V16QI). */
4397 tree void_ftype_pchar_v16qi
4398 = build_function_type (void_type_node,
4399 tree_cons (NULL_TREE, pchar_type_node,
4400 tree_cons (NULL_TREE, V16QI_type_node,
4402 /* void foo (float *, V4SF). */
4403 tree void_ftype_pfloat_v4sf
4404 = build_function_type (void_type_node,
4405 tree_cons (NULL_TREE, pfloat_type_node,
4406 tree_cons (NULL_TREE, V4SF_type_node,
4409 /* void foo (V4SI). */
4410 tree void_ftype_v4si
4411 = build_function_type (void_type_node,
4412 tree_cons (NULL_TREE, V4SI_type_node,
4415 /* void foo (vint, int, void *). */
4416 tree void_ftype_v4si_int_pvoid
4417 = build_function_type (void_type_node,
4418 tree_cons (NULL_TREE, V4SI_type_node,
4419 tree_cons (NULL_TREE, integer_type_node,
4420 tree_cons (NULL_TREE,
4424 /* void foo (vchar, int, void *). */
4425 tree void_ftype_v16qi_int_pvoid
4426 = build_function_type (void_type_node,
4427 tree_cons (NULL_TREE, V16QI_type_node,
4428 tree_cons (NULL_TREE, integer_type_node,
4429 tree_cons (NULL_TREE,
4433 /* void foo (vshort, int, void *). */
4434 tree void_ftype_v8hi_int_pvoid
4435 = build_function_type (void_type_node,
4436 tree_cons (NULL_TREE, V8HI_type_node,
4437 tree_cons (NULL_TREE, integer_type_node,
4438 tree_cons (NULL_TREE,
4442 /* void foo (char). */
4444 = build_function_type (void_type_node,
4445 tree_cons (NULL_TREE, char_type_node,
4448 /* void foo (void). */
4449 tree void_ftype_void
4450 = build_function_type (void_type_node, void_list_node);
4452 /* vshort foo (void). */
4453 tree v8hi_ftype_void
4454 = build_function_type (V8HI_type_node, void_list_node);
4456 tree v4si_ftype_v4si_v4si
4457 = build_function_type (V4SI_type_node,
4458 tree_cons (NULL_TREE, V4SI_type_node,
4459 tree_cons (NULL_TREE, V4SI_type_node,
4462 /* These are for the unsigned 5 bit literals. */
4464 tree v4sf_ftype_v4si_char
4465 = build_function_type (V4SF_type_node,
4466 tree_cons (NULL_TREE, V4SI_type_node,
4467 tree_cons (NULL_TREE, char_type_node,
4469 tree v4si_ftype_v4sf_char
4470 = build_function_type (V4SI_type_node,
4471 tree_cons (NULL_TREE, V4SF_type_node,
4472 tree_cons (NULL_TREE, char_type_node,
4474 tree v4si_ftype_v4si_char
4475 = build_function_type (V4SI_type_node,
4476 tree_cons (NULL_TREE, V4SI_type_node,
4477 tree_cons (NULL_TREE, char_type_node,
4479 tree v8hi_ftype_v8hi_char
4480 = build_function_type (V8HI_type_node,
4481 tree_cons (NULL_TREE, V8HI_type_node,
4482 tree_cons (NULL_TREE, char_type_node,
4484 tree v16qi_ftype_v16qi_char
4485 = build_function_type (V16QI_type_node,
4486 tree_cons (NULL_TREE, V16QI_type_node,
4487 tree_cons (NULL_TREE, char_type_node,
4490 /* These are for the unsigned 4 bit literals. */
4492 tree v16qi_ftype_v16qi_v16qi_char
4493 = build_function_type (V16QI_type_node,
4494 tree_cons (NULL_TREE, V16QI_type_node,
4495 tree_cons (NULL_TREE, V16QI_type_node,
4496 tree_cons (NULL_TREE,
4500 tree v8hi_ftype_v8hi_v8hi_char
4501 = build_function_type (V8HI_type_node,
4502 tree_cons (NULL_TREE, V8HI_type_node,
4503 tree_cons (NULL_TREE, V8HI_type_node,
4504 tree_cons (NULL_TREE,
4508 tree v4si_ftype_v4si_v4si_char
4509 = build_function_type (V4SI_type_node,
4510 tree_cons (NULL_TREE, V4SI_type_node,
4511 tree_cons (NULL_TREE, V4SI_type_node,
4512 tree_cons (NULL_TREE,
4516 tree v4sf_ftype_v4sf_v4sf_char
4517 = build_function_type (V4SF_type_node,
4518 tree_cons (NULL_TREE, V4SF_type_node,
4519 tree_cons (NULL_TREE, V4SF_type_node,
4520 tree_cons (NULL_TREE,
4524 /* End of 4 bit literals. */
4526 tree v4sf_ftype_v4sf_v4sf
4527 = build_function_type (V4SF_type_node,
4528 tree_cons (NULL_TREE, V4SF_type_node,
4529 tree_cons (NULL_TREE, V4SF_type_node,
4531 tree v4sf_ftype_v4sf_v4sf_v4si
4532 = build_function_type (V4SF_type_node,
4533 tree_cons (NULL_TREE, V4SF_type_node,
4534 tree_cons (NULL_TREE, V4SF_type_node,
4535 tree_cons (NULL_TREE,
4538 tree v4sf_ftype_v4sf_v4sf_v4sf
4539 = build_function_type (V4SF_type_node,
4540 tree_cons (NULL_TREE, V4SF_type_node,
4541 tree_cons (NULL_TREE, V4SF_type_node,
4542 tree_cons (NULL_TREE,
4545 tree v4si_ftype_v4si_v4si_v4si
4546 = build_function_type (V4SI_type_node,
4547 tree_cons (NULL_TREE, V4SI_type_node,
4548 tree_cons (NULL_TREE, V4SI_type_node,
4549 tree_cons (NULL_TREE,
4553 tree v8hi_ftype_v8hi_v8hi
4554 = build_function_type (V8HI_type_node,
4555 tree_cons (NULL_TREE, V8HI_type_node,
4556 tree_cons (NULL_TREE, V8HI_type_node,
4558 tree v8hi_ftype_v8hi_v8hi_v8hi
4559 = build_function_type (V8HI_type_node,
4560 tree_cons (NULL_TREE, V8HI_type_node,
4561 tree_cons (NULL_TREE, V8HI_type_node,
4562 tree_cons (NULL_TREE,
4565 tree v4si_ftype_v8hi_v8hi_v4si
4566 = build_function_type (V4SI_type_node,
4567 tree_cons (NULL_TREE, V8HI_type_node,
4568 tree_cons (NULL_TREE, V8HI_type_node,
4569 tree_cons (NULL_TREE,
4572 tree v4si_ftype_v16qi_v16qi_v4si
4573 = build_function_type (V4SI_type_node,
4574 tree_cons (NULL_TREE, V16QI_type_node,
4575 tree_cons (NULL_TREE, V16QI_type_node,
4576 tree_cons (NULL_TREE,
4580 tree v16qi_ftype_v16qi_v16qi
4581 = build_function_type (V16QI_type_node,
4582 tree_cons (NULL_TREE, V16QI_type_node,
4583 tree_cons (NULL_TREE, V16QI_type_node,
4586 tree v4si_ftype_v4sf_v4sf
4587 = build_function_type (V4SI_type_node,
4588 tree_cons (NULL_TREE, V4SF_type_node,
4589 tree_cons (NULL_TREE, V4SF_type_node,
4592 tree v4si_ftype_v4si
4593 = build_function_type (V4SI_type_node,
4594 tree_cons (NULL_TREE, V4SI_type_node, endlink));
4596 tree v8hi_ftype_v8hi
4597 = build_function_type (V8HI_type_node,
4598 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4600 tree v16qi_ftype_v16qi
4601 = build_function_type (V16QI_type_node,
4602 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4604 tree v8hi_ftype_v16qi_v16qi
4605 = build_function_type (V8HI_type_node,
4606 tree_cons (NULL_TREE, V16QI_type_node,
4607 tree_cons (NULL_TREE, V16QI_type_node,
4610 tree v4si_ftype_v8hi_v8hi
4611 = build_function_type (V4SI_type_node,
4612 tree_cons (NULL_TREE, V8HI_type_node,
4613 tree_cons (NULL_TREE, V8HI_type_node,
4616 tree v8hi_ftype_v4si_v4si
4617 = build_function_type (V8HI_type_node,
4618 tree_cons (NULL_TREE, V4SI_type_node,
4619 tree_cons (NULL_TREE, V4SI_type_node,
4622 tree v16qi_ftype_v8hi_v8hi
4623 = build_function_type (V16QI_type_node,
4624 tree_cons (NULL_TREE, V8HI_type_node,
4625 tree_cons (NULL_TREE, V8HI_type_node,
4628 tree v4si_ftype_v16qi_v4si
4629 = build_function_type (V4SI_type_node,
4630 tree_cons (NULL_TREE, V16QI_type_node,
4631 tree_cons (NULL_TREE, V4SI_type_node,
4634 tree v4si_ftype_v16qi_v16qi
4635 = build_function_type (V4SI_type_node,
4636 tree_cons (NULL_TREE, V16QI_type_node,
4637 tree_cons (NULL_TREE, V16QI_type_node,
4640 tree v4si_ftype_v8hi_v4si
4641 = build_function_type (V4SI_type_node,
4642 tree_cons (NULL_TREE, V8HI_type_node,
4643 tree_cons (NULL_TREE, V4SI_type_node,
4646 tree v4si_ftype_v8hi
4647 = build_function_type (V4SI_type_node,
4648 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4650 tree int_ftype_v4si_v4si
4651 = build_function_type (integer_type_node,
4652 tree_cons (NULL_TREE, V4SI_type_node,
4653 tree_cons (NULL_TREE, V4SI_type_node,
4656 tree int_ftype_v4sf_v4sf
4657 = build_function_type (integer_type_node,
4658 tree_cons (NULL_TREE, V4SF_type_node,
4659 tree_cons (NULL_TREE, V4SF_type_node,
4662 tree int_ftype_v16qi_v16qi
4663 = build_function_type (integer_type_node,
4664 tree_cons (NULL_TREE, V16QI_type_node,
4665 tree_cons (NULL_TREE, V16QI_type_node,
4668 tree int_ftype_int_v4si_v4si
4669 = build_function_type
4671 tree_cons (NULL_TREE, integer_type_node,
4672 tree_cons (NULL_TREE, V4SI_type_node,
4673 tree_cons (NULL_TREE, V4SI_type_node,
4676 tree int_ftype_int_v4sf_v4sf
4677 = build_function_type
4679 tree_cons (NULL_TREE, integer_type_node,
4680 tree_cons (NULL_TREE, V4SF_type_node,
4681 tree_cons (NULL_TREE, V4SF_type_node,
4684 tree int_ftype_int_v8hi_v8hi
4685 = build_function_type
4687 tree_cons (NULL_TREE, integer_type_node,
4688 tree_cons (NULL_TREE, V8HI_type_node,
4689 tree_cons (NULL_TREE, V8HI_type_node,
4692 tree int_ftype_int_v16qi_v16qi
4693 = build_function_type
4695 tree_cons (NULL_TREE, integer_type_node,
4696 tree_cons (NULL_TREE, V16QI_type_node,
4697 tree_cons (NULL_TREE, V16QI_type_node,
4700 tree v16qi_ftype_int_pvoid
4701 = build_function_type (V16QI_type_node,
4702 tree_cons (NULL_TREE, integer_type_node,
4703 tree_cons (NULL_TREE, pvoid_type_node,
4706 tree v4si_ftype_int_pvoid
4707 = build_function_type (V4SI_type_node,
4708 tree_cons (NULL_TREE, integer_type_node,
4709 tree_cons (NULL_TREE, pvoid_type_node,
4712 tree v8hi_ftype_int_pvoid
4713 = build_function_type (V8HI_type_node,
4714 tree_cons (NULL_TREE, integer_type_node,
4715 tree_cons (NULL_TREE, pvoid_type_node,
4718 tree int_ftype_v8hi_v8hi
4719 = build_function_type (integer_type_node,
4720 tree_cons (NULL_TREE, V8HI_type_node,
4721 tree_cons (NULL_TREE, V8HI_type_node,
4724 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4725 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4726 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4727 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4728 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4729 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4730 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4731 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4732 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4733 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4734 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4735 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4736 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4737 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4738 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4739 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4740 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4741 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4742 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4743 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4744 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4745 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4746 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4747 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4749 /* Add the simple ternary operators. */
4750 d = (struct builtin_description *) bdesc_3arg;
4751 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4754 enum machine_mode mode0, mode1, mode2, mode3;
4760 mode0 = insn_data[d->icode].operand[0].mode;
4761 mode1 = insn_data[d->icode].operand[1].mode;
4762 mode2 = insn_data[d->icode].operand[2].mode;
4763 mode3 = insn_data[d->icode].operand[3].mode;
4765 /* When all four are of the same mode. */
4766 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4771 type = v4si_ftype_v4si_v4si_v4si;
4774 type = v4sf_ftype_v4sf_v4sf_v4sf;
4777 type = v8hi_ftype_v8hi_v8hi_v8hi;
4780 type = v16qi_ftype_v16qi_v16qi_v16qi;
4786 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4791 type = v4si_ftype_v4si_v4si_v16qi;
4794 type = v4sf_ftype_v4sf_v4sf_v16qi;
4797 type = v8hi_ftype_v8hi_v8hi_v16qi;
4800 type = v16qi_ftype_v16qi_v16qi_v16qi;
4806 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4807 && mode3 == V4SImode)
4808 type = v4si_ftype_v16qi_v16qi_v4si;
4809 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4810 && mode3 == V4SImode)
4811 type = v4si_ftype_v8hi_v8hi_v4si;
4812 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4813 && mode3 == V4SImode)
4814 type = v4sf_ftype_v4sf_v4sf_v4si;
4816 /* vchar, vchar, vchar, 4 bit literal. */
4817 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4819 type = v16qi_ftype_v16qi_v16qi_char;
4821 /* vshort, vshort, vshort, 4 bit literal. */
4822 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4824 type = v8hi_ftype_v8hi_v8hi_char;
4826 /* vint, vint, vint, 4 bit literal. */
4827 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4829 type = v4si_ftype_v4si_v4si_char;
4831 /* vfloat, vfloat, vfloat, 4 bit literal. */
4832 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4834 type = v4sf_ftype_v4sf_v4sf_char;
4839 def_builtin (d->mask, d->name, type, d->code);
4842 /* Add the DST variants. */
4843 d = (struct builtin_description *) bdesc_dst;
4844 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4845 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4847 /* Initialize the predicates. */
4848 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4849 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4851 enum machine_mode mode1;
4854 mode1 = insn_data[dp->icode].operand[1].mode;
4859 type = int_ftype_int_v4si_v4si;
4862 type = int_ftype_int_v8hi_v8hi;
4865 type = int_ftype_int_v16qi_v16qi;
4868 type = int_ftype_int_v4sf_v4sf;
4874 def_builtin (dp->mask, dp->name, type, dp->code);
4877 /* Add the simple binary operators. */
4878 d = (struct builtin_description *) bdesc_2arg;
4879 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4881 enum machine_mode mode0, mode1, mode2;
4887 mode0 = insn_data[d->icode].operand[0].mode;
4888 mode1 = insn_data[d->icode].operand[1].mode;
4889 mode2 = insn_data[d->icode].operand[2].mode;
4891 /* When all three operands are of the same mode. */
4892 if (mode0 == mode1 && mode1 == mode2)
4897 type = v4sf_ftype_v4sf_v4sf;
4900 type = v4si_ftype_v4si_v4si;
4903 type = v16qi_ftype_v16qi_v16qi;
4906 type = v8hi_ftype_v8hi_v8hi;
4913 /* A few other combos we really don't want to do manually. */
4915 /* vint, vfloat, vfloat. */
4916 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4917 type = v4si_ftype_v4sf_v4sf;
4919 /* vshort, vchar, vchar. */
4920 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4921 type = v8hi_ftype_v16qi_v16qi;
4923 /* vint, vshort, vshort. */
4924 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4925 type = v4si_ftype_v8hi_v8hi;
4927 /* vshort, vint, vint. */
4928 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4929 type = v8hi_ftype_v4si_v4si;
4931 /* vchar, vshort, vshort. */
4932 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4933 type = v16qi_ftype_v8hi_v8hi;
4935 /* vint, vchar, vint. */
4936 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4937 type = v4si_ftype_v16qi_v4si;
4939 /* vint, vchar, vchar. */
4940 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4941 type = v4si_ftype_v16qi_v16qi;
4943 /* vint, vshort, vint. */
4944 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4945 type = v4si_ftype_v8hi_v4si;
4947 /* vint, vint, 5 bit literal. */
4948 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4949 type = v4si_ftype_v4si_char;
4951 /* vshort, vshort, 5 bit literal. */
4952 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4953 type = v8hi_ftype_v8hi_char;
4955 /* vchar, vchar, 5 bit literal. */
4956 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4957 type = v16qi_ftype_v16qi_char;
4959 /* vfloat, vint, 5 bit literal. */
4960 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4961 type = v4sf_ftype_v4si_char;
4963 /* vint, vfloat, 5 bit literal. */
4964 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4965 type = v4si_ftype_v4sf_char;
4968 else if (mode0 == SImode)
4973 type = int_ftype_v4si_v4si;
4976 type = int_ftype_v4sf_v4sf;
4979 type = int_ftype_v16qi_v16qi;
4982 type = int_ftype_v8hi_v8hi;
4992 def_builtin (d->mask, d->name, type, d->code);
4995 /* Initialize the abs* operators. */
4996 d = (struct builtin_description *) bdesc_abs;
4997 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4999 enum machine_mode mode0;
5002 mode0 = insn_data[d->icode].operand[0].mode;
5007 type = v4si_ftype_v4si;
5010 type = v8hi_ftype_v8hi;
5013 type = v16qi_ftype_v16qi;
5016 type = v4sf_ftype_v4sf;
5022 def_builtin (d->mask, d->name, type, d->code);
5025 /* Add the simple unary operators. */
5026 d = (struct builtin_description *) bdesc_1arg;
5027 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5029 enum machine_mode mode0, mode1;
5035 mode0 = insn_data[d->icode].operand[0].mode;
5036 mode1 = insn_data[d->icode].operand[1].mode;
5038 if (mode0 == V4SImode && mode1 == QImode)
5039 type = v4si_ftype_char;
5040 else if (mode0 == V8HImode && mode1 == QImode)
5041 type = v8hi_ftype_char;
5042 else if (mode0 == V16QImode && mode1 == QImode)
5043 type = v16qi_ftype_char;
5044 else if (mode0 == V4SFmode && mode1 == V4SFmode)
5045 type = v4sf_ftype_v4sf;
5046 else if (mode0 == V8HImode && mode1 == V16QImode)
5047 type = v8hi_ftype_v16qi;
5048 else if (mode0 == V4SImode && mode1 == V8HImode)
5049 type = v4si_ftype_v8hi;
5053 def_builtin (d->mask, d->name, type, d->code);
5058 /* Generate a memory reference for expand_block_move, copying volatile,
5059 and other bits from an original memory reference. */
5062 expand_block_move_mem (mode, addr, orig_mem)
5063 enum machine_mode mode;
5067 rtx mem = gen_rtx_MEM (mode, addr);
5069 MEM_COPY_ATTRIBUTES (mem, orig_mem);
5073 /* Expand a block move operation, and return 1 if successful. Return 0
5074 if we should let the compiler generate normal code.
5076 operands[0] is the destination
5077 operands[1] is the source
5078 operands[2] is the length
5079 operands[3] is the alignment */
5081 #define MAX_MOVE_REG 4
5084 expand_block_move (operands)
5087 rtx orig_dest = operands[0];
5088 rtx orig_src = operands[1];
5089 rtx bytes_rtx = operands[2];
5090 rtx align_rtx = operands[3];
5091 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5102 rtx stores[MAX_MOVE_REG];
5105 /* If this is not a fixed size move, just call memcpy */
5109 /* If this is not a fixed size alignment, abort */
5110 if (GET_CODE (align_rtx) != CONST_INT)
5112 align = INTVAL (align_rtx);
5114 /* Anything to move? */
5115 bytes = INTVAL (bytes_rtx);
5119 /* store_one_arg depends on expand_block_move to handle at least the size of
5120 reg_parm_stack_space. */
5121 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
5124 /* Move the address into scratch registers. */
5125 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
5126 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
5128 if (TARGET_STRING) /* string instructions are available */
5130 for ( ; bytes > 0; bytes -= move_bytes)
5132 if (bytes > 24 /* move up to 32 bytes at a time */
5140 && ! fixed_regs[12])
5142 move_bytes = (bytes > 32) ? 32 : bytes;
5143 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
5146 expand_block_move_mem (BLKmode,
5149 GEN_INT ((move_bytes == 32)
5153 else if (bytes > 16 /* move up to 24 bytes at a time */
5159 && ! fixed_regs[10])
5161 move_bytes = (bytes > 24) ? 24 : bytes;
5162 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
5165 expand_block_move_mem (BLKmode,
5168 GEN_INT (move_bytes),
5171 else if (bytes > 8 /* move up to 16 bytes at a time */
5177 move_bytes = (bytes > 16) ? 16 : bytes;
5178 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
5181 expand_block_move_mem (BLKmode,
5184 GEN_INT (move_bytes),
5187 else if (bytes >= 8 && TARGET_POWERPC64
5188 /* 64-bit loads and stores require word-aligned
5190 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5193 tmp_reg = gen_reg_rtx (DImode);
5194 emit_move_insn (tmp_reg,
5195 expand_block_move_mem (DImode,
5196 src_reg, orig_src));
5197 emit_move_insn (expand_block_move_mem (DImode,
5198 dest_reg, orig_dest),
5201 else if (bytes > 4 && !TARGET_POWERPC64)
5202 { /* move up to 8 bytes at a time */
5203 move_bytes = (bytes > 8) ? 8 : bytes;
5204 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
5207 expand_block_move_mem (BLKmode,
5210 GEN_INT (move_bytes),
5213 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5214 { /* move 4 bytes */
5216 tmp_reg = gen_reg_rtx (SImode);
5217 emit_move_insn (tmp_reg,
5218 expand_block_move_mem (SImode,
5219 src_reg, orig_src));
5220 emit_move_insn (expand_block_move_mem (SImode,
5221 dest_reg, orig_dest),
5224 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5225 { /* move 2 bytes */
5227 tmp_reg = gen_reg_rtx (HImode);
5228 emit_move_insn (tmp_reg,
5229 expand_block_move_mem (HImode,
5230 src_reg, orig_src));
5231 emit_move_insn (expand_block_move_mem (HImode,
5232 dest_reg, orig_dest),
5235 else if (bytes == 1) /* move 1 byte */
5238 tmp_reg = gen_reg_rtx (QImode);
5239 emit_move_insn (tmp_reg,
5240 expand_block_move_mem (QImode,
5241 src_reg, orig_src));
5242 emit_move_insn (expand_block_move_mem (QImode,
5243 dest_reg, orig_dest),
5247 { /* move up to 4 bytes at a time */
5248 move_bytes = (bytes > 4) ? 4 : bytes;
5249 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
5252 expand_block_move_mem (BLKmode,
5255 GEN_INT (move_bytes),
5259 if (bytes > move_bytes)
5261 if (! TARGET_POWERPC64)
5263 emit_insn (gen_addsi3 (src_reg, src_reg,
5264 GEN_INT (move_bytes)));
5265 emit_insn (gen_addsi3 (dest_reg, dest_reg,
5266 GEN_INT (move_bytes)));
5270 emit_insn (gen_adddi3 (src_reg, src_reg,
5271 GEN_INT (move_bytes)));
5272 emit_insn (gen_adddi3 (dest_reg, dest_reg,
5273 GEN_INT (move_bytes)));
5279 else /* string instructions not available */
5281 num_reg = offset = 0;
5282 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
5284 /* Calculate the correct offset for src/dest */
5288 dest_addr = dest_reg;
5292 src_addr = plus_constant (src_reg, offset);
5293 dest_addr = plus_constant (dest_reg, offset);
5296 /* Generate the appropriate load and store, saving the stores
5298 if (bytes >= 8 && TARGET_POWERPC64
5299 /* 64-bit loads and stores require word-aligned
5301 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5304 tmp_reg = gen_reg_rtx (DImode);
5305 emit_insn (gen_movdi (tmp_reg,
5306 expand_block_move_mem (DImode,
5309 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
5314 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5317 tmp_reg = gen_reg_rtx (SImode);
5318 emit_insn (gen_movsi (tmp_reg,
5319 expand_block_move_mem (SImode,
5322 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5327 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5330 tmp_reg = gen_reg_rtx (HImode);
5331 emit_insn (gen_movhi (tmp_reg,
5332 expand_block_move_mem (HImode,
5335 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5343 tmp_reg = gen_reg_rtx (QImode);
5344 emit_insn (gen_movqi (tmp_reg,
5345 expand_block_move_mem (QImode,
5348 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5354 if (num_reg >= MAX_MOVE_REG)
5356 for (i = 0; i < num_reg; i++)
5357 emit_insn (stores[i]);
5362 for (i = 0; i < num_reg; i++)
5363 emit_insn (stores[i]);
5370 /* Return 1 if OP is a load multiple operation. It is known to be a
5371 PARALLEL and the first section will be tested. */
5374 load_multiple_operation (op, mode)
5376 enum machine_mode mode ATTRIBUTE_UNUSED;
5378 int count = XVECLEN (op, 0);
5379 unsigned int dest_regno;
5383 /* Perform a quick check so we don't blow up below. */
5385 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5386 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5387 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5390 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5391 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5393 for (i = 1; i < count; i++)
5395 rtx elt = XVECEXP (op, 0, i);
5397 if (GET_CODE (elt) != SET
5398 || GET_CODE (SET_DEST (elt)) != REG
5399 || GET_MODE (SET_DEST (elt)) != SImode
5400 || REGNO (SET_DEST (elt)) != dest_regno + i
5401 || GET_CODE (SET_SRC (elt)) != MEM
5402 || GET_MODE (SET_SRC (elt)) != SImode
5403 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5404 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5405 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5406 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5413 /* Similar, but tests for store multiple. Here, the second vector element
5414 is a CLOBBER. It will be tested later. */
5417 store_multiple_operation (op, mode)
5419 enum machine_mode mode ATTRIBUTE_UNUSED;
5421 int count = XVECLEN (op, 0) - 1;
5422 unsigned int src_regno;
5426 /* Perform a quick check so we don't blow up below. */
5428 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5429 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5430 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5433 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5434 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5436 for (i = 1; i < count; i++)
5438 rtx elt = XVECEXP (op, 0, i + 1);
5440 if (GET_CODE (elt) != SET
5441 || GET_CODE (SET_SRC (elt)) != REG
5442 || GET_MODE (SET_SRC (elt)) != SImode
5443 || REGNO (SET_SRC (elt)) != src_regno + i
5444 || GET_CODE (SET_DEST (elt)) != MEM
5445 || GET_MODE (SET_DEST (elt)) != SImode
5446 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5447 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5448 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5449 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5456 /* Return 1 for a parallel vrsave operation. */
5459 vrsave_operation (op, mode)
5461 enum machine_mode mode ATTRIBUTE_UNUSED;
5463 int count = XVECLEN (op, 0);
5464 unsigned int dest_regno, src_regno;
5468 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5469 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5470 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5473 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5474 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5476 if (dest_regno != VRSAVE_REGNO
5477 && src_regno != VRSAVE_REGNO)
5480 for (i = 1; i < count; i++)
5482 rtx elt = XVECEXP (op, 0, i);
5484 if (GET_CODE (elt) != CLOBBER
5485 && GET_CODE (elt) != SET)
5492 /* Return 1 for an PARALLEL suitable for mtcrf. */
5495 mtcrf_operation (op, mode)
5497 enum machine_mode mode ATTRIBUTE_UNUSED;
5499 int count = XVECLEN (op, 0);
5503 /* Perform a quick check so we don't blow up below. */
5505 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5506 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5507 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5509 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5511 if (GET_CODE (src_reg) != REG
5512 || GET_MODE (src_reg) != SImode
5513 || ! INT_REGNO_P (REGNO (src_reg)))
5516 for (i = 0; i < count; i++)
5518 rtx exp = XVECEXP (op, 0, i);
5522 if (GET_CODE (exp) != SET
5523 || GET_CODE (SET_DEST (exp)) != REG
5524 || GET_MODE (SET_DEST (exp)) != CCmode
5525 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5527 unspec = SET_SRC (exp);
5528 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5530 if (GET_CODE (unspec) != UNSPEC
5531 || XINT (unspec, 1) != 20
5532 || XVECLEN (unspec, 0) != 2
5533 || XVECEXP (unspec, 0, 0) != src_reg
5534 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5535 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5541 /* Return 1 for an PARALLEL suitable for lmw. */
5544 lmw_operation (op, mode)
5546 enum machine_mode mode ATTRIBUTE_UNUSED;
5548 int count = XVECLEN (op, 0);
5549 unsigned int dest_regno;
5551 unsigned int base_regno;
5552 HOST_WIDE_INT offset;
5555 /* Perform a quick check so we don't blow up below. */
5557 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5558 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5559 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5562 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5563 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5566 || count != 32 - (int) dest_regno)
5569 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5572 base_regno = REGNO (src_addr);
5573 if (base_regno == 0)
5576 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5578 offset = INTVAL (XEXP (src_addr, 1));
5579 base_regno = REGNO (XEXP (src_addr, 0));
5584 for (i = 0; i < count; i++)
5586 rtx elt = XVECEXP (op, 0, i);
5589 HOST_WIDE_INT newoffset;
5591 if (GET_CODE (elt) != SET
5592 || GET_CODE (SET_DEST (elt)) != REG
5593 || GET_MODE (SET_DEST (elt)) != SImode
5594 || REGNO (SET_DEST (elt)) != dest_regno + i
5595 || GET_CODE (SET_SRC (elt)) != MEM
5596 || GET_MODE (SET_SRC (elt)) != SImode)
5598 newaddr = XEXP (SET_SRC (elt), 0);
5599 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5604 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5606 addr_reg = XEXP (newaddr, 0);
5607 newoffset = INTVAL (XEXP (newaddr, 1));
5611 if (REGNO (addr_reg) != base_regno
5612 || newoffset != offset + 4 * i)
5619 /* Return 1 for an PARALLEL suitable for stmw. */
5622 stmw_operation (op, mode)
5624 enum machine_mode mode ATTRIBUTE_UNUSED;
5626 int count = XVECLEN (op, 0);
5627 unsigned int src_regno;
5629 unsigned int base_regno;
5630 HOST_WIDE_INT offset;
5633 /* Perform a quick check so we don't blow up below. */
5635 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5636 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5637 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5640 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5641 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5644 || count != 32 - (int) src_regno)
5647 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5650 base_regno = REGNO (dest_addr);
5651 if (base_regno == 0)
5654 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5656 offset = INTVAL (XEXP (dest_addr, 1));
5657 base_regno = REGNO (XEXP (dest_addr, 0));
5662 for (i = 0; i < count; i++)
5664 rtx elt = XVECEXP (op, 0, i);
5667 HOST_WIDE_INT newoffset;
5669 if (GET_CODE (elt) != SET
5670 || GET_CODE (SET_SRC (elt)) != REG
5671 || GET_MODE (SET_SRC (elt)) != SImode
5672 || REGNO (SET_SRC (elt)) != src_regno + i
5673 || GET_CODE (SET_DEST (elt)) != MEM
5674 || GET_MODE (SET_DEST (elt)) != SImode)
5676 newaddr = XEXP (SET_DEST (elt), 0);
5677 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5682 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5684 addr_reg = XEXP (newaddr, 0);
5685 newoffset = INTVAL (XEXP (newaddr, 1));
5689 if (REGNO (addr_reg) != base_regno
5690 || newoffset != offset + 4 * i)
5697 /* A validation routine: say whether CODE, a condition code, and MODE
5698 match. The other alternatives either don't make sense or should
5699 never be generated. */
5702 validate_condition_mode (code, mode)
5704 enum machine_mode mode;
5706 if (GET_RTX_CLASS (code) != '<'
5707 || GET_MODE_CLASS (mode) != MODE_CC)
5710 /* These don't make sense. */
5711 if ((code == GT || code == LT || code == GE || code == LE)
5712 && mode == CCUNSmode)
5715 if ((code == GTU || code == LTU || code == GEU || code == LEU)
5716 && mode != CCUNSmode)
5719 if (mode != CCFPmode
5720 && (code == ORDERED || code == UNORDERED
5721 || code == UNEQ || code == LTGT
5722 || code == UNGT || code == UNLT
5723 || code == UNGE || code == UNLE))
5726 /* These should never be generated except for
5727 flag_unsafe_math_optimizations. */
5728 if (mode == CCFPmode
5729 && ! flag_unsafe_math_optimizations
5730 && (code == LE || code == GE
5731 || code == UNEQ || code == LTGT
5732 || code == UNGT || code == UNLT))
5735 /* These are invalid; the information is not there. */
5736 if (mode == CCEQmode
5737 && code != EQ && code != NE)
5741 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5742 We only check the opcode against the mode of the CC value here. */
5745 branch_comparison_operator (op, mode)
5747 enum machine_mode mode ATTRIBUTE_UNUSED;
5749 enum rtx_code code = GET_CODE (op);
5750 enum machine_mode cc_mode;
5752 if (GET_RTX_CLASS (code) != '<')
5755 cc_mode = GET_MODE (XEXP (op, 0));
5756 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5759 validate_condition_mode (code, cc_mode);
5764 /* Return 1 if OP is a comparison operation that is valid for a branch
5765 insn and which is true if the corresponding bit in the CC register
5769 branch_positive_comparison_operator (op, mode)
5771 enum machine_mode mode;
5775 if (! branch_comparison_operator (op, mode))
5778 code = GET_CODE (op);
5779 return (code == EQ || code == LT || code == GT
5780 || code == LTU || code == GTU
5781 || code == UNORDERED);
5784 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5785 We check the opcode against the mode of the CC value and disallow EQ or
5786 NE comparisons for integers. */
5789 scc_comparison_operator (op, mode)
5791 enum machine_mode mode;
5793 enum rtx_code code = GET_CODE (op);
5794 enum machine_mode cc_mode;
5796 if (GET_MODE (op) != mode && mode != VOIDmode)
5799 if (GET_RTX_CLASS (code) != '<')
5802 cc_mode = GET_MODE (XEXP (op, 0));
5803 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5806 validate_condition_mode (code, cc_mode);
5808 if (code == NE && cc_mode != CCFPmode)
5815 trap_comparison_operator (op, mode)
5817 enum machine_mode mode;
5819 if (mode != VOIDmode && mode != GET_MODE (op))
5821 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5825 boolean_operator (op, mode)
5827 enum machine_mode mode ATTRIBUTE_UNUSED;
5829 enum rtx_code code = GET_CODE (op);
5830 return (code == AND || code == IOR || code == XOR);
5834 boolean_or_operator (op, mode)
5836 enum machine_mode mode ATTRIBUTE_UNUSED;
5838 enum rtx_code code = GET_CODE (op);
5839 return (code == IOR || code == XOR);
5843 min_max_operator (op, mode)
5845 enum machine_mode mode ATTRIBUTE_UNUSED;
5847 enum rtx_code code = GET_CODE (op);
5848 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5851 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5852 mask required to convert the result of a rotate insn into a shift
5853 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
5856 includes_lshift_p (shiftop, andop)
5860 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5862 shift_mask <<= INTVAL (shiftop);
5864 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5867 /* Similar, but for right shift. */
5870 includes_rshift_p (shiftop, andop)
5874 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5876 shift_mask >>= INTVAL (shiftop);
5878 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5881 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5882 to perform a left shift. It must have exactly SHIFTOP least
5883 signifigant 0's, then one or more 1's, then zero or more 0's. */
5886 includes_rldic_lshift_p (shiftop, andop)
5890 if (GET_CODE (andop) == CONST_INT)
5892 HOST_WIDE_INT c, lsb, shift_mask;
5895 if (c == 0 || c == ~0)
5899 shift_mask <<= INTVAL (shiftop);
5901 /* Find the least signifigant one bit. */
5904 /* It must coincide with the LSB of the shift mask. */
5905 if (-lsb != shift_mask)
5908 /* Invert to look for the next transition (if any). */
5911 /* Remove the low group of ones (originally low group of zeros). */
5914 /* Again find the lsb, and check we have all 1's above. */
5918 else if (GET_CODE (andop) == CONST_DOUBLE
5919 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5921 HOST_WIDE_INT low, high, lsb;
5922 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5924 low = CONST_DOUBLE_LOW (andop);
5925 if (HOST_BITS_PER_WIDE_INT < 64)
5926 high = CONST_DOUBLE_HIGH (andop);
5928 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5929 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5932 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5934 shift_mask_high = ~0;
5935 if (INTVAL (shiftop) > 32)
5936 shift_mask_high <<= INTVAL (shiftop) - 32;
5940 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5947 return high == -lsb;
5950 shift_mask_low = ~0;
5951 shift_mask_low <<= INTVAL (shiftop);
5955 if (-lsb != shift_mask_low)
5958 if (HOST_BITS_PER_WIDE_INT < 64)
5963 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5966 return high == -lsb;
5970 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5976 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5977 to perform a left shift. It must have SHIFTOP or more least
5978 signifigant 0's, with the remainder of the word 1's. */
5981 includes_rldicr_lshift_p (shiftop, andop)
5985 if (GET_CODE (andop) == CONST_INT)
5987 HOST_WIDE_INT c, lsb, shift_mask;
5990 shift_mask <<= INTVAL (shiftop);
5993 /* Find the least signifigant one bit. */
5996 /* It must be covered by the shift mask.
5997 This test also rejects c == 0. */
5998 if ((lsb & shift_mask) == 0)
6001 /* Check we have all 1's above the transition, and reject all 1's. */
6002 return c == -lsb && lsb != 1;
6004 else if (GET_CODE (andop) == CONST_DOUBLE
6005 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
6007 HOST_WIDE_INT low, lsb, shift_mask_low;
6009 low = CONST_DOUBLE_LOW (andop);
6011 if (HOST_BITS_PER_WIDE_INT < 64)
6013 HOST_WIDE_INT high, shift_mask_high;
6015 high = CONST_DOUBLE_HIGH (andop);
6019 shift_mask_high = ~0;
6020 if (INTVAL (shiftop) > 32)
6021 shift_mask_high <<= INTVAL (shiftop) - 32;
6025 if ((lsb & shift_mask_high) == 0)
6028 return high == -lsb;
6034 shift_mask_low = ~0;
6035 shift_mask_low <<= INTVAL (shiftop);
6039 if ((lsb & shift_mask_low) == 0)
6042 return low == -lsb && lsb != 1;
6048 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
6049 for lfq and stfq insns.
6051 Note reg1 and reg2 *must* be hard registers. To be sure we will
6052 abort if we are passed pseudo registers. */
6055 registers_ok_for_quad_peep (reg1, reg2)
6058 /* We might have been passed a SUBREG. */
6059 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
6062 return (REGNO (reg1) == REGNO (reg2) - 1);
6065 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
6066 addr1 and addr2 must be in consecutive memory locations
6067 (addr2 == addr1 + 8). */
6070 addrs_ok_for_quad_peep (addr1, addr2)
6077 /* Extract an offset (if used) from the first addr. */
6078 if (GET_CODE (addr1) == PLUS)
6080 /* If not a REG, return zero. */
6081 if (GET_CODE (XEXP (addr1, 0)) != REG)
6085 reg1 = REGNO (XEXP (addr1, 0));
6086 /* The offset must be constant! */
6087 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
6089 offset1 = INTVAL (XEXP (addr1, 1));
6092 else if (GET_CODE (addr1) != REG)
6096 reg1 = REGNO (addr1);
6097 /* This was a simple (mem (reg)) expression. Offset is 0. */
6101 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
6102 if (GET_CODE (addr2) != PLUS)
6105 if (GET_CODE (XEXP (addr2, 0)) != REG
6106 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
6109 if (reg1 != REGNO (XEXP (addr2, 0)))
6112 /* The offset for the second addr must be 8 more than the first addr. */
6113 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
6116 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
6121 /* Return the register class of a scratch register needed to copy IN into
6122 or out of a register in CLASS in MODE. If it can be done directly,
6123 NO_REGS is returned. */
6126 secondary_reload_class (class, mode, in)
6127 enum reg_class class;
6128 enum machine_mode mode ATTRIBUTE_UNUSED;
6133 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
6135 /* We cannot copy a symbolic operand directly into anything
6136 other than BASE_REGS for TARGET_ELF. So indicate that a
6137 register from BASE_REGS is needed as an intermediate
6140 On Darwin, pic addresses require a load from memory, which
6141 needs a base register. */
6142 if (class != BASE_REGS
6143 && (GET_CODE (in) == SYMBOL_REF
6144 || GET_CODE (in) == HIGH
6145 || GET_CODE (in) == LABEL_REF
6146 || GET_CODE (in) == CONST))
6150 if (GET_CODE (in) == REG)
6153 if (regno >= FIRST_PSEUDO_REGISTER)
6155 regno = true_regnum (in);
6156 if (regno >= FIRST_PSEUDO_REGISTER)
6160 else if (GET_CODE (in) == SUBREG)
6162 regno = true_regnum (in);
6163 if (regno >= FIRST_PSEUDO_REGISTER)
6169 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
6171 if (class == GENERAL_REGS || class == BASE_REGS
6172 || (regno >= 0 && INT_REGNO_P (regno)))
6175 /* Constants, memory, and FP registers can go into FP registers. */
6176 if ((regno == -1 || FP_REGNO_P (regno))
6177 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
6180 /* Memory, and AltiVec registers can go into AltiVec registers. */
6181 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
6182 && class == ALTIVEC_REGS)
6185 /* We can copy among the CR registers. */
6186 if ((class == CR_REGS || class == CR0_REGS)
6187 && regno >= 0 && CR_REGNO_P (regno))
6190 /* Otherwise, we need GENERAL_REGS. */
6191 return GENERAL_REGS;
6194 /* Given a comparison operation, return the bit number in CCR to test. We
6195 know this is a valid comparison.
6197 SCC_P is 1 if this is for an scc. That means that %D will have been
6198 used instead of %C, so the bits will be in different places.
6200 Return -1 if OP isn't a valid comparison for some reason. */
6207 enum rtx_code code = GET_CODE (op);
6208 enum machine_mode cc_mode;
6213 if (GET_RTX_CLASS (code) != '<')
6218 if (GET_CODE (reg) != REG
6219 || ! CR_REGNO_P (REGNO (reg)))
6222 cc_mode = GET_MODE (reg);
6223 cc_regnum = REGNO (reg);
6224 base_bit = 4 * (cc_regnum - CR0_REGNO);
6226 validate_condition_mode (code, cc_mode);
6231 return scc_p ? base_bit + 3 : base_bit + 2;
6233 return base_bit + 2;
6234 case GT: case GTU: case UNLE:
6235 return base_bit + 1;
6236 case LT: case LTU: case UNGE:
6238 case ORDERED: case UNORDERED:
6239 return base_bit + 3;
6242 /* If scc, we will have done a cror to put the bit in the
6243 unordered position. So test that bit. For integer, this is ! LT
6244 unless this is an scc insn. */
6245 return scc_p ? base_bit + 3 : base_bit;
6248 return scc_p ? base_bit + 3 : base_bit + 1;
6255 /* Return the GOT register. */
6258 rs6000_got_register (value)
6259 rtx value ATTRIBUTE_UNUSED;
6261 /* The second flow pass currently (June 1999) can't update
6262 regs_ever_live without disturbing other parts of the compiler, so
6263 update it here to make the prolog/epilogue code happy. */
6264 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
6265 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
6267 current_function_uses_pic_offset_table = 1;
6269 return pic_offset_table_rtx;
6272 /* Function to init struct machine_function.
6273 This will be called, via a pointer variable,
6274 from push_function_context. */
6276 static struct machine_function *
6277 rs6000_init_machine_status ()
6279 return ggc_alloc_cleared (sizeof (machine_function));
6282 /* Print an operand. Recognize special options, documented below. */
6285 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6286 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6288 #define SMALL_DATA_RELOC "sda21"
6289 #define SMALL_DATA_REG 0
6293 print_operand (file, x, code)
6301 /* These macros test for integers and extract the low-order bits. */
6303 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
6304 && GET_MODE (X) == VOIDmode)
6306 #define INT_LOWPART(X) \
6307 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6312 /* Write out an instruction after the call which may be replaced
6313 with glue code by the loader. This depends on the AIX version. */
6314 asm_fprintf (file, RS6000_CALL_GLUE);
6317 /* %a is output_address. */
6320 /* If X is a constant integer whose low-order 5 bits are zero,
6321 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6322 in the AIX assembler where "sri" with a zero shift count
6323 writes a trash instruction. */
6324 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6331 /* If constant, low-order 16 bits of constant, unsigned.
6332 Otherwise, write normally. */
6334 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6336 print_operand (file, x, 0);
6340 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6341 for 64-bit mask direction. */
6342 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6345 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6349 /* There used to be a comment for 'C' reading "This is an
6350 optional cror needed for certain floating-point
6351 comparisons. Otherwise write nothing." */
6353 /* Similar, except that this is for an scc, so we must be able to
6354 encode the test in a single bit that is one. We do the above
6355 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6356 if (GET_CODE (x) == LE || GET_CODE (x) == GE
6357 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6359 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6361 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6363 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6366 else if (GET_CODE (x) == NE)
6368 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6370 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6371 base_bit + 2, base_bit + 2);
6376 /* X is a CR register. Print the number of the EQ bit of the CR */
6377 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6378 output_operand_lossage ("invalid %%E value");
6380 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6384 /* X is a CR register. Print the shift count needed to move it
6385 to the high-order four bits. */
6386 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6387 output_operand_lossage ("invalid %%f value");
6389 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6393 /* Similar, but print the count for the rotate in the opposite
6395 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6396 output_operand_lossage ("invalid %%F value");
6398 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6402 /* X is a constant integer. If it is negative, print "m",
6403 otherwise print "z". This is to make a aze or ame insn. */
6404 if (GET_CODE (x) != CONST_INT)
6405 output_operand_lossage ("invalid %%G value");
6406 else if (INTVAL (x) >= 0)
6413 /* If constant, output low-order five bits. Otherwise, write
6416 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6418 print_operand (file, x, 0);
6422 /* If constant, output low-order six bits. Otherwise, write
6425 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6427 print_operand (file, x, 0);
6431 /* Print `i' if this is a constant, else nothing. */
6437 /* Write the bit number in CCR for jump. */
6440 output_operand_lossage ("invalid %%j code");
6442 fprintf (file, "%d", i);
6446 /* Similar, but add one for shift count in rlinm for scc and pass
6447 scc flag to `ccr_bit'. */
6450 output_operand_lossage ("invalid %%J code");
6452 /* If we want bit 31, write a shift count of zero, not 32. */
6453 fprintf (file, "%d", i == 31 ? 0 : i + 1);
6457 /* X must be a constant. Write the 1's complement of the
6460 output_operand_lossage ("invalid %%k value");
6462 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6466 /* X must be a symbolic constant on ELF. Write an
6467 expression suitable for an 'addi' that adds in the low 16
6469 if (GET_CODE (x) != CONST)
6471 print_operand_address (file, x);
6476 if (GET_CODE (XEXP (x, 0)) != PLUS
6477 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6478 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6479 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6480 output_operand_lossage ("invalid %%K value");
6481 print_operand_address (file, XEXP (XEXP (x, 0), 0));
6483 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6487 /* %l is output_asm_label. */
6490 /* Write second word of DImode or DFmode reference. Works on register
6491 or non-indexed memory only. */
6492 if (GET_CODE (x) == REG)
6493 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6494 else if (GET_CODE (x) == MEM)
6496 /* Handle possible auto-increment. Since it is pre-increment and
6497 we have already done it, we can just use an offset of word. */
6498 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6499 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6500 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6503 output_address (XEXP (adjust_address_nv (x, SImode,
6507 if (small_data_operand (x, GET_MODE (x)))
6508 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6509 reg_names[SMALL_DATA_REG]);
6514 /* MB value for a mask operand. */
6515 if (! mask_operand (x, SImode))
6516 output_operand_lossage ("invalid %%m value");
6518 val = INT_LOWPART (x);
6520 /* If the high bit is set and the low bit is not, the value is zero.
6521 If the high bit is zero, the value is the first 1 bit we find from
6523 if ((val & 0x80000000) && ((val & 1) == 0))
6528 else if ((val & 0x80000000) == 0)
6530 for (i = 1; i < 32; i++)
6531 if ((val <<= 1) & 0x80000000)
6533 fprintf (file, "%d", i);
6537 /* Otherwise, look for the first 0 bit from the right. The result is its
6538 number plus 1. We know the low-order bit is one. */
6539 for (i = 0; i < 32; i++)
6540 if (((val >>= 1) & 1) == 0)
6543 /* If we ended in ...01, i would be 0. The correct value is 31, so
6545 fprintf (file, "%d", 31 - i);
6549 /* ME value for a mask operand. */
6550 if (! mask_operand (x, SImode))
6551 output_operand_lossage ("invalid %%M value");
6553 val = INT_LOWPART (x);
6555 /* If the low bit is set and the high bit is not, the value is 31.
6556 If the low bit is zero, the value is the first 1 bit we find from
6558 if ((val & 1) && ((val & 0x80000000) == 0))
6563 else if ((val & 1) == 0)
6565 for (i = 0; i < 32; i++)
6566 if ((val >>= 1) & 1)
6569 /* If we had ....10, i would be 0. The result should be
6570 30, so we need 30 - i. */
6571 fprintf (file, "%d", 30 - i);
6575 /* Otherwise, look for the first 0 bit from the left. The result is its
6576 number minus 1. We know the high-order bit is one. */
6577 for (i = 0; i < 32; i++)
6578 if (((val <<= 1) & 0x80000000) == 0)
6581 fprintf (file, "%d", i);
6584 /* %n outputs the negative of its operand. */
6587 /* Write the number of elements in the vector times 4. */
6588 if (GET_CODE (x) != PARALLEL)
6589 output_operand_lossage ("invalid %%N value");
6591 fprintf (file, "%d", XVECLEN (x, 0) * 4);
6595 /* Similar, but subtract 1 first. */
6596 if (GET_CODE (x) != PARALLEL)
6597 output_operand_lossage ("invalid %%O value");
6599 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6603 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6605 || INT_LOWPART (x) < 0
6606 || (i = exact_log2 (INT_LOWPART (x))) < 0)
6607 output_operand_lossage ("invalid %%p value");
6609 fprintf (file, "%d", i);
6613 /* The operand must be an indirect memory reference. The result
6614 is the register number. */
6615 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6616 || REGNO (XEXP (x, 0)) >= 32)
6617 output_operand_lossage ("invalid %%P value");
6619 fprintf (file, "%d", REGNO (XEXP (x, 0)));
6623 /* This outputs the logical code corresponding to a boolean
6624 expression. The expression may have one or both operands
6625 negated (if one, only the first one). For condition register
6626 logical operations, it will also treat the negated
6627 CR codes as NOTs, but not handle NOTs of them. */
6629 const char *const *t = 0;
6631 enum rtx_code code = GET_CODE (x);
6632 static const char * const tbl[3][3] = {
6633 { "and", "andc", "nor" },
6634 { "or", "orc", "nand" },
6635 { "xor", "eqv", "xor" } };
6639 else if (code == IOR)
6641 else if (code == XOR)
6644 output_operand_lossage ("invalid %%q value");
6646 if (GET_CODE (XEXP (x, 0)) != NOT)
6650 if (GET_CODE (XEXP (x, 1)) == NOT)
6661 /* X is a CR register. Print the mask for `mtcrf'. */
6662 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6663 output_operand_lossage ("invalid %%R value");
6665 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6669 /* Low 5 bits of 32 - value */
6671 output_operand_lossage ("invalid %%s value");
6673 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6677 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6678 CONST_INT 32-bit mask is considered sign-extended so any
6679 transition must occur within the CONST_INT, not on the boundary. */
6680 if (! mask64_operand (x, DImode))
6681 output_operand_lossage ("invalid %%S value");
6683 val = INT_LOWPART (x);
6685 if (val & 1) /* Clear Left */
6687 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6688 if (!((val >>= 1) & 1))
6691 #if HOST_BITS_PER_WIDE_INT == 32
6692 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6694 val = CONST_DOUBLE_HIGH (x);
6699 for (i = 32; i < 64; i++)
6700 if (!((val >>= 1) & 1))
6704 /* i = index of last set bit from right
6705 mask begins at 63 - i from left */
6707 output_operand_lossage ("%%S computed all 1's mask");
6709 fprintf (file, "%d", 63 - i);
6712 else /* Clear Right */
6714 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6715 if ((val >>= 1) & 1)
6718 #if HOST_BITS_PER_WIDE_INT == 32
6719 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6721 val = CONST_DOUBLE_HIGH (x);
6723 if (val == (HOST_WIDE_INT) -1)
6726 for (i = 32; i < 64; i++)
6727 if ((val >>= 1) & 1)
6731 /* i = index of last clear bit from right
6732 mask ends at 62 - i from left */
6734 output_operand_lossage ("%%S computed all 0's mask");
6736 fprintf (file, "%d", 62 - i);
6741 /* Print the symbolic name of a branch target register. */
6742 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6743 && REGNO (x) != COUNT_REGISTER_REGNUM))
6744 output_operand_lossage ("invalid %%T value");
6745 else if (REGNO (x) == LINK_REGISTER_REGNUM)
6746 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6748 fputs ("ctr", file);
6752 /* High-order 16 bits of constant for use in unsigned operand. */
6754 output_operand_lossage ("invalid %%u value");
6756 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6757 (INT_LOWPART (x) >> 16) & 0xffff);
6761 /* High-order 16 bits of constant for use in signed operand. */
6763 output_operand_lossage ("invalid %%v value");
6765 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6766 (INT_LOWPART (x) >> 16) & 0xffff);
6770 /* Print `u' if this has an auto-increment or auto-decrement. */
6771 if (GET_CODE (x) == MEM
6772 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6773 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6778 /* Print the trap code for this operand. */
6779 switch (GET_CODE (x))
6782 fputs ("eq", file); /* 4 */
6785 fputs ("ne", file); /* 24 */
6788 fputs ("lt", file); /* 16 */
6791 fputs ("le", file); /* 20 */
6794 fputs ("gt", file); /* 8 */
6797 fputs ("ge", file); /* 12 */
6800 fputs ("llt", file); /* 2 */
6803 fputs ("lle", file); /* 6 */
6806 fputs ("lgt", file); /* 1 */
6809 fputs ("lge", file); /* 5 */
6817 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6820 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6821 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6823 print_operand (file, x, 0);
6827 /* MB value for a PowerPC64 rldic operand. */
6828 val = (GET_CODE (x) == CONST_INT
6829 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6834 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6835 if ((val <<= 1) < 0)
6838 #if HOST_BITS_PER_WIDE_INT == 32
6839 if (GET_CODE (x) == CONST_INT && i >= 0)
6840 i += 32; /* zero-extend high-part was all 0's */
6841 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6843 val = CONST_DOUBLE_LOW (x);
6850 for ( ; i < 64; i++)
6851 if ((val <<= 1) < 0)
6856 fprintf (file, "%d", i + 1);
6860 if (GET_CODE (x) == MEM
6861 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6866 /* Like 'L', for third word of TImode */
6867 if (GET_CODE (x) == REG)
6868 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6869 else if (GET_CODE (x) == MEM)
6871 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6872 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6873 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6875 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6876 if (small_data_operand (x, GET_MODE (x)))
6877 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6878 reg_names[SMALL_DATA_REG]);
6883 /* X is a SYMBOL_REF. Write out the name preceded by a
6884 period and without any trailing data in brackets. Used for function
6885 names. If we are configured for System V (or the embedded ABI) on
6886 the PowerPC, do not emit the period, since those systems do not use
6887 TOCs and the like. */
6888 if (GET_CODE (x) != SYMBOL_REF)
6891 if (XSTR (x, 0)[0] != '.')
6893 switch (DEFAULT_ABI)
6903 case ABI_AIX_NODESC:
6909 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6911 assemble_name (file, XSTR (x, 0));
6916 /* Like 'L', for last word of TImode. */
6917 if (GET_CODE (x) == REG)
6918 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6919 else if (GET_CODE (x) == MEM)
6921 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6922 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6923 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6925 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6926 if (small_data_operand (x, GET_MODE (x)))
6927 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6928 reg_names[SMALL_DATA_REG]);
6932 /* Print AltiVec memory operand. */
6937 if (GET_CODE (x) != MEM)
6942 if (GET_CODE (tmp) == REG)
6943 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6944 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6946 if (REGNO (XEXP (tmp, 0)) == 0)
6947 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6948 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6950 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6951 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6959 if (GET_CODE (x) == REG)
6960 fprintf (file, "%s", reg_names[REGNO (x)]);
6961 else if (GET_CODE (x) == MEM)
6963 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6964 know the width from the mode. */
6965 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6966 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6967 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6968 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6969 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6970 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6972 output_address (XEXP (x, 0));
6975 output_addr_const (file, x);
6979 output_operand_lossage ("invalid %%xn code");
6983 /* Print the address of an operand. */
6986 print_operand_address (file, x)
6990 if (GET_CODE (x) == REG)
6991 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6992 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6993 || GET_CODE (x) == LABEL_REF)
6995 output_addr_const (file, x);
6996 if (small_data_operand (x, GET_MODE (x)))
6997 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6998 reg_names[SMALL_DATA_REG]);
6999 else if (TARGET_TOC)
7002 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
7004 if (REGNO (XEXP (x, 0)) == 0)
7005 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
7006 reg_names[ REGNO (XEXP (x, 0)) ]);
7008 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
7009 reg_names[ REGNO (XEXP (x, 1)) ]);
7011 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
7013 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
7014 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7017 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
7018 && CONSTANT_P (XEXP (x, 1)))
7020 output_addr_const (file, XEXP (x, 1));
7021 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7025 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
7026 && CONSTANT_P (XEXP (x, 1)))
7028 fprintf (file, "lo16(");
7029 output_addr_const (file, XEXP (x, 1));
7030 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7033 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
7035 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
7037 rtx contains_minus = XEXP (x, 1);
7041 /* Find the (minus (sym) (toc)) buried in X, and temporarily
7042 turn it into (sym) for output_addr_const. */
7043 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
7044 contains_minus = XEXP (contains_minus, 0);
7046 minus = XEXP (contains_minus, 0);
7047 symref = XEXP (minus, 0);
7048 XEXP (contains_minus, 0) = symref;
7053 name = XSTR (symref, 0);
7054 newname = alloca (strlen (name) + sizeof ("@toc"));
7055 strcpy (newname, name);
7056 strcat (newname, "@toc");
7057 XSTR (symref, 0) = newname;
7059 output_addr_const (file, XEXP (x, 1));
7061 XSTR (symref, 0) = name;
7062 XEXP (contains_minus, 0) = minus;
7065 output_addr_const (file, XEXP (x, 1));
7067 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
7073 /* Target hook for assembling integer objects. The powerpc version has
7074 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
7075 is defined. It also needs to handle DI-mode objects on 64-bit
7079 rs6000_assemble_integer (x, size, aligned_p)
7084 #ifdef RELOCATABLE_NEEDS_FIXUP
7085 /* Special handling for SI values. */
7086 if (size == 4 && aligned_p)
7088 extern int in_toc_section PARAMS ((void));
7089 static int recurse = 0;
7091 /* For -mrelocatable, we mark all addresses that need to be fixed up
7092 in the .fixup section. */
7093 if (TARGET_RELOCATABLE
7094 && !in_toc_section ()
7095 && !in_text_section ()
7097 && GET_CODE (x) != CONST_INT
7098 && GET_CODE (x) != CONST_DOUBLE
7104 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
7106 ASM_OUTPUT_LABEL (asm_out_file, buf);
7107 fprintf (asm_out_file, "\t.long\t(");
7108 output_addr_const (asm_out_file, x);
7109 fprintf (asm_out_file, ")@fixup\n");
7110 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
7111 ASM_OUTPUT_ALIGN (asm_out_file, 2);
7112 fprintf (asm_out_file, "\t.long\t");
7113 assemble_name (asm_out_file, buf);
7114 fprintf (asm_out_file, "\n\t.previous\n");
7118 /* Remove initial .'s to turn a -mcall-aixdesc function
7119 address into the address of the descriptor, not the function
7121 else if (GET_CODE (x) == SYMBOL_REF
7122 && XSTR (x, 0)[0] == '.'
7123 && DEFAULT_ABI == ABI_AIX)
7125 const char *name = XSTR (x, 0);
7126 while (*name == '.')
7129 fprintf (asm_out_file, "\t.long\t%s\n", name);
7133 #endif /* RELOCATABLE_NEEDS_FIXUP */
7134 return default_assemble_integer (x, size, aligned_p);
7138 rs6000_reverse_condition (mode, code)
7139 enum machine_mode mode;
7142 /* Reversal of FP compares takes care -- an ordered compare
7143 becomes an unordered compare and vice versa. */
7144 if (mode == CCFPmode)
7145 return reverse_condition_maybe_unordered (code);
7147 return reverse_condition (code);
7150 /* Generate a compare for CODE. Return a brand-new rtx that
7151 represents the result of the compare. */
7154 rs6000_generate_compare (code)
7157 enum machine_mode comp_mode;
7160 if (rs6000_compare_fp_p)
7161 comp_mode = CCFPmode;
7162 else if (code == GTU || code == LTU
7163 || code == GEU || code == LEU)
7164 comp_mode = CCUNSmode;
7168 /* First, the compare. */
7169 compare_result = gen_reg_rtx (comp_mode);
7170 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
7171 gen_rtx_COMPARE (comp_mode,
7173 rs6000_compare_op1)));
7175 /* Some kinds of FP comparisons need an OR operation;
7176 except for flag_unsafe_math_optimizations we don't bother. */
7177 if (rs6000_compare_fp_p
7178 && ! flag_unsafe_math_optimizations
7179 && (code == LE || code == GE
7180 || code == UNEQ || code == LTGT
7181 || code == UNGT || code == UNLT))
7183 enum rtx_code or1, or2;
7184 rtx or1_rtx, or2_rtx, compare2_rtx;
7185 rtx or_result = gen_reg_rtx (CCEQmode);
7189 case LE: or1 = LT; or2 = EQ; break;
7190 case GE: or1 = GT; or2 = EQ; break;
7191 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
7192 case LTGT: or1 = LT; or2 = GT; break;
7193 case UNGT: or1 = UNORDERED; or2 = GT; break;
7194 case UNLT: or1 = UNORDERED; or2 = LT; break;
7197 validate_condition_mode (or1, comp_mode);
7198 validate_condition_mode (or2, comp_mode);
7199 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
7200 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
7201 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
7202 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
7204 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
7206 compare_result = or_result;
7210 validate_condition_mode (code, GET_MODE (compare_result));
7212 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
7216 /* Emit the RTL for an sCOND pattern. */
7219 rs6000_emit_sCOND (code, result)
7224 enum machine_mode op_mode;
7226 condition_rtx = rs6000_generate_compare (code);
7228 op_mode = GET_MODE (rs6000_compare_op0);
7229 if (op_mode == VOIDmode)
7230 op_mode = GET_MODE (rs6000_compare_op1);
7232 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7234 PUT_MODE (condition_rtx, DImode);
7235 convert_move (result, condition_rtx, 0);
7239 PUT_MODE (condition_rtx, SImode);
7240 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7244 /* Emit a branch of kind CODE to location LOC. */
7247 rs6000_emit_cbranch (code, loc)
7251 rtx condition_rtx, loc_ref;
7253 condition_rtx = rs6000_generate_compare (code);
7254 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7255 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7256 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7260 /* Return the string to output a conditional branch to LABEL, which is
7261 the operand number of the label, or -1 if the branch is really a
7264 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
7265 condition code register and its mode specifies what kind of
7268 REVERSED is non-zero if we should reverse the sense of the comparison.
7270 INSN is the insn. */
7273 output_cbranch (op, label, reversed, insn)
7279 static char string[64];
7280 enum rtx_code code = GET_CODE (op);
7281 rtx cc_reg = XEXP (op, 0);
7282 enum machine_mode mode = GET_MODE (cc_reg);
7283 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7284 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7285 int really_reversed = reversed ^ need_longbranch;
7291 validate_condition_mode (code, mode);
7293 /* Work out which way this really branches. We could use
7294 reverse_condition_maybe_unordered here always but this
7295 makes the resulting assembler clearer. */
7296 if (really_reversed)
7297 code = rs6000_reverse_condition (mode, code);
7301 /* Not all of these are actually distinct opcodes, but
7302 we distinguish them for clarity of the resulting assembler. */
7304 ccode = "ne"; break;
7306 ccode = "eq"; break;
7308 ccode = "ge"; break;
7309 case GT: case GTU: case UNGT:
7310 ccode = "gt"; break;
7312 ccode = "le"; break;
7313 case LT: case LTU: case UNLT:
7314 ccode = "lt"; break;
7315 case UNORDERED: ccode = "un"; break;
7316 case ORDERED: ccode = "nu"; break;
7317 case UNGE: ccode = "nl"; break;
7318 case UNLE: ccode = "ng"; break;
7323 /* Maybe we have a guess as to how likely the branch is.
7324 The old mnemonics don't have a way to specify this information. */
7325 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7326 if (note != NULL_RTX)
7328 /* PROB is the difference from 50%. */
7329 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7331 /* For branches that are very close to 50%, assume not-taken. */
7332 if (abs (prob) > REG_BR_PROB_BASE / 20
7333 && ((prob > 0) ^ need_longbranch))
7342 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7344 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7346 /* We need to escape any '%' characters in the reg_names string.
7347 Assume they'd only be the first character... */
7348 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7350 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7354 /* If the branch distance was too far, we may have to use an
7355 unconditional branch to go the distance. */
7356 if (need_longbranch)
7357 s += sprintf (s, ",$+8\n\tb %s", label);
7359 s += sprintf (s, ",%s", label);
7365 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7366 operands of the last comparison is nonzero/true, FALSE_COND if it
7367 is zero/false. Return 0 if the hardware has no such operation. */
7370 rs6000_emit_cmove (dest, op, true_cond, false_cond)
7376 enum rtx_code code = GET_CODE (op);
7377 rtx op0 = rs6000_compare_op0;
7378 rtx op1 = rs6000_compare_op1;
7380 enum machine_mode compare_mode = GET_MODE (op0);
7381 enum machine_mode result_mode = GET_MODE (dest);
7384 /* These modes should always match. */
7385 if (GET_MODE (op1) != compare_mode)
7387 if (GET_MODE (true_cond) != result_mode)
7389 if (GET_MODE (false_cond) != result_mode)
7392 /* First, work out if the hardware can do this at all, or
7393 if it's too slow... */
7394 /* If the comparison is an integer one, since we only have fsel
7395 it'll be cheaper to use a branch. */
7396 if (! rs6000_compare_fp_p)
7399 /* Eliminate half of the comparisons by switching operands, this
7400 makes the remaining code simpler. */
7401 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7402 || code == LTGT || code == LT)
7404 code = reverse_condition_maybe_unordered (code);
7406 true_cond = false_cond;
7410 /* UNEQ and LTGT take four instructions for a comparison with zero,
7411 it'll probably be faster to use a branch here too. */
7415 if (GET_CODE (op1) == CONST_DOUBLE)
7416 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7418 /* We're going to try to implement comparions by performing
7419 a subtract, then comparing against zero. Unfortunately,
7420 Inf - Inf is NaN which is not zero, and so if we don't
7421 know that the the operand is finite and the comparison
7422 would treat EQ different to UNORDERED, we can't do it. */
7423 if (! flag_unsafe_math_optimizations
7424 && code != GT && code != UNGE
7425 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7426 /* Constructs of the form (a OP b ? a : b) are safe. */
7427 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7428 || (! rtx_equal_p (op0, true_cond)
7429 && ! rtx_equal_p (op1, true_cond))))
7431 /* At this point we know we can use fsel. */
7433 /* Reduce the comparison to a comparison against zero. */
7434 temp = gen_reg_rtx (compare_mode);
7435 emit_insn (gen_rtx_SET (VOIDmode, temp,
7436 gen_rtx_MINUS (compare_mode, op0, op1)));
7438 op1 = CONST0_RTX (compare_mode);
7440 /* If we don't care about NaNs we can reduce some of the comparisons
7441 down to faster ones. */
7442 if (flag_unsafe_math_optimizations)
7448 true_cond = false_cond;
7461 /* Now, reduce everything down to a GE. */
7468 temp = gen_reg_rtx (compare_mode);
7469 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7474 temp = gen_reg_rtx (compare_mode);
7475 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
7480 temp = gen_reg_rtx (compare_mode);
7481 emit_insn (gen_rtx_SET (VOIDmode, temp,
7482 gen_rtx_NEG (compare_mode,
7483 gen_rtx_ABS (compare_mode, op0))));
7488 temp = gen_reg_rtx (result_mode);
7489 emit_insn (gen_rtx_SET (VOIDmode, temp,
7490 gen_rtx_IF_THEN_ELSE (result_mode,
7491 gen_rtx_GE (VOIDmode,
7493 true_cond, false_cond)));
7495 true_cond = false_cond;
7497 temp = gen_reg_rtx (compare_mode);
7498 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7503 temp = gen_reg_rtx (result_mode);
7504 emit_insn (gen_rtx_SET (VOIDmode, temp,
7505 gen_rtx_IF_THEN_ELSE (result_mode,
7506 gen_rtx_GE (VOIDmode,
7508 true_cond, false_cond)));
7510 false_cond = true_cond;
7512 temp = gen_reg_rtx (compare_mode);
7513 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7521 emit_insn (gen_rtx_SET (VOIDmode, dest,
7522 gen_rtx_IF_THEN_ELSE (result_mode,
7523 gen_rtx_GE (VOIDmode,
7525 true_cond, false_cond)));
7530 rs6000_emit_minmax (dest, code, op0, op1)
7536 enum machine_mode mode = GET_MODE (op0);
7540 if (code == SMAX || code == SMIN)
7545 if (code == SMAX || code == UMAX)
7546 target = emit_conditional_move (dest, c, op0, op1, mode,
7549 target = emit_conditional_move (dest, c, op0, op1, mode,
7551 if (target == NULL_RTX)
7554 emit_move_insn (dest, target);
7557 /* This page contains routines that are used to determine what the
7558 function prologue and epilogue code will do and write them out. */
7560 /* Return the first fixed-point register that is required to be
7561 saved. 32 if none. */
7564 first_reg_to_save ()
7568 /* Find lowest numbered live register. */
7569 for (first_reg = 13; first_reg <= 31; first_reg++)
7570 if (regs_ever_live[first_reg]
7571 && (! call_used_regs[first_reg]
7572 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
7573 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7574 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7577 if (current_function_profile)
7579 /* AIX must save/restore every register that contains a parameter
7580 before/after the .__mcount call plus an additional register
7581 for the static chain, if needed; use registers from 30 down to 22
7583 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7585 int last_parm_reg, profile_first_reg;
7587 /* Figure out last used parameter register. The proper thing
7588 to do is to walk incoming args of the function. A function
7589 might have live parameter registers even if it has no
7591 for (last_parm_reg = 10;
7592 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7596 /* Calculate first reg for saving parameter registers
7598 Skip reg 31 which may contain the frame pointer. */
7599 profile_first_reg = (33 - last_parm_reg
7600 - (current_function_needs_context ? 1 : 0));
7602 /* Need to skip another reg to account for R31 being PICBASE
7603 (when flag_pic is set) or R30 being used as the frame
7604 pointer (when flag_pic is not set). */
7605 --profile_first_reg;
7607 /* Do not save frame pointer if no parameters needs to be saved. */
7608 if (profile_first_reg == 31)
7609 profile_first_reg = 32;
7611 if (first_reg > profile_first_reg)
7612 first_reg = profile_first_reg;
7615 /* SVR4 may need one register to preserve the static chain. */
7616 else if (current_function_needs_context)
7618 /* Skip reg 31 which may contain the frame pointer. */
7625 if (flag_pic && current_function_uses_pic_offset_table &&
7626 (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
7627 return RS6000_PIC_OFFSET_TABLE_REGNUM;
7633 /* Similar, for FP regs. */
7636 first_fp_reg_to_save ()
7640 /* Find lowest numbered live register. */
7641 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7642 if (regs_ever_live[first_reg])
7648 /* Similar, for AltiVec regs. */
7651 first_altivec_reg_to_save ()
7655 /* Stack frame remains as is unless we are in AltiVec ABI. */
7656 if (! TARGET_ALTIVEC_ABI)
7657 return LAST_ALTIVEC_REGNO + 1;
7659 /* Find lowest numbered live register. */
7660 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7661 if (regs_ever_live[i])
7667 /* Return a 32-bit mask of the AltiVec registers we need to set in
7668 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7669 the 32-bit word is 0. */
7672 compute_vrsave_mask ()
7674 unsigned int i, mask = 0;
7676 /* First, find out if we use _any_ altivec registers. */
7677 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7678 if (regs_ever_live[i])
7679 mask |= ALTIVEC_REG_BIT (i);
7684 /* Next, add all registers that are call-clobbered. We do this
7685 because post-reload register optimizers such as regrename_optimize
7686 may choose to use them. They never change the register class
7687 chosen by reload, so cannot create new uses of altivec registers
7688 if there were none before, so the early exit above is safe. */
7689 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7690 altivec registers not saved in the mask, which might well make the
7691 adjustments below more effective in eliding the save/restore of
7692 VRSAVE in small functions. */
7693 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7694 if (call_used_regs[i])
7695 mask |= ALTIVEC_REG_BIT (i);
7697 /* Next, remove the argument registers from the set. These must
7698 be in the VRSAVE mask set by the caller, so we don't need to add
7699 them in again. More importantly, the mask we compute here is
7700 used to generate CLOBBERs in the set_vrsave insn, and we do not
7701 wish the argument registers to die. */
7702 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7703 mask &= ~ALTIVEC_REG_BIT (i);
7705 /* Similarly, remove the return value from the set. */
7708 diddle_return_value (is_altivec_return_reg, &yes);
7710 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7717 is_altivec_return_reg (reg, xyes)
7721 bool *yes = (bool *) xyes;
7722 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7727 /* Calculate the stack information for the current function. This is
7728 complicated by having two separate calling sequences, the AIX calling
7729 sequence and the V.4 calling sequence.
7731 AIX (and Darwin/Mac OS X) stack frames look like:
7733 SP----> +---------------------------------------+
7734 | back chain to caller | 0 0
7735 +---------------------------------------+
7736 | saved CR | 4 8 (8-11)
7737 +---------------------------------------+
7739 +---------------------------------------+
7740 | reserved for compilers | 12 24
7741 +---------------------------------------+
7742 | reserved for binders | 16 32
7743 +---------------------------------------+
7744 | saved TOC pointer | 20 40
7745 +---------------------------------------+
7746 | Parameter save area (P) | 24 48
7747 +---------------------------------------+
7748 | Alloca space (A) | 24+P etc.
7749 +---------------------------------------+
7750 | Local variable space (L) | 24+P+A
7751 +---------------------------------------+
7752 | Float/int conversion temporary (X) | 24+P+A+L
7753 +---------------------------------------+
7754 | Save area for AltiVec registers (W) | 24+P+A+L+X
7755 +---------------------------------------+
7756 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7757 +---------------------------------------+
7758 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7759 +---------------------------------------+
7760 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7761 +---------------------------------------+
7762 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7763 +---------------------------------------+
7764 old SP->| back chain to caller's caller |
7765 +---------------------------------------+
7767 The required alignment for AIX configurations is two words (i.e., 8
7771 V.4 stack frames look like:
7773 SP----> +---------------------------------------+
7774 | back chain to caller | 0
7775 +---------------------------------------+
7776 | caller's saved LR | 4
7777 +---------------------------------------+
7778 | Parameter save area (P) | 8
7779 +---------------------------------------+
7780 | Alloca space (A) | 8+P
7781 +---------------------------------------+
7782 | Varargs save area (V) | 8+P+A
7783 +---------------------------------------+
7784 | Local variable space (L) | 8+P+A+V
7785 +---------------------------------------+
7786 | Float/int conversion temporary (X) | 8+P+A+V+L
7787 +---------------------------------------+
7788 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7789 +---------------------------------------+
7790 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7791 +---------------------------------------+
7792 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7793 +---------------------------------------+
7794 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7795 +---------------------------------------+
7796 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7797 +---------------------------------------+
7798 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7799 +---------------------------------------+
7800 old SP->| back chain to caller's caller |
7801 +---------------------------------------+
7803 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7804 given. (But note below and in sysv4.h that we require only 8 and
7805 may round up the size of our stack frame anyways. The historical
7806 reason is early versions of powerpc-linux which didn't properly
7807 align the stack at program startup. A happy side-effect is that
7808 -mno-eabi libraries can be used with -meabi programs.)
7810 The EABI configuration defaults to the V.4 layout, unless
7811 -mcall-aix is used, in which case the AIX layout is used. However,
7812 the stack alignment requirements may differ. If -mno-eabi is not
7813 given, the required stack alignment is 8 bytes; if -mno-eabi is
7814 given, the required alignment is 16 bytes. (But see V.4 comment
7817 #ifndef ABI_STACK_BOUNDARY
7818 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7822 rs6000_stack_info ()
7824 static rs6000_stack_t info, zero_info;
7825 rs6000_stack_t *info_ptr = &info;
7826 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7827 enum rs6000_abi abi;
7831 /* Zero all fields portably. */
7834 /* Select which calling sequence. */
7835 info_ptr->abi = abi = DEFAULT_ABI;
7837 /* Calculate which registers need to be saved & save area size. */
7838 info_ptr->first_gp_reg_save = first_reg_to_save ();
7839 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
7840 even if it currently looks like we won't. */
7841 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7842 || (flag_pic == 1 && abi == ABI_V4)
7843 || (flag_pic && abi == ABI_DARWIN))
7844 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
7845 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
7847 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7849 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7850 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7852 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7853 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7854 - info_ptr->first_altivec_reg_save);
7856 /* Does this function call anything? */
7857 info_ptr->calls_p = (! current_function_is_leaf
7858 || cfun->machine->ra_needs_full_frame);
7860 /* Determine if we need to save the link register. */
7861 if (rs6000_ra_ever_killed ()
7862 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7863 #ifdef TARGET_RELOCATABLE
7864 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7866 || (info_ptr->first_fp_reg_save != 64
7867 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7868 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7869 || (abi == ABI_V4 && current_function_calls_alloca)
7870 || (DEFAULT_ABI == ABI_DARWIN
7872 && current_function_uses_pic_offset_table)
7873 || info_ptr->calls_p)
7875 info_ptr->lr_save_p = 1;
7876 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7879 /* Determine if we need to save the condition code registers. */
7880 if (regs_ever_live[CR2_REGNO]
7881 || regs_ever_live[CR3_REGNO]
7882 || regs_ever_live[CR4_REGNO])
7884 info_ptr->cr_save_p = 1;
7886 info_ptr->cr_size = reg_size;
7889 /* If the current function calls __builtin_eh_return, then we need
7890 to allocate stack space for registers that will hold data for
7891 the exception handler. */
7892 if (current_function_calls_eh_return)
7895 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7897 ehrd_size = i * UNITS_PER_WORD;
7902 /* Determine various sizes. */
7903 info_ptr->reg_size = reg_size;
7904 info_ptr->fixed_size = RS6000_SAVE_AREA;
7905 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7906 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7907 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7910 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
7912 info_ptr->vrsave_mask = compute_vrsave_mask ();
7913 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7917 info_ptr->vrsave_mask = 0;
7918 info_ptr->vrsave_size = 0;
7921 /* Calculate the offsets. */
7929 case ABI_AIX_NODESC:
7931 info_ptr->fp_save_offset = - info_ptr->fp_size;
7932 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7934 if (TARGET_ALTIVEC_ABI)
7936 info_ptr->vrsave_save_offset
7937 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7939 /* Align stack so vector save area is on a quadword boundary. */
7940 if (info_ptr->altivec_size != 0)
7941 info_ptr->altivec_padding_size
7942 = 16 - (-info_ptr->vrsave_save_offset % 16);
7944 info_ptr->altivec_padding_size = 0;
7946 info_ptr->altivec_save_offset
7947 = info_ptr->vrsave_save_offset
7948 - info_ptr->altivec_padding_size
7949 - info_ptr->altivec_size;
7951 /* Adjust for AltiVec case. */
7952 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7955 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7956 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7957 info_ptr->lr_save_offset = 2*reg_size;
7961 info_ptr->fp_save_offset = - info_ptr->fp_size;
7962 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7963 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7965 if (TARGET_ALTIVEC_ABI)
7967 info_ptr->vrsave_save_offset
7968 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7970 /* Align stack so vector save area is on a quadword boundary. */
7971 if (info_ptr->altivec_size != 0)
7972 info_ptr->altivec_padding_size
7973 = 16 - (-info_ptr->vrsave_save_offset % 16);
7975 info_ptr->altivec_padding_size = 0;
7977 info_ptr->altivec_save_offset
7978 = info_ptr->vrsave_save_offset
7979 - info_ptr->altivec_padding_size
7980 - info_ptr->altivec_size;
7982 /* Adjust for AltiVec case. */
7983 info_ptr->toc_save_offset
7984 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7987 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7988 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7989 info_ptr->lr_save_offset = reg_size;
7993 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7995 + info_ptr->altivec_size
7996 + info_ptr->altivec_padding_size
7997 + info_ptr->vrsave_size
8001 + info_ptr->vrsave_size
8002 + info_ptr->toc_size,
8003 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
8006 total_raw_size = (info_ptr->vars_size
8007 + info_ptr->parm_size
8008 + info_ptr->save_size
8009 + info_ptr->varargs_size
8010 + info_ptr->fixed_size);
8012 info_ptr->total_size =
8013 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
8015 /* Determine if we need to allocate any stack frame:
8017 For AIX we need to push the stack if a frame pointer is needed
8018 (because the stack might be dynamically adjusted), if we are
8019 debugging, if we make calls, or if the sum of fp_save, gp_save,
8020 and local variables are more than the space needed to save all
8021 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
8022 + 18*8 = 288 (GPR13 reserved).
8024 For V.4 we don't have the stack cushion that AIX uses, but assume
8025 that the debugger can handle stackless frames. */
8027 if (info_ptr->calls_p)
8028 info_ptr->push_p = 1;
8030 else if (abi == ABI_V4)
8031 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
8034 info_ptr->push_p = (frame_pointer_needed
8035 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
8036 || ((total_raw_size - info_ptr->fixed_size)
8037 > (TARGET_32BIT ? 220 : 288)));
8039 /* Zero offsets if we're not saving those registers. */
8040 if (info_ptr->fp_size == 0)
8041 info_ptr->fp_save_offset = 0;
8043 if (info_ptr->gp_size == 0)
8044 info_ptr->gp_save_offset = 0;
8046 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
8047 info_ptr->altivec_save_offset = 0;
8049 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
8050 info_ptr->vrsave_save_offset = 0;
8052 if (! info_ptr->lr_save_p)
8053 info_ptr->lr_save_offset = 0;
8055 if (! info_ptr->cr_save_p)
8056 info_ptr->cr_save_offset = 0;
8058 if (! info_ptr->toc_save_p)
8059 info_ptr->toc_save_offset = 0;
8065 debug_stack_info (info)
8066 rs6000_stack_t *info;
8068 const char *abi_string;
8071 info = rs6000_stack_info ();
8073 fprintf (stderr, "\nStack information for function %s:\n",
8074 ((current_function_decl && DECL_NAME (current_function_decl))
8075 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
8080 default: abi_string = "Unknown"; break;
8081 case ABI_NONE: abi_string = "NONE"; break;
8083 case ABI_AIX_NODESC: abi_string = "AIX"; break;
8084 case ABI_DARWIN: abi_string = "Darwin"; break;
8085 case ABI_V4: abi_string = "V.4"; break;
8088 fprintf (stderr, "\tABI = %5s\n", abi_string);
8090 if (TARGET_ALTIVEC_ABI)
8091 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
8093 if (info->first_gp_reg_save != 32)
8094 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
8096 if (info->first_fp_reg_save != 64)
8097 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
8099 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
8100 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
8101 info->first_altivec_reg_save);
8103 if (info->lr_save_p)
8104 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
8106 if (info->cr_save_p)
8107 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
8109 if (info->toc_save_p)
8110 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
8112 if (info->vrsave_mask)
8113 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
8116 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
8119 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
8121 if (info->gp_save_offset)
8122 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
8124 if (info->fp_save_offset)
8125 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
8127 if (info->altivec_save_offset)
8128 fprintf (stderr, "\taltivec_save_offset = %5d\n",
8129 info->altivec_save_offset);
8131 if (info->vrsave_save_offset)
8132 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
8133 info->vrsave_save_offset);
8135 if (info->lr_save_offset)
8136 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
8138 if (info->cr_save_offset)
8139 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
8141 if (info->toc_save_offset)
8142 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
8144 if (info->varargs_save_offset)
8145 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
8147 if (info->total_size)
8148 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
8150 if (info->varargs_size)
8151 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
8153 if (info->vars_size)
8154 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
8156 if (info->parm_size)
8157 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
8159 if (info->fixed_size)
8160 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
8163 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
8166 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
8168 if (info->altivec_size)
8169 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
8171 if (info->vrsave_size)
8172 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
8174 if (info->altivec_padding_size)
8175 fprintf (stderr, "\taltivec_padding_size= %5d\n",
8176 info->altivec_padding_size);
8179 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
8182 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
8185 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
8187 if (info->save_size)
8188 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
8190 if (info->reg_size != 4)
8191 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
8193 fprintf (stderr, "\n");
8197 rs6000_return_addr (count, frame)
8201 /* Currently we don't optimize very well between prolog and body
8202 code and for PIC code the code can be actually quite bad, so
8203 don't try to be too clever here. */
8204 if (count != 0 || flag_pic != 0)
8206 cfun->machine->ra_needs_full_frame = 1;
8213 plus_constant (copy_to_reg
8214 (gen_rtx_MEM (Pmode,
8215 memory_address (Pmode, frame))),
8216 RETURN_ADDRESS_OFFSET)));
8219 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
8223 rs6000_ra_ever_killed ()
8227 #ifdef ASM_OUTPUT_MI_THUNK
8228 if (current_function_is_thunk)
8231 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
8232 || cfun->machine->ra_needs_full_frame)
8233 return regs_ever_live[LINK_REGISTER_REGNUM];
8235 push_topmost_sequence ();
8237 pop_topmost_sequence ();
8239 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8243 /* Add a REG_MAYBE_DEAD note to the insn. */
8245 rs6000_maybe_dead (insn)
8248 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
8253 /* Emit instructions needed to load the TOC register.
8254 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8255 a constant pool; or for SVR4 -fpic. */
8258 rs6000_emit_load_toc_table (fromprolog)
8262 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
8264 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
8266 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8268 rtx temp = (fromprolog
8269 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8270 : gen_reg_rtx (Pmode));
8271 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8272 rs6000_maybe_dead (emit_move_insn (dest, temp));
8274 else if (flag_pic == 2)
8277 rtx tempLR = (fromprolog
8278 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8279 : gen_reg_rtx (Pmode));
8280 rtx temp0 = (fromprolog
8281 ? gen_rtx_REG (Pmode, 0)
8282 : gen_reg_rtx (Pmode));
8285 /* possibly create the toc section */
8286 if (! toc_initialized)
8289 function_section (current_function_decl);
8296 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8297 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8299 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8300 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8302 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8304 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8305 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8312 static int reload_toc_labelno = 0;
8314 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8316 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8317 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8319 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8322 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8323 rs6000_maybe_dead (emit_move_insn (temp0,
8324 gen_rtx_MEM (Pmode, dest)));
8326 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8328 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8330 /* This is for AIX code running in non-PIC ELF. */
8333 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8334 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8336 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8337 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8345 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8347 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8352 get_TOC_alias_set ()
8354 static int set = -1;
8356 set = new_alias_set ();
8360 /* This retuns nonzero if the current function uses the TOC. This is
8361 determined by the presence of (unspec ... 7), which is generated by
8362 the various load_toc_* patterns. */
8369 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8372 rtx pat = PATTERN (insn);
8375 if (GET_CODE (pat) == PARALLEL)
8376 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8377 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8378 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8385 create_TOC_reference (symbol)
8388 return gen_rtx_PLUS (Pmode,
8389 gen_rtx_REG (Pmode, TOC_REGISTER),
8390 gen_rtx_CONST (Pmode,
8391 gen_rtx_MINUS (Pmode, symbol,
8392 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8396 /* __throw will restore its own return address to be the same as the
8397 return address of the function that the throw is being made to.
8398 This is unfortunate, because we want to check the original
8399 return address to see if we need to restore the TOC.
8400 So we have to squirrel it away here.
8401 This is used only in compiling __throw and __rethrow.
8403 Most of this code should be removed by CSE. */
8404 static rtx insn_after_throw;
8406 /* This does the saving... */
8408 rs6000_aix_emit_builtin_unwind_init ()
8411 rtx stack_top = gen_reg_rtx (Pmode);
8412 rtx opcode_addr = gen_reg_rtx (Pmode);
8414 insn_after_throw = gen_reg_rtx (SImode);
8416 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8417 emit_move_insn (stack_top, mem);
8419 mem = gen_rtx_MEM (Pmode,
8420 gen_rtx_PLUS (Pmode, stack_top,
8421 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8422 emit_move_insn (opcode_addr, mem);
8423 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8426 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8427 in _eh.o). Only used on AIX.
8429 The idea is that on AIX, function calls look like this:
8430 bl somefunction-trampoline
8434 somefunction-trampoline:
8436 ... load function address in the count register ...
8438 or like this, if the linker determines that this is not a cross-module call
8439 and so the TOC need not be restored:
8442 or like this, if the compiler could determine that this is not a
8445 now, the tricky bit here is that register 2 is saved and restored
8446 by the _linker_, so we can't readily generate debugging information
8447 for it. So we need to go back up the call chain looking at the
8448 insns at return addresses to see which calls saved the TOC register
8449 and so see where it gets restored from.
8451 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8452 just before the actual epilogue.
8454 On the bright side, this incurs no space or time overhead unless an
8455 exception is thrown, except for the extra code in libgcc.a.
8457 The parameter STACKSIZE is a register containing (at runtime)
8458 the amount to be popped off the stack in addition to the stack frame
8459 of this routine (which will be __throw or __rethrow, and so is
8460 guaranteed to have a stack frame). */
8463 rs6000_emit_eh_toc_restore (stacksize)
8467 rtx bottom_of_stack = gen_reg_rtx (Pmode);
8468 rtx tocompare = gen_reg_rtx (SImode);
8469 rtx opcode = gen_reg_rtx (SImode);
8470 rtx opcode_addr = gen_reg_rtx (Pmode);
8472 rtx loop_start = gen_label_rtx ();
8473 rtx no_toc_restore_needed = gen_label_rtx ();
8474 rtx loop_exit = gen_label_rtx ();
8476 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8477 set_mem_alias_set (mem, rs6000_sr_alias_set);
8478 emit_move_insn (bottom_of_stack, mem);
8480 top_of_stack = expand_binop (Pmode, add_optab,
8481 bottom_of_stack, stacksize,
8482 NULL_RTX, 1, OPTAB_WIDEN);
8484 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
8485 : 0xE8410028, SImode));
8487 if (insn_after_throw == NULL_RTX)
8489 emit_move_insn (opcode, insn_after_throw);
8491 emit_note (NULL, NOTE_INSN_LOOP_BEG);
8492 emit_label (loop_start);
8494 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8495 SImode, NULL_RTX, NULL_RTX,
8496 no_toc_restore_needed);
8498 mem = gen_rtx_MEM (Pmode,
8499 gen_rtx_PLUS (Pmode, bottom_of_stack,
8500 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8501 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8503 emit_label (no_toc_restore_needed);
8504 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8505 Pmode, NULL_RTX, NULL_RTX,
8508 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8509 set_mem_alias_set (mem, rs6000_sr_alias_set);
8510 emit_move_insn (bottom_of_stack, mem);
8512 mem = gen_rtx_MEM (Pmode,
8513 gen_rtx_PLUS (Pmode, bottom_of_stack,
8514 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8515 emit_move_insn (opcode_addr, mem);
8516 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8518 emit_note (NULL, NOTE_INSN_LOOP_CONT);
8519 emit_jump (loop_start);
8520 emit_note (NULL, NOTE_INSN_LOOP_END);
8521 emit_label (loop_exit);
8523 #endif /* TARGET_AIX */
8525 /* This ties together stack memory (MEM with an alias set of
8526 rs6000_sr_alias_set) and the change to the stack pointer. */
8529 rs6000_emit_stack_tie ()
8531 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8533 set_mem_alias_set (mem, rs6000_sr_alias_set);
8534 emit_insn (gen_stack_tie (mem));
8537 /* Emit the correct code for allocating stack space, as insns.
8538 If COPY_R12, make sure a copy of the old frame is left in r12.
8539 The generated code may use hard register 0 as a temporary. */
8542 rs6000_emit_allocate_stack (size, copy_r12)
8547 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8548 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8549 rtx todec = GEN_INT (-size);
8551 if (current_function_limit_stack)
8553 if (REG_P (stack_limit_rtx)
8554 && REGNO (stack_limit_rtx) > 1
8555 && REGNO (stack_limit_rtx) <= 31)
8557 emit_insn (Pmode == SImode
8558 ? gen_addsi3 (tmp_reg,
8561 : gen_adddi3 (tmp_reg,
8565 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8568 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8570 && DEFAULT_ABI == ABI_V4)
8572 rtx toload = gen_rtx_CONST (VOIDmode,
8573 gen_rtx_PLUS (Pmode,
8577 emit_insn (gen_elf_high (tmp_reg, toload));
8578 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8579 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8583 warning ("stack limit expression is not supported");
8586 if (copy_r12 || ! TARGET_UPDATE)
8587 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8593 /* Need a note here so that try_split doesn't get confused. */
8594 if (get_last_insn() == NULL_RTX)
8595 emit_note (0, NOTE_INSN_DELETED);
8596 insn = emit_move_insn (tmp_reg, todec);
8597 try_split (PATTERN (insn), insn, 0);
8601 if (Pmode == SImode)
8602 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8605 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8610 if (Pmode == SImode)
8611 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8613 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8614 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8615 gen_rtx_REG (Pmode, 12));
8618 RTX_FRAME_RELATED_P (insn) = 1;
8620 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8621 gen_rtx_SET (VOIDmode, stack_reg,
8622 gen_rtx_PLUS (Pmode, stack_reg,
8627 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8630 (mem (plus (blah) (regXX)))
8634 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8637 altivec_frame_fixup (insn, reg, val)
8643 real = copy_rtx (PATTERN (insn));
8645 real = replace_rtx (real, reg, GEN_INT (val));
8647 RTX_FRAME_RELATED_P (insn) = 1;
8648 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8653 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8654 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8655 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8656 deduce these equivalences by itself so it wasn't necessary to hold
8657 its hand so much. */
8660 rs6000_frame_related (insn, reg, val, reg2, rreg)
8669 /* copy_rtx will not make unique copies of registers, so we need to
8670 ensure we don't have unwanted sharing here. */
8672 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
8675 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
8677 real = copy_rtx (PATTERN (insn));
8679 real = replace_rtx (real, reg,
8680 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8681 STACK_POINTER_REGNUM),
8684 /* We expect that 'real' is either a SET or a PARALLEL containing
8685 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8686 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8688 if (GET_CODE (real) == SET)
8692 temp = simplify_rtx (SET_SRC (set));
8694 SET_SRC (set) = temp;
8695 temp = simplify_rtx (SET_DEST (set));
8697 SET_DEST (set) = temp;
8698 if (GET_CODE (SET_DEST (set)) == MEM)
8700 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8702 XEXP (SET_DEST (set), 0) = temp;
8705 else if (GET_CODE (real) == PARALLEL)
8708 for (i = 0; i < XVECLEN (real, 0); i++)
8709 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8711 rtx set = XVECEXP (real, 0, i);
8713 temp = simplify_rtx (SET_SRC (set));
8715 SET_SRC (set) = temp;
8716 temp = simplify_rtx (SET_DEST (set));
8718 SET_DEST (set) = temp;
8719 if (GET_CODE (SET_DEST (set)) == MEM)
8721 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8723 XEXP (SET_DEST (set), 0) = temp;
8725 RTX_FRAME_RELATED_P (set) = 1;
8731 if (reg2 != NULL_RTX)
8732 real = replace_rtx (real, reg2, rreg);
8734 RTX_FRAME_RELATED_P (insn) = 1;
8735 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8740 /* Returns an insn that has a vrsave set operation with the
8741 appropriate CLOBBERs. */
8744 generate_set_vrsave (reg, info, epiloguep)
8746 rs6000_stack_t *info;
8750 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8751 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8754 = gen_rtx_SET (VOIDmode,
8756 gen_rtx_UNSPEC_VOLATILE (SImode,
8757 gen_rtvec (2, reg, vrsave),
8762 /* We need to clobber the registers in the mask so the scheduler
8763 does not move sets to VRSAVE before sets of AltiVec registers.
8765 However, if the function receives nonlocal gotos, reload will set
8766 all call saved registers live. We will end up with:
8768 (set (reg 999) (mem))
8769 (parallel [ (set (reg vrsave) (unspec blah))
8770 (clobber (reg 999))])
8772 The clobber will cause the store into reg 999 to be dead, and
8773 flow will attempt to delete an epilogue insn. In this case, we
8774 need an unspec use/set of the register. */
8776 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8777 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8779 if (!epiloguep || call_used_regs [i])
8780 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8781 gen_rtx_REG (V4SImode, i));
8784 rtx reg = gen_rtx_REG (V4SImode, i);
8787 = gen_rtx_SET (VOIDmode,
8789 gen_rtx_UNSPEC (V4SImode,
8790 gen_rtvec (1, reg), 27));
8794 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8796 for (i = 0; i < nclobs; ++i)
8797 XVECEXP (insn, 0, i) = clobs[i];
8802 /* Emit function prologue as insns. */
8805 rs6000_emit_prologue ()
8807 rs6000_stack_t *info = rs6000_stack_info ();
8808 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8809 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8810 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8811 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8812 rtx frame_reg_rtx = sp_reg_rtx;
8813 rtx cr_save_rtx = NULL;
8815 int saving_FPRs_inline;
8816 int using_store_multiple;
8817 HOST_WIDE_INT sp_offset = 0;
8819 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8820 && info->first_gp_reg_save < 31);
8821 saving_FPRs_inline = (info->first_fp_reg_save == 64
8822 || FP_SAVE_INLINE (info->first_fp_reg_save));
8824 /* For V.4, update stack before we do any saving and set back pointer. */
8825 if (info->push_p && DEFAULT_ABI == ABI_V4)
8827 if (info->total_size < 32767)
8828 sp_offset = info->total_size;
8830 frame_reg_rtx = frame_ptr_rtx;
8831 rs6000_emit_allocate_stack (info->total_size,
8832 (frame_reg_rtx != sp_reg_rtx
8835 || info->first_fp_reg_save < 64
8836 || info->first_gp_reg_save < 32
8838 if (frame_reg_rtx != sp_reg_rtx)
8839 rs6000_emit_stack_tie ();
8842 /* Save AltiVec registers if needed. */
8843 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8847 /* There should be a non inline version of this, for when we
8848 are saving lots of vector registers. */
8849 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8850 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8852 rtx areg, savereg, mem;
8855 offset = info->altivec_save_offset + sp_offset
8856 + 16 * (i - info->first_altivec_reg_save);
8858 savereg = gen_rtx_REG (V4SImode, i);
8860 areg = gen_rtx_REG (Pmode, 0);
8861 emit_move_insn (areg, GEN_INT (offset));
8863 /* AltiVec addressing mode is [reg+reg]. */
8864 mem = gen_rtx_MEM (V4SImode,
8865 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8867 set_mem_alias_set (mem, rs6000_sr_alias_set);
8869 insn = emit_move_insn (mem, savereg);
8871 altivec_frame_fixup (insn, areg, offset);
8875 /* VRSAVE is a bit vector representing which AltiVec registers
8876 are used. The OS uses this to determine which vector
8877 registers to save on a context switch. We need to save
8878 VRSAVE on the stack frame, add whatever AltiVec registers we
8879 used in this function, and do the corresponding magic in the
8882 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8884 rtx reg, mem, vrsave;
8887 /* Get VRSAVE onto a GPR. */
8888 reg = gen_rtx_REG (SImode, 12);
8889 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8891 emit_insn (gen_get_vrsave_internal (reg));
8893 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8896 offset = info->vrsave_save_offset + sp_offset;
8898 = gen_rtx_MEM (SImode,
8899 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8900 set_mem_alias_set (mem, rs6000_sr_alias_set);
8901 insn = emit_move_insn (mem, reg);
8903 /* Include the registers in the mask. */
8904 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8906 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8909 /* If we use the link register, get it into r0. */
8910 if (info->lr_save_p)
8911 emit_move_insn (gen_rtx_REG (Pmode, 0),
8912 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8914 /* If we need to save CR, put it into r12. */
8915 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8917 cr_save_rtx = gen_rtx_REG (SImode, 12);
8918 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8921 /* Do any required saving of fpr's. If only one or two to save, do
8922 it ourselves. Otherwise, call function. */
8923 if (saving_FPRs_inline)
8926 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8927 if ((regs_ever_live[info->first_fp_reg_save+i]
8928 && ! call_used_regs[info->first_fp_reg_save+i]))
8931 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8932 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8933 GEN_INT (info->fp_save_offset
8936 mem = gen_rtx_MEM (DFmode, addr);
8937 set_mem_alias_set (mem, rs6000_sr_alias_set);
8939 insn = emit_move_insn (mem, reg);
8940 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8941 NULL_RTX, NULL_RTX);
8944 else if (info->first_fp_reg_save != 64)
8948 const char *alloc_rname;
8950 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8952 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8954 LINK_REGISTER_REGNUM));
8955 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8956 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8957 alloc_rname = ggc_strdup (rname);
8958 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8959 gen_rtx_SYMBOL_REF (Pmode,
8961 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8964 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8965 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8966 GEN_INT (info->fp_save_offset
8967 + sp_offset + 8*i));
8968 mem = gen_rtx_MEM (DFmode, addr);
8969 set_mem_alias_set (mem, rs6000_sr_alias_set);
8971 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8973 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8974 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8975 NULL_RTX, NULL_RTX);
8978 /* Save GPRs. This is done as a PARALLEL if we are using
8979 the store-multiple instructions. */
8980 if (using_store_multiple)
8984 p = rtvec_alloc (32 - info->first_gp_reg_save);
8985 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8986 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8989 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8990 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8991 GEN_INT (info->gp_save_offset
8994 mem = gen_rtx_MEM (reg_mode, addr);
8995 set_mem_alias_set (mem, rs6000_sr_alias_set);
8997 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8999 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9000 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9001 NULL_RTX, NULL_RTX);
9006 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9007 if ((regs_ever_live[info->first_gp_reg_save+i]
9008 && ! call_used_regs[info->first_gp_reg_save+i])
9009 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
9010 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9011 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9014 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
9015 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9016 GEN_INT (info->gp_save_offset
9019 mem = gen_rtx_MEM (reg_mode, addr);
9020 set_mem_alias_set (mem, rs6000_sr_alias_set);
9022 insn = emit_move_insn (mem, reg);
9023 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9024 NULL_RTX, NULL_RTX);
9028 /* ??? There's no need to emit actual instructions here, but it's the
9029 easiest way to get the frame unwind information emitted. */
9030 if (current_function_calls_eh_return)
9032 unsigned int i, regno;
9038 regno = EH_RETURN_DATA_REGNO (i);
9039 if (regno == INVALID_REGNUM)
9042 reg = gen_rtx_REG (reg_mode, regno);
9043 addr = plus_constant (frame_reg_rtx,
9044 info->ehrd_offset + sp_offset
9045 + reg_size * (int) i);
9046 mem = gen_rtx_MEM (reg_mode, addr);
9047 set_mem_alias_set (mem, rs6000_sr_alias_set);
9049 insn = emit_move_insn (mem, reg);
9050 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9051 NULL_RTX, NULL_RTX);
9055 /* Save lr if we used it. */
9056 if (info->lr_save_p)
9058 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9059 GEN_INT (info->lr_save_offset + sp_offset));
9060 rtx reg = gen_rtx_REG (Pmode, 0);
9061 rtx mem = gen_rtx_MEM (Pmode, addr);
9062 /* This should not be of rs6000_sr_alias_set, because of
9063 __builtin_return_address. */
9065 insn = emit_move_insn (mem, reg);
9066 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9067 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
9070 /* Save CR if we use any that must be preserved. */
9071 if (info->cr_save_p)
9073 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9074 GEN_INT (info->cr_save_offset + sp_offset));
9075 rtx mem = gen_rtx_MEM (SImode, addr);
9077 set_mem_alias_set (mem, rs6000_sr_alias_set);
9079 /* If r12 was used to hold the original sp, copy cr into r0 now
9081 if (REGNO (frame_reg_rtx) == 12)
9083 cr_save_rtx = gen_rtx_REG (SImode, 0);
9084 emit_insn (gen_movesi_from_cr (cr_save_rtx));
9086 insn = emit_move_insn (mem, cr_save_rtx);
9088 /* Now, there's no way that dwarf2out_frame_debug_expr is going
9089 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
9090 OK. All we have to do is specify that _one_ condition code
9091 register is saved in this stack slot. The thrower's epilogue
9092 will then restore all the call-saved registers.
9093 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
9094 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9095 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
9098 /* Update stack and set back pointer unless this is V.4,
9099 for which it was done previously. */
9100 if (info->push_p && DEFAULT_ABI != ABI_V4)
9101 rs6000_emit_allocate_stack (info->total_size, FALSE);
9103 /* Set frame pointer, if needed. */
9104 if (frame_pointer_needed)
9106 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
9108 RTX_FRAME_RELATED_P (insn) = 1;
9111 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9112 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
9113 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
9114 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
9116 /* If emit_load_toc_table will use the link register, we need to save
9117 it. We use R11 for this purpose because emit_load_toc_table
9118 can use register 0. This allows us to use a plain 'blr' to return
9119 from the procedure more often. */
9120 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
9121 && ! info->lr_save_p
9122 && EXIT_BLOCK_PTR->pred != NULL);
9123 if (save_LR_around_toc_setup)
9124 emit_move_insn (gen_rtx_REG (Pmode, 11),
9125 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
9127 rs6000_emit_load_toc_table (TRUE);
9129 if (save_LR_around_toc_setup)
9130 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9131 gen_rtx_REG (Pmode, 11));
9134 if (DEFAULT_ABI == ABI_DARWIN
9135 && flag_pic && current_function_uses_pic_offset_table)
9137 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9139 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
9142 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
9143 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
9147 /* Write function prologue. */
9150 rs6000_output_function_prologue (file, size)
9152 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9154 rs6000_stack_t *info = rs6000_stack_info ();
9156 if (TARGET_DEBUG_STACK)
9157 debug_stack_info (info);
9159 /* Write .extern for any function we will call to save and restore
9161 if (info->first_fp_reg_save < 64
9162 && !FP_SAVE_INLINE (info->first_fp_reg_save))
9163 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
9164 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
9165 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
9168 /* Write .extern for AIX common mode routines, if needed. */
9169 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
9171 fputs ("\t.extern __mulh\n", file);
9172 fputs ("\t.extern __mull\n", file);
9173 fputs ("\t.extern __divss\n", file);
9174 fputs ("\t.extern __divus\n", file);
9175 fputs ("\t.extern __quoss\n", file);
9176 fputs ("\t.extern __quous\n", file);
9177 common_mode_defined = 1;
9180 if (! HAVE_prologue)
9184 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
9185 the "toplevel" insn chain. */
9186 emit_note (0, NOTE_INSN_DELETED);
9187 rs6000_emit_prologue ();
9188 emit_note (0, NOTE_INSN_DELETED);
9190 /* Expand INSN_ADDRESSES so final() doesn't crash. */
9194 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9196 INSN_ADDRESSES_NEW (insn, addr);
9201 if (TARGET_DEBUG_STACK)
9202 debug_rtx_list (get_insns (), 100);
9203 final (get_insns (), file, FALSE, FALSE);
9207 rs6000_pic_labelno++;
9210 /* Emit function epilogue as insns.
9212 At present, dwarf2out_frame_debug_expr doesn't understand
9213 register restores, so we don't bother setting RTX_FRAME_RELATED_P
9214 anywhere in the epilogue. Most of the insns below would in any case
9215 need special notes to explain where r11 is in relation to the stack. */
9218 rs6000_emit_epilogue (sibcall)
9221 rs6000_stack_t *info;
9222 int restoring_FPRs_inline;
9223 int using_load_multiple;
9224 int using_mfcr_multiple;
9225 int use_backchain_to_restore_sp;
9227 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
9228 rtx frame_reg_rtx = sp_reg_rtx;
9229 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
9230 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9233 info = rs6000_stack_info ();
9234 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
9235 && info->first_gp_reg_save < 31);
9236 restoring_FPRs_inline = (sibcall
9237 || current_function_calls_eh_return
9238 || info->first_fp_reg_save == 64
9239 || FP_SAVE_INLINE (info->first_fp_reg_save));
9240 use_backchain_to_restore_sp = (frame_pointer_needed
9241 || current_function_calls_alloca
9242 || info->total_size > 32767);
9243 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9244 || rs6000_cpu == PROCESSOR_PPC603
9245 || rs6000_cpu == PROCESSOR_PPC750
9248 /* If we have a frame pointer, a call to alloca, or a large stack
9249 frame, restore the old stack pointer using the backchain. Otherwise,
9250 we know what size to update it with. */
9251 if (use_backchain_to_restore_sp)
9253 /* Under V.4, don't reset the stack pointer until after we're done
9254 loading the saved registers. */
9255 if (DEFAULT_ABI == ABI_V4)
9256 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
9258 emit_move_insn (frame_reg_rtx,
9259 gen_rtx_MEM (Pmode, sp_reg_rtx));
9262 else if (info->push_p)
9264 if (DEFAULT_ABI == ABI_V4)
9265 sp_offset = info->total_size;
9268 emit_insn (TARGET_32BIT
9269 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9270 GEN_INT (info->total_size))
9271 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9272 GEN_INT (info->total_size)));
9276 /* Restore AltiVec registers if needed. */
9277 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9281 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9282 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9284 rtx addr, areg, mem;
9286 areg = gen_rtx_REG (Pmode, 0);
9288 (areg, GEN_INT (info->altivec_save_offset
9290 + 16 * (i - info->first_altivec_reg_save)));
9292 /* AltiVec addressing mode is [reg+reg]. */
9293 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9294 mem = gen_rtx_MEM (V4SImode, addr);
9295 set_mem_alias_set (mem, rs6000_sr_alias_set);
9297 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9301 /* Restore VRSAVE if needed. */
9302 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9306 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9307 GEN_INT (info->vrsave_save_offset + sp_offset));
9308 mem = gen_rtx_MEM (SImode, addr);
9309 set_mem_alias_set (mem, rs6000_sr_alias_set);
9310 reg = gen_rtx_REG (SImode, 12);
9311 emit_move_insn (reg, mem);
9313 emit_insn (generate_set_vrsave (reg, info, 1));
9316 /* Get the old lr if we saved it. */
9317 if (info->lr_save_p)
9319 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9320 GEN_INT (info->lr_save_offset + sp_offset));
9321 rtx mem = gen_rtx_MEM (Pmode, addr);
9323 set_mem_alias_set (mem, rs6000_sr_alias_set);
9325 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9328 /* Get the old cr if we saved it. */
9329 if (info->cr_save_p)
9331 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9332 GEN_INT (info->cr_save_offset + sp_offset));
9333 rtx mem = gen_rtx_MEM (SImode, addr);
9335 set_mem_alias_set (mem, rs6000_sr_alias_set);
9337 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9340 /* Set LR here to try to overlap restores below. */
9341 if (info->lr_save_p)
9342 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9343 gen_rtx_REG (Pmode, 0));
9345 /* Load exception handler data registers, if needed. */
9346 if (current_function_calls_eh_return)
9348 unsigned int i, regno;
9354 regno = EH_RETURN_DATA_REGNO (i);
9355 if (regno == INVALID_REGNUM)
9358 addr = plus_constant (frame_reg_rtx,
9359 info->ehrd_offset + sp_offset
9360 + reg_size * (int) i);
9361 mem = gen_rtx_MEM (reg_mode, addr);
9362 set_mem_alias_set (mem, rs6000_sr_alias_set);
9364 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9368 /* Restore GPRs. This is done as a PARALLEL if we are using
9369 the load-multiple instructions. */
9370 if (using_load_multiple)
9373 p = rtvec_alloc (32 - info->first_gp_reg_save);
9374 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9376 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9377 GEN_INT (info->gp_save_offset
9380 rtx mem = gen_rtx_MEM (reg_mode, addr);
9382 set_mem_alias_set (mem, rs6000_sr_alias_set);
9385 gen_rtx_SET (VOIDmode,
9386 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9389 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9392 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9393 if ((regs_ever_live[info->first_gp_reg_save+i]
9394 && ! call_used_regs[info->first_gp_reg_save+i])
9395 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
9396 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9397 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9399 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9400 GEN_INT (info->gp_save_offset
9403 rtx mem = gen_rtx_MEM (reg_mode, addr);
9405 set_mem_alias_set (mem, rs6000_sr_alias_set);
9407 emit_move_insn (gen_rtx_REG (reg_mode,
9408 info->first_gp_reg_save + i),
9412 /* Restore fpr's if we need to do it without calling a function. */
9413 if (restoring_FPRs_inline)
9414 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9415 if ((regs_ever_live[info->first_fp_reg_save+i]
9416 && ! call_used_regs[info->first_fp_reg_save+i]))
9419 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9420 GEN_INT (info->fp_save_offset
9423 mem = gen_rtx_MEM (DFmode, addr);
9424 set_mem_alias_set (mem, rs6000_sr_alias_set);
9426 emit_move_insn (gen_rtx_REG (DFmode,
9427 info->first_fp_reg_save + i),
9431 /* If we saved cr, restore it here. Just those that were used. */
9432 if (info->cr_save_p)
9434 rtx r12_rtx = gen_rtx_REG (SImode, 12);
9437 if (using_mfcr_multiple)
9439 for (i = 0; i < 8; i++)
9440 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9446 if (using_mfcr_multiple && count > 1)
9451 p = rtvec_alloc (count);
9454 for (i = 0; i < 8; i++)
9455 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9457 rtvec r = rtvec_alloc (2);
9458 RTVEC_ELT (r, 0) = r12_rtx;
9459 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9460 RTVEC_ELT (p, ndx) =
9461 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9462 gen_rtx_UNSPEC (CCmode, r, 20));
9465 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9470 for (i = 0; i < 8; i++)
9471 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9473 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9479 /* If this is V.4, unwind the stack pointer after all of the loads
9480 have been done. We need to emit a block here so that sched
9481 doesn't decide to move the sp change before the register restores
9482 (which may not have any obvious dependency on the stack). This
9483 doesn't hurt performance, because there is no scheduling that can
9484 be done after this point. */
9485 if (DEFAULT_ABI == ABI_V4)
9487 if (frame_reg_rtx != sp_reg_rtx)
9488 rs6000_emit_stack_tie ();
9490 if (use_backchain_to_restore_sp)
9492 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9494 else if (sp_offset != 0)
9496 emit_insn (Pmode == SImode
9497 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9498 GEN_INT (sp_offset))
9499 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9500 GEN_INT (sp_offset)));
9504 if (current_function_calls_eh_return)
9506 rtx sa = EH_RETURN_STACKADJ_RTX;
9507 emit_insn (Pmode == SImode
9508 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9509 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9515 if (! restoring_FPRs_inline)
9516 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9518 p = rtvec_alloc (2);
9520 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9521 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9523 LINK_REGISTER_REGNUM));
9525 /* If we have to restore more than two FP registers, branch to the
9526 restore function. It will return to our caller. */
9527 if (! restoring_FPRs_inline)
9531 const char *alloc_rname;
9533 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9534 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9535 alloc_rname = ggc_strdup (rname);
9536 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9537 gen_rtx_SYMBOL_REF (Pmode,
9540 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9543 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9544 GEN_INT (info->fp_save_offset + 8*i));
9545 mem = gen_rtx_MEM (DFmode, addr);
9546 set_mem_alias_set (mem, rs6000_sr_alias_set);
9548 RTVEC_ELT (p, i+3) =
9549 gen_rtx_SET (VOIDmode,
9550 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9555 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9559 /* Write function epilogue. */
9562 rs6000_output_function_epilogue (file, size)
9564 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9566 rs6000_stack_t *info = rs6000_stack_info ();
9567 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9569 if (! HAVE_epilogue)
9571 rtx insn = get_last_insn ();
9572 /* If the last insn was a BARRIER, we don't have to write anything except
9574 if (GET_CODE (insn) == NOTE)
9575 insn = prev_nonnote_insn (insn);
9576 if (insn == 0 || GET_CODE (insn) != BARRIER)
9578 /* This is slightly ugly, but at least we don't have two
9579 copies of the epilogue-emitting code. */
9582 /* A NOTE_INSN_DELETED is supposed to be at the start
9583 and end of the "toplevel" insn chain. */
9584 emit_note (0, NOTE_INSN_DELETED);
9585 rs6000_emit_epilogue (FALSE);
9586 emit_note (0, NOTE_INSN_DELETED);
9588 /* Expand INSN_ADDRESSES so final() doesn't crash. */
9592 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9594 INSN_ADDRESSES_NEW (insn, addr);
9599 if (TARGET_DEBUG_STACK)
9600 debug_rtx_list (get_insns (), 100);
9601 final (get_insns (), file, FALSE, FALSE);
9606 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9609 We don't output a traceback table if -finhibit-size-directive was
9610 used. The documentation for -finhibit-size-directive reads
9611 ``don't output a @code{.size} assembler directive, or anything
9612 else that would cause trouble if the function is split in the
9613 middle, and the two halves are placed at locations far apart in
9614 memory.'' The traceback table has this property, since it
9615 includes the offset from the start of the function to the
9616 traceback table itself.
9618 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9619 different traceback table. */
9620 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9622 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9623 const char *language_string = lang_hooks.name;
9624 int fixed_parms = 0, float_parms = 0, parm_info = 0;
9627 while (*fname == '.') /* V.4 encodes . in the name */
9630 /* Need label immediately before tbtab, so we can compute its offset
9631 from the function start. */
9634 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9635 ASM_OUTPUT_LABEL (file, fname);
9637 /* The .tbtab pseudo-op can only be used for the first eight
9638 expressions, since it can't handle the possibly variable
9639 length fields that follow. However, if you omit the optional
9640 fields, the assembler outputs zeros for all optional fields
9641 anyways, giving each variable length field is minimum length
9642 (as defined in sys/debug.h). Thus we can not use the .tbtab
9643 pseudo-op at all. */
9645 /* An all-zero word flags the start of the tbtab, for debuggers
9646 that have to find it by searching forward from the entry
9647 point or from the current pc. */
9648 fputs ("\t.long 0\n", file);
9650 /* Tbtab format type. Use format type 0. */
9651 fputs ("\t.byte 0,", file);
9653 /* Language type. Unfortunately, there doesn't seem to be any
9654 official way to get this info, so we use language_string. C
9655 is 0. C++ is 9. No number defined for Obj-C, so use the
9656 value for C for now. There is no official value for Java,
9657 although IBM appears to be using 13. There is no official value
9658 for Chill, so we've chosen 44 pseudo-randomly. */
9659 if (! strcmp (language_string, "GNU C")
9660 || ! strcmp (language_string, "GNU Objective-C"))
9662 else if (! strcmp (language_string, "GNU F77"))
9664 else if (! strcmp (language_string, "GNU Ada"))
9666 else if (! strcmp (language_string, "GNU Pascal"))
9668 else if (! strcmp (language_string, "GNU C++"))
9670 else if (! strcmp (language_string, "GNU Java"))
9672 else if (! strcmp (language_string, "GNU CHILL"))
9676 fprintf (file, "%d,", i);
9678 /* 8 single bit fields: global linkage (not set for C extern linkage,
9679 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9680 from start of procedure stored in tbtab, internal function, function
9681 has controlled storage, function has no toc, function uses fp,
9682 function logs/aborts fp operations. */
9683 /* Assume that fp operations are used if any fp reg must be saved. */
9684 fprintf (file, "%d,",
9685 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9687 /* 6 bitfields: function is interrupt handler, name present in
9688 proc table, function calls alloca, on condition directives
9689 (controls stack walks, 3 bits), saves condition reg, saves
9691 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9692 set up as a frame pointer, even when there is no alloca call. */
9693 fprintf (file, "%d,",
9694 ((optional_tbtab << 6)
9695 | ((optional_tbtab & frame_pointer_needed) << 5)
9696 | (info->cr_save_p << 1)
9697 | (info->lr_save_p)));
9699 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9701 fprintf (file, "%d,",
9702 (info->push_p << 7) | (64 - info->first_fp_reg_save));
9704 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9705 fprintf (file, "%d,", (32 - first_reg_to_save ()));
9709 /* Compute the parameter info from the function decl argument
9712 int next_parm_info_bit = 31;
9714 for (decl = DECL_ARGUMENTS (current_function_decl);
9715 decl; decl = TREE_CHAIN (decl))
9717 rtx parameter = DECL_INCOMING_RTL (decl);
9718 enum machine_mode mode = GET_MODE (parameter);
9720 if (GET_CODE (parameter) == REG)
9722 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9730 else if (mode == DFmode)
9735 /* If only one bit will fit, don't or in this entry. */
9736 if (next_parm_info_bit > 0)
9737 parm_info |= (bits << (next_parm_info_bit - 1));
9738 next_parm_info_bit -= 2;
9742 fixed_parms += ((GET_MODE_SIZE (mode)
9743 + (UNITS_PER_WORD - 1))
9745 next_parm_info_bit -= 1;
9751 /* Number of fixed point parameters. */
9752 /* This is actually the number of words of fixed point parameters; thus
9753 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9754 fprintf (file, "%d,", fixed_parms);
9756 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9758 /* This is actually the number of fp registers that hold parameters;
9759 and thus the maximum value is 13. */
9760 /* Set parameters on stack bit if parameters are not in their original
9761 registers, regardless of whether they are on the stack? Xlc
9762 seems to set the bit when not optimizing. */
9763 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9765 if (! optional_tbtab)
9768 /* Optional fields follow. Some are variable length. */
9770 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9772 /* There is an entry for each parameter in a register, in the order that
9773 they occur in the parameter list. Any intervening arguments on the
9774 stack are ignored. If the list overflows a long (max possible length
9775 34 bits) then completely leave off all elements that don't fit. */
9776 /* Only emit this long if there was at least one parameter. */
9777 if (fixed_parms || float_parms)
9778 fprintf (file, "\t.long %d\n", parm_info);
9780 /* Offset from start of code to tb table. */
9781 fputs ("\t.long ", file);
9782 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9784 RS6000_OUTPUT_BASENAME (file, fname);
9786 assemble_name (file, fname);
9790 RS6000_OUTPUT_BASENAME (file, fname);
9792 assemble_name (file, fname);
9796 /* Interrupt handler mask. */
9797 /* Omit this long, since we never set the interrupt handler bit
9800 /* Number of CTL (controlled storage) anchors. */
9801 /* Omit this long, since the has_ctl bit is never set above. */
9803 /* Displacement into stack of each CTL anchor. */
9804 /* Omit this list of longs, because there are no CTL anchors. */
9806 /* Length of function name. */
9807 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9809 /* Function name. */
9810 assemble_string (fname, strlen (fname));
9812 /* Register for alloca automatic storage; this is always reg 31.
9813 Only emit this if the alloca bit was set above. */
9814 if (frame_pointer_needed)
9815 fputs ("\t.byte 31\n", file);
9817 fputs ("\t.align 2\n", file);
9822 /* A C compound statement that outputs the assembler code for a thunk
9823 function, used to implement C++ virtual function calls with
9824 multiple inheritance. The thunk acts as a wrapper around a virtual
9825 function, adjusting the implicit object parameter before handing
9826 control off to the real function.
9828 First, emit code to add the integer DELTA to the location that
9829 contains the incoming first argument. Assume that this argument
9830 contains a pointer, and is the one used to pass the `this' pointer
9831 in C++. This is the incoming argument *before* the function
9832 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9833 values of all other incoming arguments.
9835 After the addition, emit code to jump to FUNCTION, which is a
9836 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9837 not touch the return address. Hence returning from FUNCTION will
9838 return to whoever called the current `thunk'.
9840 The effect must be as if FUNCTION had been called directly with the
9841 adjusted first argument. This macro is responsible for emitting
9842 all of the code for a thunk function; output_function_prologue()
9843 and output_function_epilogue() are not invoked.
9845 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9846 been extracted from it.) It might possibly be useful on some
9847 targets, but probably not.
9849 If you do not define this macro, the target-independent code in the
9850 C++ frontend will generate a less efficient heavyweight thunk that
9851 calls FUNCTION instead of jumping to it. The generic approach does
9852 not support varargs. */
9855 output_mi_thunk (file, thunk_fndecl, delta, function)
9857 tree thunk_fndecl ATTRIBUTE_UNUSED;
9861 const char *this_reg =
9862 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9865 const char *r0 = reg_names[0];
9866 const char *toc = reg_names[2];
9867 const char *schain = reg_names[11];
9868 const char *r12 = reg_names[12];
9870 static int labelno = 0;
9872 /* Small constants that can be done by one add instruction. */
9873 if (delta >= -32768 && delta <= 32767)
9875 if (! TARGET_NEW_MNEMONICS)
9876 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9878 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9881 /* Large constants that can be done by one addis instruction. */
9882 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9883 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9886 /* 32-bit constants that can be done by an add and addis instruction. */
9887 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9889 /* Break into two pieces, propagating the sign bit from the low
9890 word to the upper word. */
9891 int delta_high = delta >> 16;
9892 int delta_low = delta & 0xffff;
9893 if ((delta_low & 0x8000) != 0)
9896 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9899 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9902 if (! TARGET_NEW_MNEMONICS)
9903 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9905 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9908 /* 64-bit constants, fixme */
9912 /* Get the prefix in front of the names. */
9913 switch (DEFAULT_ABI)
9923 case ABI_AIX_NODESC:
9928 /* If the function is compiled in this module, jump to it directly.
9929 Otherwise, load up its address and jump to it. */
9931 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9933 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9934 && (! lookup_attribute ("longcall",
9935 TYPE_ATTRIBUTES (TREE_TYPE (function)))
9936 || lookup_attribute ("shortcall",
9937 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
9940 fprintf (file, "\tb %s", prefix);
9941 assemble_name (file, fname);
9942 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9948 switch (DEFAULT_ABI)
9954 /* Set up a TOC entry for the function. */
9955 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9957 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9960 if (TARGET_MINIMAL_TOC)
9961 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9964 fputs ("\t.tc ", file);
9965 assemble_name (file, fname);
9966 fputs ("[TC],", file);
9968 assemble_name (file, fname);
9971 if (TARGET_MINIMAL_TOC)
9972 asm_fprintf (file, (TARGET_32BIT)
9973 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9974 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9975 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9976 assemble_name (file, buf);
9977 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9978 fputs ("-(.LCTOC1)", file);
9979 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9981 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9985 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9988 asm_fprintf (file, "\tmtctr %s\n", r0);
9990 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9993 asm_fprintf (file, "\tbctr\n");
9996 case ABI_AIX_NODESC:
9998 fprintf (file, "\tb %s", prefix);
9999 assemble_name (file, fname);
10000 if (flag_pic) fputs ("@plt", file);
10006 fprintf (file, "\tb %s", prefix);
10007 if (flag_pic && !machopic_name_defined_p (fname))
10008 assemble_name (file, machopic_stub_name (fname));
10010 assemble_name (file, fname);
10019 /* A quick summary of the various types of 'constant-pool tables'
10022 Target Flags Name One table per
10023 AIX (none) AIX TOC object file
10024 AIX -mfull-toc AIX TOC object file
10025 AIX -mminimal-toc AIX minimal TOC translation unit
10026 SVR4/EABI (none) SVR4 SDATA object file
10027 SVR4/EABI -fpic SVR4 pic object file
10028 SVR4/EABI -fPIC SVR4 PIC translation unit
10029 SVR4/EABI -mrelocatable EABI TOC function
10030 SVR4/EABI -maix AIX TOC object file
10031 SVR4/EABI -maix -mminimal-toc
10032 AIX minimal TOC translation unit
10034 Name Reg. Set by entries contains:
10035 made by addrs? fp? sum?
10037 AIX TOC 2 crt0 as Y option option
10038 AIX minimal TOC 30 prolog gcc Y Y option
10039 SVR4 SDATA 13 crt0 gcc N Y N
10040 SVR4 pic 30 prolog ld Y not yet N
10041 SVR4 PIC 30 prolog gcc Y option option
10042 EABI TOC 30 prolog gcc Y option option
10046 /* Hash table stuff for keeping track of TOC entries. */
10048 struct toc_hash_struct
10050 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
10051 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
10053 enum machine_mode key_mode;
10057 static htab_t toc_hash_table;
10059 /* Hash functions for the hash table. */
10062 rs6000_hash_constant (k)
10065 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
10066 const char *format = GET_RTX_FORMAT (GET_CODE (k));
10067 int flen = strlen (format);
10070 if (GET_CODE (k) == LABEL_REF)
10071 return result * 1231 + X0INT (XEXP (k, 0), 3);
10073 if (GET_CODE (k) == CODE_LABEL)
10078 for (; fidx < flen; fidx++)
10079 switch (format[fidx])
10084 const char *str = XSTR (k, fidx);
10085 len = strlen (str);
10086 result = result * 613 + len;
10087 for (i = 0; i < len; i++)
10088 result = result * 613 + (unsigned) str[i];
10093 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
10097 result = result * 613 + (unsigned) XINT (k, fidx);
10100 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
10101 result = result * 613 + (unsigned) XWINT (k, fidx);
10105 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
10106 result = result * 613 + (unsigned) (XWINT (k, fidx)
10117 toc_hash_function (hash_entry)
10118 const void * hash_entry;
10120 const struct toc_hash_struct *thc =
10121 (const struct toc_hash_struct *) hash_entry;
10122 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
10125 /* Compare H1 and H2 for equivalence. */
10128 toc_hash_eq (h1, h2)
10132 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
10133 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
10135 if (((const struct toc_hash_struct *) h1)->key_mode
10136 != ((const struct toc_hash_struct *) h2)->key_mode)
10139 return rtx_equal_p (r1, r2);
10142 /* Mark the hash table-entry HASH_ENTRY. */
10145 toc_hash_mark_entry (hash_slot, unused)
10147 void * unused ATTRIBUTE_UNUSED;
10149 const struct toc_hash_struct * hash_entry =
10150 *(const struct toc_hash_struct **) hash_slot;
10151 rtx r = hash_entry->key;
10152 ggc_set_mark (hash_entry);
10153 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
10154 if (GET_CODE (r) == LABEL_REF)
10157 ggc_set_mark (XEXP (r, 0));
10164 /* Mark all the elements of the TOC hash-table *HT. */
10167 toc_hash_mark_table (vht)
10172 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
10175 /* These are the names given by the C++ front-end to vtables, and
10176 vtable-like objects. Ideally, this logic should not be here;
10177 instead, there should be some programmatic way of inquiring as
10178 to whether or not an object is a vtable. */
10180 #define VTABLE_NAME_P(NAME) \
10181 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
10182 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
10183 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
10184 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
10187 rs6000_output_symbol_ref (file, x)
10191 /* Currently C++ toc references to vtables can be emitted before it
10192 is decided whether the vtable is public or private. If this is
10193 the case, then the linker will eventually complain that there is
10194 a reference to an unknown section. Thus, for vtables only,
10195 we emit the TOC reference to reference the symbol and not the
10197 const char *name = XSTR (x, 0);
10199 if (VTABLE_NAME_P (name))
10201 RS6000_OUTPUT_BASENAME (file, name);
10204 assemble_name (file, name);
10207 /* Output a TOC entry. We derive the entry name from what is being
10211 output_toc (file, x, labelno, mode)
10215 enum machine_mode mode;
10218 const char *name = buf;
10219 const char *real_name;
10226 /* When the linker won't eliminate them, don't output duplicate
10227 TOC entries (this happens on AIX if there is any kind of TOC,
10228 and on SVR4 under -fPIC or -mrelocatable). */
10231 struct toc_hash_struct *h;
10234 h = ggc_alloc (sizeof (*h));
10236 h->key_mode = mode;
10237 h->labelno = labelno;
10239 found = htab_find_slot (toc_hash_table, h, 1);
10240 if (*found == NULL)
10242 else /* This is indeed a duplicate.
10243 Set this label equal to that label. */
10245 fputs ("\t.set ", file);
10246 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10247 fprintf (file, "%d,", labelno);
10248 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10249 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
10255 /* If we're going to put a double constant in the TOC, make sure it's
10256 aligned properly when strict alignment is on. */
10257 if (GET_CODE (x) == CONST_DOUBLE
10258 && STRICT_ALIGNMENT
10259 && GET_MODE_BITSIZE (mode) >= 64
10260 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10261 ASM_OUTPUT_ALIGN (file, 3);
10264 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10266 /* Handle FP constants specially. Note that if we have a minimal
10267 TOC, things we put here aren't actually in the TOC, so we can allow
10269 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10271 REAL_VALUE_TYPE rv;
10274 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10275 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10279 if (TARGET_MINIMAL_TOC)
10280 fputs (DOUBLE_INT_ASM_OP, file);
10282 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
10283 k[0] & 0xffffffff, k[1] & 0xffffffff);
10284 fprintf (file, "0x%lx%08lx\n",
10285 k[0] & 0xffffffff, k[1] & 0xffffffff);
10290 if (TARGET_MINIMAL_TOC)
10291 fputs ("\t.long ", file);
10293 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
10294 k[0] & 0xffffffff, k[1] & 0xffffffff);
10295 fprintf (file, "0x%lx,0x%lx\n",
10296 k[0] & 0xffffffff, k[1] & 0xffffffff);
10300 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10302 REAL_VALUE_TYPE rv;
10305 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10306 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10310 if (TARGET_MINIMAL_TOC)
10311 fputs (DOUBLE_INT_ASM_OP, file);
10313 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
10314 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
10319 if (TARGET_MINIMAL_TOC)
10320 fputs ("\t.long ", file);
10322 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
10323 fprintf (file, "0x%lx\n", l & 0xffffffff);
10327 else if (GET_MODE (x) == VOIDmode
10328 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10330 unsigned HOST_WIDE_INT low;
10331 HOST_WIDE_INT high;
10333 if (GET_CODE (x) == CONST_DOUBLE)
10335 low = CONST_DOUBLE_LOW (x);
10336 high = CONST_DOUBLE_HIGH (x);
10339 #if HOST_BITS_PER_WIDE_INT == 32
10342 high = (low & 0x80000000) ? ~0 : 0;
10346 low = INTVAL (x) & 0xffffffff;
10347 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10351 /* TOC entries are always Pmode-sized, but since this
10352 is a bigendian machine then if we're putting smaller
10353 integer constants in the TOC we have to pad them.
10354 (This is still a win over putting the constants in
10355 a separate constant pool, because then we'd have
10356 to have both a TOC entry _and_ the actual constant.)
10358 For a 32-bit target, CONST_INT values are loaded and shifted
10359 entirely within `low' and can be stored in one TOC entry. */
10361 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10362 abort ();/* It would be easy to make this work, but it doesn't now. */
10364 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10365 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10366 POINTER_SIZE, &low, &high, 0);
10370 if (TARGET_MINIMAL_TOC)
10371 fputs (DOUBLE_INT_ASM_OP, file);
10373 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10374 (long) high & 0xffffffff, (long) low & 0xffffffff);
10375 fprintf (file, "0x%lx%08lx\n",
10376 (long) high & 0xffffffff, (long) low & 0xffffffff);
10381 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10383 if (TARGET_MINIMAL_TOC)
10384 fputs ("\t.long ", file);
10386 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10387 (long) high & 0xffffffff, (long) low & 0xffffffff);
10388 fprintf (file, "0x%lx,0x%lx\n",
10389 (long) high & 0xffffffff, (long) low & 0xffffffff);
10393 if (TARGET_MINIMAL_TOC)
10394 fputs ("\t.long ", file);
10396 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
10397 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
10403 if (GET_CODE (x) == CONST)
10405 if (GET_CODE (XEXP (x, 0)) != PLUS)
10408 base = XEXP (XEXP (x, 0), 0);
10409 offset = INTVAL (XEXP (XEXP (x, 0), 1));
10412 if (GET_CODE (base) == SYMBOL_REF)
10413 name = XSTR (base, 0);
10414 else if (GET_CODE (base) == LABEL_REF)
10415 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10416 else if (GET_CODE (base) == CODE_LABEL)
10417 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10421 real_name = (*targetm.strip_name_encoding) (name);
10422 if (TARGET_MINIMAL_TOC)
10423 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10426 fprintf (file, "\t.tc %s", real_name);
10429 fprintf (file, ".N%d", - offset);
10431 fprintf (file, ".P%d", offset);
10433 fputs ("[TC],", file);
10436 /* Currently C++ toc references to vtables can be emitted before it
10437 is decided whether the vtable is public or private. If this is
10438 the case, then the linker will eventually complain that there is
10439 a TOC reference to an unknown section. Thus, for vtables only,
10440 we emit the TOC reference to reference the symbol and not the
10442 if (VTABLE_NAME_P (name))
10444 RS6000_OUTPUT_BASENAME (file, name);
10446 fprintf (file, "%d", offset);
10447 else if (offset > 0)
10448 fprintf (file, "+%d", offset);
10451 output_addr_const (file, x);
10455 /* Output an assembler pseudo-op to write an ASCII string of N characters
10456 starting at P to FILE.
10458 On the RS/6000, we have to do this using the .byte operation and
10459 write out special characters outside the quoted string.
10460 Also, the assembler is broken; very long strings are truncated,
10461 so we must artificially break them up early. */
10464 output_ascii (file, p, n)
10470 int i, count_string;
10471 const char *for_string = "\t.byte \"";
10472 const char *for_decimal = "\t.byte ";
10473 const char *to_close = NULL;
10476 for (i = 0; i < n; i++)
10479 if (c >= ' ' && c < 0177)
10482 fputs (for_string, file);
10485 /* Write two quotes to get one. */
10493 for_decimal = "\"\n\t.byte ";
10497 if (count_string >= 512)
10499 fputs (to_close, file);
10501 for_string = "\t.byte \"";
10502 for_decimal = "\t.byte ";
10510 fputs (for_decimal, file);
10511 fprintf (file, "%d", c);
10513 for_string = "\n\t.byte \"";
10514 for_decimal = ", ";
10520 /* Now close the string if we have written one. Then end the line. */
10522 fputs (to_close, file);
10525 /* Generate a unique section name for FILENAME for a section type
10526 represented by SECTION_DESC. Output goes into BUF.
10528 SECTION_DESC can be any string, as long as it is different for each
10529 possible section type.
10531 We name the section in the same manner as xlc. The name begins with an
10532 underscore followed by the filename (after stripping any leading directory
10533 names) with the last period replaced by the string SECTION_DESC. If
10534 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10538 rs6000_gen_section_name (buf, filename, section_desc)
10540 const char *filename;
10541 const char *section_desc;
10543 const char *q, *after_last_slash, *last_period = 0;
10547 after_last_slash = filename;
10548 for (q = filename; *q; q++)
10551 after_last_slash = q + 1;
10552 else if (*q == '.')
10556 len = strlen (after_last_slash) + strlen (section_desc) + 2;
10557 *buf = (char *) permalloc (len);
10562 for (q = after_last_slash; *q; q++)
10564 if (q == last_period)
10566 strcpy (p, section_desc);
10567 p += strlen (section_desc);
10570 else if (ISALNUM (*q))
10574 if (last_period == 0)
10575 strcpy (p, section_desc);
10580 /* Emit profile function. */
10583 output_profile_hook (labelno)
10586 if (DEFAULT_ABI == ABI_AIX)
10589 const char *label_name;
10592 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10593 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
10594 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10596 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10599 else if (DEFAULT_ABI == ABI_DARWIN)
10601 const char *mcount_name = RS6000_MCOUNT;
10602 int caller_addr_regno = LINK_REGISTER_REGNUM;
10604 /* Be conservative and always set this, at least for now. */
10605 current_function_uses_pic_offset_table = 1;
10608 /* For PIC code, set up a stub and collect the caller's address
10609 from r0, which is where the prologue puts it. */
10612 mcount_name = machopic_stub_name (mcount_name);
10613 if (current_function_uses_pic_offset_table)
10614 caller_addr_regno = 0;
10617 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10619 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10623 /* Write function profiler code. */
10626 output_function_profiler (file, labelno)
10632 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10633 switch (DEFAULT_ABI)
10639 case ABI_AIX_NODESC:
10640 fprintf (file, "\tmflr %s\n", reg_names[0]);
10643 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10644 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10645 reg_names[0], reg_names[1]);
10646 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10647 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10648 assemble_name (file, buf);
10649 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10651 else if (flag_pic > 1)
10653 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10654 reg_names[0], reg_names[1]);
10655 /* Now, we need to get the address of the label. */
10656 fputs ("\tbl 1f\n\t.long ", file);
10657 assemble_name (file, buf);
10658 fputs ("-.\n1:", file);
10659 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10660 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10661 reg_names[0], reg_names[11]);
10662 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10663 reg_names[0], reg_names[0], reg_names[11]);
10667 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10668 assemble_name (file, buf);
10669 fputs ("@ha\n", file);
10670 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10671 reg_names[0], reg_names[1]);
10672 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10673 assemble_name (file, buf);
10674 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10677 if (current_function_needs_context)
10678 asm_fprintf (file, "\tmr %s,%s\n",
10679 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10680 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10681 if (current_function_needs_context)
10682 asm_fprintf (file, "\tmr %s,%s\n",
10683 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10688 /* Don't do anything, done in output_profile_hook (). */
10694 /* Adjust the cost of a scheduling dependency. Return the new cost of
10695 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10698 rs6000_adjust_cost (insn, link, dep_insn, cost)
10701 rtx dep_insn ATTRIBUTE_UNUSED;
10704 if (! recog_memoized (insn))
10707 if (REG_NOTE_KIND (link) != 0)
10710 if (REG_NOTE_KIND (link) == 0)
10712 /* Data dependency; DEP_INSN writes a register that INSN reads
10713 some cycles later. */
10714 switch (get_attr_type (insn))
10717 /* Tell the first scheduling pass about the latency between
10718 a mtctr and bctr (and mtlr and br/blr). The first
10719 scheduling pass will not know about this latency since
10720 the mtctr instruction, which has the latency associated
10721 to it, will be generated by reload. */
10722 return TARGET_POWER ? 5 : 4;
10724 /* Leave some extra cycles between a compare and its
10725 dependent branch, to inhibit expensive mispredicts. */
10726 if ((rs6000_cpu_attr == CPU_PPC603
10727 || rs6000_cpu_attr == CPU_PPC604
10728 || rs6000_cpu_attr == CPU_PPC604E
10729 || rs6000_cpu_attr == CPU_PPC620
10730 || rs6000_cpu_attr == CPU_PPC630
10731 || rs6000_cpu_attr == CPU_PPC750
10732 || rs6000_cpu_attr == CPU_PPC7400
10733 || rs6000_cpu_attr == CPU_PPC7450
10734 || rs6000_cpu_attr == CPU_POWER4)
10735 && recog_memoized (dep_insn)
10736 && (INSN_CODE (dep_insn) >= 0)
10737 && (get_attr_type (dep_insn) == TYPE_COMPARE
10738 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10739 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10740 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10745 /* Fall out to return default cost. */
10751 /* A C statement (sans semicolon) to update the integer scheduling
10752 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10753 INSN earlier, increase the priority to execute INSN later. Do not
10754 define this macro if you do not need to adjust the scheduling
10755 priorities of insns. */
10758 rs6000_adjust_priority (insn, priority)
10759 rtx insn ATTRIBUTE_UNUSED;
10762 /* On machines (like the 750) which have asymmetric integer units,
10763 where one integer unit can do multiply and divides and the other
10764 can't, reduce the priority of multiply/divide so it is scheduled
10765 before other integer operations. */
10768 if (! INSN_P (insn))
10771 if (GET_CODE (PATTERN (insn)) == USE)
10774 switch (rs6000_cpu_attr) {
10776 switch (get_attr_type (insn))
10783 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10784 priority, priority);
10785 if (priority >= 0 && priority < 0x01000000)
10795 /* Return how many instructions the machine can issue per cycle. */
10798 rs6000_issue_rate ()
10800 switch (rs6000_cpu_attr) {
10801 case CPU_RIOS1: /* ? */
10803 case CPU_PPC601: /* ? */
10823 /* Length in units of the trampoline for entering a nested function. */
10826 rs6000_trampoline_size ()
10830 switch (DEFAULT_ABI)
10836 ret = (TARGET_32BIT) ? 12 : 24;
10841 case ABI_AIX_NODESC:
10842 ret = (TARGET_32BIT) ? 40 : 48;
10849 /* Emit RTL insns to initialize the variable parts of a trampoline.
10850 FNADDR is an RTX for the address of the function's pure code.
10851 CXT is an RTX for the static chain value for the function. */
10854 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10859 enum machine_mode pmode = Pmode;
10860 int regsize = (TARGET_32BIT) ? 4 : 8;
10861 rtx ctx_reg = force_reg (pmode, cxt);
10863 switch (DEFAULT_ABI)
10868 /* Macros to shorten the code expansions below. */
10869 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10870 #define MEM_PLUS(addr,offset) \
10871 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10873 /* Under AIX, just build the 3 word function descriptor */
10876 rtx fn_reg = gen_reg_rtx (pmode);
10877 rtx toc_reg = gen_reg_rtx (pmode);
10878 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10879 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10880 emit_move_insn (MEM_DEREF (addr), fn_reg);
10881 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10882 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10886 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10889 case ABI_AIX_NODESC:
10890 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10891 FALSE, VOIDmode, 4,
10893 GEN_INT (rs6000_trampoline_size ()), SImode,
10903 /* Table of valid machine attributes. */
10905 const struct attribute_spec rs6000_attribute_table[] =
10907 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10908 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10909 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10910 { NULL, 0, 0, false, false, false, NULL }
10913 /* Handle a "longcall" or "shortcall" attribute; arguments as in
10914 struct attribute_spec.handler. */
10917 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10920 tree args ATTRIBUTE_UNUSED;
10921 int flags ATTRIBUTE_UNUSED;
10922 bool *no_add_attrs;
10924 if (TREE_CODE (*node) != FUNCTION_TYPE
10925 && TREE_CODE (*node) != FIELD_DECL
10926 && TREE_CODE (*node) != TYPE_DECL)
10928 warning ("`%s' attribute only applies to functions",
10929 IDENTIFIER_POINTER (name));
10930 *no_add_attrs = true;
10936 /* Set longcall attributes on all functions declared when
10937 rs6000_default_long_calls is true. */
10939 rs6000_set_default_type_attributes (type)
10942 if (rs6000_default_long_calls
10943 && (TREE_CODE (type) == FUNCTION_TYPE
10944 || TREE_CODE (type) == METHOD_TYPE))
10945 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
10947 TYPE_ATTRIBUTES (type));
10950 /* Return a reference suitable for calling a function with the
10951 longcall attribute. */
10954 rs6000_longcall_ref (call_ref)
10957 const char *call_name;
10960 if (GET_CODE (call_ref) != SYMBOL_REF)
10963 /* System V adds '.' to the internal name, so skip them. */
10964 call_name = XSTR (call_ref, 0);
10965 if (*call_name == '.')
10967 while (*call_name == '.')
10970 node = get_identifier (call_name);
10971 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10974 return force_reg (Pmode, call_ref);
10978 #ifdef USING_ELFOS_H
10980 /* A C statement or statements to switch to the appropriate section
10981 for output of RTX in mode MODE. You can assume that RTX is some
10982 kind of constant in RTL. The argument MODE is redundant except in
10983 the case of a `const_int' rtx. Select the section by calling
10984 `text_section' or one of the alternatives for other sections.
10986 Do not define this macro if you put all constants in the read-only
10990 rs6000_elf_select_rtx_section (mode, x, align)
10991 enum machine_mode mode;
10993 unsigned HOST_WIDE_INT align;
10995 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10998 default_elf_select_rtx_section (mode, x, align);
11001 /* A C statement or statements to switch to the appropriate
11002 section for output of DECL. DECL is either a `VAR_DECL' node
11003 or a constant of some sort. RELOC indicates whether forming
11004 the initial value of DECL requires link-time relocations. */
11007 rs6000_elf_select_section (decl, reloc, align)
11010 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
11012 int size = int_size_in_bytes (TREE_TYPE (decl));
11015 static void (* const sec_funcs[4]) PARAMS ((void)) = {
11016 &readonly_data_section,
11022 needs_sdata = (size > 0
11023 && size <= g_switch_value
11024 && rs6000_sdata != SDATA_NONE
11025 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
11027 if (TREE_CODE (decl) == STRING_CST)
11028 readonly = ! flag_writable_strings;
11029 else if (TREE_CODE (decl) == VAR_DECL)
11030 readonly = (! (flag_pic && reloc)
11031 && TREE_READONLY (decl)
11032 && ! TREE_SIDE_EFFECTS (decl)
11033 && DECL_INITIAL (decl)
11034 && DECL_INITIAL (decl) != error_mark_node
11035 && TREE_CONSTANT (DECL_INITIAL (decl)));
11036 else if (TREE_CODE (decl) == CONSTRUCTOR)
11037 readonly = (! (flag_pic && reloc)
11038 && ! TREE_SIDE_EFFECTS (decl)
11039 && TREE_CONSTANT (decl));
11042 if (needs_sdata && rs6000_sdata != SDATA_EABI)
11045 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
11048 /* A C statement to build up a unique section name, expressed as a
11049 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
11050 RELOC indicates whether the initial value of EXP requires
11051 link-time relocations. If you do not define this macro, GCC will use
11052 the symbol name prefixed by `.' as the section name. Note - this
11053 macro can now be called for uninitialized data items as well as
11054 initialised data and functions. */
11057 rs6000_elf_unique_section (decl, reloc)
11065 const char *prefix;
11067 static const char *const prefixes[7][2] =
11069 { ".rodata.", ".gnu.linkonce.r." },
11070 { ".sdata2.", ".gnu.linkonce.s2." },
11071 { ".data.", ".gnu.linkonce.d." },
11072 { ".sdata.", ".gnu.linkonce.s." },
11073 { ".bss.", ".gnu.linkonce.b." },
11074 { ".sbss.", ".gnu.linkonce.sb." },
11075 { ".text.", ".gnu.linkonce.t." }
11078 if (TREE_CODE (decl) == FUNCTION_DECL)
11087 if (TREE_CODE (decl) == STRING_CST)
11088 readonly = ! flag_writable_strings;
11089 else if (TREE_CODE (decl) == VAR_DECL)
11090 readonly = (! (flag_pic && reloc)
11091 && TREE_READONLY (decl)
11092 && ! TREE_SIDE_EFFECTS (decl)
11093 && TREE_CONSTANT (DECL_INITIAL (decl)));
11095 size = int_size_in_bytes (TREE_TYPE (decl));
11096 needs_sdata = (size > 0
11097 && size <= g_switch_value
11098 && rs6000_sdata != SDATA_NONE
11099 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
11101 if (DECL_INITIAL (decl) == 0
11102 || DECL_INITIAL (decl) == error_mark_node)
11104 else if (! readonly)
11111 /* .sdata2 is only for EABI. */
11112 if (sec == 0 && rs6000_sdata != SDATA_EABI)
11118 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
11119 name = (*targetm.strip_name_encoding) (name);
11120 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
11121 len = strlen (name) + strlen (prefix);
11122 string = alloca (len + 1);
11124 sprintf (string, "%s%s", prefix, name);
11126 DECL_SECTION_NAME (decl) = build_string (len, string);
11130 /* If we are referencing a function that is static or is known to be
11131 in this file, make the SYMBOL_REF special. We can use this to indicate
11132 that we can branch to this function without emitting a no-op after the
11133 call. For real AIX calling sequences, we also replace the
11134 function name with the real name (1 or 2 leading .'s), rather than
11135 the function descriptor name. This saves a lot of overriding code
11136 to read the prefixes. */
11139 rs6000_elf_encode_section_info (decl, first)
11146 if (TREE_CODE (decl) == FUNCTION_DECL)
11148 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11149 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
11150 && ! DECL_WEAK (decl))
11151 SYMBOL_REF_FLAG (sym_ref) = 1;
11153 if (DEFAULT_ABI == ABI_AIX)
11155 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
11156 size_t len2 = strlen (XSTR (sym_ref, 0));
11157 char *str = alloca (len1 + len2 + 1);
11160 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
11162 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
11165 else if (rs6000_sdata != SDATA_NONE
11166 && DEFAULT_ABI == ABI_V4
11167 && TREE_CODE (decl) == VAR_DECL)
11169 int size = int_size_in_bytes (TREE_TYPE (decl));
11170 tree section_name = DECL_SECTION_NAME (decl);
11171 const char *name = (char *)0;
11176 if (TREE_CODE (section_name) == STRING_CST)
11178 name = TREE_STRING_POINTER (section_name);
11179 len = TREE_STRING_LENGTH (section_name);
11185 if ((size > 0 && size <= g_switch_value)
11187 && ((len == sizeof (".sdata") - 1
11188 && strcmp (name, ".sdata") == 0)
11189 || (len == sizeof (".sdata2") - 1
11190 && strcmp (name, ".sdata2") == 0)
11191 || (len == sizeof (".sbss") - 1
11192 && strcmp (name, ".sbss") == 0)
11193 || (len == sizeof (".sbss2") - 1
11194 && strcmp (name, ".sbss2") == 0)
11195 || (len == sizeof (".PPC.EMB.sdata0") - 1
11196 && strcmp (name, ".PPC.EMB.sdata0") == 0)
11197 || (len == sizeof (".PPC.EMB.sbss0") - 1
11198 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
11200 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11201 size_t len = strlen (XSTR (sym_ref, 0));
11202 char *str = alloca (len + 2);
11205 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
11206 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
11211 static const char *
11212 rs6000_elf_strip_name_encoding (str)
11215 while (*str == '*' || *str == '@')
11220 #endif /* USING_ELFOS_H */
11223 /* Return a REG that occurs in ADDR with coefficient 1.
11224 ADDR can be effectively incremented by incrementing REG.
11226 r0 is special and we must not select it as an address
11227 register by this routine since our caller will try to
11228 increment the returned register via an "la" instruction. */
11231 find_addr_reg (addr)
11234 while (GET_CODE (addr) == PLUS)
11236 if (GET_CODE (XEXP (addr, 0)) == REG
11237 && REGNO (XEXP (addr, 0)) != 0)
11238 addr = XEXP (addr, 0);
11239 else if (GET_CODE (XEXP (addr, 1)) == REG
11240 && REGNO (XEXP (addr, 1)) != 0)
11241 addr = XEXP (addr, 1);
11242 else if (CONSTANT_P (XEXP (addr, 0)))
11243 addr = XEXP (addr, 1);
11244 else if (CONSTANT_P (XEXP (addr, 1)))
11245 addr = XEXP (addr, 0);
11249 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
11255 rs6000_fatal_bad_address (op)
11258 fatal_insn ("bad address", op);
11261 /* Called to register all of our global variables with the garbage
11265 rs6000_add_gc_roots ()
11267 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11268 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11269 toc_hash_mark_table);
11275 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
11276 reference and a constant. */
11279 symbolic_operand (op)
11282 switch (GET_CODE (op))
11289 return (GET_CODE (op) == SYMBOL_REF ||
11290 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11291 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11292 && GET_CODE (XEXP (op, 1)) == CONST_INT);
11299 #ifdef RS6000_LONG_BRANCH
11301 static tree stub_list = 0;
11303 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
11304 procedure calls to the linked list. */
11307 add_compiler_stub (label_name, function_name, line_number)
11309 tree function_name;
11312 tree stub = build_tree_list (function_name, label_name);
11313 TREE_TYPE (stub) = build_int_2 (line_number, 0);
11314 TREE_CHAIN (stub) = stub_list;
11318 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
11319 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
11320 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
11322 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11323 handling procedure calls from the linked list and initializes the
11327 output_compiler_stub ()
11330 char label_buf[256];
11332 tree tmp_stub, stub;
11335 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11337 fprintf (asm_out_file,
11338 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11340 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11341 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11342 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11343 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11345 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11347 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11350 label_buf[0] = '_';
11351 strcpy (label_buf+1,
11352 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11355 strcpy (tmp_buf, "lis r12,hi16(");
11356 strcat (tmp_buf, label_buf);
11357 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11358 strcat (tmp_buf, label_buf);
11359 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11360 output_asm_insn (tmp_buf, 0);
11362 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11363 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11364 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11365 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11371 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
11372 already there or not. */
11375 no_previous_def (function_name)
11376 tree function_name;
11379 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11380 if (function_name == STUB_FUNCTION_NAME (stub))
11385 /* GET_PREV_LABEL gets the label name from the previous definition of
11389 get_prev_label (function_name)
11390 tree function_name;
11393 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11394 if (function_name == STUB_FUNCTION_NAME (stub))
11395 return STUB_LABEL_NAME (stub);
11399 /* INSN is either a function call or a millicode call. It may have an
11400 unconditional jump in its delay slot.
11402 CALL_DEST is the routine we are calling. */
11405 output_call (insn, call_dest, operand_number)
11408 int operand_number;
11410 static char buf[256];
11411 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11414 tree funname = get_identifier (XSTR (call_dest, 0));
11416 if (no_previous_def (funname))
11419 rtx label_rtx = gen_label_rtx ();
11420 char *label_buf, temp_buf[256];
11421 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11422 CODE_LABEL_NUMBER (label_rtx));
11423 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11424 labelname = get_identifier (label_buf);
11425 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11427 line_number = NOTE_LINE_NUMBER (insn);
11428 add_compiler_stub (labelname, funname, line_number);
11431 labelname = get_prev_label (funname);
11433 sprintf (buf, "jbsr %%z%d,%.246s",
11434 operand_number, IDENTIFIER_POINTER (labelname));
11439 sprintf (buf, "bl %%z%d", operand_number);
11444 #endif /* RS6000_LONG_BRANCH */
11446 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11448 const char *const symbol_ = (SYMBOL); \
11449 char *buffer_ = (BUF); \
11450 if (symbol_[0] == '"') \
11452 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11454 else if (name_needs_quotes(symbol_)) \
11456 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11460 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11465 /* Generate PIC and indirect symbol stubs. */
11468 machopic_output_stub (file, symb, stub)
11470 const char *symb, *stub;
11472 unsigned int length;
11473 char *symbol_name, *lazy_ptr_name;
11474 char *local_label_0;
11475 static int label = 0;
11477 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11478 symb = (*targetm.strip_name_encoding) (symb);
11482 length = strlen (symb);
11483 symbol_name = alloca (length + 32);
11484 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11486 lazy_ptr_name = alloca (length + 32);
11487 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11489 local_label_0 = alloca (length + 32);
11490 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11493 machopic_picsymbol_stub_section ();
11495 machopic_symbol_stub_section ();
11497 fprintf (file, "%s:\n", stub);
11498 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11502 fprintf (file, "\tmflr r0\n");
11503 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11504 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11505 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11506 lazy_ptr_name, local_label_0);
11507 fprintf (file, "\tmtlr r0\n");
11508 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11509 lazy_ptr_name, local_label_0);
11510 fprintf (file, "\tmtctr r12\n");
11511 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11512 lazy_ptr_name, local_label_0);
11513 fprintf (file, "\tbctr\n");
11516 fprintf (file, "non-pure not supported\n");
11518 machopic_lazy_symbol_ptr_section ();
11519 fprintf (file, "%s:\n", lazy_ptr_name);
11520 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11521 fprintf (file, "\t.long dyld_stub_binding_helper\n");
11524 /* Legitimize PIC addresses. If the address is already
11525 position-independent, we return ORIG. Newly generated
11526 position-independent addresses go into a reg. This is REG if non
11527 zero, otherwise we allocate register(s) as necessary. */
11529 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11532 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11534 enum machine_mode mode;
11539 if (reg == NULL && ! reload_in_progress && ! reload_completed)
11540 reg = gen_reg_rtx (Pmode);
11542 if (GET_CODE (orig) == CONST)
11544 if (GET_CODE (XEXP (orig, 0)) == PLUS
11545 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11548 if (GET_CODE (XEXP (orig, 0)) == PLUS)
11551 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11554 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11560 if (GET_CODE (offset) == CONST_INT)
11562 if (SMALL_INT (offset))
11563 return plus_constant (base, INTVAL (offset));
11564 else if (! reload_in_progress && ! reload_completed)
11565 offset = force_reg (Pmode, offset);
11568 rtx mem = force_const_mem (Pmode, orig);
11569 return machopic_legitimize_pic_address (mem, Pmode, reg);
11572 return gen_rtx (PLUS, Pmode, base, offset);
11575 /* Fall back on generic machopic code. */
11576 return machopic_legitimize_pic_address (orig, mode, reg);
11579 /* This is just a placeholder to make linking work without having to
11580 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11581 ever needed for Darwin (not too likely!) this would have to get a
11582 real definition. */
11589 #endif /* TARGET_MACHO */
11592 static unsigned int
11593 rs6000_elf_section_type_flags (decl, name, reloc)
11598 unsigned int flags = default_section_type_flags (decl, name, reloc);
11600 if (TARGET_RELOCATABLE)
11601 flags |= SECTION_WRITE;
11606 /* Record an element in the table of global constructors. SYMBOL is
11607 a SYMBOL_REF of the function to be called; PRIORITY is a number
11608 between 0 and MAX_INIT_PRIORITY.
11610 This differs from default_named_section_asm_out_constructor in
11611 that we have special handling for -mrelocatable. */
11614 rs6000_elf_asm_out_constructor (symbol, priority)
11618 const char *section = ".ctors";
11621 if (priority != DEFAULT_INIT_PRIORITY)
11623 sprintf (buf, ".ctors.%.5u",
11624 /* Invert the numbering so the linker puts us in the proper
11625 order; constructors are run from right to left, and the
11626 linker sorts in increasing order. */
11627 MAX_INIT_PRIORITY - priority);
11631 named_section_flags (section, SECTION_WRITE);
11632 assemble_align (POINTER_SIZE);
11634 if (TARGET_RELOCATABLE)
11636 fputs ("\t.long (", asm_out_file);
11637 output_addr_const (asm_out_file, symbol);
11638 fputs (")@fixup\n", asm_out_file);
11641 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11645 rs6000_elf_asm_out_destructor (symbol, priority)
11649 const char *section = ".dtors";
11652 if (priority != DEFAULT_INIT_PRIORITY)
11654 sprintf (buf, ".dtors.%.5u",
11655 /* Invert the numbering so the linker puts us in the proper
11656 order; constructors are run from right to left, and the
11657 linker sorts in increasing order. */
11658 MAX_INIT_PRIORITY - priority);
11662 named_section_flags (section, SECTION_WRITE);
11663 assemble_align (POINTER_SIZE);
11665 if (TARGET_RELOCATABLE)
11667 fputs ("\t.long (", asm_out_file);
11668 output_addr_const (asm_out_file, symbol);
11669 fputs (")@fixup\n", asm_out_file);
11672 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11678 xcoff_asm_named_section (name, flags)
11680 unsigned int flags ATTRIBUTE_UNUSED;
11682 fprintf (asm_out_file, "\t.csect %s\n", name);
11686 rs6000_xcoff_select_section (exp, reloc, align)
11689 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
11691 if ((TREE_CODE (exp) == STRING_CST
11692 && ! flag_writable_strings)
11693 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
11694 && TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp)
11695 && DECL_INITIAL (exp)
11696 && (DECL_INITIAL (exp) == error_mark_node
11697 || TREE_CONSTANT (DECL_INITIAL (exp)))
11700 if (TREE_PUBLIC (exp))
11701 read_only_data_section ();
11703 read_only_private_data_section ();
11707 if (TREE_PUBLIC (exp))
11710 private_data_section ();
11715 rs6000_xcoff_unique_section (decl, reloc)
11717 int reloc ATTRIBUTE_UNUSED;
11723 if (TREE_CODE (decl) == FUNCTION_DECL)
11725 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
11726 len = strlen (name) + 5;
11727 string = alloca (len + 1);
11728 sprintf (string, ".%s[PR]", name);
11729 DECL_SECTION_NAME (decl) = build_string (len, string);
11733 /* Select section for constant in constant pool.
11735 On RS/6000, all constants are in the private read-only data area.
11736 However, if this is being placed in the TOC it must be output as a
11740 rs6000_xcoff_select_rtx_section (mode, x, align)
11741 enum machine_mode mode;
11743 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
11745 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
11748 read_only_private_data_section ();
11751 /* Remove any trailing [DS] or the like from the symbol name. */
11753 static const char *
11754 rs6000_xcoff_strip_name_encoding (name)
11760 len = strlen (name);
11761 if (name[len - 1] == ']')
11762 return ggc_alloc_string (name, len - 4);
11767 #endif /* TARGET_XCOFF */
11769 /* Note that this is also used for ELF64. */
11772 rs6000_xcoff_encode_section_info (decl, first)
11774 int first ATTRIBUTE_UNUSED;
11776 if (TREE_CODE (decl) == FUNCTION_DECL
11777 && (TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
11778 && ! DECL_WEAK (decl))
11779 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;