1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave;
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string;
83 /* Set to non-zero once AIX common-mode calls have been defined. */
84 static int common_mode_defined;
86 /* Save information from a "cmpxx" operation until the branch or scc is
88 rtx rs6000_compare_op0, rs6000_compare_op1;
89 int rs6000_compare_fp_p;
91 /* Label number of label created for -mrelocatable, to call to so we can
92 get the address of the GOT section */
93 int rs6000_pic_labelno;
96 /* Which abi to adhere to */
97 const char *rs6000_abi_name = RS6000_ABI_NAME;
99 /* Semantics of the small data area */
100 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
102 /* Which small data model to use */
103 const char *rs6000_sdata_name = (char *)0;
105 /* Counter for labels which are to be placed in .fixup. */
106 int fixuplabelno = 0;
109 /* ABI enumeration available for subtarget to use. */
110 enum rs6000_abi rs6000_current_abi;
112 /* ABI string from -mabi= option. */
113 const char *rs6000_abi_string;
116 const char *rs6000_debug_name;
117 int rs6000_debug_stack; /* debug stack applications */
118 int rs6000_debug_arg; /* debug argument handling */
120 /* Flag to say the TOC is initialized */
122 char toc_label_name[10];
124 /* Alias set for saves and restores from the rs6000 stack. */
125 static int rs6000_sr_alias_set;
127 static void rs6000_add_gc_roots PARAMS ((void));
128 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
129 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
130 static void validate_condition_mode
131 PARAMS ((enum rtx_code, enum machine_mode));
132 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
133 static void rs6000_maybe_dead PARAMS ((rtx));
134 static void rs6000_emit_stack_tie PARAMS ((void));
135 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
136 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
137 static unsigned rs6000_hash_constant PARAMS ((rtx));
138 static unsigned toc_hash_function PARAMS ((const void *));
139 static int toc_hash_eq PARAMS ((const void *, const void *));
140 static int toc_hash_mark_entry PARAMS ((void **, void *));
141 static void toc_hash_mark_table PARAMS ((void *));
142 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
143 static void rs6000_free_machine_status PARAMS ((struct function *));
144 static void rs6000_init_machine_status PARAMS ((struct function *));
145 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
146 static int rs6000_ra_ever_killed PARAMS ((void));
147 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
148 const struct attribute_spec rs6000_attribute_table[];
149 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
150 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
151 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
152 HOST_WIDE_INT, HOST_WIDE_INT));
154 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
156 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
157 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
159 #ifdef OBJECT_FORMAT_COFF
160 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
162 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
163 static int rs6000_adjust_priority PARAMS ((rtx, int));
164 static int rs6000_issue_rate PARAMS ((void));
166 static void rs6000_init_builtins PARAMS ((void));
167 static void altivec_init_builtins PARAMS ((void));
168 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
169 static rtx altivec_expand_builtin PARAMS ((tree, rtx));
170 static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
171 static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
172 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
173 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
174 static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
175 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
176 static void rs6000_parse_abi_options PARAMS ((void));
177 static void rs6000_parse_vrsave_option PARAMS ((void));
178 static int first_altivec_reg_to_save PARAMS ((void));
179 static unsigned int compute_vrsave_mask PARAMS ((void));
180 static void is_altivec_return_reg PARAMS ((rtx, void *));
181 int vrsave_operation PARAMS ((rtx, enum machine_mode));
182 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
183 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
184 static int easy_vector_constant PARAMS ((rtx));
186 /* Default register names. */
187 char rs6000_reg_names[][8] =
189 "0", "1", "2", "3", "4", "5", "6", "7",
190 "8", "9", "10", "11", "12", "13", "14", "15",
191 "16", "17", "18", "19", "20", "21", "22", "23",
192 "24", "25", "26", "27", "28", "29", "30", "31",
193 "0", "1", "2", "3", "4", "5", "6", "7",
194 "8", "9", "10", "11", "12", "13", "14", "15",
195 "16", "17", "18", "19", "20", "21", "22", "23",
196 "24", "25", "26", "27", "28", "29", "30", "31",
197 "mq", "lr", "ctr","ap",
198 "0", "1", "2", "3", "4", "5", "6", "7",
200 /* AltiVec registers. */
201 "0", "1", "2", "3", "4", "5", "6", "7",
202 "8", "9", "10", "11", "12", "13", "14", "15",
203 "16", "17", "18", "19", "20", "21", "22", "23",
204 "24", "25", "26", "27", "28", "29", "30", "31",
208 #ifdef TARGET_REGNAMES
209 static const char alt_reg_names[][8] =
211 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
212 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
213 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
214 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
215 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
216 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
217 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
218 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
219 "mq", "lr", "ctr", "ap",
220 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
222 /* AltiVec registers. */
223 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
224 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
225 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
226 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
231 #ifndef MASK_STRICT_ALIGN
232 #define MASK_STRICT_ALIGN 0
235 /* Initialize the GCC target structure. */
236 #undef TARGET_ATTRIBUTE_TABLE
237 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
239 #undef TARGET_ASM_ALIGNED_DI_OP
240 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
242 /* Default unaligned ops are only provided for ELF. Find the ops needed
243 for non-ELF systems. */
244 #ifndef OBJECT_FORMAT_ELF
245 #ifdef OBJECT_FORMAT_COFF
246 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
248 #undef TARGET_ASM_UNALIGNED_HI_OP
249 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
250 #undef TARGET_ASM_UNALIGNED_SI_OP
251 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
252 #undef TARGET_ASM_UNALIGNED_DI_OP
253 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
256 #undef TARGET_ASM_UNALIGNED_HI_OP
257 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
258 #undef TARGET_ASM_UNALIGNED_SI_OP
259 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
263 /* This hook deals with fixups for relocatable code and DI-mode objects
265 #undef TARGET_ASM_INTEGER
266 #define TARGET_ASM_INTEGER rs6000_assemble_integer
268 #undef TARGET_ASM_FUNCTION_PROLOGUE
269 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
270 #undef TARGET_ASM_FUNCTION_EPILOGUE
271 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
274 #undef TARGET_SECTION_TYPE_FLAGS
275 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
278 #undef TARGET_SCHED_ISSUE_RATE
279 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
280 #undef TARGET_SCHED_ADJUST_COST
281 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
282 #undef TARGET_SCHED_ADJUST_PRIORITY
283 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
285 #undef TARGET_INIT_BUILTINS
286 #define TARGET_INIT_BUILTINS rs6000_init_builtins
288 #undef TARGET_EXPAND_BUILTIN
289 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
291 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
292 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
294 struct gcc_target targetm = TARGET_INITIALIZER;
296 /* Override command line options. Mostly we process the processor
297 type and sometimes adjust other TARGET_ options. */
300 rs6000_override_options (default_cpu)
301 const char *default_cpu;
304 struct rs6000_cpu_select *ptr;
306 /* Simplify the entries below by making a mask for any POWER
307 variant and any PowerPC variant. */
309 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
310 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
311 | MASK_PPC_GFXOPT | MASK_POWERPC64)
312 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
316 const char *const name; /* Canonical processor name. */
317 const enum processor_type processor; /* Processor type enum value. */
318 const int target_enable; /* Target flags to enable. */
319 const int target_disable; /* Target flags to disable. */
320 } const processor_target_table[]
321 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
322 POWER_MASKS | POWERPC_MASKS},
323 {"power", PROCESSOR_POWER,
324 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
325 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
326 {"power2", PROCESSOR_POWER,
327 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
328 POWERPC_MASKS | MASK_NEW_MNEMONICS},
329 {"power3", PROCESSOR_PPC630,
330 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
331 POWER_MASKS | MASK_PPC_GPOPT},
332 {"powerpc", PROCESSOR_POWERPC,
333 MASK_POWERPC | MASK_NEW_MNEMONICS,
334 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
335 {"powerpc64", PROCESSOR_POWERPC64,
336 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
337 POWER_MASKS | POWERPC_OPT_MASKS},
338 {"rios", PROCESSOR_RIOS1,
339 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
340 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
341 {"rios1", PROCESSOR_RIOS1,
342 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
343 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
344 {"rsc", PROCESSOR_PPC601,
345 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
346 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
347 {"rsc1", PROCESSOR_PPC601,
348 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
349 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
350 {"rios2", PROCESSOR_RIOS2,
351 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
352 POWERPC_MASKS | MASK_NEW_MNEMONICS},
353 {"rs64a", PROCESSOR_RS64A,
354 MASK_POWERPC | MASK_NEW_MNEMONICS,
355 POWER_MASKS | POWERPC_OPT_MASKS},
356 {"401", PROCESSOR_PPC403,
357 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
358 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
359 {"403", PROCESSOR_PPC403,
360 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
361 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
362 {"405", PROCESSOR_PPC405,
363 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
364 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
365 {"505", PROCESSOR_MPCCORE,
366 MASK_POWERPC | MASK_NEW_MNEMONICS,
367 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
368 {"601", PROCESSOR_PPC601,
369 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
370 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
371 {"602", PROCESSOR_PPC603,
372 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
373 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
374 {"603", PROCESSOR_PPC603,
375 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
376 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
377 {"603e", PROCESSOR_PPC603,
378 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
379 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
380 {"ec603e", PROCESSOR_PPC603,
381 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
382 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
383 {"604", PROCESSOR_PPC604,
384 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
385 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
386 {"604e", PROCESSOR_PPC604e,
387 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
388 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
389 {"620", PROCESSOR_PPC620,
390 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
391 POWER_MASKS | MASK_PPC_GPOPT},
392 {"630", PROCESSOR_PPC630,
393 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
394 POWER_MASKS | MASK_PPC_GPOPT},
395 {"740", PROCESSOR_PPC750,
396 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
397 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
398 {"750", PROCESSOR_PPC750,
399 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
400 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
401 {"7400", PROCESSOR_PPC7400,
402 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
403 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
404 {"7450", PROCESSOR_PPC7450,
405 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
406 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
407 {"801", PROCESSOR_MPCCORE,
408 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
409 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
410 {"821", PROCESSOR_MPCCORE,
411 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
412 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
413 {"823", PROCESSOR_MPCCORE,
414 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
415 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
416 {"860", PROCESSOR_MPCCORE,
417 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
418 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
420 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
422 /* Save current -mmultiple/-mno-multiple status. */
423 int multiple = TARGET_MULTIPLE;
424 /* Save current -mstring/-mno-string status. */
425 int string = TARGET_STRING;
427 /* Identify the processor type. */
428 rs6000_select[0].string = default_cpu;
429 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
431 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
433 ptr = &rs6000_select[i];
434 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
436 for (j = 0; j < ptt_size; j++)
437 if (! strcmp (ptr->string, processor_target_table[j].name))
440 rs6000_cpu = processor_target_table[j].processor;
444 target_flags |= processor_target_table[j].target_enable;
445 target_flags &= ~processor_target_table[j].target_disable;
451 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
455 /* If we are optimizing big endian systems for space, use the store
456 multiple instructions. */
457 if (BYTES_BIG_ENDIAN && optimize_size)
458 target_flags |= MASK_MULTIPLE;
460 /* If -mmultiple or -mno-multiple was explicitly used, don't
461 override with the processor default */
462 if (TARGET_MULTIPLE_SET)
463 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
465 /* If -mstring or -mno-string was explicitly used, don't override
466 with the processor default. */
467 if (TARGET_STRING_SET)
468 target_flags = (target_flags & ~MASK_STRING) | string;
470 /* Don't allow -mmultiple or -mstring on little endian systems
471 unless the cpu is a 750, because the hardware doesn't support the
472 instructions used in little endian mode, and causes an alignment
473 trap. The 750 does not cause an alignment trap (except when the
474 target is unaligned). */
476 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
480 target_flags &= ~MASK_MULTIPLE;
481 if (TARGET_MULTIPLE_SET)
482 warning ("-mmultiple is not supported on little endian systems");
487 target_flags &= ~MASK_STRING;
488 if (TARGET_STRING_SET)
489 warning ("-mstring is not supported on little endian systems");
493 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
498 warning ("-f%s ignored (all code is position independent)",
499 (flag_pic > 1) ? "PIC" : "pic");
502 #ifdef XCOFF_DEBUGGING_INFO
503 if (flag_function_sections && (write_symbols != NO_DEBUG)
504 && DEFAULT_ABI == ABI_AIX)
506 warning ("-ffunction-sections disabled on AIX when debugging");
507 flag_function_sections = 0;
510 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
512 warning ("-fdata-sections not supported on AIX");
513 flag_data_sections = 0;
517 /* Set debug flags */
518 if (rs6000_debug_name)
520 if (! strcmp (rs6000_debug_name, "all"))
521 rs6000_debug_stack = rs6000_debug_arg = 1;
522 else if (! strcmp (rs6000_debug_name, "stack"))
523 rs6000_debug_stack = 1;
524 else if (! strcmp (rs6000_debug_name, "arg"))
525 rs6000_debug_arg = 1;
527 error ("unknown -mdebug-%s switch", rs6000_debug_name);
530 /* Set size of long double */
531 rs6000_long_double_type_size = 64;
532 if (rs6000_long_double_size_string)
535 int size = strtol (rs6000_long_double_size_string, &tail, 10);
536 if (*tail != '\0' || (size != 64 && size != 128))
537 error ("Unknown switch -mlong-double-%s",
538 rs6000_long_double_size_string);
540 rs6000_long_double_type_size = size;
543 /* Handle -mabi= options. */
544 rs6000_parse_abi_options ();
546 /* Handle -mvrsave= option. */
547 rs6000_parse_vrsave_option ();
549 #ifdef TARGET_REGNAMES
550 /* If the user desires alternate register names, copy in the
551 alternate names now. */
553 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
556 #ifdef SUBTARGET_OVERRIDE_OPTIONS
557 SUBTARGET_OVERRIDE_OPTIONS;
559 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
560 SUBSUBTARGET_OVERRIDE_OPTIONS;
563 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
564 If -maix-struct-return or -msvr4-struct-return was explicitly
565 used, don't override with the ABI default. */
566 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
568 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
569 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
571 target_flags |= MASK_AIX_STRUCT_RET;
574 /* Register global variables with the garbage collector. */
575 rs6000_add_gc_roots ();
577 /* Allocate an alias set for register saves & restores from stack. */
578 rs6000_sr_alias_set = new_alias_set ();
581 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
583 /* We can only guarantee the availability of DI pseudo-ops when
584 assembling for 64-bit targets. */
587 targetm.asm_out.aligned_op.di = NULL;
588 targetm.asm_out.unaligned_op.di = NULL;
591 /* Arrange to save and restore machine status around nested functions. */
592 init_machine_status = rs6000_init_machine_status;
593 free_machine_status = rs6000_free_machine_status;
596 /* Handle -mvrsave= options. */
598 rs6000_parse_vrsave_option ()
600 /* Generate VRSAVE instructions by default. */
601 if (rs6000_altivec_vrsave_string == 0
602 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
603 rs6000_altivec_vrsave = 1;
604 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
605 rs6000_altivec_vrsave = 0;
607 error ("unknown -mvrsave= option specified: '%s'",
608 rs6000_altivec_vrsave_string);
611 /* Handle -mabi= options. */
613 rs6000_parse_abi_options ()
615 if (rs6000_abi_string == 0)
617 else if (! strcmp (rs6000_abi_string, "altivec"))
618 rs6000_altivec_abi = 1;
619 else if (! strcmp (rs6000_abi_string, "no-altivec"))
620 rs6000_altivec_abi = 0;
622 error ("unknown ABI specified: '%s'", rs6000_abi_string);
626 optimization_options (level, size)
627 int level ATTRIBUTE_UNUSED;
628 int size ATTRIBUTE_UNUSED;
632 /* Do anything needed at the start of the asm file. */
635 rs6000_file_start (file, default_cpu)
637 const char *default_cpu;
641 const char *start = buffer;
642 struct rs6000_cpu_select *ptr;
644 if (flag_verbose_asm)
646 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
647 rs6000_select[0].string = default_cpu;
649 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
651 ptr = &rs6000_select[i];
652 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
654 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
660 switch (rs6000_sdata)
662 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
663 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
664 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
665 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
668 if (rs6000_sdata && g_switch_value)
670 fprintf (file, "%s -G %d", start, g_switch_value);
681 /* Create a CONST_DOUBLE from a string. */
684 rs6000_float_const (string, mode)
686 enum machine_mode mode;
688 REAL_VALUE_TYPE value;
689 value = REAL_VALUE_ATOF (string, mode);
690 return immed_real_const_1 (value, mode);
693 /* Return non-zero if this function is known to have a null epilogue. */
698 if (reload_completed)
700 rs6000_stack_t *info = rs6000_stack_info ();
702 if (info->first_gp_reg_save == 32
703 && info->first_fp_reg_save == 64
704 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
707 && info->vrsave_mask == 0
715 /* Returns 1 always. */
718 any_operand (op, mode)
719 rtx op ATTRIBUTE_UNUSED;
720 enum machine_mode mode ATTRIBUTE_UNUSED;
725 /* Returns 1 if op is the count register. */
727 count_register_operand (op, mode)
729 enum machine_mode mode ATTRIBUTE_UNUSED;
731 if (GET_CODE (op) != REG)
734 if (REGNO (op) == COUNT_REGISTER_REGNUM)
737 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
743 /* Returns 1 if op is an altivec register. */
745 altivec_register_operand (op, mode)
747 enum machine_mode mode ATTRIBUTE_UNUSED;
750 return (register_operand (op, mode)
751 && (GET_CODE (op) != REG
752 || REGNO (op) > FIRST_PSEUDO_REGISTER
753 || ALTIVEC_REGNO_P (REGNO (op))));
757 xer_operand (op, mode)
759 enum machine_mode mode ATTRIBUTE_UNUSED;
761 if (GET_CODE (op) != REG)
764 if (XER_REGNO_P (REGNO (op)))
770 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
771 by such constants completes more quickly. */
774 s8bit_cint_operand (op, mode)
776 enum machine_mode mode ATTRIBUTE_UNUSED;
778 return ( GET_CODE (op) == CONST_INT
779 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
782 /* Return 1 if OP is a constant that can fit in a D field. */
785 short_cint_operand (op, mode)
787 enum machine_mode mode ATTRIBUTE_UNUSED;
789 return (GET_CODE (op) == CONST_INT
790 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
793 /* Similar for an unsigned D field. */
796 u_short_cint_operand (op, mode)
798 enum machine_mode mode ATTRIBUTE_UNUSED;
800 return (GET_CODE (op) == CONST_INT
801 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
804 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
807 non_short_cint_operand (op, mode)
809 enum machine_mode mode ATTRIBUTE_UNUSED;
811 return (GET_CODE (op) == CONST_INT
812 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
815 /* Returns 1 if OP is a CONST_INT that is a positive value
816 and an exact power of 2. */
819 exact_log2_cint_operand (op, mode)
821 enum machine_mode mode ATTRIBUTE_UNUSED;
823 return (GET_CODE (op) == CONST_INT
825 && exact_log2 (INTVAL (op)) >= 0);
828 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
832 gpc_reg_operand (op, mode)
834 enum machine_mode mode;
836 return (register_operand (op, mode)
837 && (GET_CODE (op) != REG
838 || (REGNO (op) >= ARG_POINTER_REGNUM
839 && !XER_REGNO_P (REGNO (op)))
840 || REGNO (op) < MQ_REGNO));
843 /* Returns 1 if OP is either a pseudo-register or a register denoting a
847 cc_reg_operand (op, mode)
849 enum machine_mode mode;
851 return (register_operand (op, mode)
852 && (GET_CODE (op) != REG
853 || REGNO (op) >= FIRST_PSEUDO_REGISTER
854 || CR_REGNO_P (REGNO (op))));
857 /* Returns 1 if OP is either a pseudo-register or a register denoting a
858 CR field that isn't CR0. */
861 cc_reg_not_cr0_operand (op, mode)
863 enum machine_mode mode;
865 return (register_operand (op, mode)
866 && (GET_CODE (op) != REG
867 || REGNO (op) >= FIRST_PSEUDO_REGISTER
868 || CR_REGNO_NOT_CR0_P (REGNO (op))));
871 /* Returns 1 if OP is either a constant integer valid for a D-field or
872 a non-special register. If a register, it must be in the proper
873 mode unless MODE is VOIDmode. */
876 reg_or_short_operand (op, mode)
878 enum machine_mode mode;
880 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
883 /* Similar, except check if the negation of the constant would be
884 valid for a D-field. */
887 reg_or_neg_short_operand (op, mode)
889 enum machine_mode mode;
891 if (GET_CODE (op) == CONST_INT)
892 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
894 return gpc_reg_operand (op, mode);
897 /* Returns 1 if OP is either a constant integer valid for a DS-field or
898 a non-special register. If a register, it must be in the proper
899 mode unless MODE is VOIDmode. */
902 reg_or_aligned_short_operand (op, mode)
904 enum machine_mode mode;
906 if (gpc_reg_operand (op, mode))
908 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
915 /* Return 1 if the operand is either a register or an integer whose
916 high-order 16 bits are zero. */
919 reg_or_u_short_operand (op, mode)
921 enum machine_mode mode;
923 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
926 /* Return 1 is the operand is either a non-special register or ANY
930 reg_or_cint_operand (op, mode)
932 enum machine_mode mode;
934 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
937 /* Return 1 is the operand is either a non-special register or ANY
938 32-bit signed constant integer. */
941 reg_or_arith_cint_operand (op, mode)
943 enum machine_mode mode;
945 return (gpc_reg_operand (op, mode)
946 || (GET_CODE (op) == CONST_INT
947 #if HOST_BITS_PER_WIDE_INT != 32
948 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
949 < (unsigned HOST_WIDE_INT) 0x100000000ll)
954 /* Return 1 is the operand is either a non-special register or a 32-bit
955 signed constant integer valid for 64-bit addition. */
958 reg_or_add_cint64_operand (op, mode)
960 enum machine_mode mode;
962 return (gpc_reg_operand (op, mode)
963 || (GET_CODE (op) == CONST_INT
964 #if HOST_BITS_PER_WIDE_INT == 32
965 && INTVAL (op) < 0x7fff8000
967 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
973 /* Return 1 is the operand is either a non-special register or a 32-bit
974 signed constant integer valid for 64-bit subtraction. */
977 reg_or_sub_cint64_operand (op, mode)
979 enum machine_mode mode;
981 return (gpc_reg_operand (op, mode)
982 || (GET_CODE (op) == CONST_INT
983 #if HOST_BITS_PER_WIDE_INT == 32
984 && (- INTVAL (op)) < 0x7fff8000
986 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
992 /* Return 1 is the operand is either a non-special register or ANY
993 32-bit unsigned constant integer. */
996 reg_or_logical_cint_operand (op, mode)
998 enum machine_mode mode;
1000 if (GET_CODE (op) == CONST_INT)
1002 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1004 if (GET_MODE_BITSIZE (mode) <= 32)
1007 if (INTVAL (op) < 0)
1011 return ((INTVAL (op) & GET_MODE_MASK (mode)
1012 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1014 else if (GET_CODE (op) == CONST_DOUBLE)
1016 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1020 return CONST_DOUBLE_HIGH (op) == 0;
1023 return gpc_reg_operand (op, mode);
1026 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1029 got_operand (op, mode)
1031 enum machine_mode mode ATTRIBUTE_UNUSED;
1033 return (GET_CODE (op) == SYMBOL_REF
1034 || GET_CODE (op) == CONST
1035 || GET_CODE (op) == LABEL_REF);
1038 /* Return 1 if the operand is a simple references that can be loaded via
1039 the GOT (labels involving addition aren't allowed). */
1042 got_no_const_operand (op, mode)
1044 enum machine_mode mode ATTRIBUTE_UNUSED;
1046 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1049 /* Return the number of instructions it takes to form a constant in an
1050 integer register. */
1053 num_insns_constant_wide (value)
1054 HOST_WIDE_INT value;
1056 /* signed constant loadable with {cal|addi} */
1057 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1060 /* constant loadable with {cau|addis} */
1061 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1064 #if HOST_BITS_PER_WIDE_INT == 64
1065 else if (TARGET_POWERPC64)
1067 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1068 HOST_WIDE_INT high = value >> 31;
1070 if (high == 0 || high == -1)
1076 return num_insns_constant_wide (high) + 1;
1078 return (num_insns_constant_wide (high)
1079 + num_insns_constant_wide (low) + 1);
1088 num_insns_constant (op, mode)
1090 enum machine_mode mode;
1092 if (GET_CODE (op) == CONST_INT)
1094 #if HOST_BITS_PER_WIDE_INT == 64
1095 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1096 && mask64_operand (op, mode))
1100 return num_insns_constant_wide (INTVAL (op));
1103 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1108 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1109 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1110 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1113 else if (GET_CODE (op) == CONST_DOUBLE)
1119 int endian = (WORDS_BIG_ENDIAN == 0);
1121 if (mode == VOIDmode || mode == DImode)
1123 high = CONST_DOUBLE_HIGH (op);
1124 low = CONST_DOUBLE_LOW (op);
1128 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1129 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1131 low = l[1 - endian];
1135 return (num_insns_constant_wide (low)
1136 + num_insns_constant_wide (high));
1140 if (high == 0 && low >= 0)
1141 return num_insns_constant_wide (low);
1143 else if (high == -1 && low < 0)
1144 return num_insns_constant_wide (low);
1146 else if (mask64_operand (op, mode))
1150 return num_insns_constant_wide (high) + 1;
1153 return (num_insns_constant_wide (high)
1154 + num_insns_constant_wide (low) + 1);
1162 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1163 register with one instruction per word. We only do this if we can
1164 safely read CONST_DOUBLE_{LOW,HIGH}. */
1167 easy_fp_constant (op, mode)
1169 enum machine_mode mode;
1171 if (GET_CODE (op) != CONST_DOUBLE
1172 || GET_MODE (op) != mode
1173 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1176 /* Consider all constants with -msoft-float to be easy. */
1177 if (TARGET_SOFT_FLOAT && mode != DImode)
1180 /* If we are using V.4 style PIC, consider all constants to be hard. */
1181 if (flag_pic && DEFAULT_ABI == ABI_V4)
1184 #ifdef TARGET_RELOCATABLE
1185 /* Similarly if we are using -mrelocatable, consider all constants
1187 if (TARGET_RELOCATABLE)
1196 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1197 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1199 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1200 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1203 else if (mode == SFmode)
1208 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1209 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1211 return num_insns_constant_wide (l) == 1;
1214 else if (mode == DImode)
1215 return ((TARGET_POWERPC64
1216 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1217 || (num_insns_constant (op, DImode) <= 2));
1219 else if (mode == SImode)
1225 /* Return 1 if the operand is a CONST_INT and can be put into a
1226 register with one instruction. */
1229 easy_vector_constant (op)
1235 if (GET_CODE (op) != CONST_VECTOR)
1238 units = CONST_VECTOR_NUNITS (op);
1240 /* We can generate 0 easily. Look for that. */
1241 for (i = 0; i < units; ++i)
1243 elt = CONST_VECTOR_ELT (op, i);
1245 /* We could probably simplify this by just checking for equality
1246 with CONST0_RTX for the current mode, but let's be safe
1249 switch (GET_CODE (elt))
1252 if (INTVAL (elt) != 0)
1256 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1264 /* We could probably generate a few other constants trivially, but
1265 gcc doesn't generate them yet. FIXME later. */
1269 /* Return 1 if the operand is the constant 0. This works for scalars
1270 as well as vectors. */
1272 zero_constant (op, mode)
1274 enum machine_mode mode;
1276 return op == CONST0_RTX (mode);
1279 /* Return 1 if the operand is 0.0. */
1281 zero_fp_constant (op, mode)
1283 enum machine_mode mode;
1285 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1288 /* Return 1 if the operand is in volatile memory. Note that during
1289 the RTL generation phase, memory_operand does not return TRUE for
1290 volatile memory references. So this function allows us to
1291 recognize volatile references where its safe. */
1294 volatile_mem_operand (op, mode)
1296 enum machine_mode mode;
1298 if (GET_CODE (op) != MEM)
1301 if (!MEM_VOLATILE_P (op))
1304 if (mode != GET_MODE (op))
1307 if (reload_completed)
1308 return memory_operand (op, mode);
1310 if (reload_in_progress)
1311 return strict_memory_address_p (mode, XEXP (op, 0));
1313 return memory_address_p (mode, XEXP (op, 0));
1316 /* Return 1 if the operand is an offsettable memory operand. */
1319 offsettable_mem_operand (op, mode)
1321 enum machine_mode mode;
1323 return ((GET_CODE (op) == MEM)
1324 && offsettable_address_p (reload_completed || reload_in_progress,
1325 mode, XEXP (op, 0)));
1328 /* Return 1 if the operand is either an easy FP constant (see above) or
1332 mem_or_easy_const_operand (op, mode)
1334 enum machine_mode mode;
1336 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1339 /* Return 1 if the operand is either a non-special register or an item
1340 that can be used as the operand of a `mode' add insn. */
1343 add_operand (op, mode)
1345 enum machine_mode mode;
1347 if (GET_CODE (op) == CONST_INT)
1348 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1349 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1351 return gpc_reg_operand (op, mode);
1354 /* Return 1 if OP is a constant but not a valid add_operand. */
1357 non_add_cint_operand (op, mode)
1359 enum machine_mode mode ATTRIBUTE_UNUSED;
1361 return (GET_CODE (op) == CONST_INT
1362 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1363 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1366 /* Return 1 if the operand is a non-special register or a constant that
1367 can be used as the operand of an OR or XOR insn on the RS/6000. */
1370 logical_operand (op, mode)
1372 enum machine_mode mode;
1374 HOST_WIDE_INT opl, oph;
1376 if (gpc_reg_operand (op, mode))
1379 if (GET_CODE (op) == CONST_INT)
1381 opl = INTVAL (op) & GET_MODE_MASK (mode);
1383 #if HOST_BITS_PER_WIDE_INT <= 32
1384 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1388 else if (GET_CODE (op) == CONST_DOUBLE)
1390 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1393 opl = CONST_DOUBLE_LOW (op);
1394 oph = CONST_DOUBLE_HIGH (op);
1401 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1402 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1405 /* Return 1 if C is a constant that is not a logical operand (as
1406 above), but could be split into one. */
1409 non_logical_cint_operand (op, mode)
1411 enum machine_mode mode;
1413 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1414 && ! logical_operand (op, mode)
1415 && reg_or_logical_cint_operand (op, mode));
1418 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1419 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1420 Reject all ones and all zeros, since these should have been optimized
1421 away and confuse the making of MB and ME. */
1424 mask_operand (op, mode)
1426 enum machine_mode mode ATTRIBUTE_UNUSED;
1428 HOST_WIDE_INT c, lsb;
1430 if (GET_CODE (op) != CONST_INT)
1435 /* Fail in 64-bit mode if the mask wraps around because the upper
1436 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1437 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1440 /* We don't change the number of transitions by inverting,
1441 so make sure we start with the LS bit zero. */
1445 /* Reject all zeros or all ones. */
1449 /* Find the first transition. */
1452 /* Invert to look for a second transition. */
1455 /* Erase first transition. */
1458 /* Find the second transition (if any). */
1461 /* Match if all the bits above are 1's (or c is zero). */
1465 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1466 It is if there are no more than one 1->0 or 0->1 transitions.
1467 Reject all ones and all zeros, since these should have been optimized
1468 away and confuse the making of MB and ME. */
1471 mask64_operand (op, mode)
1473 enum machine_mode mode;
1475 if (GET_CODE (op) == CONST_INT)
1477 HOST_WIDE_INT c, lsb;
1479 /* We don't change the number of transitions by inverting,
1480 so make sure we start with the LS bit zero. */
1485 /* Reject all zeros or all ones. */
1489 /* Find the transition, and check that all bits above are 1's. */
1493 else if (GET_CODE (op) == CONST_DOUBLE
1494 && (mode == VOIDmode || mode == DImode))
1496 HOST_WIDE_INT low, high, lsb;
1498 if (HOST_BITS_PER_WIDE_INT < 64)
1499 high = CONST_DOUBLE_HIGH (op);
1501 low = CONST_DOUBLE_LOW (op);
1504 if (HOST_BITS_PER_WIDE_INT < 64)
1511 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1515 return high == -lsb;
1519 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1525 /* Return 1 if the operand is either a non-special register or a constant
1526 that can be used as the operand of a PowerPC64 logical AND insn. */
1529 and64_operand (op, mode)
1531 enum machine_mode mode;
1533 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1534 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1536 return (logical_operand (op, mode) || mask64_operand (op, mode));
1539 /* Return 1 if the operand is either a non-special register or a
1540 constant that can be used as the operand of an RS/6000 logical AND insn. */
1543 and_operand (op, mode)
1545 enum machine_mode mode;
1547 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1548 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1550 return (logical_operand (op, mode) || mask_operand (op, mode));
1553 /* Return 1 if the operand is a general register or memory operand. */
1556 reg_or_mem_operand (op, mode)
1558 enum machine_mode mode;
1560 return (gpc_reg_operand (op, mode)
1561 || memory_operand (op, mode)
1562 || volatile_mem_operand (op, mode));
1565 /* Return 1 if the operand is a general register or memory operand without
1566 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1570 lwa_operand (op, mode)
1572 enum machine_mode mode;
1576 if (reload_completed && GET_CODE (inner) == SUBREG)
1577 inner = SUBREG_REG (inner);
1579 return gpc_reg_operand (inner, mode)
1580 || (memory_operand (inner, mode)
1581 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1582 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1583 && (GET_CODE (XEXP (inner, 0)) != PLUS
1584 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1585 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1588 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1591 symbol_ref_operand (op, mode)
1593 enum machine_mode mode;
1595 if (mode != VOIDmode && GET_MODE (op) != mode)
1598 return (GET_CODE (op) == SYMBOL_REF);
1601 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1602 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1605 call_operand (op, mode)
1607 enum machine_mode mode;
1609 if (mode != VOIDmode && GET_MODE (op) != mode)
1612 return (GET_CODE (op) == SYMBOL_REF
1613 || (GET_CODE (op) == REG
1614 && (REGNO (op) == LINK_REGISTER_REGNUM
1615 || REGNO (op) == COUNT_REGISTER_REGNUM
1616 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1619 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1620 this file and the function is not weakly defined. */
1623 current_file_function_operand (op, mode)
1625 enum machine_mode mode ATTRIBUTE_UNUSED;
1627 return (GET_CODE (op) == SYMBOL_REF
1628 && (SYMBOL_REF_FLAG (op)
1629 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1630 && ! DECL_WEAK (current_function_decl))));
1633 /* Return 1 if this operand is a valid input for a move insn. */
1636 input_operand (op, mode)
1638 enum machine_mode mode;
1640 /* Memory is always valid. */
1641 if (memory_operand (op, mode))
1644 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1645 if (GET_CODE (op) == CONSTANT_P_RTX)
1648 /* For floating-point, easy constants are valid. */
1649 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1651 && easy_fp_constant (op, mode))
1654 /* Allow any integer constant. */
1655 if (GET_MODE_CLASS (mode) == MODE_INT
1656 && (GET_CODE (op) == CONST_INT
1657 || GET_CODE (op) == CONST_DOUBLE))
1660 /* For floating-point or multi-word mode, the only remaining valid type
1662 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1663 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1664 return register_operand (op, mode);
1666 /* The only cases left are integral modes one word or smaller (we
1667 do not get called for MODE_CC values). These can be in any
1669 if (register_operand (op, mode))
1672 /* A SYMBOL_REF referring to the TOC is valid. */
1673 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1676 /* A constant pool expression (relative to the TOC) is valid */
1677 if (TOC_RELATIVE_EXPR_P (op))
1680 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1682 if (DEFAULT_ABI == ABI_V4
1683 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1684 && small_data_operand (op, Pmode))
1690 /* Return 1 for an operand in small memory on V.4/eabi. */
1693 small_data_operand (op, mode)
1694 rtx op ATTRIBUTE_UNUSED;
1695 enum machine_mode mode ATTRIBUTE_UNUSED;
1700 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1703 if (DEFAULT_ABI != ABI_V4)
1706 if (GET_CODE (op) == SYMBOL_REF)
1709 else if (GET_CODE (op) != CONST
1710 || GET_CODE (XEXP (op, 0)) != PLUS
1711 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1712 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1717 rtx sum = XEXP (op, 0);
1718 HOST_WIDE_INT summand;
1720 /* We have to be careful here, because it is the referenced address
1721 that must be 32k from _SDA_BASE_, not just the symbol. */
1722 summand = INTVAL (XEXP (sum, 1));
1723 if (summand < 0 || summand > g_switch_value)
1726 sym_ref = XEXP (sum, 0);
1729 if (*XSTR (sym_ref, 0) != '@')
1740 constant_pool_expr_1 (op, have_sym, have_toc)
1745 switch (GET_CODE(op))
1748 if (CONSTANT_POOL_ADDRESS_P (op))
1750 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1758 else if (! strcmp (XSTR (op, 0), toc_label_name))
1767 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1768 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1770 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1779 constant_pool_expr_p (op)
1784 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1788 toc_relative_expr_p (op)
1793 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1796 /* Try machine-dependent ways of modifying an illegitimate address
1797 to be legitimate. If we find one, return the new, valid address.
1798 This is used from only one place: `memory_address' in explow.c.
1800 OLDX is the address as it was before break_out_memory_refs was
1801 called. In some cases it is useful to look at this to decide what
1804 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1806 It is always safe for this function to do nothing. It exists to
1807 recognize opportunities to optimize the output.
1809 On RS/6000, first check for the sum of a register with a constant
1810 integer that is out of range. If so, generate code to add the
1811 constant with the low-order 16 bits masked to the register and force
1812 this result into another register (this can be done with `cau').
1813 Then generate an address of REG+(CONST&0xffff), allowing for the
1814 possibility of bit 16 being a one.
1816 Then check for the sum of a register and something not constant, try to
1817 load the other things into a register and return the sum. */
1819 rs6000_legitimize_address (x, oldx, mode)
1821 rtx oldx ATTRIBUTE_UNUSED;
1822 enum machine_mode mode;
1824 if (GET_CODE (x) == PLUS
1825 && GET_CODE (XEXP (x, 0)) == REG
1826 && GET_CODE (XEXP (x, 1)) == CONST_INT
1827 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1829 HOST_WIDE_INT high_int, low_int;
1831 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
1832 high_int = INTVAL (XEXP (x, 1)) - low_int;
1833 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1834 GEN_INT (high_int)), 0);
1835 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1837 else if (GET_CODE (x) == PLUS
1838 && GET_CODE (XEXP (x, 0)) == REG
1839 && GET_CODE (XEXP (x, 1)) != CONST_INT
1840 && GET_MODE_NUNITS (mode) == 1
1841 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1842 && (TARGET_POWERPC64 || mode != DImode)
1845 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1846 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1848 else if (ALTIVEC_VECTOR_MODE (mode))
1852 /* Make sure both operands are registers. */
1853 if (GET_CODE (x) == PLUS)
1854 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1855 force_reg (Pmode, XEXP (x, 1)));
1857 reg = force_reg (Pmode, x);
1860 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1861 && GET_CODE (x) != CONST_INT
1862 && GET_CODE (x) != CONST_DOUBLE
1864 && GET_MODE_NUNITS (mode) == 1
1865 && (GET_MODE_BITSIZE (mode) <= 32
1866 || (TARGET_HARD_FLOAT && mode == DFmode)))
1868 rtx reg = gen_reg_rtx (Pmode);
1869 emit_insn (gen_elf_high (reg, (x)));
1870 return gen_rtx_LO_SUM (Pmode, reg, (x));
1872 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1874 && GET_CODE (x) != CONST_INT
1875 && GET_CODE (x) != CONST_DOUBLE
1877 && (TARGET_HARD_FLOAT || mode != DFmode)
1881 rtx reg = gen_reg_rtx (Pmode);
1882 emit_insn (gen_macho_high (reg, (x)));
1883 return gen_rtx_LO_SUM (Pmode, reg, (x));
1886 && CONSTANT_POOL_EXPR_P (x)
1887 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1889 return create_TOC_reference (x);
1895 /* The convention appears to be to define this wherever it is used.
1896 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1897 is now used here. */
1898 #ifndef REG_MODE_OK_FOR_BASE_P
1899 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1902 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1903 replace the input X, or the original X if no replacement is called for.
1904 The output parameter *WIN is 1 if the calling macro should goto WIN,
1907 For RS/6000, we wish to handle large displacements off a base
1908 register by splitting the addend across an addiu/addis and the mem insn.
1909 This cuts number of extra insns needed from 3 to 1.
1911 On Darwin, we use this to generate code for floating point constants.
1912 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1913 The Darwin code is inside #if TARGET_MACHO because only then is
1914 machopic_function_base_name() defined. */
1916 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1918 enum machine_mode mode;
1921 int ind_levels ATTRIBUTE_UNUSED;
1924 /* We must recognize output that we have already generated ourselves. */
1925 if (GET_CODE (x) == PLUS
1926 && GET_CODE (XEXP (x, 0)) == PLUS
1927 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1928 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1929 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1931 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1932 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1933 opnum, (enum reload_type)type);
1939 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1940 && GET_CODE (x) == LO_SUM
1941 && GET_CODE (XEXP (x, 0)) == PLUS
1942 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1943 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1944 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1945 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1946 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1947 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1948 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1950 /* Result of previous invocation of this function on Darwin
1951 floating point constant. */
1952 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1953 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1954 opnum, (enum reload_type)type);
1959 if (GET_CODE (x) == PLUS
1960 && GET_CODE (XEXP (x, 0)) == REG
1961 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1962 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1963 && GET_CODE (XEXP (x, 1)) == CONST_INT
1964 && !ALTIVEC_VECTOR_MODE (mode))
1966 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1967 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1969 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1971 /* Check for 32-bit overflow. */
1972 if (high + low != val)
1978 /* Reload the high part into a base reg; leave the low part
1979 in the mem directly. */
1981 x = gen_rtx_PLUS (GET_MODE (x),
1982 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1986 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1987 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1988 opnum, (enum reload_type)type);
1993 if (GET_CODE (x) == SYMBOL_REF
1994 && DEFAULT_ABI == ABI_DARWIN
1995 && !ALTIVEC_VECTOR_MODE (mode)
1998 /* Darwin load of floating point constant. */
1999 rtx offset = gen_rtx (CONST, Pmode,
2000 gen_rtx (MINUS, Pmode, x,
2001 gen_rtx (SYMBOL_REF, Pmode,
2002 machopic_function_base_name ())));
2003 x = gen_rtx (LO_SUM, GET_MODE (x),
2004 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2005 gen_rtx (HIGH, Pmode, offset)), offset);
2006 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2007 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2008 opnum, (enum reload_type)type);
2014 && CONSTANT_POOL_EXPR_P (x)
2015 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2017 (x) = create_TOC_reference (x);
2025 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2026 that is a valid memory address for an instruction.
2027 The MODE argument is the machine mode for the MEM expression
2028 that wants to use this address.
2030 On the RS/6000, there are four valid address: a SYMBOL_REF that
2031 refers to a constant pool entry of an address (or the sum of it
2032 plus a constant), a short (16-bit signed) constant plus a register,
2033 the sum of two registers, or a register indirect, possibly with an
2034 auto-increment. For DFmode and DImode with an constant plus register,
2035 we must ensure that both words are addressable or PowerPC64 with offset
2038 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2039 32-bit DImode, TImode), indexed addressing cannot be used because
2040 adjacent memory cells are accessed by adding word-sized offsets
2041 during assembly output. */
2043 rs6000_legitimate_address (mode, x, reg_ok_strict)
2044 enum machine_mode mode;
2048 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2050 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2051 && !ALTIVEC_VECTOR_MODE (mode)
2053 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2055 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2057 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2059 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2061 && GET_CODE (x) == PLUS
2062 && GET_CODE (XEXP (x, 0)) == REG
2063 && XEXP (x, 0) == virtual_stack_vars_rtx
2064 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2066 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2069 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2070 && (TARGET_POWERPC64 || mode != DImode)
2071 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2073 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2078 /* Try to output insns to set TARGET equal to the constant C if it can
2079 be done in less than N insns. Do all computations in MODE.
2080 Returns the place where the output has been placed if it can be
2081 done and the insns have been emitted. If it would take more than N
2082 insns, zero is returned and no insns and emitted. */
2085 rs6000_emit_set_const (dest, mode, source, n)
2087 enum machine_mode mode;
2088 int n ATTRIBUTE_UNUSED;
2090 HOST_WIDE_INT c0, c1;
2092 if (mode == QImode || mode == HImode || mode == SImode)
2095 dest = gen_reg_rtx (mode);
2096 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2100 if (GET_CODE (source) == CONST_INT)
2102 c0 = INTVAL (source);
2105 else if (GET_CODE (source) == CONST_DOUBLE)
2107 #if HOST_BITS_PER_WIDE_INT >= 64
2108 c0 = CONST_DOUBLE_LOW (source);
2111 c0 = CONST_DOUBLE_LOW (source);
2112 c1 = CONST_DOUBLE_HIGH (source);
2118 return rs6000_emit_set_long_const (dest, c0, c1);
2121 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2122 fall back to a straight forward decomposition. We do this to avoid
2123 exponential run times encountered when looking for longer sequences
2124 with rs6000_emit_set_const. */
2126 rs6000_emit_set_long_const (dest, c1, c2)
2128 HOST_WIDE_INT c1, c2;
2130 if (!TARGET_POWERPC64)
2132 rtx operand1, operand2;
2134 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2136 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2138 emit_move_insn (operand1, GEN_INT (c1));
2139 emit_move_insn (operand2, GEN_INT (c2));
2143 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2146 ud2 = (c1 & 0xffff0000) >> 16;
2147 #if HOST_BITS_PER_WIDE_INT >= 64
2151 ud4 = (c2 & 0xffff0000) >> 16;
2153 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2154 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2157 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2159 emit_move_insn (dest, GEN_INT (ud1));
2162 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2163 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2166 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2169 emit_move_insn (dest, GEN_INT (ud2 << 16));
2171 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2173 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2174 || (ud4 == 0 && ! (ud3 & 0x8000)))
2177 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2180 emit_move_insn (dest, GEN_INT (ud3 << 16));
2183 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2184 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2186 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2191 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2194 emit_move_insn (dest, GEN_INT (ud4 << 16));
2197 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2199 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2201 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2202 GEN_INT (ud2 << 16)));
2204 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2210 /* Emit a move from SOURCE to DEST in mode MODE. */
2212 rs6000_emit_move (dest, source, mode)
2215 enum machine_mode mode;
2219 operands[1] = source;
2221 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2222 if (GET_CODE (operands[1]) == CONST_DOUBLE
2223 && ! FLOAT_MODE_P (mode)
2224 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2226 /* FIXME. This should never happen. */
2227 /* Since it seems that it does, do the safe thing and convert
2229 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2231 if (GET_CODE (operands[1]) == CONST_DOUBLE
2232 && ! FLOAT_MODE_P (mode)
2233 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2234 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2235 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2236 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2239 /* Check if GCC is setting up a block move that will end up using FP
2240 registers as temporaries. We must make sure this is acceptable. */
2241 if (GET_CODE (operands[0]) == MEM
2242 && GET_CODE (operands[1]) == MEM
2244 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2245 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2246 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2247 ? 32 : MEM_ALIGN (operands[0])))
2248 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2250 : MEM_ALIGN (operands[1]))))
2251 && ! MEM_VOLATILE_P (operands [0])
2252 && ! MEM_VOLATILE_P (operands [1]))
2254 emit_move_insn (adjust_address (operands[0], SImode, 0),
2255 adjust_address (operands[1], SImode, 0));
2256 emit_move_insn (adjust_address (operands[0], SImode, 4),
2257 adjust_address (operands[1], SImode, 4));
2261 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2262 operands[1] = force_reg (mode, operands[1]);
2264 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2265 && GET_CODE (operands[0]) == MEM)
2269 if (reload_in_progress || reload_completed)
2270 regnum = true_regnum (operands[1]);
2271 else if (GET_CODE (operands[1]) == REG)
2272 regnum = REGNO (operands[1]);
2276 /* If operands[1] is a register, on POWER it may have
2277 double-precision data in it, so truncate it to single
2279 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2282 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2283 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2284 operands[1] = newreg;
2288 /* Handle the case where reload calls us with an invalid address;
2289 and the case of CONSTANT_P_RTX. */
2290 if (!VECTOR_MODE_P (mode)
2291 && (! general_operand (operands[1], mode)
2292 || ! nonimmediate_operand (operands[0], mode)
2293 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2295 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2299 /* FIXME: In the long term, this switch statement should go away
2300 and be replaced by a sequence of tests based on things like
2306 if (CONSTANT_P (operands[1])
2307 && GET_CODE (operands[1]) != CONST_INT)
2308 operands[1] = force_const_mem (mode, operands[1]);
2314 if (CONSTANT_P (operands[1])
2315 && ! easy_fp_constant (operands[1], mode))
2316 operands[1] = force_const_mem (mode, operands[1]);
2323 if (CONSTANT_P (operands[1])
2324 && !easy_vector_constant (operands[1]))
2325 operands[1] = force_const_mem (mode, operands[1]);
2330 /* Use default pattern for address of ELF small data */
2333 && DEFAULT_ABI == ABI_V4
2334 && (GET_CODE (operands[1]) == SYMBOL_REF
2335 || GET_CODE (operands[1]) == CONST)
2336 && small_data_operand (operands[1], mode))
2338 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2342 if (DEFAULT_ABI == ABI_V4
2343 && mode == Pmode && mode == SImode
2344 && flag_pic == 1 && got_operand (operands[1], mode))
2346 emit_insn (gen_movsi_got (operands[0], operands[1]));
2350 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2351 && TARGET_NO_TOC && ! flag_pic
2353 && CONSTANT_P (operands[1])
2354 && GET_CODE (operands[1]) != HIGH
2355 && GET_CODE (operands[1]) != CONST_INT)
2357 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2359 /* If this is a function address on -mcall-aixdesc,
2360 convert it to the address of the descriptor. */
2361 if (DEFAULT_ABI == ABI_AIX
2362 && GET_CODE (operands[1]) == SYMBOL_REF
2363 && XSTR (operands[1], 0)[0] == '.')
2365 const char *name = XSTR (operands[1], 0);
2367 while (*name == '.')
2369 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2370 CONSTANT_POOL_ADDRESS_P (new_ref)
2371 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2372 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2373 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2374 operands[1] = new_ref;
2377 if (DEFAULT_ABI == ABI_DARWIN)
2379 emit_insn (gen_macho_high (target, operands[1]));
2380 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2384 emit_insn (gen_elf_high (target, operands[1]));
2385 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2389 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2390 and we have put it in the TOC, we just need to make a TOC-relative
2393 && GET_CODE (operands[1]) == SYMBOL_REF
2394 && CONSTANT_POOL_EXPR_P (operands[1])
2395 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2396 get_pool_mode (operands[1])))
2398 operands[1] = create_TOC_reference (operands[1]);
2400 else if (mode == Pmode
2401 && CONSTANT_P (operands[1])
2402 && ((GET_CODE (operands[1]) != CONST_INT
2403 && ! easy_fp_constant (operands[1], mode))
2404 || (GET_CODE (operands[1]) == CONST_INT
2405 && num_insns_constant (operands[1], mode) > 2)
2406 || (GET_CODE (operands[0]) == REG
2407 && FP_REGNO_P (REGNO (operands[0]))))
2408 && GET_CODE (operands[1]) != HIGH
2409 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2410 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2412 /* Emit a USE operation so that the constant isn't deleted if
2413 expensive optimizations are turned on because nobody
2414 references it. This should only be done for operands that
2415 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2416 This should not be done for operands that contain LABEL_REFs.
2417 For now, we just handle the obvious case. */
2418 if (GET_CODE (operands[1]) != LABEL_REF)
2419 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2422 /* Darwin uses a special PIC legitimizer. */
2423 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2426 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2428 if (operands[0] != operands[1])
2429 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2434 /* If we are to limit the number of things we put in the TOC and
2435 this is a symbol plus a constant we can add in one insn,
2436 just put the symbol in the TOC and add the constant. Don't do
2437 this if reload is in progress. */
2438 if (GET_CODE (operands[1]) == CONST
2439 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2440 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2441 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2442 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2443 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2444 && ! side_effects_p (operands[0]))
2447 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2448 rtx other = XEXP (XEXP (operands[1], 0), 1);
2450 sym = force_reg (mode, sym);
2452 emit_insn (gen_addsi3 (operands[0], sym, other));
2454 emit_insn (gen_adddi3 (operands[0], sym, other));
2458 operands[1] = force_const_mem (mode, operands[1]);
2461 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2462 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2463 get_pool_constant (XEXP (operands[1], 0)),
2464 get_pool_mode (XEXP (operands[1], 0))))
2467 = gen_rtx_MEM (mode,
2468 create_TOC_reference (XEXP (operands[1], 0)));
2469 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2470 RTX_UNCHANGING_P (operands[1]) = 1;
2476 if (GET_CODE (operands[0]) == MEM
2477 && GET_CODE (XEXP (operands[0], 0)) != REG
2478 && ! reload_in_progress)
2480 = replace_equiv_address (operands[0],
2481 copy_addr_to_reg (XEXP (operands[0], 0)));
2483 if (GET_CODE (operands[1]) == MEM
2484 && GET_CODE (XEXP (operands[1], 0)) != REG
2485 && ! reload_in_progress)
2487 = replace_equiv_address (operands[1],
2488 copy_addr_to_reg (XEXP (operands[1], 0)));
2495 /* Above, we may have called force_const_mem which may have returned
2496 an invalid address. If we can, fix this up; otherwise, reload will
2497 have to deal with it. */
2498 if (GET_CODE (operands[1]) == MEM
2499 && ! memory_address_p (mode, XEXP (operands[1], 0))
2500 && ! reload_in_progress)
2501 operands[1] = adjust_address (operands[1], mode, 0);
2503 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2507 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2508 for a call to a function whose data type is FNTYPE.
2509 For a library call, FNTYPE is 0.
2511 For incoming args we set the number of arguments in the prototype large
2512 so we never return a PARALLEL. */
2515 init_cumulative_args (cum, fntype, libname, incoming)
2516 CUMULATIVE_ARGS *cum;
2518 rtx libname ATTRIBUTE_UNUSED;
2521 static CUMULATIVE_ARGS zero_cumulative;
2523 *cum = zero_cumulative;
2525 cum->fregno = FP_ARG_MIN_REG;
2526 cum->vregno = ALTIVEC_ARG_MIN_REG;
2527 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2528 cum->call_cookie = CALL_NORMAL;
2529 cum->sysv_gregno = GP_ARG_MIN_REG;
2532 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2534 else if (cum->prototype)
2535 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2536 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2537 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2540 cum->nargs_prototype = 0;
2542 cum->orig_nargs = cum->nargs_prototype;
2544 /* Check for longcall's */
2545 if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2546 cum->call_cookie = CALL_LONG;
2548 if (TARGET_DEBUG_ARG)
2550 fprintf (stderr, "\ninit_cumulative_args:");
2553 tree ret_type = TREE_TYPE (fntype);
2554 fprintf (stderr, " ret code = %s,",
2555 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2558 if (cum->call_cookie & CALL_LONG)
2559 fprintf (stderr, " longcall,");
2561 fprintf (stderr, " proto = %d, nargs = %d\n",
2562 cum->prototype, cum->nargs_prototype);
2566 /* If defined, a C expression which determines whether, and in which
2567 direction, to pad out an argument with extra space. The value
2568 should be of type `enum direction': either `upward' to pad above
2569 the argument, `downward' to pad below, or `none' to inhibit
2572 For the AIX ABI structs are always stored left shifted in their
2576 function_arg_padding (mode, type)
2577 enum machine_mode mode;
2580 if (type != 0 && AGGREGATE_TYPE_P (type))
2583 /* This is the default definition. */
2584 return (! BYTES_BIG_ENDIAN
2587 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2588 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2589 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2590 ? downward : upward));
2593 /* If defined, a C expression that gives the alignment boundary, in bits,
2594 of an argument with the specified mode and type. If it is not defined,
2595 PARM_BOUNDARY is used for all arguments.
2597 V.4 wants long longs to be double word aligned. */
2600 function_arg_boundary (mode, type)
2601 enum machine_mode mode;
2602 tree type ATTRIBUTE_UNUSED;
2604 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2606 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2609 return PARM_BOUNDARY;
2612 /* Update the data in CUM to advance over an argument
2613 of mode MODE and data type TYPE.
2614 (TYPE is null for libcalls where that information may not be available.) */
2617 function_arg_advance (cum, mode, type, named)
2618 CUMULATIVE_ARGS *cum;
2619 enum machine_mode mode;
2623 cum->nargs_prototype--;
2625 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2627 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2630 cum->words += RS6000_ARG_SIZE (mode, type);
2632 else if (DEFAULT_ABI == ABI_V4)
2634 if (TARGET_HARD_FLOAT
2635 && (mode == SFmode || mode == DFmode))
2637 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2642 cum->words += cum->words & 1;
2643 cum->words += RS6000_ARG_SIZE (mode, type);
2649 int gregno = cum->sysv_gregno;
2651 /* Aggregates and IEEE quad get passed by reference. */
2652 if ((type && AGGREGATE_TYPE_P (type))
2656 n_words = RS6000_ARG_SIZE (mode, type);
2658 /* Long long is put in odd registers. */
2659 if (n_words == 2 && (gregno & 1) == 0)
2662 /* Long long is not split between registers and stack. */
2663 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2665 /* Long long is aligned on the stack. */
2667 cum->words += cum->words & 1;
2668 cum->words += n_words;
2671 /* Note: continuing to accumulate gregno past when we've started
2672 spilling to the stack indicates the fact that we've started
2673 spilling to the stack to expand_builtin_saveregs. */
2674 cum->sysv_gregno = gregno + n_words;
2677 if (TARGET_DEBUG_ARG)
2679 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2680 cum->words, cum->fregno);
2681 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2682 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2683 fprintf (stderr, "mode = %4s, named = %d\n",
2684 GET_MODE_NAME (mode), named);
2689 int align = (TARGET_32BIT && (cum->words & 1) != 0
2690 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2692 cum->words += align + RS6000_ARG_SIZE (mode, type);
2694 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2697 if (TARGET_DEBUG_ARG)
2699 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2700 cum->words, cum->fregno);
2701 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2702 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2703 fprintf (stderr, "named = %d, align = %d\n", named, align);
2708 /* Determine where to put an argument to a function.
2709 Value is zero to push the argument on the stack,
2710 or a hard register in which to store the argument.
2712 MODE is the argument's machine mode.
2713 TYPE is the data type of the argument (as a tree).
2714 This is null for libcalls where that information may
2716 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2717 the preceding args and about the function being called.
2718 NAMED is nonzero if this argument is a named parameter
2719 (otherwise it is an extra parameter matching an ellipsis).
2721 On RS/6000 the first eight words of non-FP are normally in registers
2722 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2723 Under V.4, the first 8 FP args are in registers.
2725 If this is floating-point and no prototype is specified, we use
2726 both an FP and integer register (or possibly FP reg and stack). Library
2727 functions (when TYPE is zero) always have the proper types for args,
2728 so we can pass the FP value just in one register. emit_library_function
2729 doesn't support PARALLEL anyway. */
2732 function_arg (cum, mode, type, named)
2733 CUMULATIVE_ARGS *cum;
2734 enum machine_mode mode;
2738 enum rs6000_abi abi = DEFAULT_ABI;
2740 /* Return a marker to indicate whether CR1 needs to set or clear the
2741 bit that V.4 uses to say fp args were passed in registers.
2742 Assume that we don't need the marker for software floating point,
2743 or compiler generated library calls. */
2744 if (mode == VOIDmode)
2747 && TARGET_HARD_FLOAT
2748 && cum->nargs_prototype < 0
2749 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2751 return GEN_INT (cum->call_cookie
2752 | ((cum->fregno == FP_ARG_MIN_REG)
2753 ? CALL_V4_SET_FP_ARGS
2754 : CALL_V4_CLEAR_FP_ARGS));
2757 return GEN_INT (cum->call_cookie);
2760 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2762 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2763 return gen_rtx_REG (mode, cum->vregno);
2767 else if (abi == ABI_V4)
2769 if (TARGET_HARD_FLOAT
2770 && (mode == SFmode || mode == DFmode))
2772 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2773 return gen_rtx_REG (mode, cum->fregno);
2780 int gregno = cum->sysv_gregno;
2782 /* Aggregates and IEEE quad get passed by reference. */
2783 if ((type && AGGREGATE_TYPE_P (type))
2787 n_words = RS6000_ARG_SIZE (mode, type);
2789 /* Long long is put in odd registers. */
2790 if (n_words == 2 && (gregno & 1) == 0)
2793 /* Long long is not split between registers and stack. */
2794 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2795 return gen_rtx_REG (mode, gregno);
2802 int align = (TARGET_32BIT && (cum->words & 1) != 0
2803 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2804 int align_words = cum->words + align;
2806 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2809 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2812 || ((cum->nargs_prototype > 0)
2813 /* IBM AIX extended its linkage convention definition always
2814 to require FP args after register save area hole on the
2816 && (DEFAULT_ABI != ABI_AIX
2818 || (align_words < GP_ARG_NUM_REG))))
2819 return gen_rtx_REG (mode, cum->fregno);
2821 return gen_rtx_PARALLEL (mode,
2823 gen_rtx_EXPR_LIST (VOIDmode,
2824 ((align_words >= GP_ARG_NUM_REG)
2827 + RS6000_ARG_SIZE (mode, type)
2829 /* If this is partially on the stack, then
2830 we only include the portion actually
2831 in registers here. */
2832 ? gen_rtx_REG (SImode,
2833 GP_ARG_MIN_REG + align_words)
2834 : gen_rtx_REG (mode,
2835 GP_ARG_MIN_REG + align_words))),
2837 gen_rtx_EXPR_LIST (VOIDmode,
2838 gen_rtx_REG (mode, cum->fregno),
2841 else if (align_words < GP_ARG_NUM_REG)
2842 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2848 /* For an arg passed partly in registers and partly in memory,
2849 this is the number of registers used.
2850 For args passed entirely in registers or entirely in memory, zero. */
2853 function_arg_partial_nregs (cum, mode, type, named)
2854 CUMULATIVE_ARGS *cum;
2855 enum machine_mode mode;
2857 int named ATTRIBUTE_UNUSED;
2859 if (DEFAULT_ABI == ABI_V4)
2862 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2863 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2865 if (cum->nargs_prototype >= 0)
2869 if (cum->words < GP_ARG_NUM_REG
2870 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2872 int ret = GP_ARG_NUM_REG - cum->words;
2873 if (ret && TARGET_DEBUG_ARG)
2874 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2882 /* A C expression that indicates when an argument must be passed by
2883 reference. If nonzero for an argument, a copy of that argument is
2884 made in memory and a pointer to the argument is passed instead of
2885 the argument itself. The pointer is passed in whatever way is
2886 appropriate for passing a pointer to that type.
2888 Under V.4, structures and unions are passed by reference. */
2891 function_arg_pass_by_reference (cum, mode, type, named)
2892 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2893 enum machine_mode mode ATTRIBUTE_UNUSED;
2895 int named ATTRIBUTE_UNUSED;
2897 if (DEFAULT_ABI == ABI_V4
2898 && ((type && AGGREGATE_TYPE_P (type))
2901 if (TARGET_DEBUG_ARG)
2902 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2910 /* Perform any needed actions needed for a function that is receiving a
2911 variable number of arguments.
2915 MODE and TYPE are the mode and type of the current parameter.
2917 PRETEND_SIZE is a variable that should be set to the amount of stack
2918 that must be pushed by the prolog to pretend that our caller pushed
2921 Normally, this macro will push all remaining incoming registers on the
2922 stack and set PRETEND_SIZE to the length of the registers pushed. */
2925 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2926 CUMULATIVE_ARGS *cum;
2927 enum machine_mode mode;
2933 CUMULATIVE_ARGS next_cum;
2934 int reg_size = TARGET_32BIT ? 4 : 8;
2935 rtx save_area = NULL_RTX, mem;
2936 int first_reg_offset, set;
2940 fntype = TREE_TYPE (current_function_decl);
2941 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2942 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2943 != void_type_node));
2945 /* For varargs, we do not want to skip the dummy va_dcl argument.
2946 For stdargs, we do want to skip the last named argument. */
2949 function_arg_advance (&next_cum, mode, type, 1);
2951 if (DEFAULT_ABI == ABI_V4)
2953 /* Indicate to allocate space on the stack for varargs save area. */
2954 cfun->machine->sysv_varargs_p = 1;
2956 save_area = plus_constant (virtual_stack_vars_rtx,
2957 - RS6000_VARARGS_SIZE);
2959 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2963 first_reg_offset = next_cum.words;
2964 save_area = virtual_incoming_args_rtx;
2965 cfun->machine->sysv_varargs_p = 0;
2967 if (MUST_PASS_IN_STACK (mode, type))
2968 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2971 set = get_varargs_alias_set ();
2972 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2974 mem = gen_rtx_MEM (BLKmode,
2975 plus_constant (save_area,
2976 first_reg_offset * reg_size)),
2977 set_mem_alias_set (mem, set);
2978 set_mem_align (mem, BITS_PER_WORD);
2981 (GP_ARG_MIN_REG + first_reg_offset, mem,
2982 GP_ARG_NUM_REG - first_reg_offset,
2983 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2985 /* ??? Does ABI_V4 need this at all? */
2986 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2989 /* Save FP registers if needed. */
2990 if (DEFAULT_ABI == ABI_V4
2991 && TARGET_HARD_FLOAT && ! no_rtl
2992 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2994 int fregno = next_cum.fregno;
2995 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2996 rtx lab = gen_label_rtx ();
2997 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2999 emit_jump_insn (gen_rtx_SET (VOIDmode,
3001 gen_rtx_IF_THEN_ELSE (VOIDmode,
3002 gen_rtx_NE (VOIDmode, cr1,
3004 gen_rtx_LABEL_REF (VOIDmode, lab),
3007 while (fregno <= FP_ARG_V4_MAX_REG)
3009 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3010 set_mem_alias_set (mem, set);
3011 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3020 /* Create the va_list data type. */
3023 rs6000_build_va_list ()
3025 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3027 /* For AIX, prefer 'char *' because that's what the system
3028 header files like. */
3029 if (DEFAULT_ABI != ABI_V4)
3030 return build_pointer_type (char_type_node);
3032 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3033 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3035 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3036 unsigned_char_type_node);
3037 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3038 unsigned_char_type_node);
3039 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3041 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3044 DECL_FIELD_CONTEXT (f_gpr) = record;
3045 DECL_FIELD_CONTEXT (f_fpr) = record;
3046 DECL_FIELD_CONTEXT (f_ovf) = record;
3047 DECL_FIELD_CONTEXT (f_sav) = record;
3049 TREE_CHAIN (record) = type_decl;
3050 TYPE_NAME (record) = type_decl;
3051 TYPE_FIELDS (record) = f_gpr;
3052 TREE_CHAIN (f_gpr) = f_fpr;
3053 TREE_CHAIN (f_fpr) = f_ovf;
3054 TREE_CHAIN (f_ovf) = f_sav;
3056 layout_type (record);
3058 /* The correct type is an array type of one element. */
3059 return build_array_type (record, build_index_type (size_zero_node));
3062 /* Implement va_start. */
3065 rs6000_va_start (stdarg_p, valist, nextarg)
3070 HOST_WIDE_INT words, n_gpr, n_fpr;
3071 tree f_gpr, f_fpr, f_ovf, f_sav;
3072 tree gpr, fpr, ovf, sav, t;
3074 /* Only SVR4 needs something special. */
3075 if (DEFAULT_ABI != ABI_V4)
3077 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
3081 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3082 f_fpr = TREE_CHAIN (f_gpr);
3083 f_ovf = TREE_CHAIN (f_fpr);
3084 f_sav = TREE_CHAIN (f_ovf);
3086 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3087 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3088 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3089 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3090 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3092 /* Count number of gp and fp argument registers used. */
3093 words = current_function_args_info.words;
3094 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3095 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3097 if (TARGET_DEBUG_ARG)
3099 fputs ("va_start: words = ", stderr);
3100 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3101 fputs (", n_gpr = ", stderr);
3102 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3103 fputs (", n_fpr = ", stderr);
3104 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3105 putc ('\n', stderr);
3108 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3109 TREE_SIDE_EFFECTS (t) = 1;
3110 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3112 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3113 TREE_SIDE_EFFECTS (t) = 1;
3114 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3116 /* Find the overflow area. */
3117 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3119 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3120 build_int_2 (words * UNITS_PER_WORD, 0));
3121 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3122 TREE_SIDE_EFFECTS (t) = 1;
3123 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3125 /* Find the register save area. */
3126 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3127 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3128 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3129 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3130 TREE_SIDE_EFFECTS (t) = 1;
3131 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3134 /* Implement va_arg. */
3137 rs6000_va_arg (valist, type)
3140 tree f_gpr, f_fpr, f_ovf, f_sav;
3141 tree gpr, fpr, ovf, sav, reg, t, u;
3142 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3143 rtx lab_false, lab_over, addr_rtx, r;
3145 if (DEFAULT_ABI != ABI_V4)
3146 return std_expand_builtin_va_arg (valist, type);
3148 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3149 f_fpr = TREE_CHAIN (f_gpr);
3150 f_ovf = TREE_CHAIN (f_fpr);
3151 f_sav = TREE_CHAIN (f_ovf);
3153 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3154 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3155 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3156 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3157 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3159 size = int_size_in_bytes (type);
3160 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3162 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3164 /* Aggregates and long doubles are passed by reference. */
3170 size = UNITS_PER_WORD;
3173 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3175 /* FP args go in FP registers, if present. */
3184 /* Otherwise into GP registers. */
3192 /* Pull the value out of the saved registers ... */
3194 lab_false = gen_label_rtx ();
3195 lab_over = gen_label_rtx ();
3196 addr_rtx = gen_reg_rtx (Pmode);
3198 /* Vectors never go in registers. */
3199 if (TREE_CODE (type) != VECTOR_TYPE)
3201 TREE_THIS_VOLATILE (reg) = 1;
3202 emit_cmp_and_jump_insns
3203 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3204 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3207 /* Long long is aligned in the registers. */
3210 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3211 build_int_2 (n_reg - 1, 0));
3212 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3213 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3214 TREE_SIDE_EFFECTS (u) = 1;
3215 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3219 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3223 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3224 build_int_2 (n_reg, 0));
3225 TREE_SIDE_EFFECTS (u) = 1;
3227 u = build1 (CONVERT_EXPR, integer_type_node, u);
3228 TREE_SIDE_EFFECTS (u) = 1;
3230 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3231 TREE_SIDE_EFFECTS (u) = 1;
3233 t = build (PLUS_EXPR, ptr_type_node, t, u);
3234 TREE_SIDE_EFFECTS (t) = 1;
3236 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3238 emit_move_insn (addr_rtx, r);
3240 emit_jump_insn (gen_jump (lab_over));
3244 emit_label (lab_false);
3246 /* ... otherwise out of the overflow area. */
3248 /* Make sure we don't find reg 7 for the next int arg.
3250 All AltiVec vectors go in the overflow area. So in the AltiVec
3251 case we need to get the vectors from the overflow area, but
3252 remember where the GPRs and FPRs are. */
3253 if (n_reg > 1 && TREE_CODE (type) != VECTOR_TYPE)
3255 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3256 TREE_SIDE_EFFECTS (t) = 1;
3257 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3260 /* Care for on-stack alignment if needed. */
3267 /* Vectors are 16 byte aligned. */
3268 if (TREE_CODE (type) == VECTOR_TYPE)
3273 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3274 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3278 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3280 emit_move_insn (addr_rtx, r);
3282 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3283 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3284 TREE_SIDE_EFFECTS (t) = 1;
3285 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3287 emit_label (lab_over);
3291 r = gen_rtx_MEM (Pmode, addr_rtx);
3292 set_mem_alias_set (r, get_varargs_alias_set ());
3293 emit_move_insn (addr_rtx, r);
3301 #define def_builtin(MASK, NAME, TYPE, CODE) \
3303 if ((MASK) & target_flags) \
3304 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3307 struct builtin_description
3309 const unsigned int mask;
3310 const enum insn_code icode;
3311 const char *const name;
3312 const enum rs6000_builtins code;
3315 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3317 static const struct builtin_description bdesc_3arg[] =
3319 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3320 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3321 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3322 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3323 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3324 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3325 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3326 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3327 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3328 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3329 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3330 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3331 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3332 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3333 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3334 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3335 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3336 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3337 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3338 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3339 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3340 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3341 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3344 /* DST operations: void foo (void *, const int, const char). */
3346 static const struct builtin_description bdesc_dst[] =
3348 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3349 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3350 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3351 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3354 /* Simple binary operations: VECc = foo (VECa, VECb). */
3356 static const struct builtin_description bdesc_2arg[] =
3358 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3359 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3360 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3361 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3362 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3363 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3364 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3365 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3366 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3367 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3368 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3369 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3370 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3371 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3372 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3373 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3374 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3375 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3376 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3377 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3378 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3379 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3380 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3381 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3382 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3383 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3384 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3385 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3386 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3387 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3388 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3389 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3390 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3391 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3392 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3393 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3394 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3395 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3396 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3397 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3398 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3399 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3400 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3401 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3402 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3403 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3404 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3405 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3406 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3407 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3408 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3409 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3410 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3411 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3412 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3413 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3414 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3415 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3416 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3417 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3418 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3419 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3420 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3421 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3422 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3423 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3424 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3425 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3426 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3427 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3428 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3429 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3430 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3431 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3432 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3433 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3434 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3435 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3436 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3437 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3438 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3439 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3440 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3441 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3442 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3443 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3444 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3445 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3446 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3447 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3448 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3449 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3450 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3451 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3452 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3453 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3454 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3455 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3456 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3457 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3458 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3459 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3460 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3461 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3462 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3463 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3464 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3465 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3466 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3467 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3468 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3469 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3470 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3473 /* AltiVec predicates. */
3475 struct builtin_description_predicates
3477 const unsigned int mask;
3478 const enum insn_code icode;
3480 const char *const name;
3481 const enum rs6000_builtins code;
3484 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3486 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3487 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3488 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3489 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3490 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3491 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3492 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3493 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3494 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3495 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3496 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3497 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3498 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3501 /* ABS* opreations. */
3503 static const struct builtin_description bdesc_abs[] =
3505 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3506 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3507 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3508 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3509 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3510 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3511 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3514 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3517 static const struct builtin_description bdesc_1arg[] =
3519 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3520 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3521 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3522 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3523 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3524 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3525 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3526 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3527 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3528 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3529 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3530 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3531 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3532 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3533 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3534 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3535 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3539 altivec_expand_unop_builtin (icode, arglist, target)
3540 enum insn_code icode;
3545 tree arg0 = TREE_VALUE (arglist);
3546 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3547 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3548 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3550 /* If we got invalid arguments bail out before generating bad rtl. */
3551 if (arg0 == error_mark_node)
3555 || GET_MODE (target) != tmode
3556 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3557 target = gen_reg_rtx (tmode);
3559 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3560 op0 = copy_to_mode_reg (mode0, op0);
3562 pat = GEN_FCN (icode) (target, op0);
3571 altivec_expand_abs_builtin (icode, arglist, target)
3572 enum insn_code icode;
3576 rtx pat, scratch1, scratch2;
3577 tree arg0 = TREE_VALUE (arglist);
3578 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3579 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3580 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3582 /* If we have invalid arguments, bail out before generating bad rtl. */
3583 if (arg0 == error_mark_node)
3587 || GET_MODE (target) != tmode
3588 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3589 target = gen_reg_rtx (tmode);
3591 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3592 op0 = copy_to_mode_reg (mode0, op0);
3594 scratch1 = gen_reg_rtx (mode0);
3595 scratch2 = gen_reg_rtx (mode0);
3597 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3606 altivec_expand_binop_builtin (icode, arglist, target)
3607 enum insn_code icode;
3612 tree arg0 = TREE_VALUE (arglist);
3613 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3614 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3615 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3616 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3617 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3618 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3620 /* If we got invalid arguments bail out before generating bad rtl. */
3621 if (arg0 == error_mark_node || arg1 == error_mark_node)
3625 || GET_MODE (target) != tmode
3626 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3627 target = gen_reg_rtx (tmode);
3629 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3630 op0 = copy_to_mode_reg (mode0, op0);
3631 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3632 op1 = copy_to_mode_reg (mode1, op1);
3634 pat = GEN_FCN (icode) (target, op0, op1);
3643 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3644 enum insn_code icode;
3650 tree cr6_form = TREE_VALUE (arglist);
3651 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3652 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3653 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3654 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3655 enum machine_mode tmode = SImode;
3656 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3657 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3660 if (TREE_CODE (cr6_form) != INTEGER_CST)
3662 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3666 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3671 /* If we have invalid arguments, bail out before generating bad rtl. */
3672 if (arg0 == error_mark_node || arg1 == error_mark_node)
3676 || GET_MODE (target) != tmode
3677 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3678 target = gen_reg_rtx (tmode);
3680 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3681 op0 = copy_to_mode_reg (mode0, op0);
3682 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3683 op1 = copy_to_mode_reg (mode1, op1);
3685 scratch = gen_reg_rtx (mode0);
3687 pat = GEN_FCN (icode) (scratch, op0, op1,
3688 gen_rtx (SYMBOL_REF, Pmode, opcode));
3693 /* The vec_any* and vec_all* predicates use the same opcodes for two
3694 different operations, but the bits in CR6 will be different
3695 depending on what information we want. So we have to play tricks
3696 with CR6 to get the right bits out.
3698 If you think this is disgusting, look at the specs for the
3699 AltiVec predicates. */
3701 switch (cr6_form_int)
3704 emit_insn (gen_cr6_test_for_zero (target));
3707 emit_insn (gen_cr6_test_for_zero_reverse (target));
3710 emit_insn (gen_cr6_test_for_lt (target));
3713 emit_insn (gen_cr6_test_for_lt_reverse (target));
3716 error ("argument 1 of __builtin_altivec_predicate is out of range");
3724 altivec_expand_stv_builtin (icode, arglist)
3725 enum insn_code icode;
3728 tree arg0 = TREE_VALUE (arglist);
3729 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3730 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3731 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3732 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3733 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3735 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3736 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3737 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3739 /* Invalid arguments. Bail before doing anything stoopid! */
3740 if (arg0 == error_mark_node
3741 || arg1 == error_mark_node
3742 || arg2 == error_mark_node)
3745 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3746 op0 = copy_to_mode_reg (mode2, op0);
3747 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3748 op1 = copy_to_mode_reg (mode0, op1);
3749 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3750 op2 = copy_to_mode_reg (mode1, op2);
3752 pat = GEN_FCN (icode) (op1, op2, op0);
3759 altivec_expand_ternop_builtin (icode, arglist, target)
3760 enum insn_code icode;
3765 tree arg0 = TREE_VALUE (arglist);
3766 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3767 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3768 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3769 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3770 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3771 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3772 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3773 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3774 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3776 /* If we got invalid arguments bail out before generating bad rtl. */
3777 if (arg0 == error_mark_node
3778 || arg1 == error_mark_node
3779 || arg2 == error_mark_node)
3783 || GET_MODE (target) != tmode
3784 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3785 target = gen_reg_rtx (tmode);
3787 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3788 op0 = copy_to_mode_reg (mode0, op0);
3789 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3790 op1 = copy_to_mode_reg (mode1, op1);
3791 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3792 op2 = copy_to_mode_reg (mode2, op2);
3794 pat = GEN_FCN (icode) (target, op0, op1, op2);
3802 altivec_expand_builtin (exp, target)
3806 struct builtin_description *d;
3807 struct builtin_description_predicates *dp;
3809 enum insn_code icode;
3810 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3811 tree arglist = TREE_OPERAND (exp, 1);
3812 tree arg0, arg1, arg2;
3813 rtx op0, op1, op2, pat;
3814 enum machine_mode tmode, mode0, mode1, mode2;
3815 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3819 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3820 icode = CODE_FOR_altivec_lvx_16qi;
3821 arg0 = TREE_VALUE (arglist);
3822 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3823 tmode = insn_data[icode].operand[0].mode;
3824 mode0 = insn_data[icode].operand[1].mode;
3827 || GET_MODE (target) != tmode
3828 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3829 target = gen_reg_rtx (tmode);
3831 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3832 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3834 pat = GEN_FCN (icode) (target, op0);
3840 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3841 icode = CODE_FOR_altivec_lvx_8hi;
3842 arg0 = TREE_VALUE (arglist);
3843 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3844 tmode = insn_data[icode].operand[0].mode;
3845 mode0 = insn_data[icode].operand[1].mode;
3848 || GET_MODE (target) != tmode
3849 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3850 target = gen_reg_rtx (tmode);
3852 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3853 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3855 pat = GEN_FCN (icode) (target, op0);
3861 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3862 icode = CODE_FOR_altivec_lvx_4si;
3863 arg0 = TREE_VALUE (arglist);
3864 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3865 tmode = insn_data[icode].operand[0].mode;
3866 mode0 = insn_data[icode].operand[1].mode;
3869 || GET_MODE (target) != tmode
3870 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3871 target = gen_reg_rtx (tmode);
3873 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3874 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3876 pat = GEN_FCN (icode) (target, op0);
3882 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3883 icode = CODE_FOR_altivec_lvx_4sf;
3884 arg0 = TREE_VALUE (arglist);
3885 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3886 tmode = insn_data[icode].operand[0].mode;
3887 mode0 = insn_data[icode].operand[1].mode;
3890 || GET_MODE (target) != tmode
3891 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3892 target = gen_reg_rtx (tmode);
3894 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3895 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3897 pat = GEN_FCN (icode) (target, op0);
3903 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3904 icode = CODE_FOR_altivec_stvx_16qi;
3905 arg0 = TREE_VALUE (arglist);
3906 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3907 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3908 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3909 mode0 = insn_data[icode].operand[0].mode;
3910 mode1 = insn_data[icode].operand[1].mode;
3912 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3913 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3914 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3915 op1 = copy_to_mode_reg (mode1, op1);
3917 pat = GEN_FCN (icode) (op0, op1);
3922 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3923 icode = CODE_FOR_altivec_stvx_8hi;
3924 arg0 = TREE_VALUE (arglist);
3925 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3926 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3927 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3928 mode0 = insn_data[icode].operand[0].mode;
3929 mode1 = insn_data[icode].operand[1].mode;
3931 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3932 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3933 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3934 op1 = copy_to_mode_reg (mode1, op1);
3936 pat = GEN_FCN (icode) (op0, op1);
3941 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3942 icode = CODE_FOR_altivec_stvx_4si;
3943 arg0 = TREE_VALUE (arglist);
3944 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3945 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3946 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3947 mode0 = insn_data[icode].operand[0].mode;
3948 mode1 = insn_data[icode].operand[1].mode;
3950 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3951 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3952 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3953 op1 = copy_to_mode_reg (mode1, op1);
3955 pat = GEN_FCN (icode) (op0, op1);
3960 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3961 icode = CODE_FOR_altivec_stvx_4sf;
3962 arg0 = TREE_VALUE (arglist);
3963 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3964 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3965 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3966 mode0 = insn_data[icode].operand[0].mode;
3967 mode1 = insn_data[icode].operand[1].mode;
3969 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3970 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3971 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3972 op1 = copy_to_mode_reg (mode1, op1);
3974 pat = GEN_FCN (icode) (op0, op1);
3979 case ALTIVEC_BUILTIN_STVX:
3980 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
3981 case ALTIVEC_BUILTIN_STVEBX:
3982 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
3983 case ALTIVEC_BUILTIN_STVEHX:
3984 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
3985 case ALTIVEC_BUILTIN_STVEWX:
3986 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
3987 case ALTIVEC_BUILTIN_STVXL:
3988 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3990 case ALTIVEC_BUILTIN_MFVSCR:
3991 icode = CODE_FOR_altivec_mfvscr;
3992 tmode = insn_data[icode].operand[0].mode;
3995 || GET_MODE (target) != tmode
3996 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3997 target = gen_reg_rtx (tmode);
3999 pat = GEN_FCN (icode) (target);
4005 case ALTIVEC_BUILTIN_MTVSCR:
4006 icode = CODE_FOR_altivec_mtvscr;
4007 arg0 = TREE_VALUE (arglist);
4008 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4009 mode0 = insn_data[icode].operand[0].mode;
4011 /* If we got invalid arguments bail out before generating bad rtl. */
4012 if (arg0 == error_mark_node)
4015 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4016 op0 = copy_to_mode_reg (mode0, op0);
4018 pat = GEN_FCN (icode) (op0);
4023 case ALTIVEC_BUILTIN_DSSALL:
4024 emit_insn (gen_altivec_dssall ());
4027 case ALTIVEC_BUILTIN_DSS:
4028 icode = CODE_FOR_altivec_dss;
4029 arg0 = TREE_VALUE (arglist);
4030 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4031 mode0 = insn_data[icode].operand[0].mode;
4033 /* If we got invalid arguments bail out before generating bad rtl. */
4034 if (arg0 == error_mark_node)
4037 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4038 op0 = copy_to_mode_reg (mode0, op0);
4040 emit_insn (gen_altivec_dss (op0));
4044 /* Handle DST variants. */
4045 d = (struct builtin_description *) bdesc_dst;
4046 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4047 if (d->code == fcode)
4049 arg0 = TREE_VALUE (arglist);
4050 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4051 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4052 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4053 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4054 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4055 mode0 = insn_data[d->icode].operand[0].mode;
4056 mode1 = insn_data[d->icode].operand[1].mode;
4057 mode2 = insn_data[d->icode].operand[2].mode;
4059 /* Invalid arguments, bail out before generating bad rtl. */
4060 if (arg0 == error_mark_node
4061 || arg1 == error_mark_node
4062 || arg2 == error_mark_node)
4065 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4066 op0 = copy_to_mode_reg (mode0, op0);
4067 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4068 op1 = copy_to_mode_reg (mode1, op1);
4070 if (GET_CODE (op2) != CONST_INT || INTVAL (op2) > 3)
4072 error ("argument 3 of `%s' must be a 2-bit literal", d->name);
4076 pat = GEN_FCN (d->icode) (op0, op1, op2);
4083 /* Expand abs* operations. */
4084 d = (struct builtin_description *) bdesc_abs;
4085 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4086 if (d->code == fcode)
4087 return altivec_expand_abs_builtin (d->icode, arglist, target);
4089 /* Handle simple unary operations. */
4090 d = (struct builtin_description *) bdesc_1arg;
4091 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4092 if (d->code == fcode)
4093 return altivec_expand_unop_builtin (d->icode, arglist, target);
4095 /* Handle simple binary operations. */
4096 d = (struct builtin_description *) bdesc_2arg;
4097 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4098 if (d->code == fcode)
4099 return altivec_expand_binop_builtin (d->icode, arglist, target);
4101 /* Expand the AltiVec predicates. */
4102 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4103 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4104 if (dp->code == fcode)
4105 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4107 /* LV* are funky. We initialized them differently. */
4110 case ALTIVEC_BUILTIN_LVSL:
4111 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4113 case ALTIVEC_BUILTIN_LVSR:
4114 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4116 case ALTIVEC_BUILTIN_LVEBX:
4117 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4119 case ALTIVEC_BUILTIN_LVEHX:
4120 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4122 case ALTIVEC_BUILTIN_LVEWX:
4123 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4125 case ALTIVEC_BUILTIN_LVXL:
4126 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4128 case ALTIVEC_BUILTIN_LVX:
4129 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
4136 /* Handle simple ternary operations. */
4137 d = (struct builtin_description *) bdesc_3arg;
4138 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4139 if (d->code == fcode)
4140 return altivec_expand_ternop_builtin (d->icode, arglist, target);
4146 /* Expand an expression EXP that calls a built-in function,
4147 with result going to TARGET if that's convenient
4148 (and in mode MODE if that's convenient).
4149 SUBTARGET may be used as the target for computing one of EXP's operands.
4150 IGNORE is nonzero if the value is to be ignored. */
4153 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4156 rtx subtarget ATTRIBUTE_UNUSED;
4157 enum machine_mode mode ATTRIBUTE_UNUSED;
4158 int ignore ATTRIBUTE_UNUSED;
4161 return altivec_expand_builtin (exp, target);
4167 rs6000_init_builtins ()
4170 altivec_init_builtins ();
4174 altivec_init_builtins (void)
4176 struct builtin_description *d;
4177 struct builtin_description_predicates *dp;
4180 tree endlink = void_list_node;
4182 tree pint_type_node = build_pointer_type (integer_type_node);
4183 tree pvoid_type_node = build_pointer_type (void_type_node);
4184 tree pshort_type_node = build_pointer_type (short_integer_type_node);
4185 tree pchar_type_node = build_pointer_type (char_type_node);
4186 tree pfloat_type_node = build_pointer_type (float_type_node);
4188 tree v4sf_ftype_v4sf_v4sf_v16qi
4189 = build_function_type (V4SF_type_node,
4190 tree_cons (NULL_TREE, V4SF_type_node,
4191 tree_cons (NULL_TREE, V4SF_type_node,
4192 tree_cons (NULL_TREE,
4195 tree v4si_ftype_v4si_v4si_v16qi
4196 = build_function_type (V4SI_type_node,
4197 tree_cons (NULL_TREE, V4SI_type_node,
4198 tree_cons (NULL_TREE, V4SI_type_node,
4199 tree_cons (NULL_TREE,
4202 tree v8hi_ftype_v8hi_v8hi_v16qi
4203 = build_function_type (V8HI_type_node,
4204 tree_cons (NULL_TREE, V8HI_type_node,
4205 tree_cons (NULL_TREE, V8HI_type_node,
4206 tree_cons (NULL_TREE,
4209 tree v16qi_ftype_v16qi_v16qi_v16qi
4210 = build_function_type (V16QI_type_node,
4211 tree_cons (NULL_TREE, V16QI_type_node,
4212 tree_cons (NULL_TREE, V16QI_type_node,
4213 tree_cons (NULL_TREE,
4217 /* V4SI foo (char). */
4218 tree v4si_ftype_char
4219 = build_function_type (V4SI_type_node,
4220 tree_cons (NULL_TREE, char_type_node, endlink));
4222 /* V8HI foo (char). */
4223 tree v8hi_ftype_char
4224 = build_function_type (V8HI_type_node,
4225 tree_cons (NULL_TREE, char_type_node, endlink));
4227 /* V16QI foo (char). */
4228 tree v16qi_ftype_char
4229 = build_function_type (V16QI_type_node,
4230 tree_cons (NULL_TREE, char_type_node, endlink));
4231 /* V4SF foo (V4SF). */
4232 tree v4sf_ftype_v4sf
4233 = build_function_type (V4SF_type_node,
4234 tree_cons (NULL_TREE, V4SF_type_node, endlink));
4236 /* V4SI foo (int *). */
4237 tree v4si_ftype_pint
4238 = build_function_type (V4SI_type_node,
4239 tree_cons (NULL_TREE, pint_type_node, endlink));
4240 /* V8HI foo (short *). */
4241 tree v8hi_ftype_pshort
4242 = build_function_type (V8HI_type_node,
4243 tree_cons (NULL_TREE, pshort_type_node, endlink));
4244 /* V16QI foo (char *). */
4245 tree v16qi_ftype_pchar
4246 = build_function_type (V16QI_type_node,
4247 tree_cons (NULL_TREE, pchar_type_node, endlink));
4248 /* V4SF foo (float *). */
4249 tree v4sf_ftype_pfloat
4250 = build_function_type (V4SF_type_node,
4251 tree_cons (NULL_TREE, pfloat_type_node, endlink));
4253 /* V8HI foo (V16QI). */
4254 tree v8hi_ftype_v16qi
4255 = build_function_type (V8HI_type_node,
4256 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4258 /* void foo (void *, int, char/literal). */
4259 tree void_ftype_pvoid_int_char
4260 = build_function_type (void_type_node,
4261 tree_cons (NULL_TREE, pvoid_type_node,
4262 tree_cons (NULL_TREE, integer_type_node,
4263 tree_cons (NULL_TREE,
4267 /* void foo (int *, V4SI). */
4268 tree void_ftype_pint_v4si
4269 = build_function_type (void_type_node,
4270 tree_cons (NULL_TREE, pint_type_node,
4271 tree_cons (NULL_TREE, V4SI_type_node,
4273 /* void foo (short *, V8HI). */
4274 tree void_ftype_pshort_v8hi
4275 = build_function_type (void_type_node,
4276 tree_cons (NULL_TREE, pshort_type_node,
4277 tree_cons (NULL_TREE, V8HI_type_node,
4279 /* void foo (char *, V16QI). */
4280 tree void_ftype_pchar_v16qi
4281 = build_function_type (void_type_node,
4282 tree_cons (NULL_TREE, pchar_type_node,
4283 tree_cons (NULL_TREE, V16QI_type_node,
4285 /* void foo (float *, V4SF). */
4286 tree void_ftype_pfloat_v4sf
4287 = build_function_type (void_type_node,
4288 tree_cons (NULL_TREE, pfloat_type_node,
4289 tree_cons (NULL_TREE, V4SF_type_node,
4292 /* void foo (V4SI). */
4293 tree void_ftype_v4si
4294 = build_function_type (void_type_node,
4295 tree_cons (NULL_TREE, V4SI_type_node,
4298 /* void foo (vint, int, void *). */
4299 tree void_ftype_v4si_int_pvoid
4300 = build_function_type (void_type_node,
4301 tree_cons (NULL_TREE, V4SI_type_node,
4302 tree_cons (NULL_TREE, integer_type_node,
4303 tree_cons (NULL_TREE,
4307 /* void foo (vchar, int, void *). */
4308 tree void_ftype_v16qi_int_pvoid
4309 = build_function_type (void_type_node,
4310 tree_cons (NULL_TREE, V16QI_type_node,
4311 tree_cons (NULL_TREE, integer_type_node,
4312 tree_cons (NULL_TREE,
4316 /* void foo (vshort, int, void *). */
4317 tree void_ftype_v8hi_int_pvoid
4318 = build_function_type (void_type_node,
4319 tree_cons (NULL_TREE, V8HI_type_node,
4320 tree_cons (NULL_TREE, integer_type_node,
4321 tree_cons (NULL_TREE,
4325 /* void foo (char). */
4327 = build_function_type (void_type_node,
4328 tree_cons (NULL_TREE, char_type_node,
4331 /* void foo (void). */
4332 tree void_ftype_void
4333 = build_function_type (void_type_node, void_list_node);
4335 /* vshort foo (void). */
4336 tree v8hi_ftype_void
4337 = build_function_type (V8HI_type_node, void_list_node);
4339 tree v4si_ftype_v4si_v4si
4340 = build_function_type (V4SI_type_node,
4341 tree_cons (NULL_TREE, V4SI_type_node,
4342 tree_cons (NULL_TREE, V4SI_type_node,
4345 /* These are for the unsigned 5 bit literals. */
4347 tree v4sf_ftype_v4si_char
4348 = build_function_type (V4SF_type_node,
4349 tree_cons (NULL_TREE, V4SI_type_node,
4350 tree_cons (NULL_TREE, char_type_node,
4352 tree v4si_ftype_v4sf_char
4353 = build_function_type (V4SI_type_node,
4354 tree_cons (NULL_TREE, V4SF_type_node,
4355 tree_cons (NULL_TREE, char_type_node,
4357 tree v4si_ftype_v4si_char
4358 = build_function_type (V4SI_type_node,
4359 tree_cons (NULL_TREE, V4SI_type_node,
4360 tree_cons (NULL_TREE, char_type_node,
4362 tree v8hi_ftype_v8hi_char
4363 = build_function_type (V8HI_type_node,
4364 tree_cons (NULL_TREE, V8HI_type_node,
4365 tree_cons (NULL_TREE, char_type_node,
4367 tree v16qi_ftype_v16qi_char
4368 = build_function_type (V16QI_type_node,
4369 tree_cons (NULL_TREE, V16QI_type_node,
4370 tree_cons (NULL_TREE, char_type_node,
4373 /* These are for the unsigned 4 bit literals. */
4375 tree v16qi_ftype_v16qi_v16qi_char
4376 = build_function_type (V16QI_type_node,
4377 tree_cons (NULL_TREE, V16QI_type_node,
4378 tree_cons (NULL_TREE, V16QI_type_node,
4379 tree_cons (NULL_TREE,
4383 tree v8hi_ftype_v8hi_v8hi_char
4384 = build_function_type (V8HI_type_node,
4385 tree_cons (NULL_TREE, V8HI_type_node,
4386 tree_cons (NULL_TREE, V8HI_type_node,
4387 tree_cons (NULL_TREE,
4391 tree v4si_ftype_v4si_v4si_char
4392 = build_function_type (V4SI_type_node,
4393 tree_cons (NULL_TREE, V4SI_type_node,
4394 tree_cons (NULL_TREE, V4SI_type_node,
4395 tree_cons (NULL_TREE,
4399 tree v4sf_ftype_v4sf_v4sf_char
4400 = build_function_type (V4SF_type_node,
4401 tree_cons (NULL_TREE, V4SF_type_node,
4402 tree_cons (NULL_TREE, V4SF_type_node,
4403 tree_cons (NULL_TREE,
4407 /* End of 4 bit literals. */
4409 tree v4sf_ftype_v4sf_v4sf
4410 = build_function_type (V4SF_type_node,
4411 tree_cons (NULL_TREE, V4SF_type_node,
4412 tree_cons (NULL_TREE, V4SF_type_node,
4414 tree v4sf_ftype_v4sf_v4sf_v4si
4415 = build_function_type (V4SF_type_node,
4416 tree_cons (NULL_TREE, V4SF_type_node,
4417 tree_cons (NULL_TREE, V4SF_type_node,
4418 tree_cons (NULL_TREE,
4421 tree v4sf_ftype_v4sf_v4sf_v4sf
4422 = build_function_type (V4SF_type_node,
4423 tree_cons (NULL_TREE, V4SF_type_node,
4424 tree_cons (NULL_TREE, V4SF_type_node,
4425 tree_cons (NULL_TREE,
4428 tree v4si_ftype_v4si_v4si_v4si
4429 = build_function_type (V4SI_type_node,
4430 tree_cons (NULL_TREE, V4SI_type_node,
4431 tree_cons (NULL_TREE, V4SI_type_node,
4432 tree_cons (NULL_TREE,
4436 tree v8hi_ftype_v8hi_v8hi
4437 = build_function_type (V8HI_type_node,
4438 tree_cons (NULL_TREE, V8HI_type_node,
4439 tree_cons (NULL_TREE, V8HI_type_node,
4441 tree v8hi_ftype_v8hi_v8hi_v8hi
4442 = build_function_type (V8HI_type_node,
4443 tree_cons (NULL_TREE, V8HI_type_node,
4444 tree_cons (NULL_TREE, V8HI_type_node,
4445 tree_cons (NULL_TREE,
4448 tree v4si_ftype_v8hi_v8hi_v4si
4449 = build_function_type (V4SI_type_node,
4450 tree_cons (NULL_TREE, V8HI_type_node,
4451 tree_cons (NULL_TREE, V8HI_type_node,
4452 tree_cons (NULL_TREE,
4455 tree v4si_ftype_v16qi_v16qi_v4si
4456 = build_function_type (V4SI_type_node,
4457 tree_cons (NULL_TREE, V16QI_type_node,
4458 tree_cons (NULL_TREE, V16QI_type_node,
4459 tree_cons (NULL_TREE,
4463 tree v16qi_ftype_v16qi_v16qi
4464 = build_function_type (V16QI_type_node,
4465 tree_cons (NULL_TREE, V16QI_type_node,
4466 tree_cons (NULL_TREE, V16QI_type_node,
4469 tree v4si_ftype_v4sf_v4sf
4470 = build_function_type (V4SI_type_node,
4471 tree_cons (NULL_TREE, V4SF_type_node,
4472 tree_cons (NULL_TREE, V4SF_type_node,
4475 tree v4si_ftype_v4si
4476 = build_function_type (V4SI_type_node,
4477 tree_cons (NULL_TREE, V4SI_type_node, endlink));
4479 tree v8hi_ftype_v8hi
4480 = build_function_type (V8HI_type_node,
4481 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4483 tree v16qi_ftype_v16qi
4484 = build_function_type (V16QI_type_node,
4485 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4487 tree v8hi_ftype_v16qi_v16qi
4488 = build_function_type (V8HI_type_node,
4489 tree_cons (NULL_TREE, V16QI_type_node,
4490 tree_cons (NULL_TREE, V16QI_type_node,
4493 tree v4si_ftype_v8hi_v8hi
4494 = build_function_type (V4SI_type_node,
4495 tree_cons (NULL_TREE, V8HI_type_node,
4496 tree_cons (NULL_TREE, V8HI_type_node,
4499 tree v8hi_ftype_v4si_v4si
4500 = build_function_type (V8HI_type_node,
4501 tree_cons (NULL_TREE, V4SI_type_node,
4502 tree_cons (NULL_TREE, V4SI_type_node,
4505 tree v16qi_ftype_v8hi_v8hi
4506 = build_function_type (V16QI_type_node,
4507 tree_cons (NULL_TREE, V8HI_type_node,
4508 tree_cons (NULL_TREE, V8HI_type_node,
4511 tree v4si_ftype_v16qi_v4si
4512 = build_function_type (V4SI_type_node,
4513 tree_cons (NULL_TREE, V16QI_type_node,
4514 tree_cons (NULL_TREE, V4SI_type_node,
4517 tree v4si_ftype_v16qi_v16qi
4518 = build_function_type (V4SI_type_node,
4519 tree_cons (NULL_TREE, V16QI_type_node,
4520 tree_cons (NULL_TREE, V16QI_type_node,
4523 tree v4si_ftype_v8hi_v4si
4524 = build_function_type (V4SI_type_node,
4525 tree_cons (NULL_TREE, V8HI_type_node,
4526 tree_cons (NULL_TREE, V4SI_type_node,
4529 tree v4si_ftype_v8hi
4530 = build_function_type (V4SI_type_node,
4531 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4533 tree int_ftype_v4si_v4si
4534 = build_function_type (integer_type_node,
4535 tree_cons (NULL_TREE, V4SI_type_node,
4536 tree_cons (NULL_TREE, V4SI_type_node,
4539 tree int_ftype_v4sf_v4sf
4540 = build_function_type (integer_type_node,
4541 tree_cons (NULL_TREE, V4SF_type_node,
4542 tree_cons (NULL_TREE, V4SF_type_node,
4545 tree int_ftype_v16qi_v16qi
4546 = build_function_type (integer_type_node,
4547 tree_cons (NULL_TREE, V16QI_type_node,
4548 tree_cons (NULL_TREE, V16QI_type_node,
4551 tree int_ftype_int_v4si_v4si
4552 = build_function_type
4554 tree_cons (NULL_TREE, integer_type_node,
4555 tree_cons (NULL_TREE, V4SI_type_node,
4556 tree_cons (NULL_TREE, V4SI_type_node,
4559 tree int_ftype_int_v4sf_v4sf
4560 = build_function_type
4562 tree_cons (NULL_TREE, integer_type_node,
4563 tree_cons (NULL_TREE, V4SF_type_node,
4564 tree_cons (NULL_TREE, V4SF_type_node,
4567 tree int_ftype_int_v8hi_v8hi
4568 = build_function_type
4570 tree_cons (NULL_TREE, integer_type_node,
4571 tree_cons (NULL_TREE, V8HI_type_node,
4572 tree_cons (NULL_TREE, V8HI_type_node,
4575 tree int_ftype_int_v16qi_v16qi
4576 = build_function_type
4578 tree_cons (NULL_TREE, integer_type_node,
4579 tree_cons (NULL_TREE, V16QI_type_node,
4580 tree_cons (NULL_TREE, V16QI_type_node,
4583 tree v16qi_ftype_int_pvoid
4584 = build_function_type (V16QI_type_node,
4585 tree_cons (NULL_TREE, integer_type_node,
4586 tree_cons (NULL_TREE, pvoid_type_node,
4589 tree v4si_ftype_int_pvoid
4590 = build_function_type (V4SI_type_node,
4591 tree_cons (NULL_TREE, integer_type_node,
4592 tree_cons (NULL_TREE, pvoid_type_node,
4595 tree v8hi_ftype_int_pvoid
4596 = build_function_type (V8HI_type_node,
4597 tree_cons (NULL_TREE, integer_type_node,
4598 tree_cons (NULL_TREE, pvoid_type_node,
4601 tree int_ftype_v8hi_v8hi
4602 = build_function_type (integer_type_node,
4603 tree_cons (NULL_TREE, V8HI_type_node,
4604 tree_cons (NULL_TREE, V8HI_type_node,
4607 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4608 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4609 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4610 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4611 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4612 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4613 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4614 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4615 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4616 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4617 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4618 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4619 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4620 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4621 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4622 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4623 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4624 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4625 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4626 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4627 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4628 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4629 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4630 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4632 /* Add the simple ternary operators. */
4633 d = (struct builtin_description *) bdesc_3arg;
4634 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4637 enum machine_mode mode0, mode1, mode2, mode3;
4643 mode0 = insn_data[d->icode].operand[0].mode;
4644 mode1 = insn_data[d->icode].operand[1].mode;
4645 mode2 = insn_data[d->icode].operand[2].mode;
4646 mode3 = insn_data[d->icode].operand[3].mode;
4648 /* When all four are of the same mode. */
4649 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4654 type = v4si_ftype_v4si_v4si_v4si;
4657 type = v4sf_ftype_v4sf_v4sf_v4sf;
4660 type = v8hi_ftype_v8hi_v8hi_v8hi;
4663 type = v16qi_ftype_v16qi_v16qi_v16qi;
4669 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4674 type = v4si_ftype_v4si_v4si_v16qi;
4677 type = v4sf_ftype_v4sf_v4sf_v16qi;
4680 type = v8hi_ftype_v8hi_v8hi_v16qi;
4683 type = v16qi_ftype_v16qi_v16qi_v16qi;
4689 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4690 && mode3 == V4SImode)
4691 type = v4si_ftype_v16qi_v16qi_v4si;
4692 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4693 && mode3 == V4SImode)
4694 type = v4si_ftype_v8hi_v8hi_v4si;
4695 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4696 && mode3 == V4SImode)
4697 type = v4sf_ftype_v4sf_v4sf_v4si;
4699 /* vchar, vchar, vchar, 4 bit literal. */
4700 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4702 type = v16qi_ftype_v16qi_v16qi_char;
4704 /* vshort, vshort, vshort, 4 bit literal. */
4705 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4707 type = v8hi_ftype_v8hi_v8hi_char;
4709 /* vint, vint, vint, 4 bit literal. */
4710 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4712 type = v4si_ftype_v4si_v4si_char;
4714 /* vfloat, vfloat, vfloat, 4 bit literal. */
4715 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4717 type = v4sf_ftype_v4sf_v4sf_char;
4722 def_builtin (d->mask, d->name, type, d->code);
4725 /* Add the DST variants. */
4726 d = (struct builtin_description *) bdesc_dst;
4727 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4728 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4730 /* Initialize the predicates. */
4731 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4732 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4734 enum machine_mode mode1;
4737 mode1 = insn_data[dp->icode].operand[1].mode;
4742 type = int_ftype_int_v4si_v4si;
4745 type = int_ftype_int_v8hi_v8hi;
4748 type = int_ftype_int_v16qi_v16qi;
4751 type = int_ftype_int_v4sf_v4sf;
4757 def_builtin (dp->mask, dp->name, type, dp->code);
4760 /* Add the simple binary operators. */
4761 d = (struct builtin_description *) bdesc_2arg;
4762 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4764 enum machine_mode mode0, mode1, mode2;
4770 mode0 = insn_data[d->icode].operand[0].mode;
4771 mode1 = insn_data[d->icode].operand[1].mode;
4772 mode2 = insn_data[d->icode].operand[2].mode;
4774 /* When all three operands are of the same mode. */
4775 if (mode0 == mode1 && mode1 == mode2)
4780 type = v4sf_ftype_v4sf_v4sf;
4783 type = v4si_ftype_v4si_v4si;
4786 type = v16qi_ftype_v16qi_v16qi;
4789 type = v8hi_ftype_v8hi_v8hi;
4796 /* A few other combos we really don't want to do manually. */
4798 /* vint, vfloat, vfloat. */
4799 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4800 type = v4si_ftype_v4sf_v4sf;
4802 /* vshort, vchar, vchar. */
4803 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4804 type = v8hi_ftype_v16qi_v16qi;
4806 /* vint, vshort, vshort. */
4807 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4808 type = v4si_ftype_v8hi_v8hi;
4810 /* vshort, vint, vint. */
4811 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4812 type = v8hi_ftype_v4si_v4si;
4814 /* vchar, vshort, vshort. */
4815 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4816 type = v16qi_ftype_v8hi_v8hi;
4818 /* vint, vchar, vint. */
4819 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4820 type = v4si_ftype_v16qi_v4si;
4822 /* vint, vchar, vchar. */
4823 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4824 type = v4si_ftype_v16qi_v16qi;
4826 /* vint, vshort, vint. */
4827 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4828 type = v4si_ftype_v8hi_v4si;
4830 /* vint, vint, 5 bit literal. */
4831 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4832 type = v4si_ftype_v4si_char;
4834 /* vshort, vshort, 5 bit literal. */
4835 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4836 type = v8hi_ftype_v8hi_char;
4838 /* vchar, vchar, 5 bit literal. */
4839 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4840 type = v16qi_ftype_v16qi_char;
4842 /* vfloat, vint, 5 bit literal. */
4843 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4844 type = v4sf_ftype_v4si_char;
4846 /* vint, vfloat, 5 bit literal. */
4847 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4848 type = v4si_ftype_v4sf_char;
4851 else if (mode0 == SImode)
4856 type = int_ftype_v4si_v4si;
4859 type = int_ftype_v4sf_v4sf;
4862 type = int_ftype_v16qi_v16qi;
4865 type = int_ftype_v8hi_v8hi;
4875 def_builtin (d->mask, d->name, type, d->code);
4878 /* Initialize the abs* operators. */
4879 d = (struct builtin_description *) bdesc_abs;
4880 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4882 enum machine_mode mode0;
4885 mode0 = insn_data[d->icode].operand[0].mode;
4890 type = v4si_ftype_v4si;
4893 type = v8hi_ftype_v8hi;
4896 type = v16qi_ftype_v16qi;
4899 type = v4sf_ftype_v4sf;
4905 def_builtin (d->mask, d->name, type, d->code);
4908 /* Add the simple unary operators. */
4909 d = (struct builtin_description *) bdesc_1arg;
4910 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4912 enum machine_mode mode0, mode1;
4918 mode0 = insn_data[d->icode].operand[0].mode;
4919 mode1 = insn_data[d->icode].operand[1].mode;
4921 if (mode0 == V4SImode && mode1 == QImode)
4922 type = v4si_ftype_char;
4923 else if (mode0 == V8HImode && mode1 == QImode)
4924 type = v8hi_ftype_char;
4925 else if (mode0 == V16QImode && mode1 == QImode)
4926 type = v16qi_ftype_char;
4927 else if (mode0 == V4SFmode && mode1 == V4SFmode)
4928 type = v4sf_ftype_v4sf;
4929 else if (mode0 == V8HImode && mode1 == V16QImode)
4930 type = v8hi_ftype_v16qi;
4931 else if (mode0 == V4SImode && mode1 == V8HImode)
4932 type = v4si_ftype_v8hi;
4936 def_builtin (d->mask, d->name, type, d->code);
4941 /* Generate a memory reference for expand_block_move, copying volatile,
4942 and other bits from an original memory reference. */
4945 expand_block_move_mem (mode, addr, orig_mem)
4946 enum machine_mode mode;
4950 rtx mem = gen_rtx_MEM (mode, addr);
4952 MEM_COPY_ATTRIBUTES (mem, orig_mem);
4956 /* Expand a block move operation, and return 1 if successful. Return 0
4957 if we should let the compiler generate normal code.
4959 operands[0] is the destination
4960 operands[1] is the source
4961 operands[2] is the length
4962 operands[3] is the alignment */
4964 #define MAX_MOVE_REG 4
4967 expand_block_move (operands)
4970 rtx orig_dest = operands[0];
4971 rtx orig_src = operands[1];
4972 rtx bytes_rtx = operands[2];
4973 rtx align_rtx = operands[3];
4974 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
4985 rtx stores[MAX_MOVE_REG];
4988 /* If this is not a fixed size move, just call memcpy */
4992 /* If this is not a fixed size alignment, abort */
4993 if (GET_CODE (align_rtx) != CONST_INT)
4995 align = INTVAL (align_rtx);
4997 /* Anything to move? */
4998 bytes = INTVAL (bytes_rtx);
5002 /* store_one_arg depends on expand_block_move to handle at least the size of
5003 reg_parm_stack_space. */
5004 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
5007 /* Move the address into scratch registers. */
5008 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
5009 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
5011 if (TARGET_STRING) /* string instructions are available */
5013 for ( ; bytes > 0; bytes -= move_bytes)
5015 if (bytes > 24 /* move up to 32 bytes at a time */
5023 && ! fixed_regs[12])
5025 move_bytes = (bytes > 32) ? 32 : bytes;
5026 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
5029 expand_block_move_mem (BLKmode,
5032 GEN_INT ((move_bytes == 32)
5036 else if (bytes > 16 /* move up to 24 bytes at a time */
5042 && ! fixed_regs[10])
5044 move_bytes = (bytes > 24) ? 24 : bytes;
5045 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
5048 expand_block_move_mem (BLKmode,
5051 GEN_INT (move_bytes),
5054 else if (bytes > 8 /* move up to 16 bytes at a time */
5060 move_bytes = (bytes > 16) ? 16 : bytes;
5061 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
5064 expand_block_move_mem (BLKmode,
5067 GEN_INT (move_bytes),
5070 else if (bytes >= 8 && TARGET_POWERPC64
5071 /* 64-bit loads and stores require word-aligned
5073 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5076 tmp_reg = gen_reg_rtx (DImode);
5077 emit_move_insn (tmp_reg,
5078 expand_block_move_mem (DImode,
5079 src_reg, orig_src));
5080 emit_move_insn (expand_block_move_mem (DImode,
5081 dest_reg, orig_dest),
5084 else if (bytes > 4 && !TARGET_POWERPC64)
5085 { /* move up to 8 bytes at a time */
5086 move_bytes = (bytes > 8) ? 8 : bytes;
5087 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
5090 expand_block_move_mem (BLKmode,
5093 GEN_INT (move_bytes),
5096 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5097 { /* move 4 bytes */
5099 tmp_reg = gen_reg_rtx (SImode);
5100 emit_move_insn (tmp_reg,
5101 expand_block_move_mem (SImode,
5102 src_reg, orig_src));
5103 emit_move_insn (expand_block_move_mem (SImode,
5104 dest_reg, orig_dest),
5107 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5108 { /* move 2 bytes */
5110 tmp_reg = gen_reg_rtx (HImode);
5111 emit_move_insn (tmp_reg,
5112 expand_block_move_mem (HImode,
5113 src_reg, orig_src));
5114 emit_move_insn (expand_block_move_mem (HImode,
5115 dest_reg, orig_dest),
5118 else if (bytes == 1) /* move 1 byte */
5121 tmp_reg = gen_reg_rtx (QImode);
5122 emit_move_insn (tmp_reg,
5123 expand_block_move_mem (QImode,
5124 src_reg, orig_src));
5125 emit_move_insn (expand_block_move_mem (QImode,
5126 dest_reg, orig_dest),
5130 { /* move up to 4 bytes at a time */
5131 move_bytes = (bytes > 4) ? 4 : bytes;
5132 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
5135 expand_block_move_mem (BLKmode,
5138 GEN_INT (move_bytes),
5142 if (bytes > move_bytes)
5144 if (! TARGET_POWERPC64)
5146 emit_insn (gen_addsi3 (src_reg, src_reg,
5147 GEN_INT (move_bytes)));
5148 emit_insn (gen_addsi3 (dest_reg, dest_reg,
5149 GEN_INT (move_bytes)));
5153 emit_insn (gen_adddi3 (src_reg, src_reg,
5154 GEN_INT (move_bytes)));
5155 emit_insn (gen_adddi3 (dest_reg, dest_reg,
5156 GEN_INT (move_bytes)));
5162 else /* string instructions not available */
5164 num_reg = offset = 0;
5165 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
5167 /* Calculate the correct offset for src/dest */
5171 dest_addr = dest_reg;
5175 src_addr = plus_constant (src_reg, offset);
5176 dest_addr = plus_constant (dest_reg, offset);
5179 /* Generate the appropriate load and store, saving the stores
5181 if (bytes >= 8 && TARGET_POWERPC64
5182 /* 64-bit loads and stores require word-aligned
5184 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5187 tmp_reg = gen_reg_rtx (DImode);
5188 emit_insn (gen_movdi (tmp_reg,
5189 expand_block_move_mem (DImode,
5192 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
5197 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5200 tmp_reg = gen_reg_rtx (SImode);
5201 emit_insn (gen_movsi (tmp_reg,
5202 expand_block_move_mem (SImode,
5205 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5210 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5213 tmp_reg = gen_reg_rtx (HImode);
5214 emit_insn (gen_movhi (tmp_reg,
5215 expand_block_move_mem (HImode,
5218 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5226 tmp_reg = gen_reg_rtx (QImode);
5227 emit_insn (gen_movqi (tmp_reg,
5228 expand_block_move_mem (QImode,
5231 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5237 if (num_reg >= MAX_MOVE_REG)
5239 for (i = 0; i < num_reg; i++)
5240 emit_insn (stores[i]);
5245 for (i = 0; i < num_reg; i++)
5246 emit_insn (stores[i]);
5253 /* Return 1 if OP is a load multiple operation. It is known to be a
5254 PARALLEL and the first section will be tested. */
5257 load_multiple_operation (op, mode)
5259 enum machine_mode mode ATTRIBUTE_UNUSED;
5261 int count = XVECLEN (op, 0);
5262 unsigned int dest_regno;
5266 /* Perform a quick check so we don't blow up below. */
5268 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5269 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5270 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5273 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5274 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5276 for (i = 1; i < count; i++)
5278 rtx elt = XVECEXP (op, 0, i);
5280 if (GET_CODE (elt) != SET
5281 || GET_CODE (SET_DEST (elt)) != REG
5282 || GET_MODE (SET_DEST (elt)) != SImode
5283 || REGNO (SET_DEST (elt)) != dest_regno + i
5284 || GET_CODE (SET_SRC (elt)) != MEM
5285 || GET_MODE (SET_SRC (elt)) != SImode
5286 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5287 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5288 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5289 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5296 /* Similar, but tests for store multiple. Here, the second vector element
5297 is a CLOBBER. It will be tested later. */
5300 store_multiple_operation (op, mode)
5302 enum machine_mode mode ATTRIBUTE_UNUSED;
5304 int count = XVECLEN (op, 0) - 1;
5305 unsigned int src_regno;
5309 /* Perform a quick check so we don't blow up below. */
5311 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5312 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5313 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5316 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5317 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5319 for (i = 1; i < count; i++)
5321 rtx elt = XVECEXP (op, 0, i + 1);
5323 if (GET_CODE (elt) != SET
5324 || GET_CODE (SET_SRC (elt)) != REG
5325 || GET_MODE (SET_SRC (elt)) != SImode
5326 || REGNO (SET_SRC (elt)) != src_regno + i
5327 || GET_CODE (SET_DEST (elt)) != MEM
5328 || GET_MODE (SET_DEST (elt)) != SImode
5329 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5330 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5331 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5332 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5339 /* Return 1 for a parallel vrsave operation. */
5342 vrsave_operation (op, mode)
5344 enum machine_mode mode ATTRIBUTE_UNUSED;
5346 int count = XVECLEN (op, 0);
5347 unsigned int dest_regno, src_regno;
5351 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5352 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5353 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5356 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5357 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5359 if (dest_regno != VRSAVE_REGNO
5360 && src_regno != VRSAVE_REGNO)
5363 for (i = 1; i < count; i++)
5365 rtx elt = XVECEXP (op, 0, i);
5367 if (GET_CODE (elt) != CLOBBER
5368 && GET_CODE (elt) != SET)
5375 /* Return 1 for an PARALLEL suitable for mtcrf. */
5378 mtcrf_operation (op, mode)
5380 enum machine_mode mode ATTRIBUTE_UNUSED;
5382 int count = XVECLEN (op, 0);
5386 /* Perform a quick check so we don't blow up below. */
5388 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5389 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5390 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5392 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5394 if (GET_CODE (src_reg) != REG
5395 || GET_MODE (src_reg) != SImode
5396 || ! INT_REGNO_P (REGNO (src_reg)))
5399 for (i = 0; i < count; i++)
5401 rtx exp = XVECEXP (op, 0, i);
5405 if (GET_CODE (exp) != SET
5406 || GET_CODE (SET_DEST (exp)) != REG
5407 || GET_MODE (SET_DEST (exp)) != CCmode
5408 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5410 unspec = SET_SRC (exp);
5411 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5413 if (GET_CODE (unspec) != UNSPEC
5414 || XINT (unspec, 1) != 20
5415 || XVECLEN (unspec, 0) != 2
5416 || XVECEXP (unspec, 0, 0) != src_reg
5417 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5418 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5424 /* Return 1 for an PARALLEL suitable for lmw. */
5427 lmw_operation (op, mode)
5429 enum machine_mode mode ATTRIBUTE_UNUSED;
5431 int count = XVECLEN (op, 0);
5432 unsigned int dest_regno;
5434 unsigned int base_regno;
5435 HOST_WIDE_INT offset;
5438 /* Perform a quick check so we don't blow up below. */
5440 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5441 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5442 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5445 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5446 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5449 || count != 32 - (int) dest_regno)
5452 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5455 base_regno = REGNO (src_addr);
5456 if (base_regno == 0)
5459 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5461 offset = INTVAL (XEXP (src_addr, 1));
5462 base_regno = REGNO (XEXP (src_addr, 0));
5467 for (i = 0; i < count; i++)
5469 rtx elt = XVECEXP (op, 0, i);
5472 HOST_WIDE_INT newoffset;
5474 if (GET_CODE (elt) != SET
5475 || GET_CODE (SET_DEST (elt)) != REG
5476 || GET_MODE (SET_DEST (elt)) != SImode
5477 || REGNO (SET_DEST (elt)) != dest_regno + i
5478 || GET_CODE (SET_SRC (elt)) != MEM
5479 || GET_MODE (SET_SRC (elt)) != SImode)
5481 newaddr = XEXP (SET_SRC (elt), 0);
5482 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5487 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5489 addr_reg = XEXP (newaddr, 0);
5490 newoffset = INTVAL (XEXP (newaddr, 1));
5494 if (REGNO (addr_reg) != base_regno
5495 || newoffset != offset + 4 * i)
5502 /* Return 1 for an PARALLEL suitable for stmw. */
5505 stmw_operation (op, mode)
5507 enum machine_mode mode ATTRIBUTE_UNUSED;
5509 int count = XVECLEN (op, 0);
5510 unsigned int src_regno;
5512 unsigned int base_regno;
5513 HOST_WIDE_INT offset;
5516 /* Perform a quick check so we don't blow up below. */
5518 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5519 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5520 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5523 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5524 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5527 || count != 32 - (int) src_regno)
5530 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5533 base_regno = REGNO (dest_addr);
5534 if (base_regno == 0)
5537 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5539 offset = INTVAL (XEXP (dest_addr, 1));
5540 base_regno = REGNO (XEXP (dest_addr, 0));
5545 for (i = 0; i < count; i++)
5547 rtx elt = XVECEXP (op, 0, i);
5550 HOST_WIDE_INT newoffset;
5552 if (GET_CODE (elt) != SET
5553 || GET_CODE (SET_SRC (elt)) != REG
5554 || GET_MODE (SET_SRC (elt)) != SImode
5555 || REGNO (SET_SRC (elt)) != src_regno + i
5556 || GET_CODE (SET_DEST (elt)) != MEM
5557 || GET_MODE (SET_DEST (elt)) != SImode)
5559 newaddr = XEXP (SET_DEST (elt), 0);
5560 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5565 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5567 addr_reg = XEXP (newaddr, 0);
5568 newoffset = INTVAL (XEXP (newaddr, 1));
5572 if (REGNO (addr_reg) != base_regno
5573 || newoffset != offset + 4 * i)
5580 /* A validation routine: say whether CODE, a condition code, and MODE
5581 match. The other alternatives either don't make sense or should
5582 never be generated. */
5585 validate_condition_mode (code, mode)
5587 enum machine_mode mode;
5589 if (GET_RTX_CLASS (code) != '<'
5590 || GET_MODE_CLASS (mode) != MODE_CC)
5593 /* These don't make sense. */
5594 if ((code == GT || code == LT || code == GE || code == LE)
5595 && mode == CCUNSmode)
5598 if ((code == GTU || code == LTU || code == GEU || code == LEU)
5599 && mode != CCUNSmode)
5602 if (mode != CCFPmode
5603 && (code == ORDERED || code == UNORDERED
5604 || code == UNEQ || code == LTGT
5605 || code == UNGT || code == UNLT
5606 || code == UNGE || code == UNLE))
5609 /* These should never be generated except for
5610 flag_unsafe_math_optimizations. */
5611 if (mode == CCFPmode
5612 && ! flag_unsafe_math_optimizations
5613 && (code == LE || code == GE
5614 || code == UNEQ || code == LTGT
5615 || code == UNGT || code == UNLT))
5618 /* These are invalid; the information is not there. */
5619 if (mode == CCEQmode
5620 && code != EQ && code != NE)
5624 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5625 We only check the opcode against the mode of the CC value here. */
5628 branch_comparison_operator (op, mode)
5630 enum machine_mode mode ATTRIBUTE_UNUSED;
5632 enum rtx_code code = GET_CODE (op);
5633 enum machine_mode cc_mode;
5635 if (GET_RTX_CLASS (code) != '<')
5638 cc_mode = GET_MODE (XEXP (op, 0));
5639 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5642 validate_condition_mode (code, cc_mode);
5647 /* Return 1 if OP is a comparison operation that is valid for a branch
5648 insn and which is true if the corresponding bit in the CC register
5652 branch_positive_comparison_operator (op, mode)
5654 enum machine_mode mode;
5658 if (! branch_comparison_operator (op, mode))
5661 code = GET_CODE (op);
5662 return (code == EQ || code == LT || code == GT
5663 || code == LTU || code == GTU
5664 || code == UNORDERED);
5667 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5668 We check the opcode against the mode of the CC value and disallow EQ or
5669 NE comparisons for integers. */
5672 scc_comparison_operator (op, mode)
5674 enum machine_mode mode;
5676 enum rtx_code code = GET_CODE (op);
5677 enum machine_mode cc_mode;
5679 if (GET_MODE (op) != mode && mode != VOIDmode)
5682 if (GET_RTX_CLASS (code) != '<')
5685 cc_mode = GET_MODE (XEXP (op, 0));
5686 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5689 validate_condition_mode (code, cc_mode);
5691 if (code == NE && cc_mode != CCFPmode)
5698 trap_comparison_operator (op, mode)
5700 enum machine_mode mode;
5702 if (mode != VOIDmode && mode != GET_MODE (op))
5704 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5708 boolean_operator (op, mode)
5710 enum machine_mode mode ATTRIBUTE_UNUSED;
5712 enum rtx_code code = GET_CODE (op);
5713 return (code == AND || code == IOR || code == XOR);
5717 boolean_or_operator (op, mode)
5719 enum machine_mode mode ATTRIBUTE_UNUSED;
5721 enum rtx_code code = GET_CODE (op);
5722 return (code == IOR || code == XOR);
5726 min_max_operator (op, mode)
5728 enum machine_mode mode ATTRIBUTE_UNUSED;
5730 enum rtx_code code = GET_CODE (op);
5731 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5734 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5735 mask required to convert the result of a rotate insn into a shift
5736 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
5739 includes_lshift_p (shiftop, andop)
5743 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5745 shift_mask <<= INTVAL (shiftop);
5747 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5750 /* Similar, but for right shift. */
5753 includes_rshift_p (shiftop, andop)
5757 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5759 shift_mask >>= INTVAL (shiftop);
5761 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5764 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5765 to perform a left shift. It must have exactly SHIFTOP least
5766 signifigant 0's, then one or more 1's, then zero or more 0's. */
5769 includes_rldic_lshift_p (shiftop, andop)
5773 if (GET_CODE (andop) == CONST_INT)
5775 HOST_WIDE_INT c, lsb, shift_mask;
5778 if (c == 0 || c == ~0)
5782 shift_mask <<= INTVAL (shiftop);
5784 /* Find the least signifigant one bit. */
5787 /* It must coincide with the LSB of the shift mask. */
5788 if (-lsb != shift_mask)
5791 /* Invert to look for the next transition (if any). */
5794 /* Remove the low group of ones (originally low group of zeros). */
5797 /* Again find the lsb, and check we have all 1's above. */
5801 else if (GET_CODE (andop) == CONST_DOUBLE
5802 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5804 HOST_WIDE_INT low, high, lsb;
5805 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5807 low = CONST_DOUBLE_LOW (andop);
5808 if (HOST_BITS_PER_WIDE_INT < 64)
5809 high = CONST_DOUBLE_HIGH (andop);
5811 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5812 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5815 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5817 shift_mask_high = ~0;
5818 if (INTVAL (shiftop) > 32)
5819 shift_mask_high <<= INTVAL (shiftop) - 32;
5823 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5830 return high == -lsb;
5833 shift_mask_low = ~0;
5834 shift_mask_low <<= INTVAL (shiftop);
5838 if (-lsb != shift_mask_low)
5841 if (HOST_BITS_PER_WIDE_INT < 64)
5846 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5849 return high == -lsb;
5853 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5859 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5860 to perform a left shift. It must have SHIFTOP or more least
5861 signifigant 0's, with the remainder of the word 1's. */
5864 includes_rldicr_lshift_p (shiftop, andop)
5868 if (GET_CODE (andop) == CONST_INT)
5870 HOST_WIDE_INT c, lsb, shift_mask;
5873 shift_mask <<= INTVAL (shiftop);
5876 /* Find the least signifigant one bit. */
5879 /* It must be covered by the shift mask.
5880 This test also rejects c == 0. */
5881 if ((lsb & shift_mask) == 0)
5884 /* Check we have all 1's above the transition, and reject all 1's. */
5885 return c == -lsb && lsb != 1;
5887 else if (GET_CODE (andop) == CONST_DOUBLE
5888 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5890 HOST_WIDE_INT low, lsb, shift_mask_low;
5892 low = CONST_DOUBLE_LOW (andop);
5894 if (HOST_BITS_PER_WIDE_INT < 64)
5896 HOST_WIDE_INT high, shift_mask_high;
5898 high = CONST_DOUBLE_HIGH (andop);
5902 shift_mask_high = ~0;
5903 if (INTVAL (shiftop) > 32)
5904 shift_mask_high <<= INTVAL (shiftop) - 32;
5908 if ((lsb & shift_mask_high) == 0)
5911 return high == -lsb;
5917 shift_mask_low = ~0;
5918 shift_mask_low <<= INTVAL (shiftop);
5922 if ((lsb & shift_mask_low) == 0)
5925 return low == -lsb && lsb != 1;
5931 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5932 for lfq and stfq insns.
5934 Note reg1 and reg2 *must* be hard registers. To be sure we will
5935 abort if we are passed pseudo registers. */
5938 registers_ok_for_quad_peep (reg1, reg2)
5941 /* We might have been passed a SUBREG. */
5942 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5945 return (REGNO (reg1) == REGNO (reg2) - 1);
5948 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5949 addr1 and addr2 must be in consecutive memory locations
5950 (addr2 == addr1 + 8). */
5953 addrs_ok_for_quad_peep (addr1, addr2)
5960 /* Extract an offset (if used) from the first addr. */
5961 if (GET_CODE (addr1) == PLUS)
5963 /* If not a REG, return zero. */
5964 if (GET_CODE (XEXP (addr1, 0)) != REG)
5968 reg1 = REGNO (XEXP (addr1, 0));
5969 /* The offset must be constant! */
5970 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5972 offset1 = INTVAL (XEXP (addr1, 1));
5975 else if (GET_CODE (addr1) != REG)
5979 reg1 = REGNO (addr1);
5980 /* This was a simple (mem (reg)) expression. Offset is 0. */
5984 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5985 if (GET_CODE (addr2) != PLUS)
5988 if (GET_CODE (XEXP (addr2, 0)) != REG
5989 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5992 if (reg1 != REGNO (XEXP (addr2, 0)))
5995 /* The offset for the second addr must be 8 more than the first addr. */
5996 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5999 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
6004 /* Return the register class of a scratch register needed to copy IN into
6005 or out of a register in CLASS in MODE. If it can be done directly,
6006 NO_REGS is returned. */
6009 secondary_reload_class (class, mode, in)
6010 enum reg_class class;
6011 enum machine_mode mode ATTRIBUTE_UNUSED;
6016 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
6018 /* We cannot copy a symbolic operand directly into anything
6019 other than BASE_REGS for TARGET_ELF. So indicate that a
6020 register from BASE_REGS is needed as an intermediate
6023 On Darwin, pic addresses require a load from memory, which
6024 needs a base register. */
6025 if (class != BASE_REGS
6026 && (GET_CODE (in) == SYMBOL_REF
6027 || GET_CODE (in) == HIGH
6028 || GET_CODE (in) == LABEL_REF
6029 || GET_CODE (in) == CONST))
6033 if (GET_CODE (in) == REG)
6036 if (regno >= FIRST_PSEUDO_REGISTER)
6038 regno = true_regnum (in);
6039 if (regno >= FIRST_PSEUDO_REGISTER)
6043 else if (GET_CODE (in) == SUBREG)
6045 regno = true_regnum (in);
6046 if (regno >= FIRST_PSEUDO_REGISTER)
6052 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
6054 if (class == GENERAL_REGS || class == BASE_REGS
6055 || (regno >= 0 && INT_REGNO_P (regno)))
6058 /* Constants, memory, and FP registers can go into FP registers. */
6059 if ((regno == -1 || FP_REGNO_P (regno))
6060 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
6063 /* Memory, and AltiVec registers can go into AltiVec registers. */
6064 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
6065 && class == ALTIVEC_REGS)
6068 /* We can copy among the CR registers. */
6069 if ((class == CR_REGS || class == CR0_REGS)
6070 && regno >= 0 && CR_REGNO_P (regno))
6073 /* Otherwise, we need GENERAL_REGS. */
6074 return GENERAL_REGS;
6077 /* Given a comparison operation, return the bit number in CCR to test. We
6078 know this is a valid comparison.
6080 SCC_P is 1 if this is for an scc. That means that %D will have been
6081 used instead of %C, so the bits will be in different places.
6083 Return -1 if OP isn't a valid comparison for some reason. */
6090 enum rtx_code code = GET_CODE (op);
6091 enum machine_mode cc_mode;
6096 if (GET_RTX_CLASS (code) != '<')
6101 if (GET_CODE (reg) != REG
6102 || ! CR_REGNO_P (REGNO (reg)))
6105 cc_mode = GET_MODE (reg);
6106 cc_regnum = REGNO (reg);
6107 base_bit = 4 * (cc_regnum - CR0_REGNO);
6109 validate_condition_mode (code, cc_mode);
6114 return scc_p ? base_bit + 3 : base_bit + 2;
6116 return base_bit + 2;
6117 case GT: case GTU: case UNLE:
6118 return base_bit + 1;
6119 case LT: case LTU: case UNGE:
6121 case ORDERED: case UNORDERED:
6122 return base_bit + 3;
6125 /* If scc, we will have done a cror to put the bit in the
6126 unordered position. So test that bit. For integer, this is ! LT
6127 unless this is an scc insn. */
6128 return scc_p ? base_bit + 3 : base_bit;
6131 return scc_p ? base_bit + 3 : base_bit + 1;
6138 /* Return the GOT register. */
6141 rs6000_got_register (value)
6142 rtx value ATTRIBUTE_UNUSED;
6144 /* The second flow pass currently (June 1999) can't update
6145 regs_ever_live without disturbing other parts of the compiler, so
6146 update it here to make the prolog/epilogue code happy. */
6147 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
6148 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
6150 current_function_uses_pic_offset_table = 1;
6152 return pic_offset_table_rtx;
6155 /* Functions to init, mark and free struct machine_function.
6156 These will be called, via pointer variables,
6157 from push_function_context and pop_function_context. */
6160 rs6000_init_machine_status (p)
6163 p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
6167 rs6000_free_machine_status (p)
6170 if (p->machine == NULL)
6178 /* Print an operand. Recognize special options, documented below. */
6181 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6182 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6184 #define SMALL_DATA_RELOC "sda21"
6185 #define SMALL_DATA_REG 0
6189 print_operand (file, x, code)
6197 /* These macros test for integers and extract the low-order bits. */
6199 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
6200 && GET_MODE (X) == VOIDmode)
6202 #define INT_LOWPART(X) \
6203 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6208 /* Write out an instruction after the call which may be replaced
6209 with glue code by the loader. This depends on the AIX version. */
6210 asm_fprintf (file, RS6000_CALL_GLUE);
6213 /* %a is output_address. */
6216 /* If X is a constant integer whose low-order 5 bits are zero,
6217 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6218 in the AIX assembler where "sri" with a zero shift count
6219 writes a trash instruction. */
6220 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6227 /* If constant, low-order 16 bits of constant, unsigned.
6228 Otherwise, write normally. */
6230 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6232 print_operand (file, x, 0);
6236 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6237 for 64-bit mask direction. */
6238 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6241 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6245 /* There used to be a comment for 'C' reading "This is an
6246 optional cror needed for certain floating-point
6247 comparisons. Otherwise write nothing." */
6249 /* Similar, except that this is for an scc, so we must be able to
6250 encode the test in a single bit that is one. We do the above
6251 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6252 if (GET_CODE (x) == LE || GET_CODE (x) == GE
6253 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6255 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6257 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6259 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6262 else if (GET_CODE (x) == NE)
6264 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6266 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6267 base_bit + 2, base_bit + 2);
6272 /* X is a CR register. Print the number of the EQ bit of the CR */
6273 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6274 output_operand_lossage ("invalid %%E value");
6276 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6280 /* X is a CR register. Print the shift count needed to move it
6281 to the high-order four bits. */
6282 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6283 output_operand_lossage ("invalid %%f value");
6285 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6289 /* Similar, but print the count for the rotate in the opposite
6291 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6292 output_operand_lossage ("invalid %%F value");
6294 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6298 /* X is a constant integer. If it is negative, print "m",
6299 otherwise print "z". This is to make a aze or ame insn. */
6300 if (GET_CODE (x) != CONST_INT)
6301 output_operand_lossage ("invalid %%G value");
6302 else if (INTVAL (x) >= 0)
6309 /* If constant, output low-order five bits. Otherwise, write
6312 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6314 print_operand (file, x, 0);
6318 /* If constant, output low-order six bits. Otherwise, write
6321 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6323 print_operand (file, x, 0);
6327 /* Print `i' if this is a constant, else nothing. */
6333 /* Write the bit number in CCR for jump. */
6336 output_operand_lossage ("invalid %%j code");
6338 fprintf (file, "%d", i);
6342 /* Similar, but add one for shift count in rlinm for scc and pass
6343 scc flag to `ccr_bit'. */
6346 output_operand_lossage ("invalid %%J code");
6348 /* If we want bit 31, write a shift count of zero, not 32. */
6349 fprintf (file, "%d", i == 31 ? 0 : i + 1);
6353 /* X must be a constant. Write the 1's complement of the
6356 output_operand_lossage ("invalid %%k value");
6358 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6362 /* X must be a symbolic constant on ELF. Write an
6363 expression suitable for an 'addi' that adds in the low 16
6365 if (GET_CODE (x) != CONST)
6367 print_operand_address (file, x);
6372 if (GET_CODE (XEXP (x, 0)) != PLUS
6373 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6374 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6375 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6376 output_operand_lossage ("invalid %%K value");
6377 print_operand_address (file, XEXP (XEXP (x, 0), 0));
6379 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6383 /* %l is output_asm_label. */
6386 /* Write second word of DImode or DFmode reference. Works on register
6387 or non-indexed memory only. */
6388 if (GET_CODE (x) == REG)
6389 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6390 else if (GET_CODE (x) == MEM)
6392 /* Handle possible auto-increment. Since it is pre-increment and
6393 we have already done it, we can just use an offset of word. */
6394 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6395 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6396 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6399 output_address (XEXP (adjust_address_nv (x, SImode,
6403 if (small_data_operand (x, GET_MODE (x)))
6404 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6405 reg_names[SMALL_DATA_REG]);
6410 /* MB value for a mask operand. */
6411 if (! mask_operand (x, SImode))
6412 output_operand_lossage ("invalid %%m value");
6414 val = INT_LOWPART (x);
6416 /* If the high bit is set and the low bit is not, the value is zero.
6417 If the high bit is zero, the value is the first 1 bit we find from
6419 if ((val & 0x80000000) && ((val & 1) == 0))
6424 else if ((val & 0x80000000) == 0)
6426 for (i = 1; i < 32; i++)
6427 if ((val <<= 1) & 0x80000000)
6429 fprintf (file, "%d", i);
6433 /* Otherwise, look for the first 0 bit from the right. The result is its
6434 number plus 1. We know the low-order bit is one. */
6435 for (i = 0; i < 32; i++)
6436 if (((val >>= 1) & 1) == 0)
6439 /* If we ended in ...01, i would be 0. The correct value is 31, so
6441 fprintf (file, "%d", 31 - i);
6445 /* ME value for a mask operand. */
6446 if (! mask_operand (x, SImode))
6447 output_operand_lossage ("invalid %%M value");
6449 val = INT_LOWPART (x);
6451 /* If the low bit is set and the high bit is not, the value is 31.
6452 If the low bit is zero, the value is the first 1 bit we find from
6454 if ((val & 1) && ((val & 0x80000000) == 0))
6459 else if ((val & 1) == 0)
6461 for (i = 0; i < 32; i++)
6462 if ((val >>= 1) & 1)
6465 /* If we had ....10, i would be 0. The result should be
6466 30, so we need 30 - i. */
6467 fprintf (file, "%d", 30 - i);
6471 /* Otherwise, look for the first 0 bit from the left. The result is its
6472 number minus 1. We know the high-order bit is one. */
6473 for (i = 0; i < 32; i++)
6474 if (((val <<= 1) & 0x80000000) == 0)
6477 fprintf (file, "%d", i);
6480 /* %n outputs the negative of its operand. */
6483 /* Write the number of elements in the vector times 4. */
6484 if (GET_CODE (x) != PARALLEL)
6485 output_operand_lossage ("invalid %%N value");
6487 fprintf (file, "%d", XVECLEN (x, 0) * 4);
6491 /* Similar, but subtract 1 first. */
6492 if (GET_CODE (x) != PARALLEL)
6493 output_operand_lossage ("invalid %%O value");
6495 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6499 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6501 || INT_LOWPART (x) < 0
6502 || (i = exact_log2 (INT_LOWPART (x))) < 0)
6503 output_operand_lossage ("invalid %%p value");
6505 fprintf (file, "%d", i);
6509 /* The operand must be an indirect memory reference. The result
6510 is the register number. */
6511 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6512 || REGNO (XEXP (x, 0)) >= 32)
6513 output_operand_lossage ("invalid %%P value");
6515 fprintf (file, "%d", REGNO (XEXP (x, 0)));
6519 /* This outputs the logical code corresponding to a boolean
6520 expression. The expression may have one or both operands
6521 negated (if one, only the first one). For condition register
6522 logical operations, it will also treat the negated
6523 CR codes as NOTs, but not handle NOTs of them. */
6525 const char *const *t = 0;
6527 enum rtx_code code = GET_CODE (x);
6528 static const char * const tbl[3][3] = {
6529 { "and", "andc", "nor" },
6530 { "or", "orc", "nand" },
6531 { "xor", "eqv", "xor" } };
6535 else if (code == IOR)
6537 else if (code == XOR)
6540 output_operand_lossage ("invalid %%q value");
6542 if (GET_CODE (XEXP (x, 0)) != NOT)
6546 if (GET_CODE (XEXP (x, 1)) == NOT)
6557 /* X is a CR register. Print the mask for `mtcrf'. */
6558 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6559 output_operand_lossage ("invalid %%R value");
6561 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6565 /* Low 5 bits of 32 - value */
6567 output_operand_lossage ("invalid %%s value");
6569 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6573 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6574 CONST_INT 32-bit mask is considered sign-extended so any
6575 transition must occur within the CONST_INT, not on the boundary. */
6576 if (! mask64_operand (x, DImode))
6577 output_operand_lossage ("invalid %%S value");
6579 val = INT_LOWPART (x);
6581 if (val & 1) /* Clear Left */
6583 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6584 if (!((val >>= 1) & 1))
6587 #if HOST_BITS_PER_WIDE_INT == 32
6588 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6590 val = CONST_DOUBLE_HIGH (x);
6595 for (i = 32; i < 64; i++)
6596 if (!((val >>= 1) & 1))
6600 /* i = index of last set bit from right
6601 mask begins at 63 - i from left */
6603 output_operand_lossage ("%%S computed all 1's mask");
6605 fprintf (file, "%d", 63 - i);
6608 else /* Clear Right */
6610 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6611 if ((val >>= 1) & 1)
6614 #if HOST_BITS_PER_WIDE_INT == 32
6615 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6617 val = CONST_DOUBLE_HIGH (x);
6619 if (val == (HOST_WIDE_INT) -1)
6622 for (i = 32; i < 64; i++)
6623 if ((val >>= 1) & 1)
6627 /* i = index of last clear bit from right
6628 mask ends at 62 - i from left */
6630 output_operand_lossage ("%%S computed all 0's mask");
6632 fprintf (file, "%d", 62 - i);
6637 /* Print the symbolic name of a branch target register. */
6638 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6639 && REGNO (x) != COUNT_REGISTER_REGNUM))
6640 output_operand_lossage ("invalid %%T value");
6641 else if (REGNO (x) == LINK_REGISTER_REGNUM)
6642 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6644 fputs ("ctr", file);
6648 /* High-order 16 bits of constant for use in unsigned operand. */
6650 output_operand_lossage ("invalid %%u value");
6652 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6653 (INT_LOWPART (x) >> 16) & 0xffff);
6657 /* High-order 16 bits of constant for use in signed operand. */
6659 output_operand_lossage ("invalid %%v value");
6661 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6662 (INT_LOWPART (x) >> 16) & 0xffff);
6666 /* Print `u' if this has an auto-increment or auto-decrement. */
6667 if (GET_CODE (x) == MEM
6668 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6669 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6674 /* Print the trap code for this operand. */
6675 switch (GET_CODE (x))
6678 fputs ("eq", file); /* 4 */
6681 fputs ("ne", file); /* 24 */
6684 fputs ("lt", file); /* 16 */
6687 fputs ("le", file); /* 20 */
6690 fputs ("gt", file); /* 8 */
6693 fputs ("ge", file); /* 12 */
6696 fputs ("llt", file); /* 2 */
6699 fputs ("lle", file); /* 6 */
6702 fputs ("lgt", file); /* 1 */
6705 fputs ("lge", file); /* 5 */
6713 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6716 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6717 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6719 print_operand (file, x, 0);
6723 /* MB value for a PowerPC64 rldic operand. */
6724 val = (GET_CODE (x) == CONST_INT
6725 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6730 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6731 if ((val <<= 1) < 0)
6734 #if HOST_BITS_PER_WIDE_INT == 32
6735 if (GET_CODE (x) == CONST_INT && i >= 0)
6736 i += 32; /* zero-extend high-part was all 0's */
6737 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6739 val = CONST_DOUBLE_LOW (x);
6746 for ( ; i < 64; i++)
6747 if ((val <<= 1) < 0)
6752 fprintf (file, "%d", i + 1);
6756 if (GET_CODE (x) == MEM
6757 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6762 /* Like 'L', for third word of TImode */
6763 if (GET_CODE (x) == REG)
6764 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6765 else if (GET_CODE (x) == MEM)
6767 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6768 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6769 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6771 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6772 if (small_data_operand (x, GET_MODE (x)))
6773 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6774 reg_names[SMALL_DATA_REG]);
6779 /* X is a SYMBOL_REF. Write out the name preceded by a
6780 period and without any trailing data in brackets. Used for function
6781 names. If we are configured for System V (or the embedded ABI) on
6782 the PowerPC, do not emit the period, since those systems do not use
6783 TOCs and the like. */
6784 if (GET_CODE (x) != SYMBOL_REF)
6787 if (XSTR (x, 0)[0] != '.')
6789 switch (DEFAULT_ABI)
6799 case ABI_AIX_NODESC:
6805 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6807 assemble_name (file, XSTR (x, 0));
6812 /* Like 'L', for last word of TImode. */
6813 if (GET_CODE (x) == REG)
6814 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6815 else if (GET_CODE (x) == MEM)
6817 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6818 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6819 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6821 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6822 if (small_data_operand (x, GET_MODE (x)))
6823 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6824 reg_names[SMALL_DATA_REG]);
6828 /* Print AltiVec memory operand. */
6833 if (GET_CODE (x) != MEM)
6838 if (GET_CODE (tmp) == REG)
6839 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6840 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6842 if (REGNO (XEXP (tmp, 0)) == 0)
6843 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6844 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6846 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6847 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6855 if (GET_CODE (x) == REG)
6856 fprintf (file, "%s", reg_names[REGNO (x)]);
6857 else if (GET_CODE (x) == MEM)
6859 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6860 know the width from the mode. */
6861 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6862 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6863 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6864 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6865 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6866 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6868 output_address (XEXP (x, 0));
6871 output_addr_const (file, x);
6875 output_operand_lossage ("invalid %%xn code");
6879 /* Print the address of an operand. */
6882 print_operand_address (file, x)
6886 if (GET_CODE (x) == REG)
6887 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6888 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6889 || GET_CODE (x) == LABEL_REF)
6891 output_addr_const (file, x);
6892 if (small_data_operand (x, GET_MODE (x)))
6893 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6894 reg_names[SMALL_DATA_REG]);
6895 else if (TARGET_TOC)
6898 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6900 if (REGNO (XEXP (x, 0)) == 0)
6901 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6902 reg_names[ REGNO (XEXP (x, 0)) ]);
6904 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6905 reg_names[ REGNO (XEXP (x, 1)) ]);
6907 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6909 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6910 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6913 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6914 && CONSTANT_P (XEXP (x, 1)))
6916 output_addr_const (file, XEXP (x, 1));
6917 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6921 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6922 && CONSTANT_P (XEXP (x, 1)))
6924 fprintf (file, "lo16(");
6925 output_addr_const (file, XEXP (x, 1));
6926 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6929 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6931 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6933 rtx contains_minus = XEXP (x, 1);
6937 /* Find the (minus (sym) (toc)) buried in X, and temporarily
6938 turn it into (sym) for output_addr_const. */
6939 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6940 contains_minus = XEXP (contains_minus, 0);
6942 minus = XEXP (contains_minus, 0);
6943 symref = XEXP (minus, 0);
6944 XEXP (contains_minus, 0) = symref;
6949 name = XSTR (symref, 0);
6950 newname = alloca (strlen (name) + sizeof ("@toc"));
6951 strcpy (newname, name);
6952 strcat (newname, "@toc");
6953 XSTR (symref, 0) = newname;
6955 output_addr_const (file, XEXP (x, 1));
6957 XSTR (symref, 0) = name;
6958 XEXP (contains_minus, 0) = minus;
6961 output_addr_const (file, XEXP (x, 1));
6963 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6969 /* Target hook for assembling integer objects. The powerpc version has
6970 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6971 is defined. It also needs to handle DI-mode objects on 64-bit
6975 rs6000_assemble_integer (x, size, aligned_p)
6980 #ifdef RELOCATABLE_NEEDS_FIXUP
6981 /* Special handling for SI values. */
6982 if (size == 4 && aligned_p)
6984 extern int in_toc_section PARAMS ((void));
6985 static int recurse = 0;
6987 /* For -mrelocatable, we mark all addresses that need to be fixed up
6988 in the .fixup section. */
6989 if (TARGET_RELOCATABLE
6990 && !in_toc_section ()
6991 && !in_text_section ()
6993 && GET_CODE (x) != CONST_INT
6994 && GET_CODE (x) != CONST_DOUBLE
7000 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
7002 ASM_OUTPUT_LABEL (asm_out_file, buf);
7003 fprintf (asm_out_file, "\t.long\t(");
7004 output_addr_const (asm_out_file, x);
7005 fprintf (asm_out_file, ")@fixup\n");
7006 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
7007 ASM_OUTPUT_ALIGN (asm_out_file, 2);
7008 fprintf (asm_out_file, "\t.long\t");
7009 assemble_name (asm_out_file, buf);
7010 fprintf (asm_out_file, "\n\t.previous\n");
7014 /* Remove initial .'s to turn a -mcall-aixdesc function
7015 address into the address of the descriptor, not the function
7017 else if (GET_CODE (x) == SYMBOL_REF
7018 && XSTR (x, 0)[0] == '.'
7019 && DEFAULT_ABI == ABI_AIX)
7021 const char *name = XSTR (x, 0);
7022 while (*name == '.')
7025 fprintf (asm_out_file, "\t.long\t%s\n", name);
7029 #endif /* RELOCATABLE_NEEDS_FIXUP */
7030 return default_assemble_integer (x, size, aligned_p);
7034 rs6000_reverse_condition (mode, code)
7035 enum machine_mode mode;
7038 /* Reversal of FP compares takes care -- an ordered compare
7039 becomes an unordered compare and vice versa. */
7040 if (mode == CCFPmode)
7041 return reverse_condition_maybe_unordered (code);
7043 return reverse_condition (code);
7046 /* Generate a compare for CODE. Return a brand-new rtx that
7047 represents the result of the compare. */
7050 rs6000_generate_compare (code)
7053 enum machine_mode comp_mode;
7056 if (rs6000_compare_fp_p)
7057 comp_mode = CCFPmode;
7058 else if (code == GTU || code == LTU
7059 || code == GEU || code == LEU)
7060 comp_mode = CCUNSmode;
7064 /* First, the compare. */
7065 compare_result = gen_reg_rtx (comp_mode);
7066 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
7067 gen_rtx_COMPARE (comp_mode,
7069 rs6000_compare_op1)));
7071 /* Some kinds of FP comparisons need an OR operation;
7072 except for flag_unsafe_math_optimizations we don't bother. */
7073 if (rs6000_compare_fp_p
7074 && ! flag_unsafe_math_optimizations
7075 && (code == LE || code == GE
7076 || code == UNEQ || code == LTGT
7077 || code == UNGT || code == UNLT))
7079 enum rtx_code or1, or2;
7080 rtx or1_rtx, or2_rtx, compare2_rtx;
7081 rtx or_result = gen_reg_rtx (CCEQmode);
7085 case LE: or1 = LT; or2 = EQ; break;
7086 case GE: or1 = GT; or2 = EQ; break;
7087 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
7088 case LTGT: or1 = LT; or2 = GT; break;
7089 case UNGT: or1 = UNORDERED; or2 = GT; break;
7090 case UNLT: or1 = UNORDERED; or2 = LT; break;
7093 validate_condition_mode (or1, comp_mode);
7094 validate_condition_mode (or2, comp_mode);
7095 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
7096 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
7097 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
7098 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
7100 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
7102 compare_result = or_result;
7106 validate_condition_mode (code, GET_MODE (compare_result));
7108 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
7112 /* Emit the RTL for an sCOND pattern. */
7115 rs6000_emit_sCOND (code, result)
7120 enum machine_mode op_mode;
7122 condition_rtx = rs6000_generate_compare (code);
7124 op_mode = GET_MODE (rs6000_compare_op0);
7125 if (op_mode == VOIDmode)
7126 op_mode = GET_MODE (rs6000_compare_op1);
7128 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7130 PUT_MODE (condition_rtx, DImode);
7131 convert_move (result, condition_rtx, 0);
7135 PUT_MODE (condition_rtx, SImode);
7136 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7140 /* Emit a branch of kind CODE to location LOC. */
7143 rs6000_emit_cbranch (code, loc)
7147 rtx condition_rtx, loc_ref;
7149 condition_rtx = rs6000_generate_compare (code);
7150 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7151 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7152 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7156 /* Return the string to output a conditional branch to LABEL, which is
7157 the operand number of the label, or -1 if the branch is really a
7160 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
7161 condition code register and its mode specifies what kind of
7164 REVERSED is non-zero if we should reverse the sense of the comparison.
7166 INSN is the insn. */
7169 output_cbranch (op, label, reversed, insn)
7175 static char string[64];
7176 enum rtx_code code = GET_CODE (op);
7177 rtx cc_reg = XEXP (op, 0);
7178 enum machine_mode mode = GET_MODE (cc_reg);
7179 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7180 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7181 int really_reversed = reversed ^ need_longbranch;
7187 validate_condition_mode (code, mode);
7189 /* Work out which way this really branches. We could use
7190 reverse_condition_maybe_unordered here always but this
7191 makes the resulting assembler clearer. */
7192 if (really_reversed)
7193 code = rs6000_reverse_condition (mode, code);
7197 /* Not all of these are actually distinct opcodes, but
7198 we distinguish them for clarity of the resulting assembler. */
7200 ccode = "ne"; break;
7202 ccode = "eq"; break;
7204 ccode = "ge"; break;
7205 case GT: case GTU: case UNGT:
7206 ccode = "gt"; break;
7208 ccode = "le"; break;
7209 case LT: case LTU: case UNLT:
7210 ccode = "lt"; break;
7211 case UNORDERED: ccode = "un"; break;
7212 case ORDERED: ccode = "nu"; break;
7213 case UNGE: ccode = "nl"; break;
7214 case UNLE: ccode = "ng"; break;
7219 /* Maybe we have a guess as to how likely the branch is.
7220 The old mnemonics don't have a way to specify this information. */
7221 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7222 if (note != NULL_RTX)
7224 /* PROB is the difference from 50%. */
7225 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7227 /* For branches that are very close to 50%, assume not-taken. */
7228 if (abs (prob) > REG_BR_PROB_BASE / 20
7229 && ((prob > 0) ^ need_longbranch))
7238 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7240 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7242 /* We need to escape any '%' characters in the reg_names string.
7243 Assume they'd only be the first character... */
7244 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7246 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7250 /* If the branch distance was too far, we may have to use an
7251 unconditional branch to go the distance. */
7252 if (need_longbranch)
7253 s += sprintf (s, ",$+8\n\tb %s", label);
7255 s += sprintf (s, ",%s", label);
7261 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7262 operands of the last comparison is nonzero/true, FALSE_COND if it
7263 is zero/false. Return 0 if the hardware has no such operation. */
7266 rs6000_emit_cmove (dest, op, true_cond, false_cond)
7272 enum rtx_code code = GET_CODE (op);
7273 rtx op0 = rs6000_compare_op0;
7274 rtx op1 = rs6000_compare_op1;
7276 enum machine_mode compare_mode = GET_MODE (op0);
7277 enum machine_mode result_mode = GET_MODE (dest);
7280 /* These modes should always match. */
7281 if (GET_MODE (op1) != compare_mode)
7283 if (GET_MODE (true_cond) != result_mode)
7285 if (GET_MODE (false_cond) != result_mode)
7288 /* First, work out if the hardware can do this at all, or
7289 if it's too slow... */
7290 /* If the comparison is an integer one, since we only have fsel
7291 it'll be cheaper to use a branch. */
7292 if (! rs6000_compare_fp_p)
7295 /* Eliminate half of the comparisons by switching operands, this
7296 makes the remaining code simpler. */
7297 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7298 || code == LTGT || code == LT)
7300 code = reverse_condition_maybe_unordered (code);
7302 true_cond = false_cond;
7306 /* UNEQ and LTGT take four instructions for a comparison with zero,
7307 it'll probably be faster to use a branch here too. */
7311 if (GET_CODE (op1) == CONST_DOUBLE)
7312 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7314 /* We're going to try to implement comparions by performing
7315 a subtract, then comparing against zero. Unfortunately,
7316 Inf - Inf is NaN which is not zero, and so if we don't
7317 know that the the operand is finite and the comparison
7318 would treat EQ different to UNORDERED, we can't do it. */
7319 if (! flag_unsafe_math_optimizations
7320 && code != GT && code != UNGE
7321 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7322 /* Constructs of the form (a OP b ? a : b) are safe. */
7323 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7324 || (! rtx_equal_p (op0, true_cond)
7325 && ! rtx_equal_p (op1, true_cond))))
7327 /* At this point we know we can use fsel. */
7329 /* Reduce the comparison to a comparison against zero. */
7330 temp = gen_reg_rtx (compare_mode);
7331 emit_insn (gen_rtx_SET (VOIDmode, temp,
7332 gen_rtx_MINUS (compare_mode, op0, op1)));
7334 op1 = CONST0_RTX (compare_mode);
7336 /* If we don't care about NaNs we can reduce some of the comparisons
7337 down to faster ones. */
7338 if (flag_unsafe_math_optimizations)
7344 true_cond = false_cond;
7357 /* Now, reduce everything down to a GE. */
7364 temp = gen_reg_rtx (compare_mode);
7365 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7370 temp = gen_reg_rtx (compare_mode);
7371 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
7376 temp = gen_reg_rtx (compare_mode);
7377 emit_insn (gen_rtx_SET (VOIDmode, temp,
7378 gen_rtx_NEG (compare_mode,
7379 gen_rtx_ABS (compare_mode, op0))));
7384 temp = gen_reg_rtx (result_mode);
7385 emit_insn (gen_rtx_SET (VOIDmode, temp,
7386 gen_rtx_IF_THEN_ELSE (result_mode,
7387 gen_rtx_GE (VOIDmode,
7389 true_cond, false_cond)));
7391 true_cond = false_cond;
7393 temp = gen_reg_rtx (compare_mode);
7394 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7399 temp = gen_reg_rtx (result_mode);
7400 emit_insn (gen_rtx_SET (VOIDmode, temp,
7401 gen_rtx_IF_THEN_ELSE (result_mode,
7402 gen_rtx_GE (VOIDmode,
7404 true_cond, false_cond)));
7406 false_cond = true_cond;
7408 temp = gen_reg_rtx (compare_mode);
7409 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7417 emit_insn (gen_rtx_SET (VOIDmode, dest,
7418 gen_rtx_IF_THEN_ELSE (result_mode,
7419 gen_rtx_GE (VOIDmode,
7421 true_cond, false_cond)));
7426 rs6000_emit_minmax (dest, code, op0, op1)
7432 enum machine_mode mode = GET_MODE (op0);
7434 if (code == SMAX || code == UMAX)
7435 target = emit_conditional_move (dest, GE, op0, op1, mode,
7438 target = emit_conditional_move (dest, GE, op0, op1, mode,
7440 if (target == NULL_RTX)
7443 emit_move_insn (dest, target);
7446 /* This page contains routines that are used to determine what the
7447 function prologue and epilogue code will do and write them out. */
7449 /* Return the first fixed-point register that is required to be
7450 saved. 32 if none. */
7453 first_reg_to_save ()
7457 /* Find lowest numbered live register. */
7458 for (first_reg = 13; first_reg <= 31; first_reg++)
7459 if (regs_ever_live[first_reg]
7460 && (! call_used_regs[first_reg]
7461 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
7462 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7463 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7466 if (current_function_profile)
7468 /* AIX must save/restore every register that contains a parameter
7469 before/after the .__mcount call plus an additional register
7470 for the static chain, if needed; use registers from 30 down to 22
7472 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7474 int last_parm_reg, profile_first_reg;
7476 /* Figure out last used parameter register. The proper thing
7477 to do is to walk incoming args of the function. A function
7478 might have live parameter registers even if it has no
7480 for (last_parm_reg = 10;
7481 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7485 /* Calculate first reg for saving parameter registers
7487 Skip reg 31 which may contain the frame pointer. */
7488 profile_first_reg = (33 - last_parm_reg
7489 - (current_function_needs_context ? 1 : 0));
7491 /* Need to skip another reg to account for R31 being PICBASE
7492 (when flag_pic is set) or R30 being used as the frame
7493 pointer (when flag_pic is not set). */
7494 --profile_first_reg;
7496 /* Do not save frame pointer if no parameters needs to be saved. */
7497 if (profile_first_reg == 31)
7498 profile_first_reg = 32;
7500 if (first_reg > profile_first_reg)
7501 first_reg = profile_first_reg;
7504 /* SVR4 may need one register to preserve the static chain. */
7505 else if (current_function_needs_context)
7507 /* Skip reg 31 which may contain the frame pointer. */
7514 if (flag_pic && current_function_uses_pic_offset_table &&
7515 (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
7516 return RS6000_PIC_OFFSET_TABLE_REGNUM;
7522 /* Similar, for FP regs. */
7525 first_fp_reg_to_save ()
7529 /* Find lowest numbered live register. */
7530 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7531 if (regs_ever_live[first_reg])
7537 /* Similar, for AltiVec regs. */
7540 first_altivec_reg_to_save ()
7544 /* Stack frame remains as is unless we are in AltiVec ABI. */
7545 if (! TARGET_ALTIVEC_ABI)
7546 return LAST_ALTIVEC_REGNO + 1;
7548 /* Find lowest numbered live register. */
7549 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7550 if (regs_ever_live[i])
7556 /* Return a 32-bit mask of the AltiVec registers we need to set in
7557 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7558 the 32-bit word is 0. */
7561 compute_vrsave_mask ()
7563 unsigned int i, mask = 0;
7565 /* First, find out if we use _any_ altivec registers. */
7566 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7567 if (regs_ever_live[i])
7568 mask |= ALTIVEC_REG_BIT (i);
7573 /* Next, add all registers that are call-clobbered. We do this
7574 because post-reload register optimizers such as regrename_optimize
7575 may choose to use them. They never change the register class
7576 chosen by reload, so cannot create new uses of altivec registers
7577 if there were none before, so the early exit above is safe. */
7578 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7579 altivec registers not saved in the mask, which might well make the
7580 adjustments below more effective in eliding the save/restore of
7581 VRSAVE in small functions. */
7582 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7583 if (call_used_regs[i])
7584 mask |= ALTIVEC_REG_BIT (i);
7586 /* Next, remove the argument registers from the set. These must
7587 be in the VRSAVE mask set by the caller, so we don't need to add
7588 them in again. More importantly, the mask we compute here is
7589 used to generate CLOBBERs in the set_vrsave insn, and we do not
7590 wish the argument registers to die. */
7591 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7592 mask &= ~ALTIVEC_REG_BIT (i);
7594 /* Similarly, remove the return value from the set. */
7597 diddle_return_value (is_altivec_return_reg, &yes);
7599 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7606 is_altivec_return_reg (reg, xyes)
7610 bool *yes = (bool *) xyes;
7611 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7616 /* Calculate the stack information for the current function. This is
7617 complicated by having two separate calling sequences, the AIX calling
7618 sequence and the V.4 calling sequence.
7620 AIX (and Darwin/Mac OS X) stack frames look like:
7622 SP----> +---------------------------------------+
7623 | back chain to caller | 0 0
7624 +---------------------------------------+
7625 | saved CR | 4 8 (8-11)
7626 +---------------------------------------+
7628 +---------------------------------------+
7629 | reserved for compilers | 12 24
7630 +---------------------------------------+
7631 | reserved for binders | 16 32
7632 +---------------------------------------+
7633 | saved TOC pointer | 20 40
7634 +---------------------------------------+
7635 | Parameter save area (P) | 24 48
7636 +---------------------------------------+
7637 | Alloca space (A) | 24+P etc.
7638 +---------------------------------------+
7639 | Local variable space (L) | 24+P+A
7640 +---------------------------------------+
7641 | Float/int conversion temporary (X) | 24+P+A+L
7642 +---------------------------------------+
7643 | Save area for AltiVec registers (W) | 24+P+A+L+X
7644 +---------------------------------------+
7645 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7646 +---------------------------------------+
7647 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7648 +---------------------------------------+
7649 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7650 +---------------------------------------+
7651 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7652 +---------------------------------------+
7653 old SP->| back chain to caller's caller |
7654 +---------------------------------------+
7656 The required alignment for AIX configurations is two words (i.e., 8
7660 V.4 stack frames look like:
7662 SP----> +---------------------------------------+
7663 | back chain to caller | 0
7664 +---------------------------------------+
7665 | caller's saved LR | 4
7666 +---------------------------------------+
7667 | Parameter save area (P) | 8
7668 +---------------------------------------+
7669 | Alloca space (A) | 8+P
7670 +---------------------------------------+
7671 | Varargs save area (V) | 8+P+A
7672 +---------------------------------------+
7673 | Local variable space (L) | 8+P+A+V
7674 +---------------------------------------+
7675 | Float/int conversion temporary (X) | 8+P+A+V+L
7676 +---------------------------------------+
7677 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7678 +---------------------------------------+
7679 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7680 +---------------------------------------+
7681 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7682 +---------------------------------------+
7683 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7684 +---------------------------------------+
7685 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7686 +---------------------------------------+
7687 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7688 +---------------------------------------+
7689 old SP->| back chain to caller's caller |
7690 +---------------------------------------+
7692 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7693 given. (But note below and in sysv4.h that we require only 8 and
7694 may round up the size of our stack frame anyways. The historical
7695 reason is early versions of powerpc-linux which didn't properly
7696 align the stack at program startup. A happy side-effect is that
7697 -mno-eabi libraries can be used with -meabi programs.)
7699 The EABI configuration defaults to the V.4 layout, unless
7700 -mcall-aix is used, in which case the AIX layout is used. However,
7701 the stack alignment requirements may differ. If -mno-eabi is not
7702 given, the required stack alignment is 8 bytes; if -mno-eabi is
7703 given, the required alignment is 16 bytes. (But see V.4 comment
7706 #ifndef ABI_STACK_BOUNDARY
7707 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7711 rs6000_stack_info ()
7713 static rs6000_stack_t info, zero_info;
7714 rs6000_stack_t *info_ptr = &info;
7715 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7716 enum rs6000_abi abi;
7720 /* Zero all fields portably. */
7723 /* Select which calling sequence. */
7724 info_ptr->abi = abi = DEFAULT_ABI;
7726 /* Calculate which registers need to be saved & save area size. */
7727 info_ptr->first_gp_reg_save = first_reg_to_save ();
7728 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
7729 even if it currently looks like we won't. */
7730 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7731 || (flag_pic == 1 && abi == ABI_V4)
7732 || (flag_pic && abi == ABI_DARWIN))
7733 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
7734 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
7736 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7738 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7739 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7741 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7742 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7743 - info_ptr->first_altivec_reg_save);
7745 /* Does this function call anything? */
7746 info_ptr->calls_p = (! current_function_is_leaf
7747 || cfun->machine->ra_needs_full_frame);
7749 /* Determine if we need to save the link register. */
7750 if (rs6000_ra_ever_killed ()
7751 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7752 #ifdef TARGET_RELOCATABLE
7753 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7755 || (info_ptr->first_fp_reg_save != 64
7756 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7757 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7758 || (abi == ABI_V4 && current_function_calls_alloca)
7759 || (DEFAULT_ABI == ABI_DARWIN
7761 && current_function_uses_pic_offset_table)
7762 || info_ptr->calls_p)
7764 info_ptr->lr_save_p = 1;
7765 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7768 /* Determine if we need to save the condition code registers. */
7769 if (regs_ever_live[CR2_REGNO]
7770 || regs_ever_live[CR3_REGNO]
7771 || regs_ever_live[CR4_REGNO])
7773 info_ptr->cr_save_p = 1;
7775 info_ptr->cr_size = reg_size;
7778 /* If the current function calls __builtin_eh_return, then we need
7779 to allocate stack space for registers that will hold data for
7780 the exception handler. */
7781 if (current_function_calls_eh_return)
7784 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7786 ehrd_size = i * UNITS_PER_WORD;
7791 /* Determine various sizes. */
7792 info_ptr->reg_size = reg_size;
7793 info_ptr->fixed_size = RS6000_SAVE_AREA;
7794 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7795 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7796 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7799 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
7801 info_ptr->vrsave_mask = compute_vrsave_mask ();
7802 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7806 info_ptr->vrsave_mask = 0;
7807 info_ptr->vrsave_size = 0;
7810 /* Calculate the offsets. */
7818 case ABI_AIX_NODESC:
7820 info_ptr->fp_save_offset = - info_ptr->fp_size;
7821 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7823 if (TARGET_ALTIVEC_ABI)
7825 info_ptr->vrsave_save_offset
7826 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7828 /* Align stack so vector save area is on a quadword boundary. */
7829 if (info_ptr->altivec_size != 0)
7830 info_ptr->altivec_padding_size
7831 = 16 - (-info_ptr->vrsave_save_offset % 16);
7833 info_ptr->altivec_padding_size = 0;
7835 info_ptr->altivec_save_offset
7836 = info_ptr->vrsave_save_offset
7837 - info_ptr->altivec_padding_size
7838 - info_ptr->altivec_size;
7840 /* Adjust for AltiVec case. */
7841 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7844 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7845 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7846 info_ptr->lr_save_offset = 2*reg_size;
7850 info_ptr->fp_save_offset = - info_ptr->fp_size;
7851 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7852 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7854 if (TARGET_ALTIVEC_ABI)
7856 info_ptr->vrsave_save_offset
7857 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7859 /* Align stack so vector save area is on a quadword boundary. */
7860 if (info_ptr->altivec_size != 0)
7861 info_ptr->altivec_padding_size
7862 = 16 - (-info_ptr->vrsave_save_offset % 16);
7864 info_ptr->altivec_padding_size = 0;
7866 info_ptr->altivec_save_offset
7867 = info_ptr->vrsave_save_offset
7868 - info_ptr->altivec_padding_size
7869 - info_ptr->altivec_size;
7871 /* Adjust for AltiVec case. */
7872 info_ptr->toc_save_offset
7873 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7876 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7877 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7878 info_ptr->lr_save_offset = reg_size;
7882 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7884 + info_ptr->altivec_size
7885 + info_ptr->altivec_padding_size
7886 + info_ptr->vrsave_size
7890 + info_ptr->vrsave_size
7891 + info_ptr->toc_size,
7892 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7895 total_raw_size = (info_ptr->vars_size
7896 + info_ptr->parm_size
7897 + info_ptr->save_size
7898 + info_ptr->varargs_size
7899 + info_ptr->fixed_size);
7901 info_ptr->total_size =
7902 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7904 /* Determine if we need to allocate any stack frame:
7906 For AIX we need to push the stack if a frame pointer is needed
7907 (because the stack might be dynamically adjusted), if we are
7908 debugging, if we make calls, or if the sum of fp_save, gp_save,
7909 and local variables are more than the space needed to save all
7910 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7911 + 18*8 = 288 (GPR13 reserved).
7913 For V.4 we don't have the stack cushion that AIX uses, but assume
7914 that the debugger can handle stackless frames. */
7916 if (info_ptr->calls_p)
7917 info_ptr->push_p = 1;
7919 else if (abi == ABI_V4)
7920 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
7923 info_ptr->push_p = (frame_pointer_needed
7924 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7925 || ((total_raw_size - info_ptr->fixed_size)
7926 > (TARGET_32BIT ? 220 : 288)));
7928 /* Zero offsets if we're not saving those registers. */
7929 if (info_ptr->fp_size == 0)
7930 info_ptr->fp_save_offset = 0;
7932 if (info_ptr->gp_size == 0)
7933 info_ptr->gp_save_offset = 0;
7935 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7936 info_ptr->altivec_save_offset = 0;
7938 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7939 info_ptr->vrsave_save_offset = 0;
7941 if (! info_ptr->lr_save_p)
7942 info_ptr->lr_save_offset = 0;
7944 if (! info_ptr->cr_save_p)
7945 info_ptr->cr_save_offset = 0;
7947 if (! info_ptr->toc_save_p)
7948 info_ptr->toc_save_offset = 0;
7954 debug_stack_info (info)
7955 rs6000_stack_t *info;
7957 const char *abi_string;
7960 info = rs6000_stack_info ();
7962 fprintf (stderr, "\nStack information for function %s:\n",
7963 ((current_function_decl && DECL_NAME (current_function_decl))
7964 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7969 default: abi_string = "Unknown"; break;
7970 case ABI_NONE: abi_string = "NONE"; break;
7972 case ABI_AIX_NODESC: abi_string = "AIX"; break;
7973 case ABI_DARWIN: abi_string = "Darwin"; break;
7974 case ABI_V4: abi_string = "V.4"; break;
7977 fprintf (stderr, "\tABI = %5s\n", abi_string);
7979 if (TARGET_ALTIVEC_ABI)
7980 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7982 if (info->first_gp_reg_save != 32)
7983 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
7985 if (info->first_fp_reg_save != 64)
7986 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
7988 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7989 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7990 info->first_altivec_reg_save);
7992 if (info->lr_save_p)
7993 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
7995 if (info->cr_save_p)
7996 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
7998 if (info->toc_save_p)
7999 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
8001 if (info->vrsave_mask)
8002 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
8005 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
8008 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
8010 if (info->gp_save_offset)
8011 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
8013 if (info->fp_save_offset)
8014 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
8016 if (info->altivec_save_offset)
8017 fprintf (stderr, "\taltivec_save_offset = %5d\n",
8018 info->altivec_save_offset);
8020 if (info->vrsave_save_offset)
8021 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
8022 info->vrsave_save_offset);
8024 if (info->lr_save_offset)
8025 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
8027 if (info->cr_save_offset)
8028 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
8030 if (info->toc_save_offset)
8031 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
8033 if (info->varargs_save_offset)
8034 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
8036 if (info->total_size)
8037 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
8039 if (info->varargs_size)
8040 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
8042 if (info->vars_size)
8043 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
8045 if (info->parm_size)
8046 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
8048 if (info->fixed_size)
8049 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
8052 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
8055 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
8057 if (info->altivec_size)
8058 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
8060 if (info->vrsave_size)
8061 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
8063 if (info->altivec_padding_size)
8064 fprintf (stderr, "\taltivec_padding_size= %5d\n",
8065 info->altivec_padding_size);
8068 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
8071 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
8074 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
8076 if (info->save_size)
8077 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
8079 if (info->reg_size != 4)
8080 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
8082 fprintf (stderr, "\n");
8086 rs6000_return_addr (count, frame)
8090 /* Currently we don't optimize very well between prolog and body
8091 code and for PIC code the code can be actually quite bad, so
8092 don't try to be too clever here. */
8093 if (count != 0 || flag_pic != 0)
8095 cfun->machine->ra_needs_full_frame = 1;
8102 plus_constant (copy_to_reg
8103 (gen_rtx_MEM (Pmode,
8104 memory_address (Pmode, frame))),
8105 RETURN_ADDRESS_OFFSET)));
8108 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
8112 rs6000_ra_ever_killed ()
8116 #ifdef ASM_OUTPUT_MI_THUNK
8117 if (current_function_is_thunk)
8120 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
8121 || cfun->machine->ra_needs_full_frame)
8122 return regs_ever_live[LINK_REGISTER_REGNUM];
8124 push_topmost_sequence ();
8126 pop_topmost_sequence ();
8128 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8132 /* Add a REG_MAYBE_DEAD note to the insn. */
8134 rs6000_maybe_dead (insn)
8137 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
8142 /* Emit instructions needed to load the TOC register.
8143 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8144 a constant pool; or for SVR4 -fpic. */
8147 rs6000_emit_load_toc_table (fromprolog)
8151 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
8153 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
8155 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8157 rtx temp = (fromprolog
8158 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8159 : gen_reg_rtx (Pmode));
8160 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8161 rs6000_maybe_dead (emit_move_insn (dest, temp));
8163 else if (flag_pic == 2)
8166 rtx tempLR = (fromprolog
8167 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8168 : gen_reg_rtx (Pmode));
8169 rtx temp0 = (fromprolog
8170 ? gen_rtx_REG (Pmode, 0)
8171 : gen_reg_rtx (Pmode));
8174 /* possibly create the toc section */
8175 if (! toc_initialized)
8178 function_section (current_function_decl);
8185 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8186 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8188 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8189 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8191 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8193 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8194 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8201 static int reload_toc_labelno = 0;
8203 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8205 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8206 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8208 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8211 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8212 rs6000_maybe_dead (emit_move_insn (temp0,
8213 gen_rtx_MEM (Pmode, dest)));
8215 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8217 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8219 /* This is for AIX code running in non-PIC ELF. */
8222 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8223 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8225 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8226 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8234 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8236 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8241 get_TOC_alias_set ()
8243 static int set = -1;
8245 set = new_alias_set ();
8249 /* This retuns nonzero if the current function uses the TOC. This is
8250 determined by the presence of (unspec ... 7), which is generated by
8251 the various load_toc_* patterns. */
8258 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8261 rtx pat = PATTERN (insn);
8264 if (GET_CODE (pat) == PARALLEL)
8265 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8266 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8267 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8274 create_TOC_reference (symbol)
8277 return gen_rtx_PLUS (Pmode,
8278 gen_rtx_REG (Pmode, TOC_REGISTER),
8279 gen_rtx_CONST (Pmode,
8280 gen_rtx_MINUS (Pmode, symbol,
8281 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8285 /* __throw will restore its own return address to be the same as the
8286 return address of the function that the throw is being made to.
8287 This is unfortunate, because we want to check the original
8288 return address to see if we need to restore the TOC.
8289 So we have to squirrel it away here.
8290 This is used only in compiling __throw and __rethrow.
8292 Most of this code should be removed by CSE. */
8293 static rtx insn_after_throw;
8295 /* This does the saving... */
8297 rs6000_aix_emit_builtin_unwind_init ()
8300 rtx stack_top = gen_reg_rtx (Pmode);
8301 rtx opcode_addr = gen_reg_rtx (Pmode);
8303 insn_after_throw = gen_reg_rtx (SImode);
8305 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8306 emit_move_insn (stack_top, mem);
8308 mem = gen_rtx_MEM (Pmode,
8309 gen_rtx_PLUS (Pmode, stack_top,
8310 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8311 emit_move_insn (opcode_addr, mem);
8312 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8315 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8316 in _eh.o). Only used on AIX.
8318 The idea is that on AIX, function calls look like this:
8319 bl somefunction-trampoline
8323 somefunction-trampoline:
8325 ... load function address in the count register ...
8327 or like this, if the linker determines that this is not a cross-module call
8328 and so the TOC need not be restored:
8331 or like this, if the compiler could determine that this is not a
8334 now, the tricky bit here is that register 2 is saved and restored
8335 by the _linker_, so we can't readily generate debugging information
8336 for it. So we need to go back up the call chain looking at the
8337 insns at return addresses to see which calls saved the TOC register
8338 and so see where it gets restored from.
8340 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8341 just before the actual epilogue.
8343 On the bright side, this incurs no space or time overhead unless an
8344 exception is thrown, except for the extra code in libgcc.a.
8346 The parameter STACKSIZE is a register containing (at runtime)
8347 the amount to be popped off the stack in addition to the stack frame
8348 of this routine (which will be __throw or __rethrow, and so is
8349 guaranteed to have a stack frame). */
8352 rs6000_emit_eh_toc_restore (stacksize)
8356 rtx bottom_of_stack = gen_reg_rtx (Pmode);
8357 rtx tocompare = gen_reg_rtx (SImode);
8358 rtx opcode = gen_reg_rtx (SImode);
8359 rtx opcode_addr = gen_reg_rtx (Pmode);
8361 rtx loop_start = gen_label_rtx ();
8362 rtx no_toc_restore_needed = gen_label_rtx ();
8363 rtx loop_exit = gen_label_rtx ();
8365 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8366 set_mem_alias_set (mem, rs6000_sr_alias_set);
8367 emit_move_insn (bottom_of_stack, mem);
8369 top_of_stack = expand_binop (Pmode, add_optab,
8370 bottom_of_stack, stacksize,
8371 NULL_RTX, 1, OPTAB_WIDEN);
8373 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
8374 : 0xE8410028, SImode));
8376 if (insn_after_throw == NULL_RTX)
8378 emit_move_insn (opcode, insn_after_throw);
8380 emit_note (NULL, NOTE_INSN_LOOP_BEG);
8381 emit_label (loop_start);
8383 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8384 SImode, NULL_RTX, NULL_RTX,
8385 no_toc_restore_needed);
8387 mem = gen_rtx_MEM (Pmode,
8388 gen_rtx_PLUS (Pmode, bottom_of_stack,
8389 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8390 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8392 emit_label (no_toc_restore_needed);
8393 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8394 Pmode, NULL_RTX, NULL_RTX,
8397 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8398 set_mem_alias_set (mem, rs6000_sr_alias_set);
8399 emit_move_insn (bottom_of_stack, mem);
8401 mem = gen_rtx_MEM (Pmode,
8402 gen_rtx_PLUS (Pmode, bottom_of_stack,
8403 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8404 emit_move_insn (opcode_addr, mem);
8405 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8407 emit_note (NULL, NOTE_INSN_LOOP_CONT);
8408 emit_jump (loop_start);
8409 emit_note (NULL, NOTE_INSN_LOOP_END);
8410 emit_label (loop_exit);
8412 #endif /* TARGET_AIX */
8414 /* This ties together stack memory (MEM with an alias set of
8415 rs6000_sr_alias_set) and the change to the stack pointer. */
8418 rs6000_emit_stack_tie ()
8420 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8422 set_mem_alias_set (mem, rs6000_sr_alias_set);
8423 emit_insn (gen_stack_tie (mem));
8426 /* Emit the correct code for allocating stack space, as insns.
8427 If COPY_R12, make sure a copy of the old frame is left in r12.
8428 The generated code may use hard register 0 as a temporary. */
8431 rs6000_emit_allocate_stack (size, copy_r12)
8436 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8437 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8438 rtx todec = GEN_INT (-size);
8440 if (current_function_limit_stack)
8442 if (REG_P (stack_limit_rtx)
8443 && REGNO (stack_limit_rtx) > 1
8444 && REGNO (stack_limit_rtx) <= 31)
8446 emit_insn (Pmode == SImode
8447 ? gen_addsi3 (tmp_reg,
8450 : gen_adddi3 (tmp_reg,
8454 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8457 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8459 && DEFAULT_ABI == ABI_V4)
8461 rtx toload = gen_rtx_CONST (VOIDmode,
8462 gen_rtx_PLUS (Pmode,
8466 emit_insn (gen_elf_high (tmp_reg, toload));
8467 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8468 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8472 warning ("stack limit expression is not supported");
8475 if (copy_r12 || ! TARGET_UPDATE)
8476 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8482 /* Need a note here so that try_split doesn't get confused. */
8483 if (get_last_insn() == NULL_RTX)
8484 emit_note (0, NOTE_INSN_DELETED);
8485 insn = emit_move_insn (tmp_reg, todec);
8486 try_split (PATTERN (insn), insn, 0);
8490 if (Pmode == SImode)
8491 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8494 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8499 if (Pmode == SImode)
8500 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8502 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8503 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8504 gen_rtx_REG (Pmode, 12));
8507 RTX_FRAME_RELATED_P (insn) = 1;
8509 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8510 gen_rtx_SET (VOIDmode, stack_reg,
8511 gen_rtx_PLUS (Pmode, stack_reg,
8516 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8519 (mem (plus (blah) (regXX)))
8523 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8526 altivec_frame_fixup (insn, reg, val)
8532 real = copy_rtx (PATTERN (insn));
8534 real = replace_rtx (real, reg, GEN_INT (val));
8536 RTX_FRAME_RELATED_P (insn) = 1;
8537 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8542 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8543 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8544 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8545 deduce these equivalences by itself so it wasn't necessary to hold
8546 its hand so much. */
8549 rs6000_frame_related (insn, reg, val, reg2, rreg)
8558 real = copy_rtx (PATTERN (insn));
8560 real = replace_rtx (real, reg,
8561 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8562 STACK_POINTER_REGNUM),
8565 /* We expect that 'real' is either a SET or a PARALLEL containing
8566 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8567 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8569 if (GET_CODE (real) == SET)
8573 temp = simplify_rtx (SET_SRC (set));
8575 SET_SRC (set) = temp;
8576 temp = simplify_rtx (SET_DEST (set));
8578 SET_DEST (set) = temp;
8579 if (GET_CODE (SET_DEST (set)) == MEM)
8581 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8583 XEXP (SET_DEST (set), 0) = temp;
8586 else if (GET_CODE (real) == PARALLEL)
8589 for (i = 0; i < XVECLEN (real, 0); i++)
8590 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8592 rtx set = XVECEXP (real, 0, i);
8594 temp = simplify_rtx (SET_SRC (set));
8596 SET_SRC (set) = temp;
8597 temp = simplify_rtx (SET_DEST (set));
8599 SET_DEST (set) = temp;
8600 if (GET_CODE (SET_DEST (set)) == MEM)
8602 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8604 XEXP (SET_DEST (set), 0) = temp;
8606 RTX_FRAME_RELATED_P (set) = 1;
8612 if (reg2 != NULL_RTX)
8613 real = replace_rtx (real, reg2, rreg);
8615 RTX_FRAME_RELATED_P (insn) = 1;
8616 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8621 /* Returns an insn that has a vrsave set operation with the
8622 appropriate CLOBBERs. */
8625 generate_set_vrsave (reg, info, epiloguep)
8627 rs6000_stack_t *info;
8631 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8632 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8635 = gen_rtx_SET (VOIDmode,
8637 gen_rtx_UNSPEC_VOLATILE (SImode,
8638 gen_rtvec (2, reg, vrsave),
8643 /* We need to clobber the registers in the mask so the scheduler
8644 does not move sets to VRSAVE before sets of AltiVec registers.
8646 However, if the function receives nonlocal gotos, reload will set
8647 all call saved registers live. We will end up with:
8649 (set (reg 999) (mem))
8650 (parallel [ (set (reg vrsave) (unspec blah))
8651 (clobber (reg 999))])
8653 The clobber will cause the store into reg 999 to be dead, and
8654 flow will attempt to delete an epilogue insn. In this case, we
8655 need an unspec use/set of the register. */
8657 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8658 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8660 if (!epiloguep || call_used_regs [i])
8661 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8662 gen_rtx_REG (V4SImode, i));
8665 rtx reg = gen_rtx_REG (V4SImode, i);
8668 = gen_rtx_SET (VOIDmode,
8670 gen_rtx_UNSPEC (V4SImode,
8671 gen_rtvec (1, reg), 27));
8675 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8677 for (i = 0; i < nclobs; ++i)
8678 XVECEXP (insn, 0, i) = clobs[i];
8683 /* Emit function prologue as insns. */
8686 rs6000_emit_prologue ()
8688 rs6000_stack_t *info = rs6000_stack_info ();
8689 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8690 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8691 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8692 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8693 rtx frame_reg_rtx = sp_reg_rtx;
8694 rtx cr_save_rtx = NULL;
8696 int saving_FPRs_inline;
8697 int using_store_multiple;
8698 HOST_WIDE_INT sp_offset = 0;
8700 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8701 && info->first_gp_reg_save < 31);
8702 saving_FPRs_inline = (info->first_fp_reg_save == 64
8703 || FP_SAVE_INLINE (info->first_fp_reg_save));
8705 /* For V.4, update stack before we do any saving and set back pointer. */
8706 if (info->push_p && DEFAULT_ABI == ABI_V4)
8708 if (info->total_size < 32767)
8709 sp_offset = info->total_size;
8711 frame_reg_rtx = frame_ptr_rtx;
8712 rs6000_emit_allocate_stack (info->total_size,
8713 (frame_reg_rtx != sp_reg_rtx
8716 || info->first_fp_reg_save < 64
8717 || info->first_gp_reg_save < 32
8719 if (frame_reg_rtx != sp_reg_rtx)
8720 rs6000_emit_stack_tie ();
8723 /* Save AltiVec registers if needed. */
8724 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8728 /* There should be a non inline version of this, for when we
8729 are saving lots of vector registers. */
8730 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8731 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8733 rtx areg, savereg, mem;
8736 offset = info->altivec_save_offset + sp_offset
8737 + 16 * (i - info->first_altivec_reg_save);
8739 savereg = gen_rtx_REG (V4SImode, i);
8741 areg = gen_rtx_REG (Pmode, 0);
8742 emit_move_insn (areg, GEN_INT (offset));
8744 /* AltiVec addressing mode is [reg+reg]. */
8745 mem = gen_rtx_MEM (V4SImode,
8746 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8748 set_mem_alias_set (mem, rs6000_sr_alias_set);
8750 insn = emit_move_insn (mem, savereg);
8752 altivec_frame_fixup (insn, areg, offset);
8756 /* VRSAVE is a bit vector representing which AltiVec registers
8757 are used. The OS uses this to determine which vector
8758 registers to save on a context switch. We need to save
8759 VRSAVE on the stack frame, add whatever AltiVec registers we
8760 used in this function, and do the corresponding magic in the
8763 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8765 rtx reg, mem, vrsave;
8768 /* Get VRSAVE onto a GPR. */
8769 reg = gen_rtx_REG (SImode, 12);
8770 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8772 emit_insn (gen_get_vrsave_internal (reg));
8774 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8777 offset = info->vrsave_save_offset + sp_offset;
8779 = gen_rtx_MEM (SImode,
8780 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8781 set_mem_alias_set (mem, rs6000_sr_alias_set);
8782 insn = emit_move_insn (mem, reg);
8784 /* Include the registers in the mask. */
8785 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8787 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8790 /* If we use the link register, get it into r0. */
8791 if (info->lr_save_p)
8792 emit_move_insn (gen_rtx_REG (Pmode, 0),
8793 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8795 /* If we need to save CR, put it into r12. */
8796 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8798 cr_save_rtx = gen_rtx_REG (SImode, 12);
8799 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8802 /* Do any required saving of fpr's. If only one or two to save, do
8803 it ourselves. Otherwise, call function. */
8804 if (saving_FPRs_inline)
8807 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8808 if ((regs_ever_live[info->first_fp_reg_save+i]
8809 && ! call_used_regs[info->first_fp_reg_save+i]))
8812 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8813 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8814 GEN_INT (info->fp_save_offset
8817 mem = gen_rtx_MEM (DFmode, addr);
8818 set_mem_alias_set (mem, rs6000_sr_alias_set);
8820 insn = emit_move_insn (mem, reg);
8821 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8822 NULL_RTX, NULL_RTX);
8825 else if (info->first_fp_reg_save != 64)
8829 const char *alloc_rname;
8831 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8833 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8835 LINK_REGISTER_REGNUM));
8836 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8837 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8838 alloc_rname = ggc_strdup (rname);
8839 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8840 gen_rtx_SYMBOL_REF (Pmode,
8842 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8845 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8846 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8847 GEN_INT (info->fp_save_offset
8848 + sp_offset + 8*i));
8849 mem = gen_rtx_MEM (DFmode, addr);
8850 set_mem_alias_set (mem, rs6000_sr_alias_set);
8852 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8854 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8855 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8856 NULL_RTX, NULL_RTX);
8859 /* Save GPRs. This is done as a PARALLEL if we are using
8860 the store-multiple instructions. */
8861 if (using_store_multiple)
8865 p = rtvec_alloc (32 - info->first_gp_reg_save);
8866 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8867 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8870 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8871 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8872 GEN_INT (info->gp_save_offset
8875 mem = gen_rtx_MEM (reg_mode, addr);
8876 set_mem_alias_set (mem, rs6000_sr_alias_set);
8878 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8880 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8881 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8882 NULL_RTX, NULL_RTX);
8887 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8888 if ((regs_ever_live[info->first_gp_reg_save+i]
8889 && ! call_used_regs[info->first_gp_reg_save+i])
8890 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
8891 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8892 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8895 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8896 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8897 GEN_INT (info->gp_save_offset
8900 mem = gen_rtx_MEM (reg_mode, addr);
8901 set_mem_alias_set (mem, rs6000_sr_alias_set);
8903 insn = emit_move_insn (mem, reg);
8904 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8905 NULL_RTX, NULL_RTX);
8909 /* ??? There's no need to emit actual instructions here, but it's the
8910 easiest way to get the frame unwind information emitted. */
8911 if (current_function_calls_eh_return)
8913 unsigned int i, regno;
8919 regno = EH_RETURN_DATA_REGNO (i);
8920 if (regno == INVALID_REGNUM)
8923 reg = gen_rtx_REG (reg_mode, regno);
8924 addr = plus_constant (frame_reg_rtx,
8925 info->ehrd_offset + sp_offset
8926 + reg_size * (int) i);
8927 mem = gen_rtx_MEM (reg_mode, addr);
8928 set_mem_alias_set (mem, rs6000_sr_alias_set);
8930 insn = emit_move_insn (mem, reg);
8931 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8932 NULL_RTX, NULL_RTX);
8936 /* Save lr if we used it. */
8937 if (info->lr_save_p)
8939 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8940 GEN_INT (info->lr_save_offset + sp_offset));
8941 rtx reg = gen_rtx_REG (Pmode, 0);
8942 rtx mem = gen_rtx_MEM (Pmode, addr);
8943 /* This should not be of rs6000_sr_alias_set, because of
8944 __builtin_return_address. */
8946 insn = emit_move_insn (mem, reg);
8947 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8948 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8951 /* Save CR if we use any that must be preserved. */
8952 if (info->cr_save_p)
8954 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8955 GEN_INT (info->cr_save_offset + sp_offset));
8956 rtx mem = gen_rtx_MEM (SImode, addr);
8958 set_mem_alias_set (mem, rs6000_sr_alias_set);
8960 /* If r12 was used to hold the original sp, copy cr into r0 now
8962 if (REGNO (frame_reg_rtx) == 12)
8964 cr_save_rtx = gen_rtx_REG (SImode, 0);
8965 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8967 insn = emit_move_insn (mem, cr_save_rtx);
8969 /* Now, there's no way that dwarf2out_frame_debug_expr is going
8970 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
8971 OK. All we have to do is specify that _one_ condition code
8972 register is saved in this stack slot. The thrower's epilogue
8973 will then restore all the call-saved registers.
8974 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
8975 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8976 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8979 /* Update stack and set back pointer unless this is V.4,
8980 for which it was done previously. */
8981 if (info->push_p && DEFAULT_ABI != ABI_V4)
8982 rs6000_emit_allocate_stack (info->total_size, FALSE);
8984 /* Set frame pointer, if needed. */
8985 if (frame_pointer_needed)
8987 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8989 RTX_FRAME_RELATED_P (insn) = 1;
8992 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
8993 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8994 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8995 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
8997 /* If emit_load_toc_table will use the link register, we need to save
8998 it. We use R11 for this purpose because emit_load_toc_table
8999 can use register 0. This allows us to use a plain 'blr' to return
9000 from the procedure more often. */
9001 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
9002 && ! info->lr_save_p
9003 && EXIT_BLOCK_PTR->pred != NULL);
9004 if (save_LR_around_toc_setup)
9005 emit_move_insn (gen_rtx_REG (Pmode, 11),
9006 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
9008 rs6000_emit_load_toc_table (TRUE);
9010 if (save_LR_around_toc_setup)
9011 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9012 gen_rtx_REG (Pmode, 11));
9015 if (DEFAULT_ABI == ABI_DARWIN
9016 && flag_pic && current_function_uses_pic_offset_table)
9018 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9020 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
9023 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
9024 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
9028 /* Write function prologue. */
9031 rs6000_output_function_prologue (file, size)
9033 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9035 rs6000_stack_t *info = rs6000_stack_info ();
9037 if (TARGET_DEBUG_STACK)
9038 debug_stack_info (info);
9040 /* Write .extern for any function we will call to save and restore
9042 if (info->first_fp_reg_save < 64
9043 && !FP_SAVE_INLINE (info->first_fp_reg_save))
9044 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
9045 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
9046 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
9049 /* Write .extern for AIX common mode routines, if needed. */
9050 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
9052 fputs ("\t.extern __mulh\n", file);
9053 fputs ("\t.extern __mull\n", file);
9054 fputs ("\t.extern __divss\n", file);
9055 fputs ("\t.extern __divus\n", file);
9056 fputs ("\t.extern __quoss\n", file);
9057 fputs ("\t.extern __quous\n", file);
9058 common_mode_defined = 1;
9061 if (! HAVE_prologue)
9065 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
9066 the "toplevel" insn chain. */
9067 emit_note (0, NOTE_INSN_DELETED);
9068 rs6000_emit_prologue ();
9069 emit_note (0, NOTE_INSN_DELETED);
9071 /* Expand INSN_ADDRESSES so final() doesn't crash. */
9075 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9077 INSN_ADDRESSES_NEW (insn, addr);
9082 if (TARGET_DEBUG_STACK)
9083 debug_rtx_list (get_insns (), 100);
9084 final (get_insns (), file, FALSE, FALSE);
9088 rs6000_pic_labelno++;
9091 /* Emit function epilogue as insns.
9093 At present, dwarf2out_frame_debug_expr doesn't understand
9094 register restores, so we don't bother setting RTX_FRAME_RELATED_P
9095 anywhere in the epilogue. Most of the insns below would in any case
9096 need special notes to explain where r11 is in relation to the stack. */
9099 rs6000_emit_epilogue (sibcall)
9102 rs6000_stack_t *info;
9103 int restoring_FPRs_inline;
9104 int using_load_multiple;
9105 int using_mfcr_multiple;
9106 int use_backchain_to_restore_sp;
9108 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
9109 rtx frame_reg_rtx = sp_reg_rtx;
9110 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
9111 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9114 info = rs6000_stack_info ();
9115 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
9116 && info->first_gp_reg_save < 31);
9117 restoring_FPRs_inline = (sibcall
9118 || current_function_calls_eh_return
9119 || info->first_fp_reg_save == 64
9120 || FP_SAVE_INLINE (info->first_fp_reg_save));
9121 use_backchain_to_restore_sp = (frame_pointer_needed
9122 || current_function_calls_alloca
9123 || info->total_size > 32767);
9124 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9125 || rs6000_cpu == PROCESSOR_PPC603
9126 || rs6000_cpu == PROCESSOR_PPC750
9129 /* If we have a frame pointer, a call to alloca, or a large stack
9130 frame, restore the old stack pointer using the backchain. Otherwise,
9131 we know what size to update it with. */
9132 if (use_backchain_to_restore_sp)
9134 /* Under V.4, don't reset the stack pointer until after we're done
9135 loading the saved registers. */
9136 if (DEFAULT_ABI == ABI_V4)
9137 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
9139 emit_move_insn (frame_reg_rtx,
9140 gen_rtx_MEM (Pmode, sp_reg_rtx));
9143 else if (info->push_p)
9145 if (DEFAULT_ABI == ABI_V4)
9146 sp_offset = info->total_size;
9149 emit_insn (TARGET_32BIT
9150 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9151 GEN_INT (info->total_size))
9152 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9153 GEN_INT (info->total_size)));
9157 /* Restore AltiVec registers if needed. */
9158 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9162 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9163 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9165 rtx addr, areg, mem;
9167 areg = gen_rtx_REG (Pmode, 0);
9169 (areg, GEN_INT (info->altivec_save_offset
9171 + 16 * (i - info->first_altivec_reg_save)));
9173 /* AltiVec addressing mode is [reg+reg]. */
9174 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9175 mem = gen_rtx_MEM (V4SImode, addr);
9176 set_mem_alias_set (mem, rs6000_sr_alias_set);
9178 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9182 /* Restore VRSAVE if needed. */
9183 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9187 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9188 GEN_INT (info->vrsave_save_offset + sp_offset));
9189 mem = gen_rtx_MEM (SImode, addr);
9190 set_mem_alias_set (mem, rs6000_sr_alias_set);
9191 reg = gen_rtx_REG (SImode, 12);
9192 emit_move_insn (reg, mem);
9194 emit_insn (generate_set_vrsave (reg, info, 1));
9197 /* Get the old lr if we saved it. */
9198 if (info->lr_save_p)
9200 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9201 GEN_INT (info->lr_save_offset + sp_offset));
9202 rtx mem = gen_rtx_MEM (Pmode, addr);
9204 set_mem_alias_set (mem, rs6000_sr_alias_set);
9206 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9209 /* Get the old cr if we saved it. */
9210 if (info->cr_save_p)
9212 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9213 GEN_INT (info->cr_save_offset + sp_offset));
9214 rtx mem = gen_rtx_MEM (SImode, addr);
9216 set_mem_alias_set (mem, rs6000_sr_alias_set);
9218 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9221 /* Set LR here to try to overlap restores below. */
9222 if (info->lr_save_p)
9223 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9224 gen_rtx_REG (Pmode, 0));
9226 /* Load exception handler data registers, if needed. */
9227 if (current_function_calls_eh_return)
9229 unsigned int i, regno;
9235 regno = EH_RETURN_DATA_REGNO (i);
9236 if (regno == INVALID_REGNUM)
9239 addr = plus_constant (frame_reg_rtx,
9240 info->ehrd_offset + sp_offset
9241 + reg_size * (int) i);
9242 mem = gen_rtx_MEM (reg_mode, addr);
9243 set_mem_alias_set (mem, rs6000_sr_alias_set);
9245 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9249 /* Restore GPRs. This is done as a PARALLEL if we are using
9250 the load-multiple instructions. */
9251 if (using_load_multiple)
9254 p = rtvec_alloc (32 - info->first_gp_reg_save);
9255 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9257 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9258 GEN_INT (info->gp_save_offset
9261 rtx mem = gen_rtx_MEM (reg_mode, addr);
9263 set_mem_alias_set (mem, rs6000_sr_alias_set);
9266 gen_rtx_SET (VOIDmode,
9267 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9270 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9273 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9274 if ((regs_ever_live[info->first_gp_reg_save+i]
9275 && ! call_used_regs[info->first_gp_reg_save+i])
9276 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
9277 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9278 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9280 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9281 GEN_INT (info->gp_save_offset
9284 rtx mem = gen_rtx_MEM (reg_mode, addr);
9286 set_mem_alias_set (mem, rs6000_sr_alias_set);
9288 emit_move_insn (gen_rtx_REG (reg_mode,
9289 info->first_gp_reg_save + i),
9293 /* Restore fpr's if we need to do it without calling a function. */
9294 if (restoring_FPRs_inline)
9295 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9296 if ((regs_ever_live[info->first_fp_reg_save+i]
9297 && ! call_used_regs[info->first_fp_reg_save+i]))
9300 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9301 GEN_INT (info->fp_save_offset
9304 mem = gen_rtx_MEM (DFmode, addr);
9305 set_mem_alias_set (mem, rs6000_sr_alias_set);
9307 emit_move_insn (gen_rtx_REG (DFmode,
9308 info->first_fp_reg_save + i),
9312 /* If we saved cr, restore it here. Just those that were used. */
9313 if (info->cr_save_p)
9315 rtx r12_rtx = gen_rtx_REG (SImode, 12);
9318 if (using_mfcr_multiple)
9320 for (i = 0; i < 8; i++)
9321 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9327 if (using_mfcr_multiple && count > 1)
9332 p = rtvec_alloc (count);
9335 for (i = 0; i < 8; i++)
9336 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9338 rtvec r = rtvec_alloc (2);
9339 RTVEC_ELT (r, 0) = r12_rtx;
9340 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9341 RTVEC_ELT (p, ndx) =
9342 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9343 gen_rtx_UNSPEC (CCmode, r, 20));
9346 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9351 for (i = 0; i < 8; i++)
9352 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9354 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9360 /* If this is V.4, unwind the stack pointer after all of the loads
9361 have been done. We need to emit a block here so that sched
9362 doesn't decide to move the sp change before the register restores
9363 (which may not have any obvious dependency on the stack). This
9364 doesn't hurt performance, because there is no scheduling that can
9365 be done after this point. */
9366 if (DEFAULT_ABI == ABI_V4)
9368 if (frame_reg_rtx != sp_reg_rtx)
9369 rs6000_emit_stack_tie ();
9371 if (use_backchain_to_restore_sp)
9373 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9375 else if (sp_offset != 0)
9377 emit_insn (Pmode == SImode
9378 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9379 GEN_INT (sp_offset))
9380 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9381 GEN_INT (sp_offset)));
9385 if (current_function_calls_eh_return)
9387 rtx sa = EH_RETURN_STACKADJ_RTX;
9388 emit_insn (Pmode == SImode
9389 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9390 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9396 if (! restoring_FPRs_inline)
9397 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9399 p = rtvec_alloc (2);
9401 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9402 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9404 LINK_REGISTER_REGNUM));
9406 /* If we have to restore more than two FP registers, branch to the
9407 restore function. It will return to our caller. */
9408 if (! restoring_FPRs_inline)
9412 const char *alloc_rname;
9414 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9415 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9416 alloc_rname = ggc_strdup (rname);
9417 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9418 gen_rtx_SYMBOL_REF (Pmode,
9421 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9424 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9425 GEN_INT (info->fp_save_offset + 8*i));
9426 mem = gen_rtx_MEM (DFmode, addr);
9427 set_mem_alias_set (mem, rs6000_sr_alias_set);
9429 RTVEC_ELT (p, i+3) =
9430 gen_rtx_SET (VOIDmode,
9431 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9436 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9440 /* Write function epilogue. */
9443 rs6000_output_function_epilogue (file, size)
9445 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9447 rs6000_stack_t *info = rs6000_stack_info ();
9448 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9450 if (! HAVE_epilogue)
9452 rtx insn = get_last_insn ();
9453 /* If the last insn was a BARRIER, we don't have to write anything except
9455 if (GET_CODE (insn) == NOTE)
9456 insn = prev_nonnote_insn (insn);
9457 if (insn == 0 || GET_CODE (insn) != BARRIER)
9459 /* This is slightly ugly, but at least we don't have two
9460 copies of the epilogue-emitting code. */
9463 /* A NOTE_INSN_DELETED is supposed to be at the start
9464 and end of the "toplevel" insn chain. */
9465 emit_note (0, NOTE_INSN_DELETED);
9466 rs6000_emit_epilogue (FALSE);
9467 emit_note (0, NOTE_INSN_DELETED);
9469 /* Expand INSN_ADDRESSES so final() doesn't crash. */
9473 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9475 INSN_ADDRESSES_NEW (insn, addr);
9480 if (TARGET_DEBUG_STACK)
9481 debug_rtx_list (get_insns (), 100);
9482 final (get_insns (), file, FALSE, FALSE);
9487 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9490 We don't output a traceback table if -finhibit-size-directive was
9491 used. The documentation for -finhibit-size-directive reads
9492 ``don't output a @code{.size} assembler directive, or anything
9493 else that would cause trouble if the function is split in the
9494 middle, and the two halves are placed at locations far apart in
9495 memory.'' The traceback table has this property, since it
9496 includes the offset from the start of the function to the
9497 traceback table itself.
9499 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9500 different traceback table. */
9501 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9503 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9504 const char *language_string = lang_hooks.name;
9505 int fixed_parms = 0, float_parms = 0, parm_info = 0;
9508 while (*fname == '.') /* V.4 encodes . in the name */
9511 /* Need label immediately before tbtab, so we can compute its offset
9512 from the function start. */
9515 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9516 ASM_OUTPUT_LABEL (file, fname);
9518 /* The .tbtab pseudo-op can only be used for the first eight
9519 expressions, since it can't handle the possibly variable
9520 length fields that follow. However, if you omit the optional
9521 fields, the assembler outputs zeros for all optional fields
9522 anyways, giving each variable length field is minimum length
9523 (as defined in sys/debug.h). Thus we can not use the .tbtab
9524 pseudo-op at all. */
9526 /* An all-zero word flags the start of the tbtab, for debuggers
9527 that have to find it by searching forward from the entry
9528 point or from the current pc. */
9529 fputs ("\t.long 0\n", file);
9531 /* Tbtab format type. Use format type 0. */
9532 fputs ("\t.byte 0,", file);
9534 /* Language type. Unfortunately, there doesn't seem to be any
9535 official way to get this info, so we use language_string. C
9536 is 0. C++ is 9. No number defined for Obj-C, so use the
9537 value for C for now. There is no official value for Java,
9538 although IBM appears to be using 13. There is no official value
9539 for Chill, so we've chosen 44 pseudo-randomly. */
9540 if (! strcmp (language_string, "GNU C")
9541 || ! strcmp (language_string, "GNU Objective-C"))
9543 else if (! strcmp (language_string, "GNU F77"))
9545 else if (! strcmp (language_string, "GNU Ada"))
9547 else if (! strcmp (language_string, "GNU Pascal"))
9549 else if (! strcmp (language_string, "GNU C++"))
9551 else if (! strcmp (language_string, "GNU Java"))
9553 else if (! strcmp (language_string, "GNU CHILL"))
9557 fprintf (file, "%d,", i);
9559 /* 8 single bit fields: global linkage (not set for C extern linkage,
9560 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9561 from start of procedure stored in tbtab, internal function, function
9562 has controlled storage, function has no toc, function uses fp,
9563 function logs/aborts fp operations. */
9564 /* Assume that fp operations are used if any fp reg must be saved. */
9565 fprintf (file, "%d,",
9566 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9568 /* 6 bitfields: function is interrupt handler, name present in
9569 proc table, function calls alloca, on condition directives
9570 (controls stack walks, 3 bits), saves condition reg, saves
9572 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9573 set up as a frame pointer, even when there is no alloca call. */
9574 fprintf (file, "%d,",
9575 ((optional_tbtab << 6)
9576 | ((optional_tbtab & frame_pointer_needed) << 5)
9577 | (info->cr_save_p << 1)
9578 | (info->lr_save_p)));
9580 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9582 fprintf (file, "%d,",
9583 (info->push_p << 7) | (64 - info->first_fp_reg_save));
9585 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9586 fprintf (file, "%d,", (32 - first_reg_to_save ()));
9590 /* Compute the parameter info from the function decl argument
9593 int next_parm_info_bit = 31;
9595 for (decl = DECL_ARGUMENTS (current_function_decl);
9596 decl; decl = TREE_CHAIN (decl))
9598 rtx parameter = DECL_INCOMING_RTL (decl);
9599 enum machine_mode mode = GET_MODE (parameter);
9601 if (GET_CODE (parameter) == REG)
9603 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9611 else if (mode == DFmode)
9616 /* If only one bit will fit, don't or in this entry. */
9617 if (next_parm_info_bit > 0)
9618 parm_info |= (bits << (next_parm_info_bit - 1));
9619 next_parm_info_bit -= 2;
9623 fixed_parms += ((GET_MODE_SIZE (mode)
9624 + (UNITS_PER_WORD - 1))
9626 next_parm_info_bit -= 1;
9632 /* Number of fixed point parameters. */
9633 /* This is actually the number of words of fixed point parameters; thus
9634 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9635 fprintf (file, "%d,", fixed_parms);
9637 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9639 /* This is actually the number of fp registers that hold parameters;
9640 and thus the maximum value is 13. */
9641 /* Set parameters on stack bit if parameters are not in their original
9642 registers, regardless of whether they are on the stack? Xlc
9643 seems to set the bit when not optimizing. */
9644 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9646 if (! optional_tbtab)
9649 /* Optional fields follow. Some are variable length. */
9651 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9653 /* There is an entry for each parameter in a register, in the order that
9654 they occur in the parameter list. Any intervening arguments on the
9655 stack are ignored. If the list overflows a long (max possible length
9656 34 bits) then completely leave off all elements that don't fit. */
9657 /* Only emit this long if there was at least one parameter. */
9658 if (fixed_parms || float_parms)
9659 fprintf (file, "\t.long %d\n", parm_info);
9661 /* Offset from start of code to tb table. */
9662 fputs ("\t.long ", file);
9663 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9665 RS6000_OUTPUT_BASENAME (file, fname);
9667 assemble_name (file, fname);
9671 RS6000_OUTPUT_BASENAME (file, fname);
9673 assemble_name (file, fname);
9677 /* Interrupt handler mask. */
9678 /* Omit this long, since we never set the interrupt handler bit
9681 /* Number of CTL (controlled storage) anchors. */
9682 /* Omit this long, since the has_ctl bit is never set above. */
9684 /* Displacement into stack of each CTL anchor. */
9685 /* Omit this list of longs, because there are no CTL anchors. */
9687 /* Length of function name. */
9688 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9690 /* Function name. */
9691 assemble_string (fname, strlen (fname));
9693 /* Register for alloca automatic storage; this is always reg 31.
9694 Only emit this if the alloca bit was set above. */
9695 if (frame_pointer_needed)
9696 fputs ("\t.byte 31\n", file);
9698 fputs ("\t.align 2\n", file);
9703 /* A C compound statement that outputs the assembler code for a thunk
9704 function, used to implement C++ virtual function calls with
9705 multiple inheritance. The thunk acts as a wrapper around a virtual
9706 function, adjusting the implicit object parameter before handing
9707 control off to the real function.
9709 First, emit code to add the integer DELTA to the location that
9710 contains the incoming first argument. Assume that this argument
9711 contains a pointer, and is the one used to pass the `this' pointer
9712 in C++. This is the incoming argument *before* the function
9713 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9714 values of all other incoming arguments.
9716 After the addition, emit code to jump to FUNCTION, which is a
9717 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9718 not touch the return address. Hence returning from FUNCTION will
9719 return to whoever called the current `thunk'.
9721 The effect must be as if FUNCTION had been called directly with the
9722 adjusted first argument. This macro is responsible for emitting
9723 all of the code for a thunk function; output_function_prologue()
9724 and output_function_epilogue() are not invoked.
9726 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9727 been extracted from it.) It might possibly be useful on some
9728 targets, but probably not.
9730 If you do not define this macro, the target-independent code in the
9731 C++ frontend will generate a less efficient heavyweight thunk that
9732 calls FUNCTION instead of jumping to it. The generic approach does
9733 not support varargs. */
9736 output_mi_thunk (file, thunk_fndecl, delta, function)
9738 tree thunk_fndecl ATTRIBUTE_UNUSED;
9742 const char *this_reg =
9743 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9746 const char *r0 = reg_names[0];
9747 const char *toc = reg_names[2];
9748 const char *schain = reg_names[11];
9749 const char *r12 = reg_names[12];
9751 static int labelno = 0;
9753 /* Small constants that can be done by one add instruction. */
9754 if (delta >= -32768 && delta <= 32767)
9756 if (! TARGET_NEW_MNEMONICS)
9757 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9759 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9762 /* Large constants that can be done by one addis instruction. */
9763 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9764 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9767 /* 32-bit constants that can be done by an add and addis instruction. */
9768 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9770 /* Break into two pieces, propagating the sign bit from the low
9771 word to the upper word. */
9772 int delta_high = delta >> 16;
9773 int delta_low = delta & 0xffff;
9774 if ((delta_low & 0x8000) != 0)
9777 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9780 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9783 if (! TARGET_NEW_MNEMONICS)
9784 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9786 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9789 /* 64-bit constants, fixme */
9793 /* Get the prefix in front of the names. */
9794 switch (DEFAULT_ABI)
9804 case ABI_AIX_NODESC:
9809 /* If the function is compiled in this module, jump to it directly.
9810 Otherwise, load up its address and jump to it. */
9812 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9814 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9815 && ! lookup_attribute ("longcall",
9816 TYPE_ATTRIBUTES (TREE_TYPE (function))))
9818 fprintf (file, "\tb %s", prefix);
9819 assemble_name (file, fname);
9820 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9826 switch (DEFAULT_ABI)
9832 /* Set up a TOC entry for the function. */
9833 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9835 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9838 if (TARGET_MINIMAL_TOC)
9839 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9842 fputs ("\t.tc ", file);
9843 assemble_name (file, fname);
9844 fputs ("[TC],", file);
9846 assemble_name (file, fname);
9849 if (TARGET_MINIMAL_TOC)
9850 asm_fprintf (file, (TARGET_32BIT)
9851 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9852 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9853 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9854 assemble_name (file, buf);
9855 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9856 fputs ("-(.LCTOC1)", file);
9857 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9859 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9863 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9866 asm_fprintf (file, "\tmtctr %s\n", r0);
9868 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9871 asm_fprintf (file, "\tbctr\n");
9874 case ABI_AIX_NODESC:
9876 fprintf (file, "\tb %s", prefix);
9877 assemble_name (file, fname);
9878 if (flag_pic) fputs ("@plt", file);
9884 fprintf (file, "\tb %s", prefix);
9885 if (flag_pic && !machopic_name_defined_p (fname))
9886 assemble_name (file, machopic_stub_name (fname));
9888 assemble_name (file, fname);
9897 /* A quick summary of the various types of 'constant-pool tables'
9900 Target Flags Name One table per
9901 AIX (none) AIX TOC object file
9902 AIX -mfull-toc AIX TOC object file
9903 AIX -mminimal-toc AIX minimal TOC translation unit
9904 SVR4/EABI (none) SVR4 SDATA object file
9905 SVR4/EABI -fpic SVR4 pic object file
9906 SVR4/EABI -fPIC SVR4 PIC translation unit
9907 SVR4/EABI -mrelocatable EABI TOC function
9908 SVR4/EABI -maix AIX TOC object file
9909 SVR4/EABI -maix -mminimal-toc
9910 AIX minimal TOC translation unit
9912 Name Reg. Set by entries contains:
9913 made by addrs? fp? sum?
9915 AIX TOC 2 crt0 as Y option option
9916 AIX minimal TOC 30 prolog gcc Y Y option
9917 SVR4 SDATA 13 crt0 gcc N Y N
9918 SVR4 pic 30 prolog ld Y not yet N
9919 SVR4 PIC 30 prolog gcc Y option option
9920 EABI TOC 30 prolog gcc Y option option
9924 /* Hash table stuff for keeping track of TOC entries. */
9926 struct toc_hash_struct
9928 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9929 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
9931 enum machine_mode key_mode;
9935 static htab_t toc_hash_table;
9937 /* Hash functions for the hash table. */
9940 rs6000_hash_constant (k)
9943 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9944 const char *format = GET_RTX_FORMAT (GET_CODE (k));
9945 int flen = strlen (format);
9948 if (GET_CODE (k) == LABEL_REF)
9949 return result * 1231 + X0INT (XEXP (k, 0), 3);
9951 if (GET_CODE (k) == CONST_DOUBLE)
9953 else if (GET_CODE (k) == CODE_LABEL)
9958 for (; fidx < flen; fidx++)
9959 switch (format[fidx])
9964 const char *str = XSTR (k, fidx);
9966 result = result * 613 + len;
9967 for (i = 0; i < len; i++)
9968 result = result * 613 + (unsigned) str[i];
9973 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9977 result = result * 613 + (unsigned) XINT (k, fidx);
9980 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9981 result = result * 613 + (unsigned) XWINT (k, fidx);
9985 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9986 result = result * 613 + (unsigned) (XWINT (k, fidx)
9997 toc_hash_function (hash_entry)
9998 const void * hash_entry;
10000 const struct toc_hash_struct *thc =
10001 (const struct toc_hash_struct *) hash_entry;
10002 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
10005 /* Compare H1 and H2 for equivalence. */
10008 toc_hash_eq (h1, h2)
10012 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
10013 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
10015 if (((const struct toc_hash_struct *) h1)->key_mode
10016 != ((const struct toc_hash_struct *) h2)->key_mode)
10019 /* Gotcha: One of these const_doubles will be in memory.
10020 The other may be on the constant-pool chain.
10021 So rtx_equal_p will think they are different... */
10024 if (GET_CODE (r1) != GET_CODE (r2)
10025 || GET_MODE (r1) != GET_MODE (r2))
10027 if (GET_CODE (r1) == CONST_DOUBLE)
10029 int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
10031 for (i = 1; i < format_len; i++)
10032 if (XWINT (r1, i) != XWINT (r2, i))
10037 else if (GET_CODE (r1) == LABEL_REF)
10038 return (CODE_LABEL_NUMBER (XEXP (r1, 0))
10039 == CODE_LABEL_NUMBER (XEXP (r2, 0)));
10041 return rtx_equal_p (r1, r2);
10044 /* Mark the hash table-entry HASH_ENTRY. */
10047 toc_hash_mark_entry (hash_slot, unused)
10049 void * unused ATTRIBUTE_UNUSED;
10051 const struct toc_hash_struct * hash_entry =
10052 *(const struct toc_hash_struct **) hash_slot;
10053 rtx r = hash_entry->key;
10054 ggc_set_mark (hash_entry);
10055 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
10056 if (GET_CODE (r) == LABEL_REF)
10059 ggc_set_mark (XEXP (r, 0));
10066 /* Mark all the elements of the TOC hash-table *HT. */
10069 toc_hash_mark_table (vht)
10074 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
10077 /* These are the names given by the C++ front-end to vtables, and
10078 vtable-like objects. Ideally, this logic should not be here;
10079 instead, there should be some programmatic way of inquiring as
10080 to whether or not an object is a vtable. */
10082 #define VTABLE_NAME_P(NAME) \
10083 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
10084 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
10085 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
10086 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
10089 rs6000_output_symbol_ref (file, x)
10093 /* Currently C++ toc references to vtables can be emitted before it
10094 is decided whether the vtable is public or private. If this is
10095 the case, then the linker will eventually complain that there is
10096 a reference to an unknown section. Thus, for vtables only,
10097 we emit the TOC reference to reference the symbol and not the
10099 const char *name = XSTR (x, 0);
10101 if (VTABLE_NAME_P (name))
10103 RS6000_OUTPUT_BASENAME (file, name);
10106 assemble_name (file, name);
10109 /* Output a TOC entry. We derive the entry name from what is being
10113 output_toc (file, x, labelno, mode)
10117 enum machine_mode mode;
10120 const char *name = buf;
10121 const char *real_name;
10128 /* When the linker won't eliminate them, don't output duplicate
10129 TOC entries (this happens on AIX if there is any kind of TOC,
10130 and on SVR4 under -fPIC or -mrelocatable). */
10133 struct toc_hash_struct *h;
10136 h = ggc_alloc (sizeof (*h));
10138 h->key_mode = mode;
10139 h->labelno = labelno;
10141 found = htab_find_slot (toc_hash_table, h, 1);
10142 if (*found == NULL)
10144 else /* This is indeed a duplicate.
10145 Set this label equal to that label. */
10147 fputs ("\t.set ", file);
10148 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10149 fprintf (file, "%d,", labelno);
10150 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10151 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
10157 /* If we're going to put a double constant in the TOC, make sure it's
10158 aligned properly when strict alignment is on. */
10159 if (GET_CODE (x) == CONST_DOUBLE
10160 && STRICT_ALIGNMENT
10161 && GET_MODE_BITSIZE (mode) >= 64
10162 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10163 ASM_OUTPUT_ALIGN (file, 3);
10166 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10168 /* Handle FP constants specially. Note that if we have a minimal
10169 TOC, things we put here aren't actually in the TOC, so we can allow
10171 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10173 REAL_VALUE_TYPE rv;
10176 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10177 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10181 if (TARGET_MINIMAL_TOC)
10182 fputs (DOUBLE_INT_ASM_OP, file);
10184 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10185 fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
10190 if (TARGET_MINIMAL_TOC)
10191 fputs ("\t.long ", file);
10193 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10194 fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
10198 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10200 REAL_VALUE_TYPE rv;
10203 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10204 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10208 if (TARGET_MINIMAL_TOC)
10209 fputs (DOUBLE_INT_ASM_OP, file);
10211 fprintf (file, "\t.tc FS_%lx[TC],", l);
10212 fprintf (file, "0x%lx00000000\n", l);
10217 if (TARGET_MINIMAL_TOC)
10218 fputs ("\t.long ", file);
10220 fprintf (file, "\t.tc FS_%lx[TC],", l);
10221 fprintf (file, "0x%lx\n", l);
10225 else if (GET_MODE (x) == VOIDmode
10226 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10228 unsigned HOST_WIDE_INT low;
10229 HOST_WIDE_INT high;
10231 if (GET_CODE (x) == CONST_DOUBLE)
10233 low = CONST_DOUBLE_LOW (x);
10234 high = CONST_DOUBLE_HIGH (x);
10237 #if HOST_BITS_PER_WIDE_INT == 32
10240 high = (low & 0x80000000) ? ~0 : 0;
10244 low = INTVAL (x) & 0xffffffff;
10245 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10249 /* TOC entries are always Pmode-sized, but since this
10250 is a bigendian machine then if we're putting smaller
10251 integer constants in the TOC we have to pad them.
10252 (This is still a win over putting the constants in
10253 a separate constant pool, because then we'd have
10254 to have both a TOC entry _and_ the actual constant.)
10256 For a 32-bit target, CONST_INT values are loaded and shifted
10257 entirely within `low' and can be stored in one TOC entry. */
10259 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10260 abort ();/* It would be easy to make this work, but it doesn't now. */
10262 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10263 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10264 POINTER_SIZE, &low, &high, 0);
10268 if (TARGET_MINIMAL_TOC)
10269 fputs (DOUBLE_INT_ASM_OP, file);
10271 fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long) high, (long) low);
10272 fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
10277 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10279 if (TARGET_MINIMAL_TOC)
10280 fputs ("\t.long ", file);
10282 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10283 (long) high, (long) low);
10284 fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
10288 if (TARGET_MINIMAL_TOC)
10289 fputs ("\t.long ", file);
10291 fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
10292 fprintf (file, "0x%lx\n", (long) low);
10298 if (GET_CODE (x) == CONST)
10300 if (GET_CODE (XEXP (x, 0)) != PLUS)
10303 base = XEXP (XEXP (x, 0), 0);
10304 offset = INTVAL (XEXP (XEXP (x, 0), 1));
10307 if (GET_CODE (base) == SYMBOL_REF)
10308 name = XSTR (base, 0);
10309 else if (GET_CODE (base) == LABEL_REF)
10310 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10311 else if (GET_CODE (base) == CODE_LABEL)
10312 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10316 STRIP_NAME_ENCODING (real_name, name);
10317 if (TARGET_MINIMAL_TOC)
10318 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10321 fprintf (file, "\t.tc %s", real_name);
10324 fprintf (file, ".N%d", - offset);
10326 fprintf (file, ".P%d", offset);
10328 fputs ("[TC],", file);
10331 /* Currently C++ toc references to vtables can be emitted before it
10332 is decided whether the vtable is public or private. If this is
10333 the case, then the linker will eventually complain that there is
10334 a TOC reference to an unknown section. Thus, for vtables only,
10335 we emit the TOC reference to reference the symbol and not the
10337 if (VTABLE_NAME_P (name))
10339 RS6000_OUTPUT_BASENAME (file, name);
10341 fprintf (file, "%d", offset);
10342 else if (offset > 0)
10343 fprintf (file, "+%d", offset);
10346 output_addr_const (file, x);
10350 /* Output an assembler pseudo-op to write an ASCII string of N characters
10351 starting at P to FILE.
10353 On the RS/6000, we have to do this using the .byte operation and
10354 write out special characters outside the quoted string.
10355 Also, the assembler is broken; very long strings are truncated,
10356 so we must artificially break them up early. */
10359 output_ascii (file, p, n)
10365 int i, count_string;
10366 const char *for_string = "\t.byte \"";
10367 const char *for_decimal = "\t.byte ";
10368 const char *to_close = NULL;
10371 for (i = 0; i < n; i++)
10374 if (c >= ' ' && c < 0177)
10377 fputs (for_string, file);
10380 /* Write two quotes to get one. */
10388 for_decimal = "\"\n\t.byte ";
10392 if (count_string >= 512)
10394 fputs (to_close, file);
10396 for_string = "\t.byte \"";
10397 for_decimal = "\t.byte ";
10405 fputs (for_decimal, file);
10406 fprintf (file, "%d", c);
10408 for_string = "\n\t.byte \"";
10409 for_decimal = ", ";
10415 /* Now close the string if we have written one. Then end the line. */
10417 fputs (to_close, file);
10420 /* Generate a unique section name for FILENAME for a section type
10421 represented by SECTION_DESC. Output goes into BUF.
10423 SECTION_DESC can be any string, as long as it is different for each
10424 possible section type.
10426 We name the section in the same manner as xlc. The name begins with an
10427 underscore followed by the filename (after stripping any leading directory
10428 names) with the last period replaced by the string SECTION_DESC. If
10429 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10433 rs6000_gen_section_name (buf, filename, section_desc)
10435 const char *filename;
10436 const char *section_desc;
10438 const char *q, *after_last_slash, *last_period = 0;
10442 after_last_slash = filename;
10443 for (q = filename; *q; q++)
10446 after_last_slash = q + 1;
10447 else if (*q == '.')
10451 len = strlen (after_last_slash) + strlen (section_desc) + 2;
10452 *buf = (char *) permalloc (len);
10457 for (q = after_last_slash; *q; q++)
10459 if (q == last_period)
10461 strcpy (p, section_desc);
10462 p += strlen (section_desc);
10465 else if (ISALNUM (*q))
10469 if (last_period == 0)
10470 strcpy (p, section_desc);
10475 /* Emit profile function. */
10478 output_profile_hook (labelno)
10481 if (DEFAULT_ABI == ABI_AIX)
10484 const char *label_name;
10487 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10488 STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10489 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10491 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10494 else if (DEFAULT_ABI == ABI_DARWIN)
10496 const char *mcount_name = RS6000_MCOUNT;
10497 int caller_addr_regno = LINK_REGISTER_REGNUM;
10499 /* Be conservative and always set this, at least for now. */
10500 current_function_uses_pic_offset_table = 1;
10503 /* For PIC code, set up a stub and collect the caller's address
10504 from r0, which is where the prologue puts it. */
10507 mcount_name = machopic_stub_name (mcount_name);
10508 if (current_function_uses_pic_offset_table)
10509 caller_addr_regno = 0;
10512 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10514 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10518 /* Write function profiler code. */
10521 output_function_profiler (file, labelno)
10527 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10528 switch (DEFAULT_ABI)
10534 case ABI_AIX_NODESC:
10535 fprintf (file, "\tmflr %s\n", reg_names[0]);
10538 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10539 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10540 reg_names[0], reg_names[1]);
10541 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10542 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10543 assemble_name (file, buf);
10544 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10546 else if (flag_pic > 1)
10548 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10549 reg_names[0], reg_names[1]);
10550 /* Now, we need to get the address of the label. */
10551 fputs ("\tbl 1f\n\t.long ", file);
10552 assemble_name (file, buf);
10553 fputs ("-.\n1:", file);
10554 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10555 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10556 reg_names[0], reg_names[11]);
10557 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10558 reg_names[0], reg_names[0], reg_names[11]);
10562 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10563 assemble_name (file, buf);
10564 fputs ("@ha\n", file);
10565 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10566 reg_names[0], reg_names[1]);
10567 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10568 assemble_name (file, buf);
10569 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10572 if (current_function_needs_context)
10573 asm_fprintf (file, "\tmr %s,%s\n",
10574 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10575 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10576 if (current_function_needs_context)
10577 asm_fprintf (file, "\tmr %s,%s\n",
10578 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10583 /* Don't do anything, done in output_profile_hook (). */
10589 /* Adjust the cost of a scheduling dependency. Return the new cost of
10590 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10593 rs6000_adjust_cost (insn, link, dep_insn, cost)
10596 rtx dep_insn ATTRIBUTE_UNUSED;
10599 if (! recog_memoized (insn))
10602 if (REG_NOTE_KIND (link) != 0)
10605 if (REG_NOTE_KIND (link) == 0)
10607 /* Data dependency; DEP_INSN writes a register that INSN reads
10608 some cycles later. */
10609 switch (get_attr_type (insn))
10612 /* Tell the first scheduling pass about the latency between
10613 a mtctr and bctr (and mtlr and br/blr). The first
10614 scheduling pass will not know about this latency since
10615 the mtctr instruction, which has the latency associated
10616 to it, will be generated by reload. */
10617 return TARGET_POWER ? 5 : 4;
10619 /* Leave some extra cycles between a compare and its
10620 dependent branch, to inhibit expensive mispredicts. */
10621 if ((rs6000_cpu_attr == CPU_PPC750
10622 || rs6000_cpu_attr == CPU_PPC7400
10623 || rs6000_cpu_attr == CPU_PPC7450)
10624 && recog_memoized (dep_insn)
10625 && (INSN_CODE (dep_insn) >= 0)
10626 && (get_attr_type (dep_insn) == TYPE_COMPARE
10627 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10628 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10629 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10634 /* Fall out to return default cost. */
10640 /* A C statement (sans semicolon) to update the integer scheduling
10641 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10642 INSN earlier, increase the priority to execute INSN later. Do not
10643 define this macro if you do not need to adjust the scheduling
10644 priorities of insns. */
10647 rs6000_adjust_priority (insn, priority)
10648 rtx insn ATTRIBUTE_UNUSED;
10651 /* On machines (like the 750) which have asymmetric integer units,
10652 where one integer unit can do multiply and divides and the other
10653 can't, reduce the priority of multiply/divide so it is scheduled
10654 before other integer operations. */
10657 if (! INSN_P (insn))
10660 if (GET_CODE (PATTERN (insn)) == USE)
10663 switch (rs6000_cpu_attr) {
10665 switch (get_attr_type (insn))
10672 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10673 priority, priority);
10674 if (priority >= 0 && priority < 0x01000000)
10684 /* Return how many instructions the machine can issue per cycle. */
10687 rs6000_issue_rate ()
10689 switch (rs6000_cpu_attr) {
10690 case CPU_RIOS1: /* ? */
10692 case CPU_PPC601: /* ? */
10711 /* Length in units of the trampoline for entering a nested function. */
10714 rs6000_trampoline_size ()
10718 switch (DEFAULT_ABI)
10724 ret = (TARGET_32BIT) ? 12 : 24;
10729 case ABI_AIX_NODESC:
10730 ret = (TARGET_32BIT) ? 40 : 48;
10737 /* Emit RTL insns to initialize the variable parts of a trampoline.
10738 FNADDR is an RTX for the address of the function's pure code.
10739 CXT is an RTX for the static chain value for the function. */
10742 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10747 enum machine_mode pmode = Pmode;
10748 int regsize = (TARGET_32BIT) ? 4 : 8;
10749 rtx ctx_reg = force_reg (pmode, cxt);
10751 switch (DEFAULT_ABI)
10756 /* Macros to shorten the code expansions below. */
10757 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10758 #define MEM_PLUS(addr,offset) \
10759 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10761 /* Under AIX, just build the 3 word function descriptor */
10764 rtx fn_reg = gen_reg_rtx (pmode);
10765 rtx toc_reg = gen_reg_rtx (pmode);
10766 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10767 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10768 emit_move_insn (MEM_DEREF (addr), fn_reg);
10769 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10770 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10774 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10777 case ABI_AIX_NODESC:
10778 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10779 FALSE, VOIDmode, 4,
10781 GEN_INT (rs6000_trampoline_size ()), SImode,
10791 /* Table of valid machine attributes. */
10793 const struct attribute_spec rs6000_attribute_table[] =
10795 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10796 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10797 { NULL, 0, 0, false, false, false, NULL }
10800 /* Handle a "longcall" attribute; arguments as in struct
10801 attribute_spec.handler. */
10804 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10807 tree args ATTRIBUTE_UNUSED;
10808 int flags ATTRIBUTE_UNUSED;
10809 bool *no_add_attrs;
10811 if (TREE_CODE (*node) != FUNCTION_TYPE
10812 && TREE_CODE (*node) != FIELD_DECL
10813 && TREE_CODE (*node) != TYPE_DECL)
10815 warning ("`%s' attribute only applies to functions",
10816 IDENTIFIER_POINTER (name));
10817 *no_add_attrs = true;
10823 /* Return a reference suitable for calling a function with the
10824 longcall attribute. */
10827 rs6000_longcall_ref (call_ref)
10830 const char *call_name;
10833 if (GET_CODE (call_ref) != SYMBOL_REF)
10836 /* System V adds '.' to the internal name, so skip them. */
10837 call_name = XSTR (call_ref, 0);
10838 if (*call_name == '.')
10840 while (*call_name == '.')
10843 node = get_identifier (call_name);
10844 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10847 return force_reg (Pmode, call_ref);
10851 /* A C statement or statements to switch to the appropriate section
10852 for output of RTX in mode MODE. You can assume that RTX is some
10853 kind of constant in RTL. The argument MODE is redundant except in
10854 the case of a `const_int' rtx. Select the section by calling
10855 `text_section' or one of the alternatives for other sections.
10857 Do not define this macro if you put all constants in the read-only
10860 #ifdef USING_ELFOS_H
10863 rs6000_select_rtx_section (mode, x)
10864 enum machine_mode mode;
10867 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10870 && (GET_CODE (x) == SYMBOL_REF
10871 || GET_CODE (x) == LABEL_REF
10872 || GET_CODE (x) == CONST))
10878 /* A C statement or statements to switch to the appropriate
10879 section for output of DECL. DECL is either a `VAR_DECL' node
10880 or a constant of some sort. RELOC indicates whether forming
10881 the initial value of DECL requires link-time relocations. */
10884 rs6000_select_section (decl, reloc)
10888 int size = int_size_in_bytes (TREE_TYPE (decl));
10891 static void (* const sec_funcs[4]) PARAMS ((void)) = {
10898 needs_sdata = (size > 0
10899 && size <= g_switch_value
10900 && rs6000_sdata != SDATA_NONE
10901 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10903 if (TREE_CODE (decl) == STRING_CST)
10904 readonly = ! flag_writable_strings;
10905 else if (TREE_CODE (decl) == VAR_DECL)
10906 readonly = (! (flag_pic && reloc)
10907 && TREE_READONLY (decl)
10908 && ! TREE_SIDE_EFFECTS (decl)
10909 && DECL_INITIAL (decl)
10910 && DECL_INITIAL (decl) != error_mark_node
10911 && TREE_CONSTANT (DECL_INITIAL (decl)));
10912 else if (TREE_CODE (decl) == CONSTRUCTOR)
10913 readonly = (! (flag_pic && reloc)
10914 && ! TREE_SIDE_EFFECTS (decl)
10915 && TREE_CONSTANT (decl));
10918 if (needs_sdata && rs6000_sdata != SDATA_EABI)
10921 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10924 /* A C statement to build up a unique section name, expressed as a
10925 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10926 RELOC indicates whether the initial value of EXP requires
10927 link-time relocations. If you do not define this macro, GCC will use
10928 the symbol name prefixed by `.' as the section name. Note - this
10929 macro can now be called for uninitialized data items as well as
10930 initialised data and functions. */
10933 rs6000_unique_section (decl, reloc)
10941 const char *prefix;
10943 static const char *const prefixes[7][2] =
10945 { ".rodata.", ".gnu.linkonce.r." },
10946 { ".sdata2.", ".gnu.linkonce.s2." },
10947 { ".data.", ".gnu.linkonce.d." },
10948 { ".sdata.", ".gnu.linkonce.s." },
10949 { ".bss.", ".gnu.linkonce.b." },
10950 { ".sbss.", ".gnu.linkonce.sb." },
10951 { ".text.", ".gnu.linkonce.t." }
10954 if (TREE_CODE (decl) == FUNCTION_DECL)
10963 if (TREE_CODE (decl) == STRING_CST)
10964 readonly = ! flag_writable_strings;
10965 else if (TREE_CODE (decl) == VAR_DECL)
10966 readonly = (! (flag_pic && reloc)
10967 && TREE_READONLY (decl)
10968 && ! TREE_SIDE_EFFECTS (decl)
10969 && TREE_CONSTANT (DECL_INITIAL (decl)));
10971 size = int_size_in_bytes (TREE_TYPE (decl));
10972 needs_sdata = (size > 0
10973 && size <= g_switch_value
10974 && rs6000_sdata != SDATA_NONE
10975 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10977 if (DECL_INITIAL (decl) == 0
10978 || DECL_INITIAL (decl) == error_mark_node)
10980 else if (! readonly)
10987 /* .sdata2 is only for EABI. */
10988 if (sec == 0 && rs6000_sdata != SDATA_EABI)
10994 STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10995 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10996 len = strlen (name) + strlen (prefix);
10997 string = alloca (len + 1);
10999 sprintf (string, "%s%s", prefix, name);
11001 DECL_SECTION_NAME (decl) = build_string (len, string);
11005 /* If we are referencing a function that is static or is known to be
11006 in this file, make the SYMBOL_REF special. We can use this to indicate
11007 that we can branch to this function without emitting a no-op after the
11008 call. For real AIX calling sequences, we also replace the
11009 function name with the real name (1 or 2 leading .'s), rather than
11010 the function descriptor name. This saves a lot of overriding code
11011 to read the prefixes. */
11014 rs6000_encode_section_info (decl, first)
11021 if (TREE_CODE (decl) == FUNCTION_DECL)
11023 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11024 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
11025 && ! DECL_WEAK (decl))
11026 SYMBOL_REF_FLAG (sym_ref) = 1;
11028 if (DEFAULT_ABI == ABI_AIX)
11030 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
11031 size_t len2 = strlen (XSTR (sym_ref, 0));
11032 char *str = alloca (len1 + len2 + 1);
11035 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
11037 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
11040 else if (rs6000_sdata != SDATA_NONE
11041 && DEFAULT_ABI == ABI_V4
11042 && TREE_CODE (decl) == VAR_DECL)
11044 int size = int_size_in_bytes (TREE_TYPE (decl));
11045 tree section_name = DECL_SECTION_NAME (decl);
11046 const char *name = (char *)0;
11051 if (TREE_CODE (section_name) == STRING_CST)
11053 name = TREE_STRING_POINTER (section_name);
11054 len = TREE_STRING_LENGTH (section_name);
11060 if ((size > 0 && size <= g_switch_value)
11062 && ((len == sizeof (".sdata") - 1
11063 && strcmp (name, ".sdata") == 0)
11064 || (len == sizeof (".sdata2") - 1
11065 && strcmp (name, ".sdata2") == 0)
11066 || (len == sizeof (".sbss") - 1
11067 && strcmp (name, ".sbss") == 0)
11068 || (len == sizeof (".sbss2") - 1
11069 && strcmp (name, ".sbss2") == 0)
11070 || (len == sizeof (".PPC.EMB.sdata0") - 1
11071 && strcmp (name, ".PPC.EMB.sdata0") == 0)
11072 || (len == sizeof (".PPC.EMB.sbss0") - 1
11073 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
11075 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11076 size_t len = strlen (XSTR (sym_ref, 0));
11077 char *str = alloca (len + 2);
11080 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
11081 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
11086 #endif /* USING_ELFOS_H */
11089 /* Return a REG that occurs in ADDR with coefficient 1.
11090 ADDR can be effectively incremented by incrementing REG.
11092 r0 is special and we must not select it as an address
11093 register by this routine since our caller will try to
11094 increment the returned register via an "la" instruction. */
11097 find_addr_reg (addr)
11100 while (GET_CODE (addr) == PLUS)
11102 if (GET_CODE (XEXP (addr, 0)) == REG
11103 && REGNO (XEXP (addr, 0)) != 0)
11104 addr = XEXP (addr, 0);
11105 else if (GET_CODE (XEXP (addr, 1)) == REG
11106 && REGNO (XEXP (addr, 1)) != 0)
11107 addr = XEXP (addr, 1);
11108 else if (CONSTANT_P (XEXP (addr, 0)))
11109 addr = XEXP (addr, 1);
11110 else if (CONSTANT_P (XEXP (addr, 1)))
11111 addr = XEXP (addr, 0);
11115 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
11121 rs6000_fatal_bad_address (op)
11124 fatal_insn ("bad address", op);
11127 /* Called to register all of our global variables with the garbage
11131 rs6000_add_gc_roots ()
11133 ggc_add_rtx_root (&rs6000_compare_op0, 1);
11134 ggc_add_rtx_root (&rs6000_compare_op1, 1);
11136 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11137 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11138 toc_hash_mark_table);
11141 machopic_add_gc_roots ();
11148 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
11149 reference and a constant. */
11152 symbolic_operand (op)
11155 switch (GET_CODE (op))
11162 return (GET_CODE (op) == SYMBOL_REF ||
11163 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11164 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11165 && GET_CODE (XEXP (op, 1)) == CONST_INT);
11172 #ifdef RS6000_LONG_BRANCH
11174 static tree stub_list = 0;
11176 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
11177 procedure calls to the linked list. */
11180 add_compiler_stub (label_name, function_name, line_number)
11182 tree function_name;
11185 tree stub = build_tree_list (function_name, label_name);
11186 TREE_TYPE (stub) = build_int_2 (line_number, 0);
11187 TREE_CHAIN (stub) = stub_list;
11191 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
11192 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
11193 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
11195 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11196 handling procedure calls from the linked list and initializes the
11200 output_compiler_stub ()
11203 char label_buf[256];
11205 tree tmp_stub, stub;
11208 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11210 fprintf (asm_out_file,
11211 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11213 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11214 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11215 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11216 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11218 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11220 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11223 label_buf[0] = '_';
11224 strcpy (label_buf+1,
11225 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11228 strcpy (tmp_buf, "lis r12,hi16(");
11229 strcat (tmp_buf, label_buf);
11230 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11231 strcat (tmp_buf, label_buf);
11232 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11233 output_asm_insn (tmp_buf, 0);
11235 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11236 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11237 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11238 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11244 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
11245 already there or not. */
11248 no_previous_def (function_name)
11249 tree function_name;
11252 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11253 if (function_name == STUB_FUNCTION_NAME (stub))
11258 /* GET_PREV_LABEL gets the label name from the previous definition of
11262 get_prev_label (function_name)
11263 tree function_name;
11266 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11267 if (function_name == STUB_FUNCTION_NAME (stub))
11268 return STUB_LABEL_NAME (stub);
11272 /* INSN is either a function call or a millicode call. It may have an
11273 unconditional jump in its delay slot.
11275 CALL_DEST is the routine we are calling. */
11278 output_call (insn, call_dest, operand_number)
11281 int operand_number;
11283 static char buf[256];
11284 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11287 tree funname = get_identifier (XSTR (call_dest, 0));
11289 if (no_previous_def (funname))
11292 rtx label_rtx = gen_label_rtx ();
11293 char *label_buf, temp_buf[256];
11294 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11295 CODE_LABEL_NUMBER (label_rtx));
11296 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11297 labelname = get_identifier (label_buf);
11298 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11300 line_number = NOTE_LINE_NUMBER (insn);
11301 add_compiler_stub (labelname, funname, line_number);
11304 labelname = get_prev_label (funname);
11306 sprintf (buf, "jbsr %%z%d,%.246s",
11307 operand_number, IDENTIFIER_POINTER (labelname));
11312 sprintf (buf, "bl %%z%d", operand_number);
11317 #endif /* RS6000_LONG_BRANCH */
11319 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11321 const char *const symbol_ = (SYMBOL); \
11322 char *buffer_ = (BUF); \
11323 if (symbol_[0] == '"') \
11325 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11327 else if (name_needs_quotes(symbol_)) \
11329 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11333 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11338 /* Generate PIC and indirect symbol stubs. */
11341 machopic_output_stub (file, symb, stub)
11343 const char *symb, *stub;
11345 unsigned int length;
11346 char *symbol_name, *lazy_ptr_name;
11347 char *local_label_0;
11348 static int label = 0;
11350 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11351 STRIP_NAME_ENCODING (symb, symb);
11355 length = strlen (symb);
11356 symbol_name = alloca (length + 32);
11357 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11359 lazy_ptr_name = alloca (length + 32);
11360 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11362 local_label_0 = alloca (length + 32);
11363 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11366 machopic_picsymbol_stub_section ();
11368 machopic_symbol_stub_section ();
11370 fprintf (file, "%s:\n", stub);
11371 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11375 fprintf (file, "\tmflr r0\n");
11376 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11377 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11378 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11379 lazy_ptr_name, local_label_0);
11380 fprintf (file, "\tmtlr r0\n");
11381 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11382 lazy_ptr_name, local_label_0);
11383 fprintf (file, "\tmtctr r12\n");
11384 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11385 lazy_ptr_name, local_label_0);
11386 fprintf (file, "\tbctr\n");
11389 fprintf (file, "non-pure not supported\n");
11391 machopic_lazy_symbol_ptr_section ();
11392 fprintf (file, "%s:\n", lazy_ptr_name);
11393 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11394 fprintf (file, "\t.long dyld_stub_binding_helper\n");
11397 /* Legitimize PIC addresses. If the address is already
11398 position-independent, we return ORIG. Newly generated
11399 position-independent addresses go into a reg. This is REG if non
11400 zero, otherwise we allocate register(s) as necessary. */
11402 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11405 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11407 enum machine_mode mode;
11412 if (reg == NULL && ! reload_in_progress && ! reload_completed)
11413 reg = gen_reg_rtx (Pmode);
11415 if (GET_CODE (orig) == CONST)
11417 if (GET_CODE (XEXP (orig, 0)) == PLUS
11418 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11421 if (GET_CODE (XEXP (orig, 0)) == PLUS)
11424 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11427 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11433 if (GET_CODE (offset) == CONST_INT)
11435 if (SMALL_INT (offset))
11436 return plus_constant (base, INTVAL (offset));
11437 else if (! reload_in_progress && ! reload_completed)
11438 offset = force_reg (Pmode, offset);
11441 rtx mem = force_const_mem (Pmode, orig);
11442 return machopic_legitimize_pic_address (mem, Pmode, reg);
11445 return gen_rtx (PLUS, Pmode, base, offset);
11448 /* Fall back on generic machopic code. */
11449 return machopic_legitimize_pic_address (orig, mode, reg);
11452 /* This is just a placeholder to make linking work without having to
11453 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11454 ever needed for Darwin (not too likely!) this would have to get a
11455 real definition. */
11462 #endif /* TARGET_MACHO */
11465 static unsigned int
11466 rs6000_elf_section_type_flags (decl, name, reloc)
11471 unsigned int flags = default_section_type_flags (decl, name, reloc);
11473 if (TARGET_RELOCATABLE)
11474 flags |= SECTION_WRITE;
11479 /* Record an element in the table of global constructors. SYMBOL is
11480 a SYMBOL_REF of the function to be called; PRIORITY is a number
11481 between 0 and MAX_INIT_PRIORITY.
11483 This differs from default_named_section_asm_out_constructor in
11484 that we have special handling for -mrelocatable. */
11487 rs6000_elf_asm_out_constructor (symbol, priority)
11491 const char *section = ".ctors";
11494 if (priority != DEFAULT_INIT_PRIORITY)
11496 sprintf (buf, ".ctors.%.5u",
11497 /* Invert the numbering so the linker puts us in the proper
11498 order; constructors are run from right to left, and the
11499 linker sorts in increasing order. */
11500 MAX_INIT_PRIORITY - priority);
11504 named_section_flags (section, SECTION_WRITE);
11505 assemble_align (POINTER_SIZE);
11507 if (TARGET_RELOCATABLE)
11509 fputs ("\t.long (", asm_out_file);
11510 output_addr_const (asm_out_file, symbol);
11511 fputs (")@fixup\n", asm_out_file);
11514 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11518 rs6000_elf_asm_out_destructor (symbol, priority)
11522 const char *section = ".dtors";
11525 if (priority != DEFAULT_INIT_PRIORITY)
11527 sprintf (buf, ".dtors.%.5u",
11528 /* Invert the numbering so the linker puts us in the proper
11529 order; constructors are run from right to left, and the
11530 linker sorts in increasing order. */
11531 MAX_INIT_PRIORITY - priority);
11535 named_section_flags (section, SECTION_WRITE);
11536 assemble_align (POINTER_SIZE);
11538 if (TARGET_RELOCATABLE)
11540 fputs ("\t.long (", asm_out_file);
11541 output_addr_const (asm_out_file, symbol);
11542 fputs (")@fixup\n", asm_out_file);
11545 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11549 #ifdef OBJECT_FORMAT_COFF
11551 xcoff_asm_named_section (name, flags)
11553 unsigned int flags ATTRIBUTE_UNUSED;
11555 fprintf (asm_out_file, "\t.csect %s\n", name);