1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
56 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
59 #ifndef TARGET_NO_PROTOTYPE
60 #define TARGET_NO_PROTOTYPE 0
63 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
64 && easy_vector_same (x, y))
66 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
68 && easy_vector_same (x, y))
70 #define min(A,B) ((A) < (B) ? (A) : (B))
71 #define max(A,B) ((A) > (B) ? (A) : (B))
73 /* Structure used to define the rs6000 stack */
74 typedef struct rs6000_stack {
75 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask; /* mask of vec registers to save */
81 int toc_save_p; /* true if the TOC needs to be saved */
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
84 enum rs6000_abi abi; /* which ABI to use */
85 int gp_save_offset; /* offset to save GP regs from initial SP */
86 int fp_save_offset; /* offset to save FP regs from initial SP */
87 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset; /* offset to save LR from initial SP */
89 int cr_save_offset; /* offset to save CR from initial SP */
90 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
92 int toc_save_offset; /* offset to save the TOC pointer */
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
96 int varargs_size; /* size to hold V.4 args passed in regs */
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int lr_size; /* size to hold LR if not in save_size */
106 int vrsave_size; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size; /* size of altivec alignment padding if
109 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size;
111 int toc_size; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
113 int spe_64bit_regs_used;
116 /* Target cpu type */
118 enum processor_type rs6000_cpu;
119 struct rs6000_cpu_select rs6000_select[3] =
121 /* switch name, tune arch */
122 { (const char *)0, "--with-cpu=", 1, 1 },
123 { (const char *)0, "-mcpu=", 1, 1 },
124 { (const char *)0, "-mtune=", 1, 0 },
127 /* Support adjust_priority scheduler hook
128 and -mprioritize-restricted-insns= option. */
129 const char *rs6000_sched_restricted_insns_priority_str;
130 int rs6000_sched_restricted_insns_priority;
132 /* Support for -msched-costly-dep option. */
133 const char *rs6000_sched_costly_dep_str;
134 enum rs6000_dependence_cost rs6000_sched_costly_dep;
136 /* Support for -minsert-sched-nops option. */
137 const char *rs6000_sched_insert_nops_str;
138 enum rs6000_nop_insertion rs6000_sched_insert_nops;
140 /* Size of long double */
141 const char *rs6000_long_double_size_string;
142 int rs6000_long_double_type_size;
144 /* Whether -mabi=altivec has appeared */
145 int rs6000_altivec_abi;
147 /* Whether VRSAVE instructions should be generated. */
148 int rs6000_altivec_vrsave;
150 /* String from -mvrsave= option. */
151 const char *rs6000_altivec_vrsave_string;
153 /* Nonzero if we want SPE ABI extensions. */
156 /* Whether isel instructions should be generated. */
159 /* Whether SPE simd instructions should be generated. */
162 /* Nonzero if floating point operations are done in the GPRs. */
163 int rs6000_float_gprs = 0;
165 /* String from -mfloat-gprs=. */
166 const char *rs6000_float_gprs_string;
168 /* String from -misel=. */
169 const char *rs6000_isel_string;
171 /* String from -mspe=. */
172 const char *rs6000_spe_string;
174 /* Set to nonzero once AIX common-mode calls have been defined. */
175 static GTY(()) int common_mode_defined;
177 /* Save information from a "cmpxx" operation until the branch or scc is
179 rtx rs6000_compare_op0, rs6000_compare_op1;
180 int rs6000_compare_fp_p;
182 /* Label number of label created for -mrelocatable, to call to so we can
183 get the address of the GOT section */
184 int rs6000_pic_labelno;
187 /* Which abi to adhere to */
188 const char *rs6000_abi_name;
190 /* Semantics of the small data area */
191 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
193 /* Which small data model to use */
194 const char *rs6000_sdata_name = (char *)0;
196 /* Counter for labels which are to be placed in .fixup. */
197 int fixuplabelno = 0;
200 /* Bit size of immediate TLS offsets and string from which it is decoded. */
201 int rs6000_tls_size = 32;
202 const char *rs6000_tls_size_string;
204 /* ABI enumeration available for subtarget to use. */
205 enum rs6000_abi rs6000_current_abi;
207 /* ABI string from -mabi= option. */
208 const char *rs6000_abi_string;
211 const char *rs6000_debug_name;
212 int rs6000_debug_stack; /* debug stack applications */
213 int rs6000_debug_arg; /* debug argument handling */
216 static GTY(()) tree opaque_V2SI_type_node;
217 static GTY(()) tree opaque_V2SF_type_node;
218 static GTY(()) tree opaque_p_V2SI_type_node;
220 const char *rs6000_traceback_name;
222 traceback_default = 0,
228 /* Flag to say the TOC is initialized */
230 char toc_label_name[10];
232 /* Alias set for saves and restores from the rs6000 stack. */
233 static GTY(()) int rs6000_sr_alias_set;
235 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
236 The only place that looks at this is rs6000_set_default_type_attributes;
237 everywhere else should rely on the presence or absence of a longcall
238 attribute on the function declaration. */
239 int rs6000_default_long_calls;
240 const char *rs6000_longcall_switch;
242 /* Control alignment for fields within structures. */
243 /* String from -malign-XXXXX. */
244 const char *rs6000_alignment_string;
245 int rs6000_alignment_flags;
247 struct builtin_description
249 /* mask is not const because we're going to alter it below. This
250 nonsense will go away when we rewrite the -march infrastructure
251 to give us more target flag bits. */
253 const enum insn_code icode;
254 const char *const name;
255 const enum rs6000_builtins code;
258 static bool rs6000_function_ok_for_sibcall (tree, tree);
259 static int num_insns_constant_wide (HOST_WIDE_INT);
260 static void validate_condition_mode (enum rtx_code, enum machine_mode);
261 static rtx rs6000_generate_compare (enum rtx_code);
262 static void rs6000_maybe_dead (rtx);
263 static void rs6000_emit_stack_tie (void);
264 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
265 static rtx spe_synthesize_frame_save (rtx);
266 static bool spe_func_has_64bit_regs_p (void);
267 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
269 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
270 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
271 static unsigned rs6000_hash_constant (rtx);
272 static unsigned toc_hash_function (const void *);
273 static int toc_hash_eq (const void *, const void *);
274 static int constant_pool_expr_1 (rtx, int *, int *);
275 static bool constant_pool_expr_p (rtx);
276 static bool toc_relative_expr_p (rtx);
277 static bool legitimate_small_data_p (enum machine_mode, rtx);
278 static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
279 static bool legitimate_indexed_address_p (rtx, int);
280 static bool legitimate_indirect_address_p (rtx, int);
281 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
282 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
283 static struct machine_function * rs6000_init_machine_status (void);
284 static bool rs6000_assemble_integer (rtx, unsigned int, int);
285 #ifdef HAVE_GAS_HIDDEN
286 static void rs6000_assemble_visibility (tree, int);
288 static int rs6000_ra_ever_killed (void);
289 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
290 extern const struct attribute_spec rs6000_attribute_table[];
291 static void rs6000_set_default_type_attributes (tree);
292 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
293 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
294 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
296 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
297 static bool rs6000_return_in_memory (tree, tree);
298 static void rs6000_file_start (void);
300 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
301 static void rs6000_elf_asm_out_constructor (rtx, int);
302 static void rs6000_elf_asm_out_destructor (rtx, int);
303 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
304 static void rs6000_elf_unique_section (tree, int);
305 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
306 unsigned HOST_WIDE_INT);
307 static void rs6000_elf_encode_section_info (tree, rtx, int)
309 static bool rs6000_elf_in_small_data_p (tree);
312 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
313 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
314 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
315 static void rs6000_xcoff_unique_section (tree, int);
316 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
317 unsigned HOST_WIDE_INT);
318 static const char * rs6000_xcoff_strip_name_encoding (const char *);
319 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
320 static void rs6000_xcoff_file_start (void);
321 static void rs6000_xcoff_file_end (void);
324 static bool rs6000_binds_local_p (tree);
326 static int rs6000_use_dfa_pipeline_interface (void);
327 static int rs6000_variable_issue (FILE *, int, rtx, int);
328 static bool rs6000_rtx_costs (rtx, int, int, int *);
329 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
330 static bool is_microcoded_insn (rtx);
331 static int is_dispatch_slot_restricted (rtx);
332 static bool is_cracked_insn (rtx);
333 static bool is_branch_slot_insn (rtx);
334 static int rs6000_adjust_priority (rtx, int);
335 static int rs6000_issue_rate (void);
336 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
337 static rtx get_next_active_insn (rtx, rtx);
338 static bool insn_terminates_group_p (rtx , enum group_termination);
339 static bool is_costly_group (rtx *, rtx);
340 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
341 static int redefine_groups (FILE *, int, rtx, rtx);
342 static int pad_groups (FILE *, int, rtx, rtx);
343 static void rs6000_sched_finish (FILE *, int);
344 static int rs6000_use_sched_lookahead (void);
346 static void rs6000_init_builtins (void);
347 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
348 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
349 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
350 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
351 static void altivec_init_builtins (void);
352 static void rs6000_common_init_builtins (void);
353 static void rs6000_init_libfuncs (void);
355 static void enable_mask_for_builtins (struct builtin_description *, int,
356 enum rs6000_builtins,
357 enum rs6000_builtins);
358 static void spe_init_builtins (void);
359 static rtx spe_expand_builtin (tree, rtx, bool *);
360 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
361 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
362 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
363 static rs6000_stack_t *rs6000_stack_info (void);
364 static void debug_stack_info (rs6000_stack_t *);
366 static rtx altivec_expand_builtin (tree, rtx, bool *);
367 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
368 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
369 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
370 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
371 static rtx altivec_expand_predicate_builtin (enum insn_code,
372 const char *, tree, rtx);
373 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
374 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
375 static void rs6000_parse_abi_options (void);
376 static void rs6000_parse_alignment_option (void);
377 static void rs6000_parse_tls_size_option (void);
378 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
379 static int first_altivec_reg_to_save (void);
380 static unsigned int compute_vrsave_mask (void);
381 static void is_altivec_return_reg (rtx, void *);
382 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
383 int easy_vector_constant (rtx, enum machine_mode);
384 static int easy_vector_same (rtx, enum machine_mode);
385 static bool is_ev64_opaque_type (tree);
386 static rtx rs6000_dwarf_register_span (rtx);
387 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
388 static rtx rs6000_tls_get_addr (void);
389 static rtx rs6000_got_sym (void);
390 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
391 static const char *rs6000_get_some_local_dynamic_name (void);
392 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
393 static rtx rs6000_complex_function_value (enum machine_mode);
394 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
395 enum machine_mode, tree);
396 static rtx rs6000_mixed_function_arg (CUMULATIVE_ARGS *,
397 enum machine_mode, tree, int);
398 static void rs6000_move_block_from_reg(int regno, rtx x, int nregs);
399 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
400 enum machine_mode, tree,
403 static void macho_branch_islands (void);
404 static void add_compiler_branch_island (tree, tree, int);
405 static int no_previous_def (tree function_name);
406 static tree get_prev_label (tree function_name);
409 static tree rs6000_build_builtin_va_list (void);
411 /* Hash table stuff for keeping track of TOC entries. */
413 struct toc_hash_struct GTY(())
415 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
416 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
418 enum machine_mode key_mode;
422 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
424 /* Default register names. */
425 char rs6000_reg_names[][8] =
427 "0", "1", "2", "3", "4", "5", "6", "7",
428 "8", "9", "10", "11", "12", "13", "14", "15",
429 "16", "17", "18", "19", "20", "21", "22", "23",
430 "24", "25", "26", "27", "28", "29", "30", "31",
431 "0", "1", "2", "3", "4", "5", "6", "7",
432 "8", "9", "10", "11", "12", "13", "14", "15",
433 "16", "17", "18", "19", "20", "21", "22", "23",
434 "24", "25", "26", "27", "28", "29", "30", "31",
435 "mq", "lr", "ctr","ap",
436 "0", "1", "2", "3", "4", "5", "6", "7",
438 /* AltiVec registers. */
439 "0", "1", "2", "3", "4", "5", "6", "7",
440 "8", "9", "10", "11", "12", "13", "14", "15",
441 "16", "17", "18", "19", "20", "21", "22", "23",
442 "24", "25", "26", "27", "28", "29", "30", "31",
448 #ifdef TARGET_REGNAMES
449 static const char alt_reg_names[][8] =
451 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
452 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
453 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
454 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
455 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
456 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
457 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
458 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
459 "mq", "lr", "ctr", "ap",
460 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
462 /* AltiVec registers. */
463 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
464 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
465 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
466 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
473 #ifndef MASK_STRICT_ALIGN
474 #define MASK_STRICT_ALIGN 0
476 #ifndef TARGET_PROFILE_KERNEL
477 #define TARGET_PROFILE_KERNEL 0
480 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
481 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
483 /* Return 1 for a symbol ref for a thread-local storage symbol. */
484 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
485 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
487 /* Initialize the GCC target structure. */
488 #undef TARGET_ATTRIBUTE_TABLE
489 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
490 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
491 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
493 #undef TARGET_ASM_ALIGNED_DI_OP
494 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
496 /* Default unaligned ops are only provided for ELF. Find the ops needed
497 for non-ELF systems. */
498 #ifndef OBJECT_FORMAT_ELF
500 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
502 #undef TARGET_ASM_UNALIGNED_HI_OP
503 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
504 #undef TARGET_ASM_UNALIGNED_SI_OP
505 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
506 #undef TARGET_ASM_UNALIGNED_DI_OP
507 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
510 #undef TARGET_ASM_UNALIGNED_HI_OP
511 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
512 #undef TARGET_ASM_UNALIGNED_SI_OP
513 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
517 /* This hook deals with fixups for relocatable code and DI-mode objects
519 #undef TARGET_ASM_INTEGER
520 #define TARGET_ASM_INTEGER rs6000_assemble_integer
522 #ifdef HAVE_GAS_HIDDEN
523 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
524 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
527 #undef TARGET_HAVE_TLS
528 #define TARGET_HAVE_TLS HAVE_AS_TLS
530 #undef TARGET_CANNOT_FORCE_CONST_MEM
531 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
533 #undef TARGET_ASM_FUNCTION_PROLOGUE
534 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
535 #undef TARGET_ASM_FUNCTION_EPILOGUE
536 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
538 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
539 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
540 #undef TARGET_SCHED_VARIABLE_ISSUE
541 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
543 #undef TARGET_SCHED_ISSUE_RATE
544 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
545 #undef TARGET_SCHED_ADJUST_COST
546 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
547 #undef TARGET_SCHED_ADJUST_PRIORITY
548 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
549 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
550 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
551 #undef TARGET_SCHED_FINISH
552 #define TARGET_SCHED_FINISH rs6000_sched_finish
554 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
555 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
557 #undef TARGET_INIT_BUILTINS
558 #define TARGET_INIT_BUILTINS rs6000_init_builtins
560 #undef TARGET_EXPAND_BUILTIN
561 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
563 #undef TARGET_INIT_LIBFUNCS
564 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
567 #undef TARGET_BINDS_LOCAL_P
568 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
571 #undef TARGET_ASM_OUTPUT_MI_THUNK
572 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
574 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
575 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
577 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
578 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
580 #undef TARGET_RTX_COSTS
581 #define TARGET_RTX_COSTS rs6000_rtx_costs
582 #undef TARGET_ADDRESS_COST
583 #define TARGET_ADDRESS_COST hook_int_rtx_0
585 #undef TARGET_VECTOR_OPAQUE_P
586 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
588 #undef TARGET_DWARF_REGISTER_SPAN
589 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
591 /* On rs6000, function arguments are promoted, as are function return
593 #undef TARGET_PROMOTE_FUNCTION_ARGS
594 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
595 #undef TARGET_PROMOTE_FUNCTION_RETURN
596 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
598 /* Structure return values are passed as an extra parameter. */
599 #undef TARGET_STRUCT_VALUE_RTX
600 #define TARGET_STRUCT_VALUE_RTX hook_rtx_tree_int_null
602 #undef TARGET_RETURN_IN_MEMORY
603 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
605 #undef TARGET_SETUP_INCOMING_VARARGS
606 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
608 /* Always strict argument naming on rs6000. */
609 #undef TARGET_STRICT_ARGUMENT_NAMING
610 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
611 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
612 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
614 #undef TARGET_BUILD_BUILTIN_VA_LIST
615 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
617 struct gcc_target targetm = TARGET_INITIALIZER;
619 /* Override command line options. Mostly we process the processor
620 type and sometimes adjust other TARGET_ options. */
623 rs6000_override_options (const char *default_cpu)
626 struct rs6000_cpu_select *ptr;
629 /* Simplifications for entries below. */
632 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
633 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
636 /* This table occasionally claims that a processor does not support
637 a particular feature even though it does, but the feature is slower
638 than the alternative. Thus, it shouldn't be relied on as a
639 complete description of the processor's support.
641 Please keep this list in order, and don't forget to update the
642 documentation in invoke.texi when adding a new processor or
646 const char *const name; /* Canonical processor name. */
647 const enum processor_type processor; /* Processor type enum value. */
648 const int target_enable; /* Target flags to enable. */
649 } const processor_target_table[]
650 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
651 {"403", PROCESSOR_PPC403,
652 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
653 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
654 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
655 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
656 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
657 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
658 {"601", PROCESSOR_PPC601,
659 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
660 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
661 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
662 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
663 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
664 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
665 {"620", PROCESSOR_PPC620,
666 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
667 {"630", PROCESSOR_PPC630,
668 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
669 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
670 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
671 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
672 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
673 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
674 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
675 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
676 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
677 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
678 {"970", PROCESSOR_POWER4,
679 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
680 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
681 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
682 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
683 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
684 {"G5", PROCESSOR_POWER4,
685 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
686 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
687 {"power2", PROCESSOR_POWER,
688 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
689 {"power3", PROCESSOR_PPC630,
690 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
691 {"power4", PROCESSOR_POWER4,
692 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
693 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
694 {"powerpc64", PROCESSOR_POWERPC64,
695 POWERPC_BASE_MASK | MASK_POWERPC64},
696 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
697 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
698 {"rios2", PROCESSOR_RIOS2,
699 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
700 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
701 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
702 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
705 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
707 /* Save current -mmultiple/-mno-multiple status. */
708 int multiple = TARGET_MULTIPLE;
709 /* Save current -mstring/-mno-string status. */
710 int string = TARGET_STRING;
712 /* Some OSs don't support saving the high part of 64-bit registers on
713 context switch. Other OSs don't support saving Altivec registers.
714 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
715 settings; if the user wants either, the user must explicitly specify
716 them and we won't interfere with the user's specification. */
719 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
720 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
721 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
724 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
725 #ifdef OS_MISSING_POWERPC64
726 if (OS_MISSING_POWERPC64)
727 set_masks &= ~MASK_POWERPC64;
729 #ifdef OS_MISSING_ALTIVEC
730 if (OS_MISSING_ALTIVEC)
731 set_masks &= ~MASK_ALTIVEC;
734 /* Identify the processor type. */
735 rs6000_select[0].string = default_cpu;
736 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
738 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
740 ptr = &rs6000_select[i];
741 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
743 for (j = 0; j < ptt_size; j++)
744 if (! strcmp (ptr->string, processor_target_table[j].name))
747 rs6000_cpu = processor_target_table[j].processor;
751 target_flags &= ~set_masks;
752 target_flags |= (processor_target_table[j].target_enable
759 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
766 /* If we are optimizing big endian systems for space, use the load/store
767 multiple and string instructions. */
768 if (BYTES_BIG_ENDIAN && optimize_size)
769 target_flags |= MASK_MULTIPLE | MASK_STRING;
771 /* If -mmultiple or -mno-multiple was explicitly used, don't
772 override with the processor default */
773 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
774 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
776 /* If -mstring or -mno-string was explicitly used, don't override
777 with the processor default. */
778 if ((target_flags_explicit & MASK_STRING) != 0)
779 target_flags = (target_flags & ~MASK_STRING) | string;
781 /* Don't allow -mmultiple or -mstring on little endian systems
782 unless the cpu is a 750, because the hardware doesn't support the
783 instructions used in little endian mode, and causes an alignment
784 trap. The 750 does not cause an alignment trap (except when the
785 target is unaligned). */
787 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
791 target_flags &= ~MASK_MULTIPLE;
792 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
793 warning ("-mmultiple is not supported on little endian systems");
798 target_flags &= ~MASK_STRING;
799 if ((target_flags_explicit & MASK_STRING) != 0)
800 warning ("-mstring is not supported on little endian systems");
804 /* Set debug flags */
805 if (rs6000_debug_name)
807 if (! strcmp (rs6000_debug_name, "all"))
808 rs6000_debug_stack = rs6000_debug_arg = 1;
809 else if (! strcmp (rs6000_debug_name, "stack"))
810 rs6000_debug_stack = 1;
811 else if (! strcmp (rs6000_debug_name, "arg"))
812 rs6000_debug_arg = 1;
814 error ("unknown -mdebug-%s switch", rs6000_debug_name);
817 if (rs6000_traceback_name)
819 if (! strncmp (rs6000_traceback_name, "full", 4))
820 rs6000_traceback = traceback_full;
821 else if (! strncmp (rs6000_traceback_name, "part", 4))
822 rs6000_traceback = traceback_part;
823 else if (! strncmp (rs6000_traceback_name, "no", 2))
824 rs6000_traceback = traceback_none;
826 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
827 rs6000_traceback_name);
830 /* Set size of long double */
831 rs6000_long_double_type_size = 64;
832 if (rs6000_long_double_size_string)
835 int size = strtol (rs6000_long_double_size_string, &tail, 10);
836 if (*tail != '\0' || (size != 64 && size != 128))
837 error ("Unknown switch -mlong-double-%s",
838 rs6000_long_double_size_string);
840 rs6000_long_double_type_size = size;
843 /* Handle -mabi= options. */
844 rs6000_parse_abi_options ();
846 /* Handle -malign-XXXXX option. */
847 rs6000_parse_alignment_option ();
849 /* Handle generic -mFOO=YES/NO options. */
850 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
851 &rs6000_altivec_vrsave);
852 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
854 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
855 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
858 /* Handle -mtls-size option. */
859 rs6000_parse_tls_size_option ();
861 #ifdef SUBTARGET_OVERRIDE_OPTIONS
862 SUBTARGET_OVERRIDE_OPTIONS;
864 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
865 SUBSUBTARGET_OVERRIDE_OPTIONS;
870 /* The e500 does not have string instructions, and we set
871 MASK_STRING above when optimizing for size. */
872 if ((target_flags & MASK_STRING) != 0)
873 target_flags = target_flags & ~MASK_STRING;
875 /* No SPE means 64-bit long doubles, even if an E500. */
876 if (rs6000_spe_string != 0
877 && !strcmp (rs6000_spe_string, "no"))
878 rs6000_long_double_type_size = 64;
880 else if (rs6000_select[1].string != NULL)
882 /* For the powerpc-eabispe configuration, we set all these by
883 default, so let's unset them if we manually set another
884 CPU that is not the E500. */
885 if (rs6000_abi_string == 0)
887 if (rs6000_spe_string == 0)
889 if (rs6000_float_gprs_string == 0)
890 rs6000_float_gprs = 0;
891 if (rs6000_isel_string == 0)
893 if (rs6000_long_double_size_string == 0)
894 rs6000_long_double_type_size = 64;
897 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
898 using TARGET_OPTIONS to handle a toggle switch, but we're out of
899 bits in target_flags so TARGET_SWITCHES cannot be used.
900 Assumption here is that rs6000_longcall_switch points into the
901 text of the complete option, rather than being a copy, so we can
902 scan back for the presence or absence of the no- modifier. */
903 if (rs6000_longcall_switch)
905 const char *base = rs6000_longcall_switch;
906 while (base[-1] != 'm') base--;
908 if (*rs6000_longcall_switch != '\0')
909 error ("invalid option `%s'", base);
910 rs6000_default_long_calls = (base[0] != 'n');
913 /* Handle -mprioritize-restricted-insns option. */
914 rs6000_sched_restricted_insns_priority = DEFAULT_RESTRICTED_INSNS_PRIORITY;
915 if (rs6000_sched_restricted_insns_priority_str)
916 rs6000_sched_restricted_insns_priority =
917 atoi (rs6000_sched_restricted_insns_priority_str);
919 /* Handle -msched-costly-dep option. */
920 rs6000_sched_costly_dep = DEFAULT_SCHED_COSTLY_DEP;
921 if (rs6000_sched_costly_dep_str)
923 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
924 rs6000_sched_costly_dep = no_dep_costly;
925 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
926 rs6000_sched_costly_dep = all_deps_costly;
927 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
928 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
929 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
930 rs6000_sched_costly_dep = store_to_load_dep_costly;
932 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
935 /* Handle -minsert-sched-nops option. */
936 rs6000_sched_insert_nops = DEFAULT_SCHED_FINISH_NOP_INSERTION_SCHEME;
937 if (rs6000_sched_insert_nops_str)
939 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
940 rs6000_sched_insert_nops = sched_finish_none;
941 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
942 rs6000_sched_insert_nops = sched_finish_pad_groups;
943 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
944 rs6000_sched_insert_nops = sched_finish_regroup_exact;
946 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
949 #ifdef TARGET_REGNAMES
950 /* If the user desires alternate register names, copy in the
951 alternate names now. */
953 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
956 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
957 If -maix-struct-return or -msvr4-struct-return was explicitly
958 used, don't override with the ABI default. */
959 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
961 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
962 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
964 target_flags |= MASK_AIX_STRUCT_RET;
967 if (TARGET_LONG_DOUBLE_128
968 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
969 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
971 /* Allocate an alias set for register saves & restores from stack. */
972 rs6000_sr_alias_set = new_alias_set ();
975 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
977 /* We can only guarantee the availability of DI pseudo-ops when
978 assembling for 64-bit targets. */
981 targetm.asm_out.aligned_op.di = NULL;
982 targetm.asm_out.unaligned_op.di = NULL;
985 /* Set maximum branch target alignment at two instructions, eight bytes. */
986 align_jumps_max_skip = 8;
987 align_loops_max_skip = 8;
989 /* Arrange to save and restore machine status around nested functions. */
990 init_machine_status = rs6000_init_machine_status;
993 /* Handle generic options of the form -mfoo=yes/no.
994 NAME is the option name.
995 VALUE is the option value.
996 FLAG is the pointer to the flag where to store a 1 or 0, depending on
997 whether the option value is 'yes' or 'no' respectively. */
999 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1003 else if (!strcmp (value, "yes"))
1005 else if (!strcmp (value, "no"))
1008 error ("unknown -m%s= option specified: '%s'", name, value);
1011 /* Handle -mabi= options. */
1013 rs6000_parse_abi_options (void)
1015 if (rs6000_abi_string == 0)
1017 else if (! strcmp (rs6000_abi_string, "altivec"))
1018 rs6000_altivec_abi = 1;
1019 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1020 rs6000_altivec_abi = 0;
1021 else if (! strcmp (rs6000_abi_string, "spe"))
1024 if (!TARGET_SPE_ABI)
1025 error ("not configured for ABI: '%s'", rs6000_abi_string);
1028 else if (! strcmp (rs6000_abi_string, "no-spe"))
1031 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1034 /* Handle -malign-XXXXXX options. */
1036 rs6000_parse_alignment_option (void)
1038 if (rs6000_alignment_string == 0)
1040 else if (! strcmp (rs6000_alignment_string, "power"))
1041 rs6000_alignment_flags = MASK_ALIGN_POWER;
1042 else if (! strcmp (rs6000_alignment_string, "natural"))
1043 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1045 error ("unknown -malign-XXXXX option specified: '%s'",
1046 rs6000_alignment_string);
1049 /* Validate and record the size specified with the -mtls-size option. */
1052 rs6000_parse_tls_size_option (void)
1054 if (rs6000_tls_size_string == 0)
1056 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1057 rs6000_tls_size = 16;
1058 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1059 rs6000_tls_size = 32;
1060 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1061 rs6000_tls_size = 64;
1063 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1067 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1071 /* Do anything needed at the start of the asm file. */
1074 rs6000_file_start (void)
1078 const char *start = buffer;
1079 struct rs6000_cpu_select *ptr;
1080 const char *default_cpu = TARGET_CPU_DEFAULT;
1081 FILE *file = asm_out_file;
1083 default_file_start ();
1085 #ifdef TARGET_BI_ARCH
1086 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1090 if (flag_verbose_asm)
1092 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1093 rs6000_select[0].string = default_cpu;
1095 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1097 ptr = &rs6000_select[i];
1098 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1100 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1105 #ifdef USING_ELFOS_H
1106 switch (rs6000_sdata)
1108 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1109 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1110 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1111 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1114 if (rs6000_sdata && g_switch_value)
1116 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1127 /* Return nonzero if this function is known to have a null epilogue. */
1130 direct_return (void)
1132 if (reload_completed)
1134 rs6000_stack_t *info = rs6000_stack_info ();
1136 if (info->first_gp_reg_save == 32
1137 && info->first_fp_reg_save == 64
1138 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1139 && ! info->lr_save_p
1140 && ! info->cr_save_p
1141 && info->vrsave_mask == 0
1149 /* Returns 1 always. */
1152 any_operand (rtx op ATTRIBUTE_UNUSED,
1153 enum machine_mode mode ATTRIBUTE_UNUSED)
1158 /* Returns 1 if op is the count register. */
1160 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1162 if (GET_CODE (op) != REG)
1165 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1168 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1174 /* Returns 1 if op is an altivec register. */
1176 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1179 return (register_operand (op, mode)
1180 && (GET_CODE (op) != REG
1181 || REGNO (op) > FIRST_PSEUDO_REGISTER
1182 || ALTIVEC_REGNO_P (REGNO (op))));
1186 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1188 if (GET_CODE (op) != REG)
1191 if (XER_REGNO_P (REGNO (op)))
1197 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1198 by such constants completes more quickly. */
1201 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1203 return ( GET_CODE (op) == CONST_INT
1204 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1207 /* Return 1 if OP is a constant that can fit in a D field. */
1210 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1212 return (GET_CODE (op) == CONST_INT
1213 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1216 /* Similar for an unsigned D field. */
1219 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1221 return (GET_CODE (op) == CONST_INT
1222 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1225 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1228 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1230 return (GET_CODE (op) == CONST_INT
1231 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1234 /* Returns 1 if OP is a CONST_INT that is a positive value
1235 and an exact power of 2. */
1238 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1240 return (GET_CODE (op) == CONST_INT
1242 && exact_log2 (INTVAL (op)) >= 0);
1245 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1249 gpc_reg_operand (rtx op, enum machine_mode mode)
1251 return (register_operand (op, mode)
1252 && (GET_CODE (op) != REG
1253 || (REGNO (op) >= ARG_POINTER_REGNUM
1254 && !XER_REGNO_P (REGNO (op)))
1255 || REGNO (op) < MQ_REGNO));
1258 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1262 cc_reg_operand (rtx op, enum machine_mode mode)
1264 return (register_operand (op, mode)
1265 && (GET_CODE (op) != REG
1266 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1267 || CR_REGNO_P (REGNO (op))));
1270 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1271 CR field that isn't CR0. */
1274 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1276 return (register_operand (op, mode)
1277 && (GET_CODE (op) != REG
1278 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1279 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1282 /* Returns 1 if OP is either a constant integer valid for a D-field or
1283 a non-special register. If a register, it must be in the proper
1284 mode unless MODE is VOIDmode. */
1287 reg_or_short_operand (rtx op, enum machine_mode mode)
1289 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1292 /* Similar, except check if the negation of the constant would be
1293 valid for a D-field. */
1296 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1298 if (GET_CODE (op) == CONST_INT)
1299 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1301 return gpc_reg_operand (op, mode);
1304 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1305 a non-special register. If a register, it must be in the proper
1306 mode unless MODE is VOIDmode. */
1309 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1311 if (gpc_reg_operand (op, mode))
1313 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1320 /* Return 1 if the operand is either a register or an integer whose
1321 high-order 16 bits are zero. */
1324 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1326 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1329 /* Return 1 is the operand is either a non-special register or ANY
1330 constant integer. */
1333 reg_or_cint_operand (rtx op, enum machine_mode mode)
1335 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1338 /* Return 1 is the operand is either a non-special register or ANY
1339 32-bit signed constant integer. */
1342 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1344 return (gpc_reg_operand (op, mode)
1345 || (GET_CODE (op) == CONST_INT
1346 #if HOST_BITS_PER_WIDE_INT != 32
1347 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1348 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1353 /* Return 1 is the operand is either a non-special register or a 32-bit
1354 signed constant integer valid for 64-bit addition. */
1357 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1359 return (gpc_reg_operand (op, mode)
1360 || (GET_CODE (op) == CONST_INT
1361 #if HOST_BITS_PER_WIDE_INT == 32
1362 && INTVAL (op) < 0x7fff8000
1364 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1370 /* Return 1 is the operand is either a non-special register or a 32-bit
1371 signed constant integer valid for 64-bit subtraction. */
1374 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1376 return (gpc_reg_operand (op, mode)
1377 || (GET_CODE (op) == CONST_INT
1378 #if HOST_BITS_PER_WIDE_INT == 32
1379 && (- INTVAL (op)) < 0x7fff8000
1381 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1387 /* Return 1 is the operand is either a non-special register or ANY
1388 32-bit unsigned constant integer. */
1391 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1393 if (GET_CODE (op) == CONST_INT)
1395 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1397 if (GET_MODE_BITSIZE (mode) <= 32)
1400 if (INTVAL (op) < 0)
1404 return ((INTVAL (op) & GET_MODE_MASK (mode)
1405 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1407 else if (GET_CODE (op) == CONST_DOUBLE)
1409 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1413 return CONST_DOUBLE_HIGH (op) == 0;
1416 return gpc_reg_operand (op, mode);
1419 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1422 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1424 return (GET_CODE (op) == SYMBOL_REF
1425 || GET_CODE (op) == CONST
1426 || GET_CODE (op) == LABEL_REF);
1429 /* Return 1 if the operand is a simple references that can be loaded via
1430 the GOT (labels involving addition aren't allowed). */
1433 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1435 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1438 /* Return the number of instructions it takes to form a constant in an
1439 integer register. */
1442 num_insns_constant_wide (HOST_WIDE_INT value)
1444 /* signed constant loadable with {cal|addi} */
1445 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1448 /* constant loadable with {cau|addis} */
1449 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1452 #if HOST_BITS_PER_WIDE_INT == 64
1453 else if (TARGET_POWERPC64)
1455 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1456 HOST_WIDE_INT high = value >> 31;
1458 if (high == 0 || high == -1)
1464 return num_insns_constant_wide (high) + 1;
1466 return (num_insns_constant_wide (high)
1467 + num_insns_constant_wide (low) + 1);
1476 num_insns_constant (rtx op, enum machine_mode mode)
1478 if (GET_CODE (op) == CONST_INT)
1480 #if HOST_BITS_PER_WIDE_INT == 64
1481 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1482 && mask64_operand (op, mode))
1486 return num_insns_constant_wide (INTVAL (op));
1489 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1494 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1495 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1496 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1499 else if (GET_CODE (op) == CONST_DOUBLE)
1505 int endian = (WORDS_BIG_ENDIAN == 0);
1507 if (mode == VOIDmode || mode == DImode)
1509 high = CONST_DOUBLE_HIGH (op);
1510 low = CONST_DOUBLE_LOW (op);
1514 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1515 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1517 low = l[1 - endian];
1521 return (num_insns_constant_wide (low)
1522 + num_insns_constant_wide (high));
1526 if (high == 0 && low >= 0)
1527 return num_insns_constant_wide (low);
1529 else if (high == -1 && low < 0)
1530 return num_insns_constant_wide (low);
1532 else if (mask64_operand (op, mode))
1536 return num_insns_constant_wide (high) + 1;
1539 return (num_insns_constant_wide (high)
1540 + num_insns_constant_wide (low) + 1);
1548 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1549 register with one instruction per word. We only do this if we can
1550 safely read CONST_DOUBLE_{LOW,HIGH}. */
1553 easy_fp_constant (rtx op, enum machine_mode mode)
1555 if (GET_CODE (op) != CONST_DOUBLE
1556 || GET_MODE (op) != mode
1557 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1560 /* Consider all constants with -msoft-float to be easy. */
1561 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1565 /* If we are using V.4 style PIC, consider all constants to be hard. */
1566 if (flag_pic && DEFAULT_ABI == ABI_V4)
1569 #ifdef TARGET_RELOCATABLE
1570 /* Similarly if we are using -mrelocatable, consider all constants
1572 if (TARGET_RELOCATABLE)
1581 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1582 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1584 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1585 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1586 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1587 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1590 else if (mode == DFmode)
1595 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1596 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1598 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1599 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1602 else if (mode == SFmode)
1607 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1608 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1610 return num_insns_constant_wide (l) == 1;
1613 else if (mode == DImode)
1614 return ((TARGET_POWERPC64
1615 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1616 || (num_insns_constant (op, DImode) <= 2));
1618 else if (mode == SImode)
1624 /* Return nonzero if all elements of a vector have the same value. */
1627 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1631 units = CONST_VECTOR_NUNITS (op);
1633 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1634 for (i = 1; i < units; ++i)
1635 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1642 /* Return 1 if the operand is a CONST_INT and can be put into a
1643 register without using memory. */
1646 easy_vector_constant (rtx op, enum machine_mode mode)
1650 if (GET_CODE (op) != CONST_VECTOR
1655 if (zero_constant (op, mode)
1656 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1657 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1660 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1663 if (TARGET_SPE && mode == V1DImode)
1666 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1667 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1669 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1671 evmergelo r0, r0, r0
1674 I don't know how efficient it would be to allow bigger constants,
1675 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1676 instructions is better than a 64-bit memory load, but I don't
1677 have the e500 timing specs. */
1678 if (TARGET_SPE && mode == V2SImode
1679 && cst >= -0x7fff && cst <= 0x7fff
1680 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1687 if (EASY_VECTOR_15 (cst, op, mode))
1689 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
1693 if (EASY_VECTOR_15 (cst, op, mode))
1695 if ((cst & 0xff) != ((cst >> 8) & 0xff))
1699 if (EASY_VECTOR_15 (cst, op, mode))
1705 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1711 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1714 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1718 if (!easy_vector_constant (op, mode))
1721 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1723 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1727 output_vec_const_move (rtx *operands)
1730 enum machine_mode mode;
1736 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1737 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1738 mode = GET_MODE (dest);
1742 if (zero_constant (vec, mode))
1743 return "vxor %0,%0,%0";
1744 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1746 else if (easy_vector_constant (vec, mode))
1748 operands[1] = GEN_INT (cst);
1752 if (EASY_VECTOR_15 (cst, vec, mode))
1754 operands[1] = GEN_INT (cst);
1755 return "vspltisw %0,%1";
1759 if (EASY_VECTOR_15 (cst, vec, mode))
1761 operands[1] = GEN_INT (cst);
1762 return "vspltish %0,%1";
1766 if (EASY_VECTOR_15 (cst, vec, mode))
1768 operands[1] = GEN_INT (cst);
1769 return "vspltisb %0,%1";
1781 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1782 pattern of V1DI, V4HI, and V2SF.
1784 FIXME: We should probably return # and add post reload
1785 splitters for these, but this way is so easy ;-).
1787 operands[1] = GEN_INT (cst);
1788 operands[2] = GEN_INT (cst2);
1790 return "li %0,%1\n\tevmergelo %0,%0,%0";
1792 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1798 /* Return 1 if the operand is the constant 0. This works for scalars
1799 as well as vectors. */
1801 zero_constant (rtx op, enum machine_mode mode)
1803 return op == CONST0_RTX (mode);
1806 /* Return 1 if the operand is 0.0. */
1808 zero_fp_constant (rtx op, enum machine_mode mode)
1810 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1813 /* Return 1 if the operand is in volatile memory. Note that during
1814 the RTL generation phase, memory_operand does not return TRUE for
1815 volatile memory references. So this function allows us to
1816 recognize volatile references where its safe. */
1819 volatile_mem_operand (rtx op, enum machine_mode mode)
1821 if (GET_CODE (op) != MEM)
1824 if (!MEM_VOLATILE_P (op))
1827 if (mode != GET_MODE (op))
1830 if (reload_completed)
1831 return memory_operand (op, mode);
1833 if (reload_in_progress)
1834 return strict_memory_address_p (mode, XEXP (op, 0));
1836 return memory_address_p (mode, XEXP (op, 0));
1839 /* Return 1 if the operand is an offsettable memory operand. */
1842 offsettable_mem_operand (rtx op, enum machine_mode mode)
1844 return ((GET_CODE (op) == MEM)
1845 && offsettable_address_p (reload_completed || reload_in_progress,
1846 mode, XEXP (op, 0)));
1849 /* Return 1 if the operand is either an easy FP constant (see above) or
1853 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1855 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1858 /* Return 1 if the operand is either a non-special register or an item
1859 that can be used as the operand of a `mode' add insn. */
1862 add_operand (rtx op, enum machine_mode mode)
1864 if (GET_CODE (op) == CONST_INT)
1865 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1866 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1868 return gpc_reg_operand (op, mode);
1871 /* Return 1 if OP is a constant but not a valid add_operand. */
1874 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1876 return (GET_CODE (op) == CONST_INT
1877 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1878 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1881 /* Return 1 if the operand is a non-special register or a constant that
1882 can be used as the operand of an OR or XOR insn on the RS/6000. */
1885 logical_operand (rtx op, enum machine_mode mode)
1887 HOST_WIDE_INT opl, oph;
1889 if (gpc_reg_operand (op, mode))
1892 if (GET_CODE (op) == CONST_INT)
1894 opl = INTVAL (op) & GET_MODE_MASK (mode);
1896 #if HOST_BITS_PER_WIDE_INT <= 32
1897 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1901 else if (GET_CODE (op) == CONST_DOUBLE)
1903 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1906 opl = CONST_DOUBLE_LOW (op);
1907 oph = CONST_DOUBLE_HIGH (op);
1914 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1915 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1918 /* Return 1 if C is a constant that is not a logical operand (as
1919 above), but could be split into one. */
1922 non_logical_cint_operand (rtx op, enum machine_mode mode)
1924 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1925 && ! logical_operand (op, mode)
1926 && reg_or_logical_cint_operand (op, mode));
1929 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1930 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1931 Reject all ones and all zeros, since these should have been optimized
1932 away and confuse the making of MB and ME. */
1935 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1937 HOST_WIDE_INT c, lsb;
1939 if (GET_CODE (op) != CONST_INT)
1944 /* Fail in 64-bit mode if the mask wraps around because the upper
1945 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1946 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1949 /* We don't change the number of transitions by inverting,
1950 so make sure we start with the LS bit zero. */
1954 /* Reject all zeros or all ones. */
1958 /* Find the first transition. */
1961 /* Invert to look for a second transition. */
1964 /* Erase first transition. */
1967 /* Find the second transition (if any). */
1970 /* Match if all the bits above are 1's (or c is zero). */
1974 /* Return 1 for the PowerPC64 rlwinm corner case. */
1977 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1979 HOST_WIDE_INT c, lsb;
1981 if (GET_CODE (op) != CONST_INT)
1986 if ((c & 0x80000001) != 0x80000001)
2000 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2001 It is if there are no more than one 1->0 or 0->1 transitions.
2002 Reject all zeros, since zero should have been optimized away and
2003 confuses the making of MB and ME. */
2006 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2008 if (GET_CODE (op) == CONST_INT)
2010 HOST_WIDE_INT c, lsb;
2014 /* Reject all zeros. */
2018 /* We don't change the number of transitions by inverting,
2019 so make sure we start with the LS bit zero. */
2023 /* Find the transition, and check that all bits above are 1's. */
2026 /* Match if all the bits above are 1's (or c is zero). */
2032 /* Like mask64_operand, but allow up to three transitions. This
2033 predicate is used by insn patterns that generate two rldicl or
2034 rldicr machine insns. */
2037 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2039 if (GET_CODE (op) == CONST_INT)
2041 HOST_WIDE_INT c, lsb;
2045 /* Disallow all zeros. */
2049 /* We don't change the number of transitions by inverting,
2050 so make sure we start with the LS bit zero. */
2054 /* Find the first transition. */
2057 /* Invert to look for a second transition. */
2060 /* Erase first transition. */
2063 /* Find the second transition. */
2066 /* Invert to look for a third transition. */
2069 /* Erase second transition. */
2072 /* Find the third transition (if any). */
2075 /* Match if all the bits above are 1's (or c is zero). */
2081 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2082 implement ANDing by the mask IN. */
2084 build_mask64_2_operands (rtx in, rtx *out)
2086 #if HOST_BITS_PER_WIDE_INT >= 64
2087 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2090 if (GET_CODE (in) != CONST_INT)
2096 /* Assume c initially something like 0x00fff000000fffff. The idea
2097 is to rotate the word so that the middle ^^^^^^ group of zeros
2098 is at the MS end and can be cleared with an rldicl mask. We then
2099 rotate back and clear off the MS ^^ group of zeros with a
2101 c = ~c; /* c == 0xff000ffffff00000 */
2102 lsb = c & -c; /* lsb == 0x0000000000100000 */
2103 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2104 c = ~c; /* c == 0x00fff000000fffff */
2105 c &= -lsb; /* c == 0x00fff00000000000 */
2106 lsb = c & -c; /* lsb == 0x0000100000000000 */
2107 c = ~c; /* c == 0xff000fffffffffff */
2108 c &= -lsb; /* c == 0xff00000000000000 */
2110 while ((lsb >>= 1) != 0)
2111 shift++; /* shift == 44 on exit from loop */
2112 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2113 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2114 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2118 /* Assume c initially something like 0xff000f0000000000. The idea
2119 is to rotate the word so that the ^^^ middle group of zeros
2120 is at the LS end and can be cleared with an rldicr mask. We then
2121 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2123 lsb = c & -c; /* lsb == 0x0000010000000000 */
2124 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2125 c = ~c; /* c == 0x00fff0ffffffffff */
2126 c &= -lsb; /* c == 0x00fff00000000000 */
2127 lsb = c & -c; /* lsb == 0x0000100000000000 */
2128 c = ~c; /* c == 0xff000fffffffffff */
2129 c &= -lsb; /* c == 0xff00000000000000 */
2131 while ((lsb >>= 1) != 0)
2132 shift++; /* shift == 44 on exit from loop */
2133 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2134 m1 >>= shift; /* m1 == 0x0000000000000fff */
2135 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2138 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2139 masks will be all 1's. We are guaranteed more than one transition. */
2140 out[0] = GEN_INT (64 - shift);
2141 out[1] = GEN_INT (m1);
2142 out[2] = GEN_INT (shift);
2143 out[3] = GEN_INT (m2);
2151 /* Return 1 if the operand is either a non-special register or a constant
2152 that can be used as the operand of a PowerPC64 logical AND insn. */
2155 and64_operand (rtx op, enum machine_mode mode)
2157 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2158 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2160 return (logical_operand (op, mode) || mask64_operand (op, mode));
2163 /* Like the above, but also match constants that can be implemented
2164 with two rldicl or rldicr insns. */
2167 and64_2_operand (rtx op, enum machine_mode mode)
2169 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2170 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2172 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2175 /* Return 1 if the operand is either a non-special register or a
2176 constant that can be used as the operand of an RS/6000 logical AND insn. */
2179 and_operand (rtx op, enum machine_mode mode)
2181 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2182 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2184 return (logical_operand (op, mode) || mask_operand (op, mode));
2187 /* Return 1 if the operand is a general register or memory operand. */
2190 reg_or_mem_operand (rtx op, enum machine_mode mode)
2192 return (gpc_reg_operand (op, mode)
2193 || memory_operand (op, mode)
2194 || macho_lo_sum_memory_operand (op, mode)
2195 || volatile_mem_operand (op, mode));
2198 /* Return 1 if the operand is a general register or memory operand without
2199 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2203 lwa_operand (rtx op, enum machine_mode mode)
2207 if (reload_completed && GET_CODE (inner) == SUBREG)
2208 inner = SUBREG_REG (inner);
2210 return gpc_reg_operand (inner, mode)
2211 || (memory_operand (inner, mode)
2212 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2213 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2214 && (GET_CODE (XEXP (inner, 0)) != PLUS
2215 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2216 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2219 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2222 symbol_ref_operand (rtx op, enum machine_mode mode)
2224 if (mode != VOIDmode && GET_MODE (op) != mode)
2227 return (GET_CODE (op) == SYMBOL_REF
2228 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2231 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2232 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2235 call_operand (rtx op, enum machine_mode mode)
2237 if (mode != VOIDmode && GET_MODE (op) != mode)
2240 return (GET_CODE (op) == SYMBOL_REF
2241 || (GET_CODE (op) == REG
2242 && (REGNO (op) == LINK_REGISTER_REGNUM
2243 || REGNO (op) == COUNT_REGISTER_REGNUM
2244 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2247 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2251 current_file_function_operand (rtx op,
2252 enum machine_mode mode ATTRIBUTE_UNUSED)
2254 return (GET_CODE (op) == SYMBOL_REF
2255 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2256 && (SYMBOL_REF_LOCAL_P (op)
2257 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2260 /* Return 1 if this operand is a valid input for a move insn. */
2263 input_operand (rtx op, enum machine_mode mode)
2265 /* Memory is always valid. */
2266 if (memory_operand (op, mode))
2269 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2270 if (GET_CODE (op) == CONSTANT_P_RTX)
2273 /* For floating-point, easy constants are valid. */
2274 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2276 && easy_fp_constant (op, mode))
2279 /* Allow any integer constant. */
2280 if (GET_MODE_CLASS (mode) == MODE_INT
2281 && (GET_CODE (op) == CONST_INT
2282 || GET_CODE (op) == CONST_DOUBLE))
2285 /* Allow easy vector constants. */
2286 if (GET_CODE (op) == CONST_VECTOR
2287 && easy_vector_constant (op, mode))
2290 /* For floating-point or multi-word mode, the only remaining valid type
2292 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2293 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2294 return register_operand (op, mode);
2296 /* The only cases left are integral modes one word or smaller (we
2297 do not get called for MODE_CC values). These can be in any
2299 if (register_operand (op, mode))
2302 /* A SYMBOL_REF referring to the TOC is valid. */
2303 if (legitimate_constant_pool_address_p (op))
2306 /* A constant pool expression (relative to the TOC) is valid */
2307 if (toc_relative_expr_p (op))
2310 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2312 if (DEFAULT_ABI == ABI_V4
2313 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2314 && small_data_operand (op, Pmode))
2321 /* Darwin, AIX increases natural record alignment to doubleword if the first
2322 field is an FP double while the FP fields remain word aligned. */
2325 rs6000_special_round_type_align (tree type, int computed, int specified)
2327 tree field = TYPE_FIELDS (type);
2329 /* Skip all the static variables only if ABI is greater than
2331 while (field != NULL && TREE_CODE (field) == VAR_DECL)
2332 field = TREE_CHAIN (field);
2334 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2335 return MAX (computed, specified);
2337 return MAX (MAX (computed, specified), 64);
2340 /* Return 1 for an operand in small memory on V.4/eabi. */
2343 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2344 enum machine_mode mode ATTRIBUTE_UNUSED)
2349 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2352 if (DEFAULT_ABI != ABI_V4)
2355 if (GET_CODE (op) == SYMBOL_REF)
2358 else if (GET_CODE (op) != CONST
2359 || GET_CODE (XEXP (op, 0)) != PLUS
2360 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2361 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2366 rtx sum = XEXP (op, 0);
2367 HOST_WIDE_INT summand;
2369 /* We have to be careful here, because it is the referenced address
2370 that must be 32k from _SDA_BASE_, not just the symbol. */
2371 summand = INTVAL (XEXP (sum, 1));
2372 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2375 sym_ref = XEXP (sum, 0);
2378 return SYMBOL_REF_SMALL_P (sym_ref);
2384 /* Return true, if operand is a memory operand and has a
2385 displacement divisible by 4. */
2388 word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2393 if (!memory_operand (op, mode))
2396 addr = XEXP (op, 0);
2397 if (GET_CODE (addr) == PLUS
2398 && GET_CODE (XEXP (addr, 0)) == REG
2399 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2400 off = INTVAL (XEXP (addr, 1));
2402 return (off % 4) == 0;
2405 /* Return true if either operand is a general purpose register. */
2408 gpr_or_gpr_p (rtx op0, rtx op1)
2410 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2411 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2415 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2418 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2420 switch (GET_CODE(op))
2423 if (RS6000_SYMBOL_REF_TLS_P (op))
2425 else if (CONSTANT_POOL_ADDRESS_P (op))
2427 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2435 else if (! strcmp (XSTR (op, 0), toc_label_name))
2444 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2445 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2447 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2456 constant_pool_expr_p (rtx op)
2460 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2464 toc_relative_expr_p (rtx op)
2468 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2471 /* SPE offset addressing is limited to 5-bits worth of double words. */
2472 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2475 legitimate_constant_pool_address_p (rtx x)
2478 && GET_CODE (x) == PLUS
2479 && GET_CODE (XEXP (x, 0)) == REG
2480 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2481 && constant_pool_expr_p (XEXP (x, 1)));
2485 legitimate_small_data_p (enum machine_mode mode, rtx x)
2487 return (DEFAULT_ABI == ABI_V4
2488 && !flag_pic && !TARGET_TOC
2489 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2490 && small_data_operand (x, mode));
2494 legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2496 unsigned HOST_WIDE_INT offset, extra;
2498 if (GET_CODE (x) != PLUS)
2500 if (GET_CODE (XEXP (x, 0)) != REG)
2502 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2504 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2507 offset = INTVAL (XEXP (x, 1));
2515 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2516 which leaves the only valid constant offset of zero, which by
2517 canonicalization rules is also invalid. */
2524 /* SPE vector modes. */
2525 return SPE_CONST_OFFSET_OK (offset);
2529 if (mode == DFmode || !TARGET_POWERPC64)
2531 else if (offset & 3)
2537 if (mode == TFmode || !TARGET_POWERPC64)
2539 else if (offset & 3)
2549 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2553 legitimate_indexed_address_p (rtx x, int strict)
2557 if (GET_CODE (x) != PLUS)
2562 if (!REG_P (op0) || !REG_P (op1))
2565 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2566 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2567 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2568 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2572 legitimate_indirect_address_p (rtx x, int strict)
2574 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2578 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2580 if (!TARGET_MACHO || !flag_pic
2581 || mode != SImode || GET_CODE(x) != MEM)
2585 if (GET_CODE (x) != LO_SUM)
2587 if (GET_CODE (XEXP (x, 0)) != REG)
2589 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2593 return CONSTANT_P (x);
2597 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2599 if (GET_CODE (x) != LO_SUM)
2601 if (GET_CODE (XEXP (x, 0)) != REG)
2603 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2607 if (TARGET_ELF || TARGET_MACHO)
2609 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2613 if (GET_MODE_NUNITS (mode) != 1)
2615 if (GET_MODE_BITSIZE (mode) > 32
2616 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2619 return CONSTANT_P (x);
2626 /* Try machine-dependent ways of modifying an illegitimate address
2627 to be legitimate. If we find one, return the new, valid address.
2628 This is used from only one place: `memory_address' in explow.c.
2630 OLDX is the address as it was before break_out_memory_refs was
2631 called. In some cases it is useful to look at this to decide what
2634 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2636 It is always safe for this function to do nothing. It exists to
2637 recognize opportunities to optimize the output.
2639 On RS/6000, first check for the sum of a register with a constant
2640 integer that is out of range. If so, generate code to add the
2641 constant with the low-order 16 bits masked to the register and force
2642 this result into another register (this can be done with `cau').
2643 Then generate an address of REG+(CONST&0xffff), allowing for the
2644 possibility of bit 16 being a one.
2646 Then check for the sum of a register and something not constant, try to
2647 load the other things into a register and return the sum. */
2650 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2651 enum machine_mode mode)
2653 if (GET_CODE (x) == SYMBOL_REF)
2655 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2657 return rs6000_legitimize_tls_address (x, model);
2660 if (GET_CODE (x) == PLUS
2661 && GET_CODE (XEXP (x, 0)) == REG
2662 && GET_CODE (XEXP (x, 1)) == CONST_INT
2663 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2665 HOST_WIDE_INT high_int, low_int;
2667 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2668 high_int = INTVAL (XEXP (x, 1)) - low_int;
2669 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2670 GEN_INT (high_int)), 0);
2671 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2673 else if (GET_CODE (x) == PLUS
2674 && GET_CODE (XEXP (x, 0)) == REG
2675 && GET_CODE (XEXP (x, 1)) != CONST_INT
2676 && GET_MODE_NUNITS (mode) == 1
2677 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2679 || (mode != DFmode && mode != TFmode))
2680 && (TARGET_POWERPC64 || mode != DImode)
2683 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2684 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2686 else if (ALTIVEC_VECTOR_MODE (mode))
2690 /* Make sure both operands are registers. */
2691 if (GET_CODE (x) == PLUS)
2692 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2693 force_reg (Pmode, XEXP (x, 1)));
2695 reg = force_reg (Pmode, x);
2698 else if (SPE_VECTOR_MODE (mode))
2700 /* We accept [reg + reg] and [reg + OFFSET]. */
2702 if (GET_CODE (x) == PLUS)
2704 rtx op1 = XEXP (x, 0);
2705 rtx op2 = XEXP (x, 1);
2707 op1 = force_reg (Pmode, op1);
2709 if (GET_CODE (op2) != REG
2710 && (GET_CODE (op2) != CONST_INT
2711 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2712 op2 = force_reg (Pmode, op2);
2714 return gen_rtx_PLUS (Pmode, op1, op2);
2717 return force_reg (Pmode, x);
2723 && GET_CODE (x) != CONST_INT
2724 && GET_CODE (x) != CONST_DOUBLE
2726 && GET_MODE_NUNITS (mode) == 1
2727 && (GET_MODE_BITSIZE (mode) <= 32
2728 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2730 rtx reg = gen_reg_rtx (Pmode);
2731 emit_insn (gen_elf_high (reg, x));
2732 return gen_rtx_LO_SUM (Pmode, reg, x);
2734 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2737 && ! MACHO_DYNAMIC_NO_PIC_P
2739 && GET_CODE (x) != CONST_INT
2740 && GET_CODE (x) != CONST_DOUBLE
2742 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2746 rtx reg = gen_reg_rtx (Pmode);
2747 emit_insn (gen_macho_high (reg, x));
2748 return gen_rtx_LO_SUM (Pmode, reg, x);
2751 && constant_pool_expr_p (x)
2752 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2754 return create_TOC_reference (x);
2760 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2762 static GTY(()) rtx rs6000_tls_symbol;
2764 rs6000_tls_get_addr (void)
2766 if (!rs6000_tls_symbol)
2767 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2769 return rs6000_tls_symbol;
2772 /* Construct the SYMBOL_REF for TLS GOT references. */
2774 static GTY(()) rtx rs6000_got_symbol;
2776 rs6000_got_sym (void)
2778 if (!rs6000_got_symbol)
2780 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2781 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2782 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2785 return rs6000_got_symbol;
2788 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2789 this (thread-local) address. */
2792 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2796 dest = gen_reg_rtx (Pmode);
2797 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2803 tlsreg = gen_rtx_REG (Pmode, 13);
2804 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2808 tlsreg = gen_rtx_REG (Pmode, 2);
2809 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2813 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2817 tmp = gen_reg_rtx (Pmode);
2820 tlsreg = gen_rtx_REG (Pmode, 13);
2821 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2825 tlsreg = gen_rtx_REG (Pmode, 2);
2826 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2830 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2832 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2837 rtx r3, got, tga, tmp1, tmp2, eqv;
2840 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2844 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2847 rtx gsym = rs6000_got_sym ();
2848 got = gen_reg_rtx (Pmode);
2850 rs6000_emit_move (got, gsym, Pmode);
2854 static int tls_got_labelno = 0;
2855 rtx tempLR, lab, tmp3, mem;
2858 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2859 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2860 tempLR = gen_reg_rtx (Pmode);
2861 tmp1 = gen_reg_rtx (Pmode);
2862 tmp2 = gen_reg_rtx (Pmode);
2863 tmp3 = gen_reg_rtx (Pmode);
2864 mem = gen_rtx_MEM (Pmode, tmp1);
2865 RTX_UNCHANGING_P (mem) = 1;
2867 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2869 emit_move_insn (tmp1, tempLR);
2870 emit_move_insn (tmp2, mem);
2871 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2872 last = emit_move_insn (got, tmp3);
2873 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2875 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2877 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2883 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2885 r3 = gen_rtx_REG (Pmode, 3);
2887 insn = gen_tls_gd_64 (r3, got, addr);
2889 insn = gen_tls_gd_32 (r3, got, addr);
2892 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2893 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2894 insn = emit_call_insn (insn);
2895 CONST_OR_PURE_CALL_P (insn) = 1;
2896 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2897 insn = get_insns ();
2899 emit_libcall_block (insn, dest, r3, addr);
2901 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2903 r3 = gen_rtx_REG (Pmode, 3);
2905 insn = gen_tls_ld_64 (r3, got);
2907 insn = gen_tls_ld_32 (r3, got);
2910 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2911 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2912 insn = emit_call_insn (insn);
2913 CONST_OR_PURE_CALL_P (insn) = 1;
2914 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2915 insn = get_insns ();
2917 tmp1 = gen_reg_rtx (Pmode);
2918 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2920 emit_libcall_block (insn, tmp1, r3, eqv);
2921 if (rs6000_tls_size == 16)
2924 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2926 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2928 else if (rs6000_tls_size == 32)
2930 tmp2 = gen_reg_rtx (Pmode);
2932 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2934 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2937 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2939 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2943 tmp2 = gen_reg_rtx (Pmode);
2945 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2947 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2949 insn = gen_rtx_SET (Pmode, dest,
2950 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2956 /* IE, or 64 bit offset LE. */
2957 tmp2 = gen_reg_rtx (Pmode);
2959 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2961 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2964 insn = gen_tls_tls_64 (dest, tmp2, addr);
2966 insn = gen_tls_tls_32 (dest, tmp2, addr);
2974 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2975 instruction definitions. */
2978 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
2980 return RS6000_SYMBOL_REF_TLS_P (x);
2983 /* Return 1 if X contains a thread-local symbol. */
2986 rs6000_tls_referenced_p (rtx x)
2988 if (! TARGET_HAVE_TLS)
2991 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2994 /* Return 1 if *X is a thread-local symbol. This is the same as
2995 rs6000_tls_symbol_ref except for the type of the unused argument. */
2998 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3000 return RS6000_SYMBOL_REF_TLS_P (*x);
3003 /* The convention appears to be to define this wherever it is used.
3004 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3005 is now used here. */
3006 #ifndef REG_MODE_OK_FOR_BASE_P
3007 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3010 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3011 replace the input X, or the original X if no replacement is called for.
3012 The output parameter *WIN is 1 if the calling macro should goto WIN,
3015 For RS/6000, we wish to handle large displacements off a base
3016 register by splitting the addend across an addiu/addis and the mem insn.
3017 This cuts number of extra insns needed from 3 to 1.
3019 On Darwin, we use this to generate code for floating point constants.
3020 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3021 The Darwin code is inside #if TARGET_MACHO because only then is
3022 machopic_function_base_name() defined. */
3024 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3025 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3027 /* We must recognize output that we have already generated ourselves. */
3028 if (GET_CODE (x) == PLUS
3029 && GET_CODE (XEXP (x, 0)) == PLUS
3030 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3031 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3032 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3034 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3035 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3036 opnum, (enum reload_type)type);
3042 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3043 && GET_CODE (x) == LO_SUM
3044 && GET_CODE (XEXP (x, 0)) == PLUS
3045 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3046 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3047 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3048 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3049 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3050 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3051 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3053 /* Result of previous invocation of this function on Darwin
3054 floating point constant. */
3055 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3056 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3057 opnum, (enum reload_type)type);
3062 if (GET_CODE (x) == PLUS
3063 && GET_CODE (XEXP (x, 0)) == REG
3064 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3065 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3066 && GET_CODE (XEXP (x, 1)) == CONST_INT
3067 && !SPE_VECTOR_MODE (mode)
3068 && !ALTIVEC_VECTOR_MODE (mode))
3070 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3071 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3073 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3075 /* Check for 32-bit overflow. */
3076 if (high + low != val)
3082 /* Reload the high part into a base reg; leave the low part
3083 in the mem directly. */
3085 x = gen_rtx_PLUS (GET_MODE (x),
3086 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3090 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3091 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3092 opnum, (enum reload_type)type);
3097 if (GET_CODE (x) == SYMBOL_REF
3098 && DEFAULT_ABI == ABI_DARWIN
3099 && !ALTIVEC_VECTOR_MODE (mode)
3100 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3101 /* Don't do this for TFmode, since the result isn't offsettable. */
3106 rtx offset = gen_rtx_CONST (Pmode,
3107 gen_rtx_MINUS (Pmode, x,
3108 gen_rtx_SYMBOL_REF (Pmode,
3109 machopic_function_base_name ())));
3110 x = gen_rtx_LO_SUM (GET_MODE (x),
3111 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3112 gen_rtx_HIGH (Pmode, offset)), offset);
3115 x = gen_rtx_LO_SUM (GET_MODE (x),
3116 gen_rtx_HIGH (Pmode, x), x);
3118 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3119 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3120 opnum, (enum reload_type)type);
3126 && constant_pool_expr_p (x)
3127 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3129 (x) = create_TOC_reference (x);
3137 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3138 that is a valid memory address for an instruction.
3139 The MODE argument is the machine mode for the MEM expression
3140 that wants to use this address.
3142 On the RS/6000, there are four valid address: a SYMBOL_REF that
3143 refers to a constant pool entry of an address (or the sum of it
3144 plus a constant), a short (16-bit signed) constant plus a register,
3145 the sum of two registers, or a register indirect, possibly with an
3146 auto-increment. For DFmode and DImode with a constant plus register,
3147 we must ensure that both words are addressable or PowerPC64 with offset
3150 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3151 32-bit DImode, TImode), indexed addressing cannot be used because
3152 adjacent memory cells are accessed by adding word-sized offsets
3153 during assembly output. */
3155 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3157 if (RS6000_SYMBOL_REF_TLS_P (x))
3159 if (legitimate_indirect_address_p (x, reg_ok_strict))
3161 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3162 && !ALTIVEC_VECTOR_MODE (mode)
3163 && !SPE_VECTOR_MODE (mode)
3165 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3167 if (legitimate_small_data_p (mode, x))
3169 if (legitimate_constant_pool_address_p (x))
3171 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3173 && GET_CODE (x) == PLUS
3174 && GET_CODE (XEXP (x, 0)) == REG
3175 && XEXP (x, 0) == virtual_stack_vars_rtx
3176 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3178 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3181 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3183 || (mode != DFmode && mode != TFmode))
3184 && (TARGET_POWERPC64 || mode != DImode)
3185 && legitimate_indexed_address_p (x, reg_ok_strict))
3187 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3192 /* Go to LABEL if ADDR (a legitimate address expression)
3193 has an effect that depends on the machine mode it is used for.
3195 On the RS/6000 this is true of all integral offsets (since AltiVec
3196 modes don't allow them) or is a pre-increment or decrement.
3198 ??? Except that due to conceptual problems in offsettable_address_p
3199 we can't really report the problems of integral offsets. So leave
3200 this assuming that the adjustable offset must be valid for the
3201 sub-words of a TFmode operand, which is what we had before. */
3204 rs6000_mode_dependent_address (rtx addr)
3206 switch (GET_CODE (addr))
3209 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3211 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3212 return val + 12 + 0x8000 >= 0x10000;
3221 return TARGET_UPDATE;
3230 /* Try to output insns to set TARGET equal to the constant C if it can
3231 be done in less than N insns. Do all computations in MODE.
3232 Returns the place where the output has been placed if it can be
3233 done and the insns have been emitted. If it would take more than N
3234 insns, zero is returned and no insns and emitted. */
3237 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3238 rtx source, int n ATTRIBUTE_UNUSED)
3240 rtx result, insn, set;
3241 HOST_WIDE_INT c0, c1;
3243 if (mode == QImode || mode == HImode)
3246 dest = gen_reg_rtx (mode);
3247 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3250 else if (mode == SImode)
3252 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3254 emit_insn (gen_rtx_SET (VOIDmode, result,
3255 GEN_INT (INTVAL (source)
3256 & (~ (HOST_WIDE_INT) 0xffff))));
3257 emit_insn (gen_rtx_SET (VOIDmode, dest,
3258 gen_rtx_IOR (SImode, result,
3259 GEN_INT (INTVAL (source) & 0xffff))));
3262 else if (mode == DImode)
3264 if (GET_CODE (source) == CONST_INT)
3266 c0 = INTVAL (source);
3269 else if (GET_CODE (source) == CONST_DOUBLE)
3271 #if HOST_BITS_PER_WIDE_INT >= 64
3272 c0 = CONST_DOUBLE_LOW (source);
3275 c0 = CONST_DOUBLE_LOW (source);
3276 c1 = CONST_DOUBLE_HIGH (source);
3282 result = rs6000_emit_set_long_const (dest, c0, c1);
3287 insn = get_last_insn ();
3288 set = single_set (insn);
3289 if (! CONSTANT_P (SET_SRC (set)))
3290 set_unique_reg_note (insn, REG_EQUAL, source);
3295 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3296 fall back to a straight forward decomposition. We do this to avoid
3297 exponential run times encountered when looking for longer sequences
3298 with rs6000_emit_set_const. */
3300 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3302 if (!TARGET_POWERPC64)
3304 rtx operand1, operand2;
3306 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3308 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3310 emit_move_insn (operand1, GEN_INT (c1));
3311 emit_move_insn (operand2, GEN_INT (c2));
3315 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3318 ud2 = (c1 & 0xffff0000) >> 16;
3319 #if HOST_BITS_PER_WIDE_INT >= 64
3323 ud4 = (c2 & 0xffff0000) >> 16;
3325 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3326 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3329 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3331 emit_move_insn (dest, GEN_INT (ud1));
3334 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3335 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3338 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3341 emit_move_insn (dest, GEN_INT (ud2 << 16));
3343 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3345 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3346 || (ud4 == 0 && ! (ud3 & 0x8000)))
3349 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3352 emit_move_insn (dest, GEN_INT (ud3 << 16));
3355 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3356 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3358 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3363 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3366 emit_move_insn (dest, GEN_INT (ud4 << 16));
3369 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3371 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3373 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3374 GEN_INT (ud2 << 16)));
3376 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3382 /* Emit a move from SOURCE to DEST in mode MODE. */
3384 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3388 operands[1] = source;
3390 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3391 if (GET_CODE (operands[1]) == CONST_DOUBLE
3392 && ! FLOAT_MODE_P (mode)
3393 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3395 /* FIXME. This should never happen. */
3396 /* Since it seems that it does, do the safe thing and convert
3398 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3400 if (GET_CODE (operands[1]) == CONST_DOUBLE
3401 && ! FLOAT_MODE_P (mode)
3402 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3403 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3404 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3405 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3408 /* Check if GCC is setting up a block move that will end up using FP
3409 registers as temporaries. We must make sure this is acceptable. */
3410 if (GET_CODE (operands[0]) == MEM
3411 && GET_CODE (operands[1]) == MEM
3413 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3414 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3415 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3416 ? 32 : MEM_ALIGN (operands[0])))
3417 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3419 : MEM_ALIGN (operands[1]))))
3420 && ! MEM_VOLATILE_P (operands [0])
3421 && ! MEM_VOLATILE_P (operands [1]))
3423 emit_move_insn (adjust_address (operands[0], SImode, 0),
3424 adjust_address (operands[1], SImode, 0));
3425 emit_move_insn (adjust_address (operands[0], SImode, 4),
3426 adjust_address (operands[1], SImode, 4));
3429 else if (mode == DImode && TARGET_POWERPC64
3430 && GET_CODE (operands[0]) == REG
3431 && GET_CODE (operands[1]) == MEM && optimize > 0
3432 && SLOW_UNALIGNED_ACCESS (DImode,
3433 MEM_ALIGN (operands[1]) > 32
3435 : MEM_ALIGN (operands[1]))
3438 rtx reg = gen_reg_rtx (SImode);
3439 emit_insn (gen_rtx_SET (SImode, reg,
3440 adjust_address (operands[1], SImode, 0)));
3441 reg = simplify_gen_subreg (DImode, reg, SImode, 0);
3442 emit_insn (gen_insvdi (operands[0], GEN_INT (32), const0_rtx, reg));
3443 reg = gen_reg_rtx (SImode);
3444 emit_insn (gen_rtx_SET (SImode, reg,
3445 adjust_address (operands[1], SImode, 4)));
3446 reg = simplify_gen_subreg (DImode, reg, SImode, 0);
3447 emit_insn (gen_insvdi (operands[0], GEN_INT (32), GEN_INT (32), reg));
3450 else if (mode == DImode && TARGET_POWERPC64
3451 && GET_CODE (operands[1]) == REG
3452 && GET_CODE (operands[0]) == MEM && optimize > 0
3453 && SLOW_UNALIGNED_ACCESS (DImode,
3454 MEM_ALIGN (operands[0]) > 32
3456 : MEM_ALIGN (operands[0]))
3459 rtx reg = gen_reg_rtx (DImode);
3460 emit_move_insn (reg,
3461 gen_rtx_LSHIFTRT (DImode, operands[1], GEN_INT (32)));
3462 emit_move_insn (adjust_address (operands[0], SImode, 0),
3463 simplify_gen_subreg (SImode, reg, DImode, 0));
3464 emit_move_insn (reg, operands[1]);
3465 emit_move_insn (adjust_address (operands[0], SImode, 4),
3466 simplify_gen_subreg (SImode, reg, DImode, 0));
3470 if (!no_new_pseudos)
3472 if (GET_CODE (operands[1]) == MEM && optimize > 0
3473 && (mode == QImode || mode == HImode || mode == SImode)
3474 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3476 rtx reg = gen_reg_rtx (word_mode);
3478 emit_insn (gen_rtx_SET (word_mode, reg,
3479 gen_rtx_ZERO_EXTEND (word_mode,
3481 operands[1] = gen_lowpart (mode, reg);
3483 if (GET_CODE (operands[0]) != REG)
3484 operands[1] = force_reg (mode, operands[1]);
3487 if (mode == SFmode && ! TARGET_POWERPC
3488 && TARGET_HARD_FLOAT && TARGET_FPRS
3489 && GET_CODE (operands[0]) == MEM)
3493 if (reload_in_progress || reload_completed)
3494 regnum = true_regnum (operands[1]);
3495 else if (GET_CODE (operands[1]) == REG)
3496 regnum = REGNO (operands[1]);
3500 /* If operands[1] is a register, on POWER it may have
3501 double-precision data in it, so truncate it to single
3503 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3506 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3507 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3508 operands[1] = newreg;
3512 /* Recognize the case where operand[1] is a reference to thread-local
3513 data and load its address to a register. */
3514 if (GET_CODE (operands[1]) == SYMBOL_REF)
3516 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3518 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3521 /* Handle the case where reload calls us with an invalid address. */
3522 if (reload_in_progress && mode == Pmode
3523 && (! general_operand (operands[1], mode)
3524 || ! nonimmediate_operand (operands[0], mode)))
3527 /* Handle the case of CONSTANT_P_RTX. */
3528 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3531 /* 128-bit constant floating-point values on Darwin should really be
3532 loaded as two parts. */
3533 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3534 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3535 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3537 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3538 know how to get a DFmode SUBREG of a TFmode. */
3539 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3540 simplify_gen_subreg (DImode, operands[1], mode, 0),
3542 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3543 GET_MODE_SIZE (DImode)),
3544 simplify_gen_subreg (DImode, operands[1], mode,
3545 GET_MODE_SIZE (DImode)),
3550 /* FIXME: In the long term, this switch statement should go away
3551 and be replaced by a sequence of tests based on things like
3557 if (CONSTANT_P (operands[1])
3558 && GET_CODE (operands[1]) != CONST_INT)
3559 operands[1] = force_const_mem (mode, operands[1]);
3565 if (CONSTANT_P (operands[1])
3566 && ! easy_fp_constant (operands[1], mode))
3567 operands[1] = force_const_mem (mode, operands[1]);
3578 if (CONSTANT_P (operands[1])
3579 && !easy_vector_constant (operands[1], mode))
3580 operands[1] = force_const_mem (mode, operands[1]);
3585 /* Use default pattern for address of ELF small data */
3588 && DEFAULT_ABI == ABI_V4
3589 && (GET_CODE (operands[1]) == SYMBOL_REF
3590 || GET_CODE (operands[1]) == CONST)
3591 && small_data_operand (operands[1], mode))
3593 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3597 if (DEFAULT_ABI == ABI_V4
3598 && mode == Pmode && mode == SImode
3599 && flag_pic == 1 && got_operand (operands[1], mode))
3601 emit_insn (gen_movsi_got (operands[0], operands[1]));
3605 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3609 && CONSTANT_P (operands[1])
3610 && GET_CODE (operands[1]) != HIGH
3611 && GET_CODE (operands[1]) != CONST_INT)
3613 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3615 /* If this is a function address on -mcall-aixdesc,
3616 convert it to the address of the descriptor. */
3617 if (DEFAULT_ABI == ABI_AIX
3618 && GET_CODE (operands[1]) == SYMBOL_REF
3619 && XSTR (operands[1], 0)[0] == '.')
3621 const char *name = XSTR (operands[1], 0);
3623 while (*name == '.')
3625 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3626 CONSTANT_POOL_ADDRESS_P (new_ref)
3627 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3628 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3629 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3630 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3631 operands[1] = new_ref;
3634 if (DEFAULT_ABI == ABI_DARWIN)
3637 if (MACHO_DYNAMIC_NO_PIC_P)
3639 /* Take care of any required data indirection. */
3640 operands[1] = rs6000_machopic_legitimize_pic_address (
3641 operands[1], mode, operands[0]);
3642 if (operands[0] != operands[1])
3643 emit_insn (gen_rtx_SET (VOIDmode,
3644 operands[0], operands[1]));
3648 emit_insn (gen_macho_high (target, operands[1]));
3649 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3653 emit_insn (gen_elf_high (target, operands[1]));
3654 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3658 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3659 and we have put it in the TOC, we just need to make a TOC-relative
3662 && GET_CODE (operands[1]) == SYMBOL_REF
3663 && constant_pool_expr_p (operands[1])
3664 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3665 get_pool_mode (operands[1])))
3667 operands[1] = create_TOC_reference (operands[1]);
3669 else if (mode == Pmode
3670 && CONSTANT_P (operands[1])
3671 && ((GET_CODE (operands[1]) != CONST_INT
3672 && ! easy_fp_constant (operands[1], mode))
3673 || (GET_CODE (operands[1]) == CONST_INT
3674 && num_insns_constant (operands[1], mode) > 2)
3675 || (GET_CODE (operands[0]) == REG
3676 && FP_REGNO_P (REGNO (operands[0]))))
3677 && GET_CODE (operands[1]) != HIGH
3678 && ! legitimate_constant_pool_address_p (operands[1])
3679 && ! toc_relative_expr_p (operands[1]))
3681 /* Emit a USE operation so that the constant isn't deleted if
3682 expensive optimizations are turned on because nobody
3683 references it. This should only be done for operands that
3684 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3685 This should not be done for operands that contain LABEL_REFs.
3686 For now, we just handle the obvious case. */
3687 if (GET_CODE (operands[1]) != LABEL_REF)
3688 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3691 /* Darwin uses a special PIC legitimizer. */
3692 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3695 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3697 if (operands[0] != operands[1])
3698 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3703 /* If we are to limit the number of things we put in the TOC and
3704 this is a symbol plus a constant we can add in one insn,
3705 just put the symbol in the TOC and add the constant. Don't do
3706 this if reload is in progress. */
3707 if (GET_CODE (operands[1]) == CONST
3708 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3709 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3710 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3711 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3712 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3713 && ! side_effects_p (operands[0]))
3716 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3717 rtx other = XEXP (XEXP (operands[1], 0), 1);
3719 sym = force_reg (mode, sym);
3721 emit_insn (gen_addsi3 (operands[0], sym, other));
3723 emit_insn (gen_adddi3 (operands[0], sym, other));
3727 operands[1] = force_const_mem (mode, operands[1]);
3730 && constant_pool_expr_p (XEXP (operands[1], 0))
3731 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3732 get_pool_constant (XEXP (operands[1], 0)),
3733 get_pool_mode (XEXP (operands[1], 0))))
3736 = gen_rtx_MEM (mode,
3737 create_TOC_reference (XEXP (operands[1], 0)));
3738 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3739 RTX_UNCHANGING_P (operands[1]) = 1;
3745 if (GET_CODE (operands[0]) == MEM
3746 && GET_CODE (XEXP (operands[0], 0)) != REG
3747 && ! reload_in_progress)
3749 = replace_equiv_address (operands[0],
3750 copy_addr_to_reg (XEXP (operands[0], 0)));
3752 if (GET_CODE (operands[1]) == MEM
3753 && GET_CODE (XEXP (operands[1], 0)) != REG
3754 && ! reload_in_progress)
3756 = replace_equiv_address (operands[1],
3757 copy_addr_to_reg (XEXP (operands[1], 0)));
3760 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3762 gen_rtx_SET (VOIDmode,
3763 operands[0], operands[1]),
3764 gen_rtx_CLOBBER (VOIDmode,
3765 gen_rtx_SCRATCH (SImode)))));
3774 /* Above, we may have called force_const_mem which may have returned
3775 an invalid address. If we can, fix this up; otherwise, reload will
3776 have to deal with it. */
3777 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3778 operands[1] = validize_mem (operands[1]);
3781 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3784 /* Nonzero if we can use a floating-point register to pass this arg. */
3785 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
3786 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
3787 && (CUM)->fregno <= FP_ARG_MAX_REG \
3788 && TARGET_HARD_FLOAT && TARGET_FPRS)
3790 /* Nonzero if we can use an AltiVec register to pass this arg. */
3791 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
3792 (ALTIVEC_VECTOR_MODE (MODE) \
3793 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
3794 && TARGET_ALTIVEC_ABI \
3797 /* Return a nonzero value to say to return the function value in
3798 memory, just as large structures are always returned. TYPE will be
3799 the data type of the value, and FNTYPE will be the type of the
3800 function doing the returning, or @code{NULL} for libcalls.
3802 The AIX ABI for the RS/6000 specifies that all structures are
3803 returned in memory. The Darwin ABI does the same. The SVR4 ABI
3804 specifies that structures <= 8 bytes are returned in r3/r4, but a
3805 draft put them in memory, and GCC used to implement the draft
3806 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
3807 controls this instead of DEFAULT_ABI; V.4 targets needing backward
3808 compatibility can change DRAFT_V4_STRUCT_RET to override the
3809 default, and -m switches get the final word. See
3810 rs6000_override_options for more details.
3812 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3813 long double support is enabled. These values are returned in memory.
3815 int_size_in_bytes returns -1 for variable size objects, which go in
3816 memory always. The cast to unsigned makes -1 > 8. */
3819 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3821 if (AGGREGATE_TYPE_P (type)
3822 && (TARGET_AIX_STRUCT_RET
3823 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3825 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3830 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3831 for a call to a function whose data type is FNTYPE.
3832 For a library call, FNTYPE is 0.
3834 For incoming args we set the number of arguments in the prototype large
3835 so we never return a PARALLEL. */
3838 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3839 rtx libname ATTRIBUTE_UNUSED, int incoming, int libcall)
3841 static CUMULATIVE_ARGS zero_cumulative;
3843 *cum = zero_cumulative;
3845 cum->fregno = FP_ARG_MIN_REG;
3846 cum->vregno = ALTIVEC_ARG_MIN_REG;
3847 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3848 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3849 ? CALL_LIBCALL : CALL_NORMAL);
3850 cum->sysv_gregno = GP_ARG_MIN_REG;
3851 cum->stdarg = fntype
3852 && (TYPE_ARG_TYPES (fntype) != 0
3853 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3854 != void_type_node));
3857 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3859 else if (cum->prototype)
3860 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3861 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3862 || rs6000_return_in_memory (TREE_TYPE (fntype),
3866 cum->nargs_prototype = 0;
3868 /* Check for a longcall attribute. */
3870 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3871 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3872 cum->call_cookie = CALL_LONG;
3874 if (TARGET_DEBUG_ARG)
3876 fprintf (stderr, "\ninit_cumulative_args:");
3879 tree ret_type = TREE_TYPE (fntype);
3880 fprintf (stderr, " ret code = %s,",
3881 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3884 if (cum->call_cookie & CALL_LONG)
3885 fprintf (stderr, " longcall,");
3887 fprintf (stderr, " proto = %d, nargs = %d\n",
3888 cum->prototype, cum->nargs_prototype);
3892 /* If defined, a C expression which determines whether, and in which
3893 direction, to pad out an argument with extra space. The value
3894 should be of type `enum direction': either `upward' to pad above
3895 the argument, `downward' to pad below, or `none' to inhibit
3898 For the AIX ABI structs are always stored left shifted in their
3902 function_arg_padding (enum machine_mode mode, tree type)
3904 #ifndef AGGREGATE_PADDING_FIXED
3905 #define AGGREGATE_PADDING_FIXED 0
3907 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
3908 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
3911 if (!AGGREGATE_PADDING_FIXED)
3913 /* GCC used to pass structures of the same size as integer types as
3914 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
3915 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
3916 passed padded downward, except that -mstrict-align further
3917 muddied the water in that multi-component structures of 2 and 4
3918 bytes in size were passed padded upward.
3920 The following arranges for best compatibility with previous
3921 versions of gcc, but removes the -mstrict-align dependency. */
3922 if (BYTES_BIG_ENDIAN)
3924 HOST_WIDE_INT size = 0;
3926 if (mode == BLKmode)
3928 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
3929 size = int_size_in_bytes (type);
3932 size = GET_MODE_SIZE (mode);
3934 if (size == 1 || size == 2 || size == 4)
3940 if (AGGREGATES_PAD_UPWARD_ALWAYS)
3942 if (type != 0 && AGGREGATE_TYPE_P (type))
3946 /* Fall back to the default. */
3947 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
3950 /* If defined, a C expression that gives the alignment boundary, in bits,
3951 of an argument with the specified mode and type. If it is not defined,
3952 PARM_BOUNDARY is used for all arguments.
3954 V.4 wants long longs to be double word aligned. */
3957 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
3959 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3961 else if (SPE_VECTOR_MODE (mode))
3963 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3966 return PARM_BOUNDARY;
3969 /* Update the data in CUM to advance over an argument
3970 of mode MODE and data type TYPE.
3971 (TYPE is null for libcalls where that information may not be available.) */
3974 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3975 tree type, int named)
3977 cum->nargs_prototype--;
3979 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3981 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
3984 /* PowerPC64 Linux and AIX allocates GPRs for a vector argument
3985 even if it is going to be passed in a vector register.
3986 Darwin does the same for variable-argument functions. */
3987 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
3988 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
3992 /* Vector parameters must be 16-byte aligned. This places
3993 them at 2 mod 4 in terms of words in 32-bit mode, since
3994 the parameter save area starts at offset 24 from the
3995 stack. In 64-bit mode, they just have to start on an
3996 even word, since the parameter save area is 16-byte
3997 aligned. Space for GPRs is reserved even if the argument
3998 will be passed in memory. */
4000 align = ((6 - (cum->words & 3)) & 3);
4002 align = cum->words & 1;
4003 cum->words += align + RS6000_ARG_SIZE (mode, type);
4005 if (TARGET_DEBUG_ARG)
4007 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4009 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4010 cum->nargs_prototype, cum->prototype,
4011 GET_MODE_NAME (mode));
4015 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4017 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4019 else if (DEFAULT_ABI == ABI_V4)
4021 if (TARGET_HARD_FLOAT && TARGET_FPRS
4022 && (mode == SFmode || mode == DFmode))
4024 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4029 cum->words += cum->words & 1;
4030 cum->words += RS6000_ARG_SIZE (mode, type);
4036 int gregno = cum->sysv_gregno;
4038 /* Aggregates and IEEE quad get passed by reference. */
4039 if ((type && AGGREGATE_TYPE_P (type))
4043 n_words = RS6000_ARG_SIZE (mode, type);
4045 /* Long long and SPE vectors are put in odd registers. */
4046 if (n_words == 2 && (gregno & 1) == 0)
4049 /* Long long and SPE vectors are not split between registers
4051 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4053 /* Long long is aligned on the stack. */
4055 cum->words += cum->words & 1;
4056 cum->words += n_words;
4059 /* Note: continuing to accumulate gregno past when we've started
4060 spilling to the stack indicates the fact that we've started
4061 spilling to the stack to expand_builtin_saveregs. */
4062 cum->sysv_gregno = gregno + n_words;
4065 if (TARGET_DEBUG_ARG)
4067 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4068 cum->words, cum->fregno);
4069 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4070 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4071 fprintf (stderr, "mode = %4s, named = %d\n",
4072 GET_MODE_NAME (mode), named);
4077 int align = (TARGET_32BIT && (cum->words & 1) != 0
4078 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4080 cum->words += align + RS6000_ARG_SIZE (mode, type);
4082 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4083 && TARGET_HARD_FLOAT && TARGET_FPRS)
4084 cum->fregno += (mode == TFmode ? 2 : 1);
4086 if (TARGET_DEBUG_ARG)
4088 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4089 cum->words, cum->fregno);
4090 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4091 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4092 fprintf (stderr, "named = %d, align = %d\n", named, align);
4097 /* Determine where to put a SIMD argument on the SPE. */
4100 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4105 int gregno = cum->sysv_gregno;
4106 int n_words = RS6000_ARG_SIZE (mode, type);
4108 /* SPE vectors are put in odd registers. */
4109 if (n_words == 2 && (gregno & 1) == 0)
4112 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4115 enum machine_mode m = SImode;
4117 r1 = gen_rtx_REG (m, gregno);
4118 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4119 r2 = gen_rtx_REG (m, gregno + 1);
4120 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4121 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4128 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4129 return gen_rtx_REG (mode, cum->sysv_gregno);
4135 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4138 rs6000_mixed_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4139 tree type, int align_words)
4143 /* -mpowerpc64 with 32bit ABI splits up a DFmode argument
4144 in vararg list into zero, one or two GPRs */
4145 if (align_words >= GP_ARG_NUM_REG)
4146 return gen_rtx_PARALLEL (DFmode,
4148 gen_rtx_EXPR_LIST (VOIDmode,
4149 NULL_RTX, const0_rtx),
4150 gen_rtx_EXPR_LIST (VOIDmode,
4154 else if (align_words + RS6000_ARG_SIZE (mode, type)
4156 /* If this is partially on the stack, then we only
4157 include the portion actually in registers here. */
4158 return gen_rtx_PARALLEL (DFmode,
4160 gen_rtx_EXPR_LIST (VOIDmode,
4161 gen_rtx_REG (SImode,
4165 gen_rtx_EXPR_LIST (VOIDmode,
4170 /* split a DFmode arg into two GPRs */
4171 return gen_rtx_PARALLEL (DFmode,
4173 gen_rtx_EXPR_LIST (VOIDmode,
4174 gen_rtx_REG (SImode,
4178 gen_rtx_EXPR_LIST (VOIDmode,
4179 gen_rtx_REG (SImode,
4183 gen_rtx_EXPR_LIST (VOIDmode,
4184 gen_rtx_REG (mode, cum->fregno),
4187 /* -mpowerpc64 with 32bit ABI splits up a DImode argument into one
4189 else if (mode == DImode)
4191 if (align_words < GP_ARG_NUM_REG - 1)
4192 return gen_rtx_PARALLEL (DImode,
4194 gen_rtx_EXPR_LIST (VOIDmode,
4195 gen_rtx_REG (SImode,
4199 gen_rtx_EXPR_LIST (VOIDmode,
4200 gen_rtx_REG (SImode,
4204 else if (align_words == GP_ARG_NUM_REG - 1)
4205 return gen_rtx_PARALLEL (DImode,
4207 gen_rtx_EXPR_LIST (VOIDmode,
4208 NULL_RTX, const0_rtx),
4209 gen_rtx_EXPR_LIST (VOIDmode,
4210 gen_rtx_REG (SImode,
4215 else if (mode == BLKmode && align_words <= (GP_ARG_NUM_REG - 1))
4218 int size = int_size_in_bytes (type);
4219 int no_units = ((size - 1) / 4) + 1;
4220 int max_no_words = GP_ARG_NUM_REG - align_words;
4221 int rtlvec_len = no_units < max_no_words ? no_units : max_no_words;
4222 rtx *rtlvec = (rtx *) alloca (rtlvec_len * sizeof (rtx));
4224 memset ((char *) rtlvec, 0, rtlvec_len * sizeof (rtx));
4226 for (k=0; k < rtlvec_len; k++)
4227 rtlvec[k] = gen_rtx_EXPR_LIST (VOIDmode,
4228 gen_rtx_REG (SImode,
4231 k == 0 ? const0_rtx : GEN_INT (k*4));
4233 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k, rtlvec));
4239 /* Determine where to put an argument to a function.
4240 Value is zero to push the argument on the stack,
4241 or a hard register in which to store the argument.
4243 MODE is the argument's machine mode.
4244 TYPE is the data type of the argument (as a tree).
4245 This is null for libcalls where that information may
4247 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4248 the preceding args and about the function being called.
4249 NAMED is nonzero if this argument is a named parameter
4250 (otherwise it is an extra parameter matching an ellipsis).
4252 On RS/6000 the first eight words of non-FP are normally in registers
4253 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4254 Under V.4, the first 8 FP args are in registers.
4256 If this is floating-point and no prototype is specified, we use
4257 both an FP and integer register (or possibly FP reg and stack). Library
4258 functions (when CALL_LIBCALL is set) always have the proper types for args,
4259 so we can pass the FP value just in one register. emit_library_function
4260 doesn't support PARALLEL anyway. */
4263 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4264 tree type, int named)
4266 enum rs6000_abi abi = DEFAULT_ABI;
4268 /* Return a marker to indicate whether CR1 needs to set or clear the
4269 bit that V.4 uses to say fp args were passed in registers.
4270 Assume that we don't need the marker for software floating point,
4271 or compiler generated library calls. */
4272 if (mode == VOIDmode)
4275 && cum->nargs_prototype < 0
4276 && (cum->call_cookie & CALL_LIBCALL) == 0
4277 && (cum->prototype || TARGET_NO_PROTOTYPE))
4279 /* For the SPE, we need to crxor CR6 always. */
4281 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4282 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4283 return GEN_INT (cum->call_cookie
4284 | ((cum->fregno == FP_ARG_MIN_REG)
4285 ? CALL_V4_SET_FP_ARGS
4286 : CALL_V4_CLEAR_FP_ARGS));
4289 return GEN_INT (cum->call_cookie);
4292 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4293 if (TARGET_64BIT && ! cum->prototype)
4295 /* Vector parameters get passed in vector register
4296 and also in GPRs or memory, in absence of prototype. */
4299 align_words = (cum->words + 1) & ~1;
4301 if (align_words >= GP_ARG_NUM_REG)
4307 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4309 return gen_rtx_PARALLEL (mode,
4311 gen_rtx_EXPR_LIST (VOIDmode,
4313 gen_rtx_EXPR_LIST (VOIDmode,
4314 gen_rtx_REG (mode, cum->vregno),
4318 return gen_rtx_REG (mode, cum->vregno);
4319 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4321 if (named || abi == ABI_V4)
4325 /* Vector parameters to varargs functions under AIX or Darwin
4326 get passed in memory and possibly also in GPRs. */
4327 int align, align_words;
4328 enum machine_mode part_mode = mode;
4330 /* Vector parameters must be 16-byte aligned. This places them at
4331 2 mod 4 in terms of words in 32-bit mode, since the parameter
4332 save area starts at offset 24 from the stack. In 64-bit mode,
4333 they just have to start on an even word, since the parameter
4334 save area is 16-byte aligned. */
4336 align = ((6 - (cum->words & 3)) & 3);
4338 align = cum->words & 1;
4339 align_words = cum->words + align;
4341 /* Out of registers? Memory, then. */
4342 if (align_words >= GP_ARG_NUM_REG)
4345 /* The vector value goes in GPRs. Only the part of the
4346 value in GPRs is reported here. */
4347 if (align_words + CLASS_MAX_NREGS (mode, GENERAL_REGS)
4349 /* Fortunately, there are only two possibilities, the value
4350 is either wholly in GPRs or half in GPRs and half not. */
4353 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4356 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4357 return rs6000_spe_function_arg (cum, mode, type);
4358 else if (abi == ABI_V4)
4360 if (TARGET_HARD_FLOAT && TARGET_FPRS
4361 && (mode == SFmode || mode == DFmode))
4363 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4364 return gen_rtx_REG (mode, cum->fregno);
4371 int gregno = cum->sysv_gregno;
4373 /* Aggregates and IEEE quad get passed by reference. */
4374 if ((type && AGGREGATE_TYPE_P (type))
4378 n_words = RS6000_ARG_SIZE (mode, type);
4380 /* Long long and SPE vectors are put in odd registers. */
4381 if (n_words == 2 && (gregno & 1) == 0)
4384 /* Long long do not split between registers and stack. */
4385 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4386 return gen_rtx_REG (mode, gregno);
4393 int align = (TARGET_32BIT && (cum->words & 1) != 0
4394 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4395 int align_words = cum->words + align;
4397 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4400 if (TARGET_32BIT && TARGET_POWERPC64
4401 && (mode == DFmode || mode == DImode || mode == BLKmode))
4402 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4404 if (USE_FP_FOR_ARG_P (cum, mode, type))
4407 || ((cum->nargs_prototype > 0)
4408 /* IBM AIX extended its linkage convention definition always
4409 to require FP args after register save area hole on the
4411 && (DEFAULT_ABI != ABI_AIX
4413 || (align_words < GP_ARG_NUM_REG))))
4414 return gen_rtx_REG (mode, cum->fregno);
4416 return gen_rtx_PARALLEL (mode,
4418 gen_rtx_EXPR_LIST (VOIDmode,
4419 ((align_words >= GP_ARG_NUM_REG)
4422 + RS6000_ARG_SIZE (mode, type)
4424 /* If this is partially on the stack, then
4425 we only include the portion actually
4426 in registers here. */
4427 ? gen_rtx_REG (SImode,
4428 GP_ARG_MIN_REG + align_words)
4429 : gen_rtx_REG (mode,
4430 GP_ARG_MIN_REG + align_words))),
4432 gen_rtx_EXPR_LIST (VOIDmode,
4433 gen_rtx_REG (mode, cum->fregno),
4436 else if (align_words < GP_ARG_NUM_REG)
4437 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4443 /* For an arg passed partly in registers and partly in memory,
4444 this is the number of registers used.
4445 For args passed entirely in registers or entirely in memory, zero. */
4448 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4449 tree type, int named)
4451 if (DEFAULT_ABI == ABI_V4)
4454 if (USE_FP_FOR_ARG_P (cum, mode, type)
4455 || USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4457 if (cum->nargs_prototype >= 0)
4461 if (cum->words < GP_ARG_NUM_REG
4462 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
4464 int ret = GP_ARG_NUM_REG - cum->words;
4465 if (ret && TARGET_DEBUG_ARG)
4466 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4474 /* A C expression that indicates when an argument must be passed by
4475 reference. If nonzero for an argument, a copy of that argument is
4476 made in memory and a pointer to the argument is passed instead of
4477 the argument itself. The pointer is passed in whatever way is
4478 appropriate for passing a pointer to that type.
4480 Under V.4, structures and unions are passed by reference.
4482 As an extension to all ABIs, variable sized types are passed by
4486 function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4487 enum machine_mode mode ATTRIBUTE_UNUSED,
4488 tree type, int named ATTRIBUTE_UNUSED)
4490 if (DEFAULT_ABI == ABI_V4
4491 && ((type && AGGREGATE_TYPE_P (type))
4494 if (TARGET_DEBUG_ARG)
4495 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4499 return type && int_size_in_bytes (type) < 0;
4503 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
4506 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
4511 for (i = 0; i < nregs; i++)
4513 rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
4514 if (reload_completed)
4516 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
4519 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
4520 i * GET_MODE_SIZE(reg_mode));
4523 tem = replace_equiv_address (tem, XEXP (tem, 0));
4525 if (tem == NULL_RTX)
4528 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
4533 /* Perform any needed actions needed for a function that is receiving a
4534 variable number of arguments.
4538 MODE and TYPE are the mode and type of the current parameter.
4540 PRETEND_SIZE is a variable that should be set to the amount of stack
4541 that must be pushed by the prolog to pretend that our caller pushed
4544 Normally, this macro will push all remaining incoming registers on the
4545 stack and set PRETEND_SIZE to the length of the registers pushed. */
4548 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4549 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4551 CUMULATIVE_ARGS next_cum;
4552 int reg_size = TARGET_32BIT ? 4 : 8;
4553 rtx save_area = NULL_RTX, mem;
4554 int first_reg_offset, set;
4556 /* Skip the last named argument. */
4558 function_arg_advance (&next_cum, mode, type, 1);
4560 if (DEFAULT_ABI == ABI_V4)
4562 /* Indicate to allocate space on the stack for varargs save area. */
4563 cfun->machine->sysv_varargs_p = 1;
4565 save_area = plus_constant (virtual_stack_vars_rtx,
4566 - RS6000_VARARGS_SIZE);
4568 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4572 first_reg_offset = next_cum.words;
4573 save_area = virtual_incoming_args_rtx;
4574 cfun->machine->sysv_varargs_p = 0;
4576 if (MUST_PASS_IN_STACK (mode, type))
4577 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4580 set = get_varargs_alias_set ();
4581 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4583 mem = gen_rtx_MEM (BLKmode,
4584 plus_constant (save_area,
4585 first_reg_offset * reg_size)),
4586 set_mem_alias_set (mem, set);
4587 set_mem_align (mem, BITS_PER_WORD);
4589 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4590 GP_ARG_NUM_REG - first_reg_offset);
4593 /* Save FP registers if needed. */
4594 if (DEFAULT_ABI == ABI_V4
4595 && TARGET_HARD_FLOAT && TARGET_FPRS
4597 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4599 int fregno = next_cum.fregno;
4600 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4601 rtx lab = gen_label_rtx ();
4602 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4604 emit_jump_insn (gen_rtx_SET (VOIDmode,
4606 gen_rtx_IF_THEN_ELSE (VOIDmode,
4607 gen_rtx_NE (VOIDmode, cr1,
4609 gen_rtx_LABEL_REF (VOIDmode, lab),
4612 while (fregno <= FP_ARG_V4_MAX_REG)
4614 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4615 set_mem_alias_set (mem, set);
4616 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4625 /* Create the va_list data type. */
4628 rs6000_build_builtin_va_list (void)
4630 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4632 /* For AIX, prefer 'char *' because that's what the system
4633 header files like. */
4634 if (DEFAULT_ABI != ABI_V4)
4635 return build_pointer_type (char_type_node);
4637 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4638 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4640 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4641 unsigned_char_type_node);
4642 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4643 unsigned_char_type_node);
4644 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4646 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4647 short_unsigned_type_node);
4648 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4650 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4653 DECL_FIELD_CONTEXT (f_gpr) = record;
4654 DECL_FIELD_CONTEXT (f_fpr) = record;
4655 DECL_FIELD_CONTEXT (f_res) = record;
4656 DECL_FIELD_CONTEXT (f_ovf) = record;
4657 DECL_FIELD_CONTEXT (f_sav) = record;
4659 TREE_CHAIN (record) = type_decl;
4660 TYPE_NAME (record) = type_decl;
4661 TYPE_FIELDS (record) = f_gpr;
4662 TREE_CHAIN (f_gpr) = f_fpr;
4663 TREE_CHAIN (f_fpr) = f_res;
4664 TREE_CHAIN (f_res) = f_ovf;
4665 TREE_CHAIN (f_ovf) = f_sav;
4667 layout_type (record);
4669 /* The correct type is an array type of one element. */
4670 return build_array_type (record, build_index_type (size_zero_node));
4673 /* Implement va_start. */
4676 rs6000_va_start (tree valist, rtx nextarg)
4678 HOST_WIDE_INT words, n_gpr, n_fpr;
4679 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4680 tree gpr, fpr, ovf, sav, t;
4682 /* Only SVR4 needs something special. */
4683 if (DEFAULT_ABI != ABI_V4)
4685 std_expand_builtin_va_start (valist, nextarg);
4689 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4690 f_fpr = TREE_CHAIN (f_gpr);
4691 f_res = TREE_CHAIN (f_fpr);
4692 f_ovf = TREE_CHAIN (f_res);
4693 f_sav = TREE_CHAIN (f_ovf);
4695 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4696 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4697 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4698 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4699 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4701 /* Count number of gp and fp argument registers used. */
4702 words = current_function_args_info.words;
4703 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4704 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4706 if (TARGET_DEBUG_ARG)
4707 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4708 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4709 words, n_gpr, n_fpr);
4711 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4712 TREE_SIDE_EFFECTS (t) = 1;
4713 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4715 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4716 TREE_SIDE_EFFECTS (t) = 1;
4717 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4719 /* Find the overflow area. */
4720 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4722 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4723 build_int_2 (words * UNITS_PER_WORD, 0));
4724 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4725 TREE_SIDE_EFFECTS (t) = 1;
4726 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4728 /* Find the register save area. */
4729 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4730 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4731 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4732 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4733 TREE_SIDE_EFFECTS (t) = 1;
4734 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4737 /* Implement va_arg. */
4740 rs6000_va_arg (tree valist, tree type)
4742 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4743 tree gpr, fpr, ovf, sav, reg, t, u;
4744 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4745 rtx lab_false, lab_over, addr_rtx, r;
4747 if (DEFAULT_ABI != ABI_V4)
4749 /* Variable sized types are passed by reference. */
4750 if (int_size_in_bytes (type) < 0)
4752 u = build_pointer_type (type);
4754 /* Args grow upward. */
4755 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4756 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4757 TREE_SIDE_EFFECTS (t) = 1;
4759 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4760 TREE_SIDE_EFFECTS (t) = 1;
4762 t = build1 (INDIRECT_REF, u, t);
4763 TREE_SIDE_EFFECTS (t) = 1;
4765 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4768 return std_expand_builtin_va_arg (valist, type);
4771 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4772 f_fpr = TREE_CHAIN (f_gpr);
4773 f_res = TREE_CHAIN (f_fpr);
4774 f_ovf = TREE_CHAIN (f_res);
4775 f_sav = TREE_CHAIN (f_ovf);
4777 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4778 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4779 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4780 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4781 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4783 size = int_size_in_bytes (type);
4784 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4786 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4788 /* Aggregates and long doubles are passed by reference. */
4794 size = UNITS_PER_WORD;
4797 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4799 /* FP args go in FP registers, if present. */
4808 /* Otherwise into GP registers. */
4816 /* Pull the value out of the saved registers.... */
4818 lab_false = gen_label_rtx ();
4819 lab_over = gen_label_rtx ();
4820 addr_rtx = gen_reg_rtx (Pmode);
4822 /* AltiVec vectors never go in registers. */
4823 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4825 TREE_THIS_VOLATILE (reg) = 1;
4826 emit_cmp_and_jump_insns
4827 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4828 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4831 /* Long long is aligned in the registers. */
4834 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4835 build_int_2 (n_reg - 1, 0));
4836 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4837 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4838 TREE_SIDE_EFFECTS (u) = 1;
4839 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4843 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4847 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4848 build_int_2 (n_reg, 0));
4849 TREE_SIDE_EFFECTS (u) = 1;
4851 u = build1 (CONVERT_EXPR, integer_type_node, u);
4852 TREE_SIDE_EFFECTS (u) = 1;
4854 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4855 TREE_SIDE_EFFECTS (u) = 1;
4857 t = build (PLUS_EXPR, ptr_type_node, t, u);
4858 TREE_SIDE_EFFECTS (t) = 1;
4860 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4862 emit_move_insn (addr_rtx, r);
4864 emit_jump_insn (gen_jump (lab_over));
4868 emit_label (lab_false);
4870 /* ... otherwise out of the overflow area. */
4872 /* Make sure we don't find reg 7 for the next int arg.
4874 All AltiVec vectors go in the overflow area. So in the AltiVec
4875 case we need to get the vectors from the overflow area, but
4876 remember where the GPRs and FPRs are. */
4877 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4878 || !TARGET_ALTIVEC))
4880 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4881 TREE_SIDE_EFFECTS (t) = 1;
4882 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4885 /* Care for on-stack alignment if needed. */
4892 /* AltiVec vectors are 16 byte aligned. */
4893 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4898 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4899 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4903 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4905 emit_move_insn (addr_rtx, r);
4907 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4908 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4909 TREE_SIDE_EFFECTS (t) = 1;
4910 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4912 emit_label (lab_over);
4916 r = gen_rtx_MEM (Pmode, addr_rtx);
4917 set_mem_alias_set (r, get_varargs_alias_set ());
4918 emit_move_insn (addr_rtx, r);
4926 #define def_builtin(MASK, NAME, TYPE, CODE) \
4928 if ((MASK) & target_flags) \
4929 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4933 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4935 static const struct builtin_description bdesc_3arg[] =
4937 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4938 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4939 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4940 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4941 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4942 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4943 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4944 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4945 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4946 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4947 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4948 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4949 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4950 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4951 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4952 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4953 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4954 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4955 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4956 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4957 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4958 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4959 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4962 /* DST operations: void foo (void *, const int, const char). */
4964 static const struct builtin_description bdesc_dst[] =
4966 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4967 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4968 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4969 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4972 /* Simple binary operations: VECc = foo (VECa, VECb). */
4974 static struct builtin_description bdesc_2arg[] =
4976 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4977 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4978 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4979 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4980 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4981 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4982 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4983 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4984 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4985 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4986 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4987 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4988 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4989 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4990 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4991 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4992 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4993 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4994 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4995 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4996 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4997 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4998 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4999 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5000 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5001 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5002 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5003 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5004 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5005 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5006 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5007 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5008 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5009 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5010 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5011 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5012 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5013 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5014 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5015 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5016 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5017 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5018 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5019 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5020 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5021 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5022 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5023 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5024 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5025 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5026 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5027 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5028 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5029 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5030 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5031 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5032 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5033 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5034 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5035 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5036 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5037 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5038 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5039 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5040 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5041 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5042 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5043 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5044 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5045 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5046 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5047 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5048 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5049 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5050 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5051 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5052 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5053 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5054 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5055 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5056 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5057 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5058 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5059 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5060 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5061 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5062 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5063 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5064 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5065 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5066 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5067 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5068 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5069 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5070 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5071 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5072 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5073 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5074 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5075 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5076 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5077 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5078 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5079 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5080 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5081 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5082 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5083 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5084 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5085 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5086 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5087 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5088 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5090 /* Place holder, leave as first spe builtin. */
5091 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5092 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5093 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5094 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5095 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5096 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5097 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5098 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5099 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5100 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5101 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5102 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5103 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5104 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5105 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5106 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5107 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5108 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5109 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5110 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5111 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5112 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5113 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5114 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5115 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5116 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5117 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5118 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5119 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5120 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5121 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5122 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5123 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5124 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5125 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5126 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5127 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5128 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5129 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5130 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5131 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5132 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5133 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5134 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5135 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5136 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5137 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5138 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5139 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5140 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5141 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5142 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5143 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5144 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5145 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5146 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5147 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5148 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5149 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5150 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5151 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5152 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5153 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5154 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5155 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5156 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5157 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5158 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5159 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5160 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5161 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5162 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5163 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5164 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5165 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5166 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5167 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5168 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5169 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5170 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5171 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5172 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5173 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5174 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5175 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5176 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5177 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5178 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5179 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5180 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5181 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5182 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5183 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5184 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5185 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5186 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5187 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5188 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5189 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5190 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5191 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5192 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5193 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5194 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5195 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5196 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5197 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5198 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5199 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5201 /* SPE binary operations expecting a 5-bit unsigned literal. */
5202 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5204 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5205 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5206 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5207 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5208 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5209 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5210 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5211 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5212 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5213 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5214 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5215 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5216 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5217 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5218 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5219 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5220 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5221 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5222 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5223 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5224 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5225 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5226 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5227 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5228 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5229 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5231 /* Place-holder. Leave as last binary SPE builtin. */
5232 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5235 /* AltiVec predicates. */
5237 struct builtin_description_predicates
5239 const unsigned int mask;
5240 const enum insn_code icode;
5242 const char *const name;
5243 const enum rs6000_builtins code;
5246 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5248 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5249 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5250 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5251 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5252 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5253 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5254 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5255 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5256 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5257 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5258 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5259 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5260 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5263 /* SPE predicates. */
5264 static struct builtin_description bdesc_spe_predicates[] =
5266 /* Place-holder. Leave as first. */
5267 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5268 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5269 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5270 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5271 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5272 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5273 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5274 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5275 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5276 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5277 /* Place-holder. Leave as last. */
5278 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5281 /* SPE evsel predicates. */
5282 static struct builtin_description bdesc_spe_evsel[] =
5284 /* Place-holder. Leave as first. */
5285 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5286 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5287 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5288 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5289 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5290 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5291 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5292 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5293 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5294 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5295 /* Place-holder. Leave as last. */
5296 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5299 /* ABS* operations. */
5301 static const struct builtin_description bdesc_abs[] =
5303 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5304 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5305 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5306 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5307 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5308 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5309 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5312 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5315 static struct builtin_description bdesc_1arg[] =
5317 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5318 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5319 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5320 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5321 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5322 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5323 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5324 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5325 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5326 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5327 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5328 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5329 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5330 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5331 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5332 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5333 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5335 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5336 end with SPE_BUILTIN_EVSUBFUSIAAW. */
5337 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5338 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5339 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5340 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5341 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5342 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5343 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5344 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5345 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5346 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5347 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5348 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5349 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5350 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5351 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5352 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5353 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5354 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5355 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5356 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5357 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5358 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5359 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5360 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5361 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5362 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5363 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5364 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5366 /* Place-holder. Leave as last unary SPE builtin. */
5367 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5371 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5374 tree arg0 = TREE_VALUE (arglist);
5375 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5376 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5377 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5379 if (icode == CODE_FOR_nothing)
5380 /* Builtin not supported on this processor. */
5383 /* If we got invalid arguments bail out before generating bad rtl. */
5384 if (arg0 == error_mark_node)
5387 if (icode == CODE_FOR_altivec_vspltisb
5388 || icode == CODE_FOR_altivec_vspltish
5389 || icode == CODE_FOR_altivec_vspltisw
5390 || icode == CODE_FOR_spe_evsplatfi
5391 || icode == CODE_FOR_spe_evsplati)
5393 /* Only allow 5-bit *signed* literals. */
5394 if (GET_CODE (op0) != CONST_INT
5395 || INTVAL (op0) > 0x1f
5396 || INTVAL (op0) < -0x1f)
5398 error ("argument 1 must be a 5-bit signed literal");
5404 || GET_MODE (target) != tmode
5405 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5406 target = gen_reg_rtx (tmode);
5408 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5409 op0 = copy_to_mode_reg (mode0, op0);
5411 pat = GEN_FCN (icode) (target, op0);
5420 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5422 rtx pat, scratch1, scratch2;
5423 tree arg0 = TREE_VALUE (arglist);
5424 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5425 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5426 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5428 /* If we have invalid arguments, bail out before generating bad rtl. */
5429 if (arg0 == error_mark_node)
5433 || GET_MODE (target) != tmode
5434 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5435 target = gen_reg_rtx (tmode);
5437 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5438 op0 = copy_to_mode_reg (mode0, op0);
5440 scratch1 = gen_reg_rtx (mode0);
5441 scratch2 = gen_reg_rtx (mode0);
5443 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5452 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5455 tree arg0 = TREE_VALUE (arglist);
5456 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5457 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5458 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5459 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5460 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5461 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5463 if (icode == CODE_FOR_nothing)
5464 /* Builtin not supported on this processor. */
5467 /* If we got invalid arguments bail out before generating bad rtl. */
5468 if (arg0 == error_mark_node || arg1 == error_mark_node)
5471 if (icode == CODE_FOR_altivec_vcfux
5472 || icode == CODE_FOR_altivec_vcfsx
5473 || icode == CODE_FOR_altivec_vctsxs
5474 || icode == CODE_FOR_altivec_vctuxs
5475 || icode == CODE_FOR_altivec_vspltb
5476 || icode == CODE_FOR_altivec_vsplth
5477 || icode == CODE_FOR_altivec_vspltw
5478 || icode == CODE_FOR_spe_evaddiw
5479 || icode == CODE_FOR_spe_evldd
5480 || icode == CODE_FOR_spe_evldh
5481 || icode == CODE_FOR_spe_evldw
5482 || icode == CODE_FOR_spe_evlhhesplat
5483 || icode == CODE_FOR_spe_evlhhossplat
5484 || icode == CODE_FOR_spe_evlhhousplat
5485 || icode == CODE_FOR_spe_evlwhe
5486 || icode == CODE_FOR_spe_evlwhos
5487 || icode == CODE_FOR_spe_evlwhou
5488 || icode == CODE_FOR_spe_evlwhsplat
5489 || icode == CODE_FOR_spe_evlwwsplat
5490 || icode == CODE_FOR_spe_evrlwi
5491 || icode == CODE_FOR_spe_evslwi
5492 || icode == CODE_FOR_spe_evsrwis
5493 || icode == CODE_FOR_spe_evsubifw
5494 || icode == CODE_FOR_spe_evsrwiu)
5496 /* Only allow 5-bit unsigned literals. */
5497 if (TREE_CODE (arg1) != INTEGER_CST
5498 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5500 error ("argument 2 must be a 5-bit unsigned literal");
5506 || GET_MODE (target) != tmode
5507 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5508 target = gen_reg_rtx (tmode);
5510 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5511 op0 = copy_to_mode_reg (mode0, op0);
5512 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5513 op1 = copy_to_mode_reg (mode1, op1);
5515 pat = GEN_FCN (icode) (target, op0, op1);
5524 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5525 tree arglist, rtx target)
5528 tree cr6_form = TREE_VALUE (arglist);
5529 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5530 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5531 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5532 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5533 enum machine_mode tmode = SImode;
5534 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5535 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5538 if (TREE_CODE (cr6_form) != INTEGER_CST)
5540 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5544 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5549 /* If we have invalid arguments, bail out before generating bad rtl. */
5550 if (arg0 == error_mark_node || arg1 == error_mark_node)
5554 || GET_MODE (target) != tmode
5555 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5556 target = gen_reg_rtx (tmode);
5558 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5559 op0 = copy_to_mode_reg (mode0, op0);
5560 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5561 op1 = copy_to_mode_reg (mode1, op1);
5563 scratch = gen_reg_rtx (mode0);
5565 pat = GEN_FCN (icode) (scratch, op0, op1,
5566 gen_rtx_SYMBOL_REF (Pmode, opcode));
5571 /* The vec_any* and vec_all* predicates use the same opcodes for two
5572 different operations, but the bits in CR6 will be different
5573 depending on what information we want. So we have to play tricks
5574 with CR6 to get the right bits out.
5576 If you think this is disgusting, look at the specs for the
5577 AltiVec predicates. */
5579 switch (cr6_form_int)
5582 emit_insn (gen_cr6_test_for_zero (target));
5585 emit_insn (gen_cr6_test_for_zero_reverse (target));
5588 emit_insn (gen_cr6_test_for_lt (target));
5591 emit_insn (gen_cr6_test_for_lt_reverse (target));
5594 error ("argument 1 of __builtin_altivec_predicate is out of range");
5602 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5605 tree arg0 = TREE_VALUE (arglist);
5606 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5607 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5608 enum machine_mode mode0 = Pmode;
5609 enum machine_mode mode1 = Pmode;
5610 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5611 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5613 if (icode == CODE_FOR_nothing)
5614 /* Builtin not supported on this processor. */
5617 /* If we got invalid arguments bail out before generating bad rtl. */
5618 if (arg0 == error_mark_node || arg1 == error_mark_node)
5622 || GET_MODE (target) != tmode
5623 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5624 target = gen_reg_rtx (tmode);
5626 op1 = copy_to_mode_reg (mode1, op1);
5628 if (op0 == const0_rtx)
5630 addr = gen_rtx_MEM (tmode, op1);
5634 op0 = copy_to_mode_reg (mode0, op0);
5635 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5638 pat = GEN_FCN (icode) (target, addr);
5648 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5650 tree arg0 = TREE_VALUE (arglist);
5651 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5652 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5653 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5654 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5655 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5657 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5658 enum machine_mode mode1 = Pmode;
5659 enum machine_mode mode2 = Pmode;
5661 /* Invalid arguments. Bail before doing anything stoopid! */
5662 if (arg0 == error_mark_node
5663 || arg1 == error_mark_node
5664 || arg2 == error_mark_node)
5667 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
5668 op0 = copy_to_mode_reg (tmode, op0);
5670 op2 = copy_to_mode_reg (mode2, op2);
5672 if (op1 == const0_rtx)
5674 addr = gen_rtx_MEM (tmode, op2);
5678 op1 = copy_to_mode_reg (mode1, op1);
5679 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
5682 pat = GEN_FCN (icode) (addr, op0);
5689 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5692 tree arg0 = TREE_VALUE (arglist);
5693 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5694 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5695 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5696 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5697 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5698 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5699 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5700 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5701 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5703 if (icode == CODE_FOR_nothing)
5704 /* Builtin not supported on this processor. */
5707 /* If we got invalid arguments bail out before generating bad rtl. */
5708 if (arg0 == error_mark_node
5709 || arg1 == error_mark_node
5710 || arg2 == error_mark_node)
5713 if (icode == CODE_FOR_altivec_vsldoi_4sf
5714 || icode == CODE_FOR_altivec_vsldoi_4si
5715 || icode == CODE_FOR_altivec_vsldoi_8hi
5716 || icode == CODE_FOR_altivec_vsldoi_16qi)
5718 /* Only allow 4-bit unsigned literals. */
5719 if (TREE_CODE (arg2) != INTEGER_CST
5720 || TREE_INT_CST_LOW (arg2) & ~0xf)
5722 error ("argument 3 must be a 4-bit unsigned literal");
5728 || GET_MODE (target) != tmode
5729 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5730 target = gen_reg_rtx (tmode);
5732 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5733 op0 = copy_to_mode_reg (mode0, op0);
5734 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5735 op1 = copy_to_mode_reg (mode1, op1);
5736 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5737 op2 = copy_to_mode_reg (mode2, op2);
5739 pat = GEN_FCN (icode) (target, op0, op1, op2);
5747 /* Expand the lvx builtins. */
5749 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5751 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5752 tree arglist = TREE_OPERAND (exp, 1);
5753 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5755 enum machine_mode tmode, mode0;
5757 enum insn_code icode;
5761 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5762 icode = CODE_FOR_altivec_lvx_16qi;
5764 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5765 icode = CODE_FOR_altivec_lvx_8hi;
5767 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5768 icode = CODE_FOR_altivec_lvx_4si;
5770 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5771 icode = CODE_FOR_altivec_lvx_4sf;
5780 arg0 = TREE_VALUE (arglist);
5781 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5782 tmode = insn_data[icode].operand[0].mode;
5783 mode0 = insn_data[icode].operand[1].mode;
5786 || GET_MODE (target) != tmode
5787 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5788 target = gen_reg_rtx (tmode);
5790 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5791 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5793 pat = GEN_FCN (icode) (target, op0);
5800 /* Expand the stvx builtins. */
5802 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5805 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5806 tree arglist = TREE_OPERAND (exp, 1);
5807 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5809 enum machine_mode mode0, mode1;
5811 enum insn_code icode;
5815 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5816 icode = CODE_FOR_altivec_stvx_16qi;
5818 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5819 icode = CODE_FOR_altivec_stvx_8hi;
5821 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5822 icode = CODE_FOR_altivec_stvx_4si;
5824 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5825 icode = CODE_FOR_altivec_stvx_4sf;
5832 arg0 = TREE_VALUE (arglist);
5833 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5834 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5835 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5836 mode0 = insn_data[icode].operand[0].mode;
5837 mode1 = insn_data[icode].operand[1].mode;
5839 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5840 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5841 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5842 op1 = copy_to_mode_reg (mode1, op1);
5844 pat = GEN_FCN (icode) (op0, op1);
5852 /* Expand the dst builtins. */
5854 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5857 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5858 tree arglist = TREE_OPERAND (exp, 1);
5859 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5860 tree arg0, arg1, arg2;
5861 enum machine_mode mode0, mode1, mode2;
5862 rtx pat, op0, op1, op2;
5863 struct builtin_description *d;
5868 /* Handle DST variants. */
5869 d = (struct builtin_description *) bdesc_dst;
5870 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5871 if (d->code == fcode)
5873 arg0 = TREE_VALUE (arglist);
5874 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5875 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5876 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5877 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5878 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5879 mode0 = insn_data[d->icode].operand[0].mode;
5880 mode1 = insn_data[d->icode].operand[1].mode;
5881 mode2 = insn_data[d->icode].operand[2].mode;
5883 /* Invalid arguments, bail out before generating bad rtl. */
5884 if (arg0 == error_mark_node
5885 || arg1 == error_mark_node
5886 || arg2 == error_mark_node)
5889 if (TREE_CODE (arg2) != INTEGER_CST
5890 || TREE_INT_CST_LOW (arg2) & ~0x3)
5892 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5896 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5897 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5898 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5899 op1 = copy_to_mode_reg (mode1, op1);
5901 pat = GEN_FCN (d->icode) (op0, op1, op2);
5912 /* Expand the builtin in EXP and store the result in TARGET. Store
5913 true in *EXPANDEDP if we found a builtin to expand. */
5915 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
5917 struct builtin_description *d;
5918 struct builtin_description_predicates *dp;
5920 enum insn_code icode;
5921 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5922 tree arglist = TREE_OPERAND (exp, 1);
5925 enum machine_mode tmode, mode0;
5926 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5928 target = altivec_expand_ld_builtin (exp, target, expandedp);
5932 target = altivec_expand_st_builtin (exp, target, expandedp);
5936 target = altivec_expand_dst_builtin (exp, target, expandedp);
5944 case ALTIVEC_BUILTIN_STVX:
5945 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5946 case ALTIVEC_BUILTIN_STVEBX:
5947 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5948 case ALTIVEC_BUILTIN_STVEHX:
5949 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5950 case ALTIVEC_BUILTIN_STVEWX:
5951 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5952 case ALTIVEC_BUILTIN_STVXL:
5953 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5955 case ALTIVEC_BUILTIN_MFVSCR:
5956 icode = CODE_FOR_altivec_mfvscr;
5957 tmode = insn_data[icode].operand[0].mode;
5960 || GET_MODE (target) != tmode
5961 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5962 target = gen_reg_rtx (tmode);
5964 pat = GEN_FCN (icode) (target);
5970 case ALTIVEC_BUILTIN_MTVSCR:
5971 icode = CODE_FOR_altivec_mtvscr;
5972 arg0 = TREE_VALUE (arglist);
5973 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5974 mode0 = insn_data[icode].operand[0].mode;
5976 /* If we got invalid arguments bail out before generating bad rtl. */
5977 if (arg0 == error_mark_node)
5980 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5981 op0 = copy_to_mode_reg (mode0, op0);
5983 pat = GEN_FCN (icode) (op0);
5988 case ALTIVEC_BUILTIN_DSSALL:
5989 emit_insn (gen_altivec_dssall ());
5992 case ALTIVEC_BUILTIN_DSS:
5993 icode = CODE_FOR_altivec_dss;
5994 arg0 = TREE_VALUE (arglist);
5995 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5996 mode0 = insn_data[icode].operand[0].mode;
5998 /* If we got invalid arguments bail out before generating bad rtl. */
5999 if (arg0 == error_mark_node)
6002 if (TREE_CODE (arg0) != INTEGER_CST
6003 || TREE_INT_CST_LOW (arg0) & ~0x3)
6005 error ("argument to dss must be a 2-bit unsigned literal");
6009 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6010 op0 = copy_to_mode_reg (mode0, op0);
6012 emit_insn (gen_altivec_dss (op0));
6016 /* Expand abs* operations. */
6017 d = (struct builtin_description *) bdesc_abs;
6018 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6019 if (d->code == fcode)
6020 return altivec_expand_abs_builtin (d->icode, arglist, target);
6022 /* Expand the AltiVec predicates. */
6023 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6024 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6025 if (dp->code == fcode)
6026 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6028 /* LV* are funky. We initialized them differently. */
6031 case ALTIVEC_BUILTIN_LVSL:
6032 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6034 case ALTIVEC_BUILTIN_LVSR:
6035 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6037 case ALTIVEC_BUILTIN_LVEBX:
6038 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6040 case ALTIVEC_BUILTIN_LVEHX:
6041 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6043 case ALTIVEC_BUILTIN_LVEWX:
6044 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6046 case ALTIVEC_BUILTIN_LVXL:
6047 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6049 case ALTIVEC_BUILTIN_LVX:
6050 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6061 /* Binops that need to be initialized manually, but can be expanded
6062 automagically by rs6000_expand_binop_builtin. */
6063 static struct builtin_description bdesc_2arg_spe[] =
6065 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6066 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6067 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6068 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6069 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6070 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6071 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6072 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6073 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6074 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6075 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6076 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6077 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6078 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6079 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6080 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6081 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6082 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6083 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6084 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6085 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6086 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6089 /* Expand the builtin in EXP and store the result in TARGET. Store
6090 true in *EXPANDEDP if we found a builtin to expand.
6092 This expands the SPE builtins that are not simple unary and binary
6095 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6097 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6098 tree arglist = TREE_OPERAND (exp, 1);
6100 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6101 enum insn_code icode;
6102 enum machine_mode tmode, mode0;
6104 struct builtin_description *d;
6109 /* Syntax check for a 5-bit unsigned immediate. */
6112 case SPE_BUILTIN_EVSTDD:
6113 case SPE_BUILTIN_EVSTDH:
6114 case SPE_BUILTIN_EVSTDW:
6115 case SPE_BUILTIN_EVSTWHE:
6116 case SPE_BUILTIN_EVSTWHO:
6117 case SPE_BUILTIN_EVSTWWE:
6118 case SPE_BUILTIN_EVSTWWO:
6119 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6120 if (TREE_CODE (arg1) != INTEGER_CST
6121 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6123 error ("argument 2 must be a 5-bit unsigned literal");
6131 /* The evsplat*i instructions are not quite generic. */
6134 case SPE_BUILTIN_EVSPLATFI:
6135 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6137 case SPE_BUILTIN_EVSPLATI:
6138 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6144 d = (struct builtin_description *) bdesc_2arg_spe;
6145 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6146 if (d->code == fcode)
6147 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6149 d = (struct builtin_description *) bdesc_spe_predicates;
6150 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6151 if (d->code == fcode)
6152 return spe_expand_predicate_builtin (d->icode, arglist, target);
6154 d = (struct builtin_description *) bdesc_spe_evsel;
6155 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6156 if (d->code == fcode)
6157 return spe_expand_evsel_builtin (d->icode, arglist, target);
6161 case SPE_BUILTIN_EVSTDDX:
6162 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6163 case SPE_BUILTIN_EVSTDHX:
6164 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6165 case SPE_BUILTIN_EVSTDWX:
6166 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6167 case SPE_BUILTIN_EVSTWHEX:
6168 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6169 case SPE_BUILTIN_EVSTWHOX:
6170 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6171 case SPE_BUILTIN_EVSTWWEX:
6172 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6173 case SPE_BUILTIN_EVSTWWOX:
6174 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6175 case SPE_BUILTIN_EVSTDD:
6176 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6177 case SPE_BUILTIN_EVSTDH:
6178 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6179 case SPE_BUILTIN_EVSTDW:
6180 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6181 case SPE_BUILTIN_EVSTWHE:
6182 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6183 case SPE_BUILTIN_EVSTWHO:
6184 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6185 case SPE_BUILTIN_EVSTWWE:
6186 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6187 case SPE_BUILTIN_EVSTWWO:
6188 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6189 case SPE_BUILTIN_MFSPEFSCR:
6190 icode = CODE_FOR_spe_mfspefscr;
6191 tmode = insn_data[icode].operand[0].mode;
6194 || GET_MODE (target) != tmode
6195 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6196 target = gen_reg_rtx (tmode);
6198 pat = GEN_FCN (icode) (target);
6203 case SPE_BUILTIN_MTSPEFSCR:
6204 icode = CODE_FOR_spe_mtspefscr;
6205 arg0 = TREE_VALUE (arglist);
6206 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6207 mode0 = insn_data[icode].operand[0].mode;
6209 if (arg0 == error_mark_node)
6212 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6213 op0 = copy_to_mode_reg (mode0, op0);
6215 pat = GEN_FCN (icode) (op0);
6228 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6230 rtx pat, scratch, tmp;
6231 tree form = TREE_VALUE (arglist);
6232 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6233 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6234 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6235 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6236 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6237 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6241 if (TREE_CODE (form) != INTEGER_CST)
6243 error ("argument 1 of __builtin_spe_predicate must be a constant");
6247 form_int = TREE_INT_CST_LOW (form);
6252 if (arg0 == error_mark_node || arg1 == error_mark_node)
6256 || GET_MODE (target) != SImode
6257 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6258 target = gen_reg_rtx (SImode);
6260 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6261 op0 = copy_to_mode_reg (mode0, op0);
6262 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6263 op1 = copy_to_mode_reg (mode1, op1);
6265 scratch = gen_reg_rtx (CCmode);
6267 pat = GEN_FCN (icode) (scratch, op0, op1);
6272 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6273 _lower_. We use one compare, but look in different bits of the
6274 CR for each variant.
6276 There are 2 elements in each SPE simd type (upper/lower). The CR
6277 bits are set as follows:
6279 BIT0 | BIT 1 | BIT 2 | BIT 3
6280 U | L | (U | L) | (U & L)
6282 So, for an "all" relationship, BIT 3 would be set.
6283 For an "any" relationship, BIT 2 would be set. Etc.
6285 Following traditional nomenclature, these bits map to:
6287 BIT0 | BIT 1 | BIT 2 | BIT 3
6290 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6295 /* All variant. OV bit. */
6297 /* We need to get to the OV bit, which is the ORDERED bit. We
6298 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6299 that's ugly and will trigger a validate_condition_mode abort.
6300 So let's just use another pattern. */
6301 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6303 /* Any variant. EQ bit. */
6307 /* Upper variant. LT bit. */
6311 /* Lower variant. GT bit. */
6316 error ("argument 1 of __builtin_spe_predicate is out of range");
6320 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6321 emit_move_insn (target, tmp);
6326 /* The evsel builtins look like this:
6328 e = __builtin_spe_evsel_OP (a, b, c, d);
6332 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6333 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6337 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6340 tree arg0 = TREE_VALUE (arglist);
6341 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6342 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6343 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6344 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6345 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6346 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6347 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6348 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6349 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6354 if (arg0 == error_mark_node || arg1 == error_mark_node
6355 || arg2 == error_mark_node || arg3 == error_mark_node)
6359 || GET_MODE (target) != mode0
6360 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6361 target = gen_reg_rtx (mode0);
6363 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6364 op0 = copy_to_mode_reg (mode0, op0);
6365 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6366 op1 = copy_to_mode_reg (mode0, op1);
6367 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6368 op2 = copy_to_mode_reg (mode0, op2);
6369 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6370 op3 = copy_to_mode_reg (mode0, op3);
6372 /* Generate the compare. */
6373 scratch = gen_reg_rtx (CCmode);
6374 pat = GEN_FCN (icode) (scratch, op0, op1);
6379 if (mode0 == V2SImode)
6380 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6382 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6387 /* Expand an expression EXP that calls a built-in function,
6388 with result going to TARGET if that's convenient
6389 (and in mode MODE if that's convenient).
6390 SUBTARGET may be used as the target for computing one of EXP's operands.
6391 IGNORE is nonzero if the value is to be ignored. */
6394 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6395 enum machine_mode mode ATTRIBUTE_UNUSED,
6396 int ignore ATTRIBUTE_UNUSED)
6398 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6399 tree arglist = TREE_OPERAND (exp, 1);
6400 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6401 struct builtin_description *d;
6408 ret = altivec_expand_builtin (exp, target, &success);
6415 ret = spe_expand_builtin (exp, target, &success);
6421 if (TARGET_ALTIVEC || TARGET_SPE)
6423 /* Handle simple unary operations. */
6424 d = (struct builtin_description *) bdesc_1arg;
6425 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6426 if (d->code == fcode)
6427 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6429 /* Handle simple binary operations. */
6430 d = (struct builtin_description *) bdesc_2arg;
6431 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6432 if (d->code == fcode)
6433 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6435 /* Handle simple ternary operations. */
6436 d = (struct builtin_description *) bdesc_3arg;
6437 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6438 if (d->code == fcode)
6439 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6447 rs6000_init_builtins (void)
6449 opaque_V2SI_type_node = copy_node (V2SI_type_node);
6450 opaque_V2SF_type_node = copy_node (V2SF_type_node);
6451 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6454 spe_init_builtins ();
6456 altivec_init_builtins ();
6457 if (TARGET_ALTIVEC || TARGET_SPE)
6458 rs6000_common_init_builtins ();
6461 /* Search through a set of builtins and enable the mask bits.
6462 DESC is an array of builtins.
6463 SIZE is the total number of builtins.
6464 START is the builtin enum at which to start.
6465 END is the builtin enum at which to end. */
6467 enable_mask_for_builtins (struct builtin_description *desc, int size,
6468 enum rs6000_builtins start,
6469 enum rs6000_builtins end)
6473 for (i = 0; i < size; ++i)
6474 if (desc[i].code == start)
6480 for (; i < size; ++i)
6482 /* Flip all the bits on. */
6483 desc[i].mask = target_flags;
6484 if (desc[i].code == end)
6490 spe_init_builtins (void)
6492 tree endlink = void_list_node;
6493 tree puint_type_node = build_pointer_type (unsigned_type_node);
6494 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6495 struct builtin_description *d;
6498 tree v2si_ftype_4_v2si
6499 = build_function_type
6500 (opaque_V2SI_type_node,
6501 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6502 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6503 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6504 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6507 tree v2sf_ftype_4_v2sf
6508 = build_function_type
6509 (opaque_V2SF_type_node,
6510 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6511 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6512 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6513 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6516 tree int_ftype_int_v2si_v2si
6517 = build_function_type
6519 tree_cons (NULL_TREE, integer_type_node,
6520 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6521 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6524 tree int_ftype_int_v2sf_v2sf
6525 = build_function_type
6527 tree_cons (NULL_TREE, integer_type_node,
6528 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6529 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6532 tree void_ftype_v2si_puint_int
6533 = build_function_type (void_type_node,
6534 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6535 tree_cons (NULL_TREE, puint_type_node,
6536 tree_cons (NULL_TREE,
6540 tree void_ftype_v2si_puint_char
6541 = build_function_type (void_type_node,
6542 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6543 tree_cons (NULL_TREE, puint_type_node,
6544 tree_cons (NULL_TREE,
6548 tree void_ftype_v2si_pv2si_int
6549 = build_function_type (void_type_node,
6550 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6551 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6552 tree_cons (NULL_TREE,
6556 tree void_ftype_v2si_pv2si_char
6557 = build_function_type (void_type_node,
6558 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6559 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6560 tree_cons (NULL_TREE,
6565 = build_function_type (void_type_node,
6566 tree_cons (NULL_TREE, integer_type_node, endlink));
6569 = build_function_type (integer_type_node, endlink);
6571 tree v2si_ftype_pv2si_int
6572 = build_function_type (opaque_V2SI_type_node,
6573 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6574 tree_cons (NULL_TREE, integer_type_node,
6577 tree v2si_ftype_puint_int
6578 = build_function_type (opaque_V2SI_type_node,
6579 tree_cons (NULL_TREE, puint_type_node,
6580 tree_cons (NULL_TREE, integer_type_node,
6583 tree v2si_ftype_pushort_int
6584 = build_function_type (opaque_V2SI_type_node,
6585 tree_cons (NULL_TREE, pushort_type_node,
6586 tree_cons (NULL_TREE, integer_type_node,
6589 tree v2si_ftype_signed_char
6590 = build_function_type (opaque_V2SI_type_node,
6591 tree_cons (NULL_TREE, signed_char_type_node,
6594 /* The initialization of the simple binary and unary builtins is
6595 done in rs6000_common_init_builtins, but we have to enable the
6596 mask bits here manually because we have run out of `target_flags'
6597 bits. We really need to redesign this mask business. */
6599 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6600 ARRAY_SIZE (bdesc_2arg),
6603 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6604 ARRAY_SIZE (bdesc_1arg),
6606 SPE_BUILTIN_EVSUBFUSIAAW);
6607 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6608 ARRAY_SIZE (bdesc_spe_predicates),
6609 SPE_BUILTIN_EVCMPEQ,
6610 SPE_BUILTIN_EVFSTSTLT);
6611 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6612 ARRAY_SIZE (bdesc_spe_evsel),
6613 SPE_BUILTIN_EVSEL_CMPGTS,
6614 SPE_BUILTIN_EVSEL_FSTSTEQ);
6616 (*lang_hooks.decls.pushdecl)
6617 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6618 opaque_V2SI_type_node));
6620 /* Initialize irregular SPE builtins. */
6622 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6623 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6624 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6625 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6626 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6627 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6628 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6629 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6630 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6631 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6632 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6633 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6634 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6635 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6636 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6637 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6638 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6639 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6642 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6643 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6644 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6645 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6646 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6647 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6648 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6649 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6650 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6651 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6652 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6653 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6654 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6655 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6656 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6657 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6658 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6659 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6660 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6661 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6662 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6663 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6666 d = (struct builtin_description *) bdesc_spe_predicates;
6667 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6671 switch (insn_data[d->icode].operand[1].mode)
6674 type = int_ftype_int_v2si_v2si;
6677 type = int_ftype_int_v2sf_v2sf;
6683 def_builtin (d->mask, d->name, type, d->code);
6686 /* Evsel predicates. */
6687 d = (struct builtin_description *) bdesc_spe_evsel;
6688 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6692 switch (insn_data[d->icode].operand[1].mode)
6695 type = v2si_ftype_4_v2si;
6698 type = v2sf_ftype_4_v2sf;
6704 def_builtin (d->mask, d->name, type, d->code);
6709 altivec_init_builtins (void)
6711 struct builtin_description *d;
6712 struct builtin_description_predicates *dp;
6714 tree pfloat_type_node = build_pointer_type (float_type_node);
6715 tree pint_type_node = build_pointer_type (integer_type_node);
6716 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6717 tree pchar_type_node = build_pointer_type (char_type_node);
6719 tree pvoid_type_node = build_pointer_type (void_type_node);
6721 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6722 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6723 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6724 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6726 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6728 tree int_ftype_int_v4si_v4si
6729 = build_function_type_list (integer_type_node,
6730 integer_type_node, V4SI_type_node,
6731 V4SI_type_node, NULL_TREE);
6732 tree v4sf_ftype_pcfloat
6733 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6734 tree void_ftype_pfloat_v4sf
6735 = build_function_type_list (void_type_node,
6736 pfloat_type_node, V4SF_type_node, NULL_TREE);
6737 tree v4si_ftype_pcint
6738 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6739 tree void_ftype_pint_v4si
6740 = build_function_type_list (void_type_node,
6741 pint_type_node, V4SI_type_node, NULL_TREE);
6742 tree v8hi_ftype_pcshort
6743 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6744 tree void_ftype_pshort_v8hi
6745 = build_function_type_list (void_type_node,
6746 pshort_type_node, V8HI_type_node, NULL_TREE);
6747 tree v16qi_ftype_pcchar
6748 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6749 tree void_ftype_pchar_v16qi
6750 = build_function_type_list (void_type_node,
6751 pchar_type_node, V16QI_type_node, NULL_TREE);
6752 tree void_ftype_v4si
6753 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6754 tree v8hi_ftype_void
6755 = build_function_type (V8HI_type_node, void_list_node);
6756 tree void_ftype_void
6757 = build_function_type (void_type_node, void_list_node);
6759 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6761 tree v16qi_ftype_long_pcvoid
6762 = build_function_type_list (V16QI_type_node,
6763 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6764 tree v8hi_ftype_long_pcvoid
6765 = build_function_type_list (V8HI_type_node,
6766 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6767 tree v4si_ftype_long_pcvoid
6768 = build_function_type_list (V4SI_type_node,
6769 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6771 tree void_ftype_v4si_long_pvoid
6772 = build_function_type_list (void_type_node,
6773 V4SI_type_node, long_integer_type_node,
6774 pvoid_type_node, NULL_TREE);
6775 tree void_ftype_v16qi_long_pvoid
6776 = build_function_type_list (void_type_node,
6777 V16QI_type_node, long_integer_type_node,
6778 pvoid_type_node, NULL_TREE);
6779 tree void_ftype_v8hi_long_pvoid
6780 = build_function_type_list (void_type_node,
6781 V8HI_type_node, long_integer_type_node,
6782 pvoid_type_node, NULL_TREE);
6783 tree int_ftype_int_v8hi_v8hi
6784 = build_function_type_list (integer_type_node,
6785 integer_type_node, V8HI_type_node,
6786 V8HI_type_node, NULL_TREE);
6787 tree int_ftype_int_v16qi_v16qi
6788 = build_function_type_list (integer_type_node,
6789 integer_type_node, V16QI_type_node,
6790 V16QI_type_node, NULL_TREE);
6791 tree int_ftype_int_v4sf_v4sf
6792 = build_function_type_list (integer_type_node,
6793 integer_type_node, V4SF_type_node,
6794 V4SF_type_node, NULL_TREE);
6795 tree v4si_ftype_v4si
6796 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6797 tree v8hi_ftype_v8hi
6798 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6799 tree v16qi_ftype_v16qi
6800 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6801 tree v4sf_ftype_v4sf
6802 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6803 tree void_ftype_pcvoid_int_char
6804 = build_function_type_list (void_type_node,
6805 pcvoid_type_node, integer_type_node,
6806 char_type_node, NULL_TREE);
6808 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6809 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6810 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6811 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6812 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6813 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6814 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6815 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6816 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6817 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6818 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6819 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6820 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6821 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6822 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6823 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6824 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6825 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6826 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6827 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6828 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
6829 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
6830 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6831 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6832 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6833 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
6834 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
6835 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
6836 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
6837 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
6838 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
6839 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
6841 /* Add the DST variants. */
6842 d = (struct builtin_description *) bdesc_dst;
6843 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6844 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6846 /* Initialize the predicates. */
6847 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6848 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6850 enum machine_mode mode1;
6853 mode1 = insn_data[dp->icode].operand[1].mode;
6858 type = int_ftype_int_v4si_v4si;
6861 type = int_ftype_int_v8hi_v8hi;
6864 type = int_ftype_int_v16qi_v16qi;
6867 type = int_ftype_int_v4sf_v4sf;
6873 def_builtin (dp->mask, dp->name, type, dp->code);
6876 /* Initialize the abs* operators. */
6877 d = (struct builtin_description *) bdesc_abs;
6878 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6880 enum machine_mode mode0;
6883 mode0 = insn_data[d->icode].operand[0].mode;
6888 type = v4si_ftype_v4si;
6891 type = v8hi_ftype_v8hi;
6894 type = v16qi_ftype_v16qi;
6897 type = v4sf_ftype_v4sf;
6903 def_builtin (d->mask, d->name, type, d->code);
6908 rs6000_common_init_builtins (void)
6910 struct builtin_description *d;
6913 tree v4sf_ftype_v4sf_v4sf_v16qi
6914 = build_function_type_list (V4SF_type_node,
6915 V4SF_type_node, V4SF_type_node,
6916 V16QI_type_node, NULL_TREE);
6917 tree v4si_ftype_v4si_v4si_v16qi
6918 = build_function_type_list (V4SI_type_node,
6919 V4SI_type_node, V4SI_type_node,
6920 V16QI_type_node, NULL_TREE);
6921 tree v8hi_ftype_v8hi_v8hi_v16qi
6922 = build_function_type_list (V8HI_type_node,
6923 V8HI_type_node, V8HI_type_node,
6924 V16QI_type_node, NULL_TREE);
6925 tree v16qi_ftype_v16qi_v16qi_v16qi
6926 = build_function_type_list (V16QI_type_node,
6927 V16QI_type_node, V16QI_type_node,
6928 V16QI_type_node, NULL_TREE);
6929 tree v4si_ftype_char
6930 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6931 tree v8hi_ftype_char
6932 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6933 tree v16qi_ftype_char
6934 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6935 tree v8hi_ftype_v16qi
6936 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6937 tree v4sf_ftype_v4sf
6938 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6940 tree v2si_ftype_v2si_v2si
6941 = build_function_type_list (opaque_V2SI_type_node,
6942 opaque_V2SI_type_node,
6943 opaque_V2SI_type_node, NULL_TREE);
6945 tree v2sf_ftype_v2sf_v2sf
6946 = build_function_type_list (opaque_V2SF_type_node,
6947 opaque_V2SF_type_node,
6948 opaque_V2SF_type_node, NULL_TREE);
6950 tree v2si_ftype_int_int
6951 = build_function_type_list (opaque_V2SI_type_node,
6952 integer_type_node, integer_type_node,
6955 tree v2si_ftype_v2si
6956 = build_function_type_list (opaque_V2SI_type_node,
6957 opaque_V2SI_type_node, NULL_TREE);
6959 tree v2sf_ftype_v2sf
6960 = build_function_type_list (opaque_V2SF_type_node,
6961 opaque_V2SF_type_node, NULL_TREE);
6963 tree v2sf_ftype_v2si
6964 = build_function_type_list (opaque_V2SF_type_node,
6965 opaque_V2SI_type_node, NULL_TREE);
6967 tree v2si_ftype_v2sf
6968 = build_function_type_list (opaque_V2SI_type_node,
6969 opaque_V2SF_type_node, NULL_TREE);
6971 tree v2si_ftype_v2si_char
6972 = build_function_type_list (opaque_V2SI_type_node,
6973 opaque_V2SI_type_node,
6974 char_type_node, NULL_TREE);
6976 tree v2si_ftype_int_char
6977 = build_function_type_list (opaque_V2SI_type_node,
6978 integer_type_node, char_type_node, NULL_TREE);
6980 tree v2si_ftype_char
6981 = build_function_type_list (opaque_V2SI_type_node,
6982 char_type_node, NULL_TREE);
6984 tree int_ftype_int_int
6985 = build_function_type_list (integer_type_node,
6986 integer_type_node, integer_type_node,
6989 tree v4si_ftype_v4si_v4si
6990 = build_function_type_list (V4SI_type_node,
6991 V4SI_type_node, V4SI_type_node, NULL_TREE);
6992 tree v4sf_ftype_v4si_char
6993 = build_function_type_list (V4SF_type_node,
6994 V4SI_type_node, char_type_node, NULL_TREE);
6995 tree v4si_ftype_v4sf_char
6996 = build_function_type_list (V4SI_type_node,
6997 V4SF_type_node, char_type_node, NULL_TREE);
6998 tree v4si_ftype_v4si_char
6999 = build_function_type_list (V4SI_type_node,
7000 V4SI_type_node, char_type_node, NULL_TREE);
7001 tree v8hi_ftype_v8hi_char
7002 = build_function_type_list (V8HI_type_node,
7003 V8HI_type_node, char_type_node, NULL_TREE);
7004 tree v16qi_ftype_v16qi_char
7005 = build_function_type_list (V16QI_type_node,
7006 V16QI_type_node, char_type_node, NULL_TREE);
7007 tree v16qi_ftype_v16qi_v16qi_char
7008 = build_function_type_list (V16QI_type_node,
7009 V16QI_type_node, V16QI_type_node,
7010 char_type_node, NULL_TREE);
7011 tree v8hi_ftype_v8hi_v8hi_char
7012 = build_function_type_list (V8HI_type_node,
7013 V8HI_type_node, V8HI_type_node,
7014 char_type_node, NULL_TREE);
7015 tree v4si_ftype_v4si_v4si_char
7016 = build_function_type_list (V4SI_type_node,
7017 V4SI_type_node, V4SI_type_node,
7018 char_type_node, NULL_TREE);
7019 tree v4sf_ftype_v4sf_v4sf_char
7020 = build_function_type_list (V4SF_type_node,
7021 V4SF_type_node, V4SF_type_node,
7022 char_type_node, NULL_TREE);
7023 tree v4sf_ftype_v4sf_v4sf
7024 = build_function_type_list (V4SF_type_node,
7025 V4SF_type_node, V4SF_type_node, NULL_TREE);
7026 tree v4sf_ftype_v4sf_v4sf_v4si
7027 = build_function_type_list (V4SF_type_node,
7028 V4SF_type_node, V4SF_type_node,
7029 V4SI_type_node, NULL_TREE);
7030 tree v4sf_ftype_v4sf_v4sf_v4sf
7031 = build_function_type_list (V4SF_type_node,
7032 V4SF_type_node, V4SF_type_node,
7033 V4SF_type_node, NULL_TREE);
7034 tree v4si_ftype_v4si_v4si_v4si
7035 = build_function_type_list (V4SI_type_node,
7036 V4SI_type_node, V4SI_type_node,
7037 V4SI_type_node, NULL_TREE);
7038 tree v8hi_ftype_v8hi_v8hi
7039 = build_function_type_list (V8HI_type_node,
7040 V8HI_type_node, V8HI_type_node, NULL_TREE);
7041 tree v8hi_ftype_v8hi_v8hi_v8hi
7042 = build_function_type_list (V8HI_type_node,
7043 V8HI_type_node, V8HI_type_node,
7044 V8HI_type_node, NULL_TREE);
7045 tree v4si_ftype_v8hi_v8hi_v4si
7046 = build_function_type_list (V4SI_type_node,
7047 V8HI_type_node, V8HI_type_node,
7048 V4SI_type_node, NULL_TREE);
7049 tree v4si_ftype_v16qi_v16qi_v4si
7050 = build_function_type_list (V4SI_type_node,
7051 V16QI_type_node, V16QI_type_node,
7052 V4SI_type_node, NULL_TREE);
7053 tree v16qi_ftype_v16qi_v16qi
7054 = build_function_type_list (V16QI_type_node,
7055 V16QI_type_node, V16QI_type_node, NULL_TREE);
7056 tree v4si_ftype_v4sf_v4sf
7057 = build_function_type_list (V4SI_type_node,
7058 V4SF_type_node, V4SF_type_node, NULL_TREE);
7059 tree v8hi_ftype_v16qi_v16qi
7060 = build_function_type_list (V8HI_type_node,
7061 V16QI_type_node, V16QI_type_node, NULL_TREE);
7062 tree v4si_ftype_v8hi_v8hi
7063 = build_function_type_list (V4SI_type_node,
7064 V8HI_type_node, V8HI_type_node, NULL_TREE);
7065 tree v8hi_ftype_v4si_v4si
7066 = build_function_type_list (V8HI_type_node,
7067 V4SI_type_node, V4SI_type_node, NULL_TREE);
7068 tree v16qi_ftype_v8hi_v8hi
7069 = build_function_type_list (V16QI_type_node,
7070 V8HI_type_node, V8HI_type_node, NULL_TREE);
7071 tree v4si_ftype_v16qi_v4si
7072 = build_function_type_list (V4SI_type_node,
7073 V16QI_type_node, V4SI_type_node, NULL_TREE);
7074 tree v4si_ftype_v16qi_v16qi
7075 = build_function_type_list (V4SI_type_node,
7076 V16QI_type_node, V16QI_type_node, NULL_TREE);
7077 tree v4si_ftype_v8hi_v4si
7078 = build_function_type_list (V4SI_type_node,
7079 V8HI_type_node, V4SI_type_node, NULL_TREE);
7080 tree v4si_ftype_v8hi
7081 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7082 tree int_ftype_v4si_v4si
7083 = build_function_type_list (integer_type_node,
7084 V4SI_type_node, V4SI_type_node, NULL_TREE);
7085 tree int_ftype_v4sf_v4sf
7086 = build_function_type_list (integer_type_node,
7087 V4SF_type_node, V4SF_type_node, NULL_TREE);
7088 tree int_ftype_v16qi_v16qi
7089 = build_function_type_list (integer_type_node,
7090 V16QI_type_node, V16QI_type_node, NULL_TREE);
7091 tree int_ftype_v8hi_v8hi
7092 = build_function_type_list (integer_type_node,
7093 V8HI_type_node, V8HI_type_node, NULL_TREE);
7095 /* Add the simple ternary operators. */
7096 d = (struct builtin_description *) bdesc_3arg;
7097 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7100 enum machine_mode mode0, mode1, mode2, mode3;
7103 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7106 mode0 = insn_data[d->icode].operand[0].mode;
7107 mode1 = insn_data[d->icode].operand[1].mode;
7108 mode2 = insn_data[d->icode].operand[2].mode;
7109 mode3 = insn_data[d->icode].operand[3].mode;
7111 /* When all four are of the same mode. */
7112 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7117 type = v4si_ftype_v4si_v4si_v4si;
7120 type = v4sf_ftype_v4sf_v4sf_v4sf;
7123 type = v8hi_ftype_v8hi_v8hi_v8hi;
7126 type = v16qi_ftype_v16qi_v16qi_v16qi;
7132 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7137 type = v4si_ftype_v4si_v4si_v16qi;
7140 type = v4sf_ftype_v4sf_v4sf_v16qi;
7143 type = v8hi_ftype_v8hi_v8hi_v16qi;
7146 type = v16qi_ftype_v16qi_v16qi_v16qi;
7152 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
7153 && mode3 == V4SImode)
7154 type = v4si_ftype_v16qi_v16qi_v4si;
7155 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
7156 && mode3 == V4SImode)
7157 type = v4si_ftype_v8hi_v8hi_v4si;
7158 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
7159 && mode3 == V4SImode)
7160 type = v4sf_ftype_v4sf_v4sf_v4si;
7162 /* vchar, vchar, vchar, 4 bit literal. */
7163 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
7165 type = v16qi_ftype_v16qi_v16qi_char;
7167 /* vshort, vshort, vshort, 4 bit literal. */
7168 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
7170 type = v8hi_ftype_v8hi_v8hi_char;
7172 /* vint, vint, vint, 4 bit literal. */
7173 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
7175 type = v4si_ftype_v4si_v4si_char;
7177 /* vfloat, vfloat, vfloat, 4 bit literal. */
7178 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
7180 type = v4sf_ftype_v4sf_v4sf_char;
7185 def_builtin (d->mask, d->name, type, d->code);
7188 /* Add the simple binary operators. */
7189 d = (struct builtin_description *) bdesc_2arg;
7190 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7192 enum machine_mode mode0, mode1, mode2;
7195 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7198 mode0 = insn_data[d->icode].operand[0].mode;
7199 mode1 = insn_data[d->icode].operand[1].mode;
7200 mode2 = insn_data[d->icode].operand[2].mode;
7202 /* When all three operands are of the same mode. */
7203 if (mode0 == mode1 && mode1 == mode2)
7208 type = v4sf_ftype_v4sf_v4sf;
7211 type = v4si_ftype_v4si_v4si;
7214 type = v16qi_ftype_v16qi_v16qi;
7217 type = v8hi_ftype_v8hi_v8hi;
7220 type = v2si_ftype_v2si_v2si;
7223 type = v2sf_ftype_v2sf_v2sf;
7226 type = int_ftype_int_int;
7233 /* A few other combos we really don't want to do manually. */
7235 /* vint, vfloat, vfloat. */
7236 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7237 type = v4si_ftype_v4sf_v4sf;
7239 /* vshort, vchar, vchar. */
7240 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7241 type = v8hi_ftype_v16qi_v16qi;
7243 /* vint, vshort, vshort. */
7244 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7245 type = v4si_ftype_v8hi_v8hi;
7247 /* vshort, vint, vint. */
7248 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7249 type = v8hi_ftype_v4si_v4si;
7251 /* vchar, vshort, vshort. */
7252 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7253 type = v16qi_ftype_v8hi_v8hi;
7255 /* vint, vchar, vint. */
7256 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7257 type = v4si_ftype_v16qi_v4si;
7259 /* vint, vchar, vchar. */
7260 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7261 type = v4si_ftype_v16qi_v16qi;
7263 /* vint, vshort, vint. */
7264 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7265 type = v4si_ftype_v8hi_v4si;
7267 /* vint, vint, 5 bit literal. */
7268 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7269 type = v4si_ftype_v4si_char;
7271 /* vshort, vshort, 5 bit literal. */
7272 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7273 type = v8hi_ftype_v8hi_char;
7275 /* vchar, vchar, 5 bit literal. */
7276 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7277 type = v16qi_ftype_v16qi_char;
7279 /* vfloat, vint, 5 bit literal. */
7280 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7281 type = v4sf_ftype_v4si_char;
7283 /* vint, vfloat, 5 bit literal. */
7284 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7285 type = v4si_ftype_v4sf_char;
7287 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7288 type = v2si_ftype_int_int;
7290 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7291 type = v2si_ftype_v2si_char;
7293 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7294 type = v2si_ftype_int_char;
7297 else if (mode0 == SImode)
7302 type = int_ftype_v4si_v4si;
7305 type = int_ftype_v4sf_v4sf;
7308 type = int_ftype_v16qi_v16qi;
7311 type = int_ftype_v8hi_v8hi;
7321 def_builtin (d->mask, d->name, type, d->code);
7324 /* Add the simple unary operators. */
7325 d = (struct builtin_description *) bdesc_1arg;
7326 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7328 enum machine_mode mode0, mode1;
7331 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7334 mode0 = insn_data[d->icode].operand[0].mode;
7335 mode1 = insn_data[d->icode].operand[1].mode;
7337 if (mode0 == V4SImode && mode1 == QImode)
7338 type = v4si_ftype_char;
7339 else if (mode0 == V8HImode && mode1 == QImode)
7340 type = v8hi_ftype_char;
7341 else if (mode0 == V16QImode && mode1 == QImode)
7342 type = v16qi_ftype_char;
7343 else if (mode0 == V4SFmode && mode1 == V4SFmode)
7344 type = v4sf_ftype_v4sf;
7345 else if (mode0 == V8HImode && mode1 == V16QImode)
7346 type = v8hi_ftype_v16qi;
7347 else if (mode0 == V4SImode && mode1 == V8HImode)
7348 type = v4si_ftype_v8hi;
7349 else if (mode0 == V2SImode && mode1 == V2SImode)
7350 type = v2si_ftype_v2si;
7351 else if (mode0 == V2SFmode && mode1 == V2SFmode)
7352 type = v2sf_ftype_v2sf;
7353 else if (mode0 == V2SFmode && mode1 == V2SImode)
7354 type = v2sf_ftype_v2si;
7355 else if (mode0 == V2SImode && mode1 == V2SFmode)
7356 type = v2si_ftype_v2sf;
7357 else if (mode0 == V2SImode && mode1 == QImode)
7358 type = v2si_ftype_char;
7362 def_builtin (d->mask, d->name, type, d->code);
7367 rs6000_init_libfuncs (void)
7369 if (!TARGET_HARD_FLOAT)
7372 if (DEFAULT_ABI != ABI_V4)
7374 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7376 /* AIX library routines for float->int conversion. */
7377 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7378 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7379 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
7380 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
7383 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
7384 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7385 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7386 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7387 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7391 /* 32-bit SVR4 quad floating point routines. */
7393 set_optab_libfunc (add_optab, TFmode, "_q_add");
7394 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7395 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7396 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7397 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7398 if (TARGET_PPC_GPOPT || TARGET_POWER2)
7399 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7401 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7402 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7403 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7404 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7405 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7406 set_optab_libfunc (le_optab, TFmode, "_q_fle");
7408 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7409 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7410 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7411 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7412 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7413 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7414 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7418 /* Expand a block move operation, and return 1 if successful. Return 0
7419 if we should let the compiler generate normal code.
7421 operands[0] is the destination
7422 operands[1] is the source
7423 operands[2] is the length
7424 operands[3] is the alignment */
7426 #define MAX_MOVE_REG 4
7429 expand_block_move (rtx operands[])
7431 rtx orig_dest = operands[0];
7432 rtx orig_src = operands[1];
7433 rtx bytes_rtx = operands[2];
7434 rtx align_rtx = operands[3];
7435 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
7440 rtx stores[MAX_MOVE_REG];
7443 /* If this is not a fixed size move, just call memcpy */
7447 /* If this is not a fixed size alignment, abort */
7448 if (GET_CODE (align_rtx) != CONST_INT)
7450 align = INTVAL (align_rtx);
7452 /* Anything to move? */
7453 bytes = INTVAL (bytes_rtx);
7457 /* store_one_arg depends on expand_block_move to handle at least the size of
7458 reg_parm_stack_space. */
7459 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7462 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7465 rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7466 rtx (*mov) (rtx, rtx);
7468 enum machine_mode mode = BLKmode;
7472 && bytes > 24 /* move up to 32 bytes at a time */
7480 && ! fixed_regs[12])
7482 move_bytes = (bytes > 32) ? 32 : bytes;
7483 gen_func.movstrsi = gen_movstrsi_8reg;
7485 else if (TARGET_STRING
7486 && bytes > 16 /* move up to 24 bytes at a time */
7492 && ! fixed_regs[10])
7494 move_bytes = (bytes > 24) ? 24 : bytes;
7495 gen_func.movstrsi = gen_movstrsi_6reg;
7497 else if (TARGET_STRING
7498 && bytes > 8 /* move up to 16 bytes at a time */
7504 move_bytes = (bytes > 16) ? 16 : bytes;
7505 gen_func.movstrsi = gen_movstrsi_4reg;
7507 else if (bytes >= 8 && TARGET_POWERPC64
7508 /* 64-bit loads and stores require word-aligned
7510 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7514 gen_func.mov = gen_movdi;
7516 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7517 { /* move up to 8 bytes at a time */
7518 move_bytes = (bytes > 8) ? 8 : bytes;
7519 gen_func.movstrsi = gen_movstrsi_2reg;
7521 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7522 { /* move 4 bytes */
7525 gen_func.mov = gen_movsi;
7527 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7528 { /* move 2 bytes */
7531 gen_func.mov = gen_movhi;
7533 else if (TARGET_STRING && bytes > 1)
7534 { /* move up to 4 bytes at a time */
7535 move_bytes = (bytes > 4) ? 4 : bytes;
7536 gen_func.movstrsi = gen_movstrsi_1reg;
7538 else /* move 1 byte at a time */
7542 gen_func.mov = gen_movqi;
7545 src = adjust_address (orig_src, mode, offset);
7546 dest = adjust_address (orig_dest, mode, offset);
7548 if (mode != BLKmode)
7550 rtx tmp_reg = gen_reg_rtx (mode);
7552 emit_insn ((*gen_func.mov) (tmp_reg, src));
7553 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7556 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7559 for (i = 0; i < num_reg; i++)
7560 emit_insn (stores[i]);
7564 if (mode == BLKmode)
7566 /* Move the address into scratch registers. The movstrsi
7567 patterns require zero offset. */
7568 if (!REG_P (XEXP (src, 0)))
7570 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7571 src = replace_equiv_address (src, src_reg);
7573 set_mem_size (src, GEN_INT (move_bytes));
7575 if (!REG_P (XEXP (dest, 0)))
7577 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7578 dest = replace_equiv_address (dest, dest_reg);
7580 set_mem_size (dest, GEN_INT (move_bytes));
7582 emit_insn ((*gen_func.movstrsi) (dest, src,
7583 GEN_INT (move_bytes & 31),
7592 /* Return 1 if OP is a load multiple operation. It is known to be a
7593 PARALLEL and the first section will be tested. */
7596 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7598 int count = XVECLEN (op, 0);
7599 unsigned int dest_regno;
7603 /* Perform a quick check so we don't blow up below. */
7605 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7606 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7607 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7610 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7611 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7613 for (i = 1; i < count; i++)
7615 rtx elt = XVECEXP (op, 0, i);
7617 if (GET_CODE (elt) != SET
7618 || GET_CODE (SET_DEST (elt)) != REG
7619 || GET_MODE (SET_DEST (elt)) != SImode
7620 || REGNO (SET_DEST (elt)) != dest_regno + i
7621 || GET_CODE (SET_SRC (elt)) != MEM
7622 || GET_MODE (SET_SRC (elt)) != SImode
7623 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7624 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7625 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7626 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7633 /* Similar, but tests for store multiple. Here, the second vector element
7634 is a CLOBBER. It will be tested later. */
7637 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7639 int count = XVECLEN (op, 0) - 1;
7640 unsigned int src_regno;
7644 /* Perform a quick check so we don't blow up below. */
7646 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7647 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7648 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7651 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7652 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7654 for (i = 1; i < count; i++)
7656 rtx elt = XVECEXP (op, 0, i + 1);
7658 if (GET_CODE (elt) != SET
7659 || GET_CODE (SET_SRC (elt)) != REG
7660 || GET_MODE (SET_SRC (elt)) != SImode
7661 || REGNO (SET_SRC (elt)) != src_regno + i
7662 || GET_CODE (SET_DEST (elt)) != MEM
7663 || GET_MODE (SET_DEST (elt)) != SImode
7664 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7665 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7666 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7667 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7674 /* Return a string to perform a load_multiple operation.
7675 operands[0] is the vector.
7676 operands[1] is the source address.
7677 operands[2] is the first destination register. */
7680 rs6000_output_load_multiple (rtx operands[3])
7682 /* We have to handle the case where the pseudo used to contain the address
7683 is assigned to one of the output registers. */
7685 int words = XVECLEN (operands[0], 0);
7688 if (XVECLEN (operands[0], 0) == 1)
7689 return "{l|lwz} %2,0(%1)";
7691 for (i = 0; i < words; i++)
7692 if (refers_to_regno_p (REGNO (operands[2]) + i,
7693 REGNO (operands[2]) + i + 1, operands[1], 0))
7697 xop[0] = GEN_INT (4 * (words-1));
7698 xop[1] = operands[1];
7699 xop[2] = operands[2];
7700 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7705 xop[0] = GEN_INT (4 * (words-1));
7706 xop[1] = operands[1];
7707 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7708 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7713 for (j = 0; j < words; j++)
7716 xop[0] = GEN_INT (j * 4);
7717 xop[1] = operands[1];
7718 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7719 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7721 xop[0] = GEN_INT (i * 4);
7722 xop[1] = operands[1];
7723 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7728 return "{lsi|lswi} %2,%1,%N0";
7731 /* Return 1 for a parallel vrsave operation. */
7734 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7736 int count = XVECLEN (op, 0);
7737 unsigned int dest_regno, src_regno;
7741 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7742 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7743 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7746 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7747 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7749 if (dest_regno != VRSAVE_REGNO
7750 && src_regno != VRSAVE_REGNO)
7753 for (i = 1; i < count; i++)
7755 rtx elt = XVECEXP (op, 0, i);
7757 if (GET_CODE (elt) != CLOBBER
7758 && GET_CODE (elt) != SET)
7765 /* Return 1 for an PARALLEL suitable for mfcr. */
7768 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7770 int count = XVECLEN (op, 0);
7773 /* Perform a quick check so we don't blow up below. */
7775 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7776 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7777 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7780 for (i = 0; i < count; i++)
7782 rtx exp = XVECEXP (op, 0, i);
7787 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
7789 if (GET_CODE (src_reg) != REG
7790 || GET_MODE (src_reg) != CCmode
7791 || ! CR_REGNO_P (REGNO (src_reg)))
7794 if (GET_CODE (exp) != SET
7795 || GET_CODE (SET_DEST (exp)) != REG
7796 || GET_MODE (SET_DEST (exp)) != SImode
7797 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
7799 unspec = SET_SRC (exp);
7800 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
7802 if (GET_CODE (unspec) != UNSPEC
7803 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
7804 || XVECLEN (unspec, 0) != 2
7805 || XVECEXP (unspec, 0, 0) != src_reg
7806 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7807 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7813 /* Return 1 for an PARALLEL suitable for mtcrf. */
7816 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7818 int count = XVECLEN (op, 0);
7822 /* Perform a quick check so we don't blow up below. */
7824 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7825 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7826 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7828 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7830 if (GET_CODE (src_reg) != REG
7831 || GET_MODE (src_reg) != SImode
7832 || ! INT_REGNO_P (REGNO (src_reg)))
7835 for (i = 0; i < count; i++)
7837 rtx exp = XVECEXP (op, 0, i);
7841 if (GET_CODE (exp) != SET
7842 || GET_CODE (SET_DEST (exp)) != REG
7843 || GET_MODE (SET_DEST (exp)) != CCmode
7844 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7846 unspec = SET_SRC (exp);
7847 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7849 if (GET_CODE (unspec) != UNSPEC
7850 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7851 || XVECLEN (unspec, 0) != 2
7852 || XVECEXP (unspec, 0, 0) != src_reg
7853 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7854 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7860 /* Return 1 for an PARALLEL suitable for lmw. */
7863 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7865 int count = XVECLEN (op, 0);
7866 unsigned int dest_regno;
7868 unsigned int base_regno;
7869 HOST_WIDE_INT offset;
7872 /* Perform a quick check so we don't blow up below. */
7874 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7875 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7876 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7879 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7880 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7883 || count != 32 - (int) dest_regno)
7886 if (legitimate_indirect_address_p (src_addr, 0))
7889 base_regno = REGNO (src_addr);
7890 if (base_regno == 0)
7893 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7895 offset = INTVAL (XEXP (src_addr, 1));
7896 base_regno = REGNO (XEXP (src_addr, 0));
7901 for (i = 0; i < count; i++)
7903 rtx elt = XVECEXP (op, 0, i);
7906 HOST_WIDE_INT newoffset;
7908 if (GET_CODE (elt) != SET
7909 || GET_CODE (SET_DEST (elt)) != REG
7910 || GET_MODE (SET_DEST (elt)) != SImode
7911 || REGNO (SET_DEST (elt)) != dest_regno + i
7912 || GET_CODE (SET_SRC (elt)) != MEM
7913 || GET_MODE (SET_SRC (elt)) != SImode)
7915 newaddr = XEXP (SET_SRC (elt), 0);
7916 if (legitimate_indirect_address_p (newaddr, 0))
7921 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7923 addr_reg = XEXP (newaddr, 0);
7924 newoffset = INTVAL (XEXP (newaddr, 1));
7928 if (REGNO (addr_reg) != base_regno
7929 || newoffset != offset + 4 * i)
7936 /* Return 1 for an PARALLEL suitable for stmw. */
7939 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7941 int count = XVECLEN (op, 0);
7942 unsigned int src_regno;
7944 unsigned int base_regno;
7945 HOST_WIDE_INT offset;
7948 /* Perform a quick check so we don't blow up below. */
7950 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7951 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7952 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7955 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7956 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7959 || count != 32 - (int) src_regno)
7962 if (legitimate_indirect_address_p (dest_addr, 0))
7965 base_regno = REGNO (dest_addr);
7966 if (base_regno == 0)
7969 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7971 offset = INTVAL (XEXP (dest_addr, 1));
7972 base_regno = REGNO (XEXP (dest_addr, 0));
7977 for (i = 0; i < count; i++)
7979 rtx elt = XVECEXP (op, 0, i);
7982 HOST_WIDE_INT newoffset;
7984 if (GET_CODE (elt) != SET
7985 || GET_CODE (SET_SRC (elt)) != REG
7986 || GET_MODE (SET_SRC (elt)) != SImode
7987 || REGNO (SET_SRC (elt)) != src_regno + i
7988 || GET_CODE (SET_DEST (elt)) != MEM
7989 || GET_MODE (SET_DEST (elt)) != SImode)
7991 newaddr = XEXP (SET_DEST (elt), 0);
7992 if (legitimate_indirect_address_p (newaddr, 0))
7997 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7999 addr_reg = XEXP (newaddr, 0);
8000 newoffset = INTVAL (XEXP (newaddr, 1));
8004 if (REGNO (addr_reg) != base_regno
8005 || newoffset != offset + 4 * i)
8012 /* A validation routine: say whether CODE, a condition code, and MODE
8013 match. The other alternatives either don't make sense or should
8014 never be generated. */
8017 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8019 if (GET_RTX_CLASS (code) != '<'
8020 || GET_MODE_CLASS (mode) != MODE_CC)
8023 /* These don't make sense. */
8024 if ((code == GT || code == LT || code == GE || code == LE)
8025 && mode == CCUNSmode)
8028 if ((code == GTU || code == LTU || code == GEU || code == LEU)
8029 && mode != CCUNSmode)
8032 if (mode != CCFPmode
8033 && (code == ORDERED || code == UNORDERED
8034 || code == UNEQ || code == LTGT
8035 || code == UNGT || code == UNLT
8036 || code == UNGE || code == UNLE))
8039 /* These should never be generated except for
8040 flag_finite_math_only. */
8041 if (mode == CCFPmode
8042 && ! flag_finite_math_only
8043 && (code == LE || code == GE
8044 || code == UNEQ || code == LTGT
8045 || code == UNGT || code == UNLT))
8048 /* These are invalid; the information is not there. */
8049 if (mode == CCEQmode
8050 && code != EQ && code != NE)
8054 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
8055 We only check the opcode against the mode of the CC value here. */
8058 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8060 enum rtx_code code = GET_CODE (op);
8061 enum machine_mode cc_mode;
8063 if (GET_RTX_CLASS (code) != '<')
8066 cc_mode = GET_MODE (XEXP (op, 0));
8067 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
8070 validate_condition_mode (code, cc_mode);
8075 /* Return 1 if OP is a comparison operation that is valid for a branch
8076 insn and which is true if the corresponding bit in the CC register
8080 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
8084 if (! branch_comparison_operator (op, mode))
8087 code = GET_CODE (op);
8088 return (code == EQ || code == LT || code == GT
8089 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
8090 || code == LTU || code == GTU
8091 || code == UNORDERED);
8094 /* Return 1 if OP is a comparison operation that is valid for an scc
8095 insn: it must be a positive comparison. */
8098 scc_comparison_operator (rtx op, enum machine_mode mode)
8100 return branch_positive_comparison_operator (op, mode);
8104 trap_comparison_operator (rtx op, enum machine_mode mode)
8106 if (mode != VOIDmode && mode != GET_MODE (op))
8108 return GET_RTX_CLASS (GET_CODE (op)) == '<';
8112 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8114 enum rtx_code code = GET_CODE (op);
8115 return (code == AND || code == IOR || code == XOR);
8119 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8121 enum rtx_code code = GET_CODE (op);
8122 return (code == IOR || code == XOR);
8126 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8128 enum rtx_code code = GET_CODE (op);
8129 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
8132 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
8133 mask required to convert the result of a rotate insn into a shift
8134 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8137 includes_lshift_p (rtx shiftop, rtx andop)
8139 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8141 shift_mask <<= INTVAL (shiftop);
8143 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8146 /* Similar, but for right shift. */
8149 includes_rshift_p (rtx shiftop, rtx andop)
8151 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8153 shift_mask >>= INTVAL (shiftop);
8155 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8158 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8159 to perform a left shift. It must have exactly SHIFTOP least
8160 significant 0's, then one or more 1's, then zero or more 0's. */
8163 includes_rldic_lshift_p (rtx shiftop, rtx andop)
8165 if (GET_CODE (andop) == CONST_INT)
8167 HOST_WIDE_INT c, lsb, shift_mask;
8170 if (c == 0 || c == ~0)
8174 shift_mask <<= INTVAL (shiftop);
8176 /* Find the least significant one bit. */
8179 /* It must coincide with the LSB of the shift mask. */
8180 if (-lsb != shift_mask)
8183 /* Invert to look for the next transition (if any). */
8186 /* Remove the low group of ones (originally low group of zeros). */
8189 /* Again find the lsb, and check we have all 1's above. */
8193 else if (GET_CODE (andop) == CONST_DOUBLE
8194 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8196 HOST_WIDE_INT low, high, lsb;
8197 HOST_WIDE_INT shift_mask_low, shift_mask_high;
8199 low = CONST_DOUBLE_LOW (andop);
8200 if (HOST_BITS_PER_WIDE_INT < 64)
8201 high = CONST_DOUBLE_HIGH (andop);
8203 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8204 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8207 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8209 shift_mask_high = ~0;
8210 if (INTVAL (shiftop) > 32)
8211 shift_mask_high <<= INTVAL (shiftop) - 32;
8215 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8222 return high == -lsb;
8225 shift_mask_low = ~0;
8226 shift_mask_low <<= INTVAL (shiftop);
8230 if (-lsb != shift_mask_low)
8233 if (HOST_BITS_PER_WIDE_INT < 64)
8238 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8241 return high == -lsb;
8245 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8251 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8252 to perform a left shift. It must have SHIFTOP or more least
8253 significant 0's, with the remainder of the word 1's. */
8256 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8258 if (GET_CODE (andop) == CONST_INT)
8260 HOST_WIDE_INT c, lsb, shift_mask;
8263 shift_mask <<= INTVAL (shiftop);
8266 /* Find the least significant one bit. */
8269 /* It must be covered by the shift mask.
8270 This test also rejects c == 0. */
8271 if ((lsb & shift_mask) == 0)
8274 /* Check we have all 1's above the transition, and reject all 1's. */
8275 return c == -lsb && lsb != 1;
8277 else if (GET_CODE (andop) == CONST_DOUBLE
8278 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8280 HOST_WIDE_INT low, lsb, shift_mask_low;
8282 low = CONST_DOUBLE_LOW (andop);
8284 if (HOST_BITS_PER_WIDE_INT < 64)
8286 HOST_WIDE_INT high, shift_mask_high;
8288 high = CONST_DOUBLE_HIGH (andop);
8292 shift_mask_high = ~0;
8293 if (INTVAL (shiftop) > 32)
8294 shift_mask_high <<= INTVAL (shiftop) - 32;
8298 if ((lsb & shift_mask_high) == 0)
8301 return high == -lsb;
8307 shift_mask_low = ~0;
8308 shift_mask_low <<= INTVAL (shiftop);
8312 if ((lsb & shift_mask_low) == 0)
8315 return low == -lsb && lsb != 1;
8321 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8322 for lfq and stfq insns.
8324 Note reg1 and reg2 *must* be hard registers. To be sure we will
8325 abort if we are passed pseudo registers. */
8328 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8330 /* We might have been passed a SUBREG. */
8331 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8334 return (REGNO (reg1) == REGNO (reg2) - 1);
8337 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8338 addr1 and addr2 must be in consecutive memory locations
8339 (addr2 == addr1 + 8). */
8342 addrs_ok_for_quad_peep (rtx addr1, rtx addr2)
8347 /* Extract an offset (if used) from the first addr. */
8348 if (GET_CODE (addr1) == PLUS)
8350 /* If not a REG, return zero. */
8351 if (GET_CODE (XEXP (addr1, 0)) != REG)
8355 reg1 = REGNO (XEXP (addr1, 0));
8356 /* The offset must be constant! */
8357 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8359 offset1 = INTVAL (XEXP (addr1, 1));
8362 else if (GET_CODE (addr1) != REG)
8366 reg1 = REGNO (addr1);
8367 /* This was a simple (mem (reg)) expression. Offset is 0. */
8371 /* Make sure the second address is a (mem (plus (reg) (const_int)))
8372 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8373 register as addr1. */
8374 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8376 if (GET_CODE (addr2) != PLUS)
8379 if (GET_CODE (XEXP (addr2, 0)) != REG
8380 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8383 if (reg1 != REGNO (XEXP (addr2, 0)))
8386 /* The offset for the second addr must be 8 more than the first addr. */
8387 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8390 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
8395 /* Return the register class of a scratch register needed to copy IN into
8396 or out of a register in CLASS in MODE. If it can be done directly,
8397 NO_REGS is returned. */
8400 secondary_reload_class (enum reg_class class,
8401 enum machine_mode mode ATTRIBUTE_UNUSED,
8406 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8408 && MACHOPIC_INDIRECT
8412 /* We cannot copy a symbolic operand directly into anything
8413 other than BASE_REGS for TARGET_ELF. So indicate that a
8414 register from BASE_REGS is needed as an intermediate
8417 On Darwin, pic addresses require a load from memory, which
8418 needs a base register. */
8419 if (class != BASE_REGS
8420 && (GET_CODE (in) == SYMBOL_REF
8421 || GET_CODE (in) == HIGH
8422 || GET_CODE (in) == LABEL_REF
8423 || GET_CODE (in) == CONST))
8427 if (GET_CODE (in) == REG)
8430 if (regno >= FIRST_PSEUDO_REGISTER)
8432 regno = true_regnum (in);
8433 if (regno >= FIRST_PSEUDO_REGISTER)
8437 else if (GET_CODE (in) == SUBREG)
8439 regno = true_regnum (in);
8440 if (regno >= FIRST_PSEUDO_REGISTER)
8446 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8448 if (class == GENERAL_REGS || class == BASE_REGS
8449 || (regno >= 0 && INT_REGNO_P (regno)))
8452 /* Constants, memory, and FP registers can go into FP registers. */
8453 if ((regno == -1 || FP_REGNO_P (regno))
8454 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8457 /* Memory, and AltiVec registers can go into AltiVec registers. */
8458 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8459 && class == ALTIVEC_REGS)
8462 /* We can copy among the CR registers. */
8463 if ((class == CR_REGS || class == CR0_REGS)
8464 && regno >= 0 && CR_REGNO_P (regno))
8467 /* Otherwise, we need GENERAL_REGS. */
8468 return GENERAL_REGS;
8471 /* Given a comparison operation, return the bit number in CCR to test. We
8472 know this is a valid comparison.
8474 SCC_P is 1 if this is for an scc. That means that %D will have been
8475 used instead of %C, so the bits will be in different places.
8477 Return -1 if OP isn't a valid comparison for some reason. */
8480 ccr_bit (rtx op, int scc_p)
8482 enum rtx_code code = GET_CODE (op);
8483 enum machine_mode cc_mode;
8488 if (GET_RTX_CLASS (code) != '<')
8493 if (GET_CODE (reg) != REG
8494 || ! CR_REGNO_P (REGNO (reg)))
8497 cc_mode = GET_MODE (reg);
8498 cc_regnum = REGNO (reg);
8499 base_bit = 4 * (cc_regnum - CR0_REGNO);
8501 validate_condition_mode (code, cc_mode);
8503 /* When generating a sCOND operation, only positive conditions are
8505 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8506 && code != GTU && code != LTU)
8512 if (TARGET_E500 && !TARGET_FPRS
8513 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8514 return base_bit + 1;
8515 return scc_p ? base_bit + 3 : base_bit + 2;
8517 if (TARGET_E500 && !TARGET_FPRS
8518 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8519 return base_bit + 1;
8520 return base_bit + 2;
8521 case GT: case GTU: case UNLE:
8522 return base_bit + 1;
8523 case LT: case LTU: case UNGE:
8525 case ORDERED: case UNORDERED:
8526 return base_bit + 3;
8529 /* If scc, we will have done a cror to put the bit in the
8530 unordered position. So test that bit. For integer, this is ! LT
8531 unless this is an scc insn. */
8532 return scc_p ? base_bit + 3 : base_bit;
8535 return scc_p ? base_bit + 3 : base_bit + 1;
8542 /* Return the GOT register. */
8545 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
8547 /* The second flow pass currently (June 1999) can't update
8548 regs_ever_live without disturbing other parts of the compiler, so
8549 update it here to make the prolog/epilogue code happy. */
8550 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8551 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8553 current_function_uses_pic_offset_table = 1;
8555 return pic_offset_table_rtx;
8558 /* Function to init struct machine_function.
8559 This will be called, via a pointer variable,
8560 from push_function_context. */
8562 static struct machine_function *
8563 rs6000_init_machine_status (void)
8565 return ggc_alloc_cleared (sizeof (machine_function));
8568 /* These macros test for integers and extract the low-order bits. */
8570 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8571 && GET_MODE (X) == VOIDmode)
8573 #define INT_LOWPART(X) \
8574 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8580 unsigned long val = INT_LOWPART (op);
8582 /* If the high bit is zero, the value is the first 1 bit we find
8584 if ((val & 0x80000000) == 0)
8586 if ((val & 0xffffffff) == 0)
8590 while (((val <<= 1) & 0x80000000) == 0)
8595 /* If the high bit is set and the low bit is not, or the mask is all
8596 1's, the value is zero. */
8597 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8600 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8603 while (((val >>= 1) & 1) != 0)
8613 unsigned long val = INT_LOWPART (op);
8615 /* If the low bit is zero, the value is the first 1 bit we find from
8619 if ((val & 0xffffffff) == 0)
8623 while (((val >>= 1) & 1) == 0)
8629 /* If the low bit is set and the high bit is not, or the mask is all
8630 1's, the value is 31. */
8631 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8634 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8637 while (((val <<= 1) & 0x80000000) != 0)
8643 /* Locate some local-dynamic symbol still in use by this function
8644 so that we can print its name in some tls_ld pattern. */
8647 rs6000_get_some_local_dynamic_name (void)
8651 if (cfun->machine->some_ld_name)
8652 return cfun->machine->some_ld_name;
8654 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8656 && for_each_rtx (&PATTERN (insn),
8657 rs6000_get_some_local_dynamic_name_1, 0))
8658 return cfun->machine->some_ld_name;
8663 /* Helper function for rs6000_get_some_local_dynamic_name. */
8666 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
8670 if (GET_CODE (x) == SYMBOL_REF)
8672 const char *str = XSTR (x, 0);
8673 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8675 cfun->machine->some_ld_name = str;
8683 /* Print an operand. Recognize special options, documented below. */
8686 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8687 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8689 #define SMALL_DATA_RELOC "sda21"
8690 #define SMALL_DATA_REG 0
8694 print_operand (FILE *file, rtx x, int code)
8698 unsigned HOST_WIDE_INT uval;
8703 /* Write out an instruction after the call which may be replaced
8704 with glue code by the loader. This depends on the AIX version. */
8705 asm_fprintf (file, RS6000_CALL_GLUE);
8708 /* %a is output_address. */
8711 /* If X is a constant integer whose low-order 5 bits are zero,
8712 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8713 in the AIX assembler where "sri" with a zero shift count
8714 writes a trash instruction. */
8715 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8722 /* If constant, low-order 16 bits of constant, unsigned.
8723 Otherwise, write normally. */
8725 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8727 print_operand (file, x, 0);
8731 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8732 for 64-bit mask direction. */
8733 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8736 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8740 /* X is a CR register. Print the number of the EQ bit of the CR */
8741 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8742 output_operand_lossage ("invalid %%E value");
8744 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8748 /* X is a CR register. Print the shift count needed to move it
8749 to the high-order four bits. */
8750 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8751 output_operand_lossage ("invalid %%f value");
8753 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8757 /* Similar, but print the count for the rotate in the opposite
8759 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8760 output_operand_lossage ("invalid %%F value");
8762 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8766 /* X is a constant integer. If it is negative, print "m",
8767 otherwise print "z". This is to make an aze or ame insn. */
8768 if (GET_CODE (x) != CONST_INT)
8769 output_operand_lossage ("invalid %%G value");
8770 else if (INTVAL (x) >= 0)
8777 /* If constant, output low-order five bits. Otherwise, write
8780 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8782 print_operand (file, x, 0);
8786 /* If constant, output low-order six bits. Otherwise, write
8789 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8791 print_operand (file, x, 0);
8795 /* Print `i' if this is a constant, else nothing. */
8801 /* Write the bit number in CCR for jump. */
8804 output_operand_lossage ("invalid %%j code");
8806 fprintf (file, "%d", i);
8810 /* Similar, but add one for shift count in rlinm for scc and pass
8811 scc flag to `ccr_bit'. */
8814 output_operand_lossage ("invalid %%J code");
8816 /* If we want bit 31, write a shift count of zero, not 32. */
8817 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8821 /* X must be a constant. Write the 1's complement of the
8824 output_operand_lossage ("invalid %%k value");
8826 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8830 /* X must be a symbolic constant on ELF. Write an
8831 expression suitable for an 'addi' that adds in the low 16
8833 if (GET_CODE (x) != CONST)
8835 print_operand_address (file, x);
8840 if (GET_CODE (XEXP (x, 0)) != PLUS
8841 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8842 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8843 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8844 output_operand_lossage ("invalid %%K value");
8845 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8847 /* For GNU as, there must be a non-alphanumeric character
8848 between 'l' and the number. The '-' is added by
8849 print_operand() already. */
8850 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8852 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8856 /* %l is output_asm_label. */
8859 /* Write second word of DImode or DFmode reference. Works on register
8860 or non-indexed memory only. */
8861 if (GET_CODE (x) == REG)
8862 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8863 else if (GET_CODE (x) == MEM)
8865 /* Handle possible auto-increment. Since it is pre-increment and
8866 we have already done it, we can just use an offset of word. */
8867 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8868 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8869 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8872 output_address (XEXP (adjust_address_nv (x, SImode,
8876 if (small_data_operand (x, GET_MODE (x)))
8877 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8878 reg_names[SMALL_DATA_REG]);
8883 /* MB value for a mask operand. */
8884 if (! mask_operand (x, SImode))
8885 output_operand_lossage ("invalid %%m value");
8887 fprintf (file, "%d", extract_MB (x));
8891 /* ME value for a mask operand. */
8892 if (! mask_operand (x, SImode))
8893 output_operand_lossage ("invalid %%M value");
8895 fprintf (file, "%d", extract_ME (x));
8898 /* %n outputs the negative of its operand. */
8901 /* Write the number of elements in the vector times 4. */
8902 if (GET_CODE (x) != PARALLEL)
8903 output_operand_lossage ("invalid %%N value");
8905 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8909 /* Similar, but subtract 1 first. */
8910 if (GET_CODE (x) != PARALLEL)
8911 output_operand_lossage ("invalid %%O value");
8913 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8917 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8919 || INT_LOWPART (x) < 0
8920 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8921 output_operand_lossage ("invalid %%p value");
8923 fprintf (file, "%d", i);
8927 /* The operand must be an indirect memory reference. The result
8928 is the register number. */
8929 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8930 || REGNO (XEXP (x, 0)) >= 32)
8931 output_operand_lossage ("invalid %%P value");
8933 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8937 /* This outputs the logical code corresponding to a boolean
8938 expression. The expression may have one or both operands
8939 negated (if one, only the first one). For condition register
8940 logical operations, it will also treat the negated
8941 CR codes as NOTs, but not handle NOTs of them. */
8943 const char *const *t = 0;
8945 enum rtx_code code = GET_CODE (x);
8946 static const char * const tbl[3][3] = {
8947 { "and", "andc", "nor" },
8948 { "or", "orc", "nand" },
8949 { "xor", "eqv", "xor" } };
8953 else if (code == IOR)
8955 else if (code == XOR)
8958 output_operand_lossage ("invalid %%q value");
8960 if (GET_CODE (XEXP (x, 0)) != NOT)
8964 if (GET_CODE (XEXP (x, 1)) == NOT)
8982 /* X is a CR register. Print the mask for `mtcrf'. */
8983 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8984 output_operand_lossage ("invalid %%R value");
8986 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8990 /* Low 5 bits of 32 - value */
8992 output_operand_lossage ("invalid %%s value");
8994 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8998 /* PowerPC64 mask position. All 0's is excluded.
8999 CONST_INT 32-bit mask is considered sign-extended so any
9000 transition must occur within the CONST_INT, not on the boundary. */
9001 if (! mask64_operand (x, DImode))
9002 output_operand_lossage ("invalid %%S value");
9004 uval = INT_LOWPART (x);
9006 if (uval & 1) /* Clear Left */
9008 #if HOST_BITS_PER_WIDE_INT > 64
9009 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9013 else /* Clear Right */
9016 #if HOST_BITS_PER_WIDE_INT > 64
9017 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9025 fprintf (file, "%d", i);
9029 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
9030 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
9033 /* Bit 3 is OV bit. */
9034 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9036 /* If we want bit 31, write a shift count of zero, not 32. */
9037 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9041 /* Print the symbolic name of a branch target register. */
9042 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9043 && REGNO (x) != COUNT_REGISTER_REGNUM))
9044 output_operand_lossage ("invalid %%T value");
9045 else if (REGNO (x) == LINK_REGISTER_REGNUM)
9046 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9048 fputs ("ctr", file);
9052 /* High-order 16 bits of constant for use in unsigned operand. */
9054 output_operand_lossage ("invalid %%u value");
9056 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9057 (INT_LOWPART (x) >> 16) & 0xffff);
9061 /* High-order 16 bits of constant for use in signed operand. */
9063 output_operand_lossage ("invalid %%v value");
9065 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9066 (INT_LOWPART (x) >> 16) & 0xffff);
9070 /* Print `u' if this has an auto-increment or auto-decrement. */
9071 if (GET_CODE (x) == MEM
9072 && (GET_CODE (XEXP (x, 0)) == PRE_INC
9073 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9078 /* Print the trap code for this operand. */
9079 switch (GET_CODE (x))
9082 fputs ("eq", file); /* 4 */
9085 fputs ("ne", file); /* 24 */
9088 fputs ("lt", file); /* 16 */
9091 fputs ("le", file); /* 20 */
9094 fputs ("gt", file); /* 8 */
9097 fputs ("ge", file); /* 12 */
9100 fputs ("llt", file); /* 2 */
9103 fputs ("lle", file); /* 6 */
9106 fputs ("lgt", file); /* 1 */
9109 fputs ("lge", file); /* 5 */
9117 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
9120 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9121 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9123 print_operand (file, x, 0);
9127 /* MB value for a PowerPC64 rldic operand. */
9128 val = (GET_CODE (x) == CONST_INT
9129 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
9134 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
9135 if ((val <<= 1) < 0)
9138 #if HOST_BITS_PER_WIDE_INT == 32
9139 if (GET_CODE (x) == CONST_INT && i >= 0)
9140 i += 32; /* zero-extend high-part was all 0's */
9141 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
9143 val = CONST_DOUBLE_LOW (x);
9150 for ( ; i < 64; i++)
9151 if ((val <<= 1) < 0)
9156 fprintf (file, "%d", i + 1);
9160 if (GET_CODE (x) == MEM
9161 && legitimate_indexed_address_p (XEXP (x, 0), 0))
9166 /* Like 'L', for third word of TImode */
9167 if (GET_CODE (x) == REG)
9168 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
9169 else if (GET_CODE (x) == MEM)
9171 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9172 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9173 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9175 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
9176 if (small_data_operand (x, GET_MODE (x)))
9177 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9178 reg_names[SMALL_DATA_REG]);
9183 /* X is a SYMBOL_REF. Write out the name preceded by a
9184 period and without any trailing data in brackets. Used for function
9185 names. If we are configured for System V (or the embedded ABI) on
9186 the PowerPC, do not emit the period, since those systems do not use
9187 TOCs and the like. */
9188 if (GET_CODE (x) != SYMBOL_REF)
9191 if (XSTR (x, 0)[0] != '.')
9193 switch (DEFAULT_ABI)
9208 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9210 assemble_name (file, XSTR (x, 0));
9214 /* Like 'L', for last word of TImode. */
9215 if (GET_CODE (x) == REG)
9216 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9217 else if (GET_CODE (x) == MEM)
9219 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9220 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9221 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9223 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9224 if (small_data_operand (x, GET_MODE (x)))
9225 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9226 reg_names[SMALL_DATA_REG]);
9230 /* Print AltiVec or SPE memory operand. */
9235 if (GET_CODE (x) != MEM)
9243 if (GET_CODE (tmp) == REG)
9245 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9248 /* Handle [reg+UIMM]. */
9249 else if (GET_CODE (tmp) == PLUS &&
9250 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9254 if (GET_CODE (XEXP (tmp, 0)) != REG)
9257 x = INTVAL (XEXP (tmp, 1));
9258 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9262 /* Fall through. Must be [reg+reg]. */
9264 if (GET_CODE (tmp) == REG)
9265 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9266 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9268 if (REGNO (XEXP (tmp, 0)) == 0)
9269 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9270 reg_names[ REGNO (XEXP (tmp, 0)) ]);
9272 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9273 reg_names[ REGNO (XEXP (tmp, 1)) ]);
9281 if (GET_CODE (x) == REG)
9282 fprintf (file, "%s", reg_names[REGNO (x)]);
9283 else if (GET_CODE (x) == MEM)
9285 /* We need to handle PRE_INC and PRE_DEC here, since we need to
9286 know the width from the mode. */
9287 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9288 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9289 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9290 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9291 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9292 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9294 output_address (XEXP (x, 0));
9297 output_addr_const (file, x);
9301 assemble_name (file, rs6000_get_some_local_dynamic_name ());
9305 output_operand_lossage ("invalid %%xn code");
9309 /* Print the address of an operand. */
9312 print_operand_address (FILE *file, rtx x)
9314 if (GET_CODE (x) == REG)
9315 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9316 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9317 || GET_CODE (x) == LABEL_REF)
9319 output_addr_const (file, x);
9320 if (small_data_operand (x, GET_MODE (x)))
9321 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9322 reg_names[SMALL_DATA_REG]);
9323 else if (TARGET_TOC)
9326 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9328 if (REGNO (XEXP (x, 0)) == 0)
9329 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9330 reg_names[ REGNO (XEXP (x, 0)) ]);
9332 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9333 reg_names[ REGNO (XEXP (x, 1)) ]);
9335 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9336 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9337 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9339 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9340 && CONSTANT_P (XEXP (x, 1)))
9342 output_addr_const (file, XEXP (x, 1));
9343 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9347 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9348 && CONSTANT_P (XEXP (x, 1)))
9350 fprintf (file, "lo16(");
9351 output_addr_const (file, XEXP (x, 1));
9352 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9355 else if (legitimate_constant_pool_address_p (x))
9357 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9359 rtx contains_minus = XEXP (x, 1);
9363 /* Find the (minus (sym) (toc)) buried in X, and temporarily
9364 turn it into (sym) for output_addr_const. */
9365 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9366 contains_minus = XEXP (contains_minus, 0);
9368 minus = XEXP (contains_minus, 0);
9369 symref = XEXP (minus, 0);
9370 XEXP (contains_minus, 0) = symref;
9375 name = XSTR (symref, 0);
9376 newname = alloca (strlen (name) + sizeof ("@toc"));
9377 strcpy (newname, name);
9378 strcat (newname, "@toc");
9379 XSTR (symref, 0) = newname;
9381 output_addr_const (file, XEXP (x, 1));
9383 XSTR (symref, 0) = name;
9384 XEXP (contains_minus, 0) = minus;
9387 output_addr_const (file, XEXP (x, 1));
9389 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9395 /* Target hook for assembling integer objects. The PowerPC version has
9396 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9397 is defined. It also needs to handle DI-mode objects on 64-bit
9401 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9403 #ifdef RELOCATABLE_NEEDS_FIXUP
9404 /* Special handling for SI values. */
9405 if (size == 4 && aligned_p)
9407 extern int in_toc_section (void);
9408 static int recurse = 0;
9410 /* For -mrelocatable, we mark all addresses that need to be fixed up
9411 in the .fixup section. */
9412 if (TARGET_RELOCATABLE
9413 && !in_toc_section ()
9414 && !in_text_section ()
9416 && GET_CODE (x) != CONST_INT
9417 && GET_CODE (x) != CONST_DOUBLE
9423 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9425 ASM_OUTPUT_LABEL (asm_out_file, buf);
9426 fprintf (asm_out_file, "\t.long\t(");
9427 output_addr_const (asm_out_file, x);
9428 fprintf (asm_out_file, ")@fixup\n");
9429 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9430 ASM_OUTPUT_ALIGN (asm_out_file, 2);
9431 fprintf (asm_out_file, "\t.long\t");
9432 assemble_name (asm_out_file, buf);
9433 fprintf (asm_out_file, "\n\t.previous\n");
9437 /* Remove initial .'s to turn a -mcall-aixdesc function
9438 address into the address of the descriptor, not the function
9440 else if (GET_CODE (x) == SYMBOL_REF
9441 && XSTR (x, 0)[0] == '.'
9442 && DEFAULT_ABI == ABI_AIX)
9444 const char *name = XSTR (x, 0);
9445 while (*name == '.')
9448 fprintf (asm_out_file, "\t.long\t%s\n", name);
9452 #endif /* RELOCATABLE_NEEDS_FIXUP */
9453 return default_assemble_integer (x, size, aligned_p);
9456 #ifdef HAVE_GAS_HIDDEN
9457 /* Emit an assembler directive to set symbol visibility for DECL to
9461 rs6000_assemble_visibility (tree decl, int vis)
9463 /* Functions need to have their entry point symbol visibility set as
9464 well as their descriptor symbol visibility. */
9465 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9467 static const char * const visibility_types[] = {
9468 NULL, "internal", "hidden", "protected"
9471 const char *name, *type;
9473 name = ((* targetm.strip_name_encoding)
9474 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9475 type = visibility_types[vis];
9477 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9478 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9481 default_assemble_visibility (decl, vis);
9486 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
9488 /* Reversal of FP compares takes care -- an ordered compare
9489 becomes an unordered compare and vice versa. */
9490 if (mode == CCFPmode
9491 && (!flag_finite_math_only
9492 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9493 || code == UNEQ || code == LTGT))
9494 return reverse_condition_maybe_unordered (code);
9496 return reverse_condition (code);
9499 /* Generate a compare for CODE. Return a brand-new rtx that
9500 represents the result of the compare. */
9503 rs6000_generate_compare (enum rtx_code code)
9505 enum machine_mode comp_mode;
9508 if (rs6000_compare_fp_p)
9509 comp_mode = CCFPmode;
9510 else if (code == GTU || code == LTU
9511 || code == GEU || code == LEU)
9512 comp_mode = CCUNSmode;
9516 /* First, the compare. */
9517 compare_result = gen_reg_rtx (comp_mode);
9519 /* SPE FP compare instructions on the GPRs. Yuck! */
9520 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9521 && rs6000_compare_fp_p)
9523 rtx cmp, or1, or2, or_result, compare_result2;
9531 cmp = flag_finite_math_only
9532 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9534 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9535 rs6000_compare_op1);
9543 cmp = flag_finite_math_only
9544 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9546 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9547 rs6000_compare_op1);
9555 cmp = flag_finite_math_only
9556 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9558 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9559 rs6000_compare_op1);
9565 /* Synthesize LE and GE from LT/GT || EQ. */
9566 if (code == LE || code == GE || code == LEU || code == GEU)
9568 /* Synthesize GE/LE frome GT/LT || EQ. */
9574 case LE: code = LT; break;
9575 case GE: code = GT; break;
9576 case LEU: code = LT; break;
9577 case GEU: code = GT; break;
9581 or1 = gen_reg_rtx (SImode);
9582 or2 = gen_reg_rtx (SImode);
9583 or_result = gen_reg_rtx (CCEQmode);
9584 compare_result2 = gen_reg_rtx (CCFPmode);
9587 cmp = flag_finite_math_only
9588 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9590 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9591 rs6000_compare_op1);
9594 /* The MC8540 FP compare instructions set the CR bits
9595 differently than other PPC compare instructions. For
9596 that matter, there is no generic test instruction, but a
9597 testgt, testlt, and testeq. For a true condition, bit 2
9598 is set (x1xx) in the CR. Following the traditional CR
9604 ... bit 2 would be a GT CR alias, so later on we
9605 look in the GT bits for the branch instructions.
9606 However, we must be careful to emit correct RTL in
9607 the meantime, so optimizations don't get confused. */
9609 or1 = gen_rtx_NE (SImode, compare_result, const0_rtx);
9610 or2 = gen_rtx_NE (SImode, compare_result2, const0_rtx);
9612 /* OR them together. */
9613 cmp = gen_rtx_SET (VOIDmode, or_result,
9614 gen_rtx_COMPARE (CCEQmode,
9615 gen_rtx_IOR (SImode, or1, or2),
9617 compare_result = or_result;
9622 /* We only care about 1 bit (x1xx), so map everything to NE to
9623 maintain rtl sanity. We'll get to the right bit (x1xx) at
9624 code output time. */
9625 if (code == NE || code == LTGT)
9626 /* Do the inverse here because we have no cmpne
9627 instruction. We use the cmpeq instruction and expect
9628 to get a 0 instead. */
9637 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9638 gen_rtx_COMPARE (comp_mode,
9640 rs6000_compare_op1)));
9642 /* Some kinds of FP comparisons need an OR operation;
9643 under flag_finite_math_only we don't bother. */
9644 if (rs6000_compare_fp_p
9645 && ! flag_finite_math_only
9646 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9647 && (code == LE || code == GE
9648 || code == UNEQ || code == LTGT
9649 || code == UNGT || code == UNLT))
9651 enum rtx_code or1, or2;
9652 rtx or1_rtx, or2_rtx, compare2_rtx;
9653 rtx or_result = gen_reg_rtx (CCEQmode);
9657 case LE: or1 = LT; or2 = EQ; break;
9658 case GE: or1 = GT; or2 = EQ; break;
9659 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9660 case LTGT: or1 = LT; or2 = GT; break;
9661 case UNGT: or1 = UNORDERED; or2 = GT; break;
9662 case UNLT: or1 = UNORDERED; or2 = LT; break;
9665 validate_condition_mode (or1, comp_mode);
9666 validate_condition_mode (or2, comp_mode);
9667 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
9668 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
9669 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9670 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9672 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9674 compare_result = or_result;
9678 validate_condition_mode (code, GET_MODE (compare_result));
9680 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
9684 /* Emit the RTL for an sCOND pattern. */
9687 rs6000_emit_sCOND (enum rtx_code code, rtx result)
9690 enum machine_mode op_mode;
9691 enum rtx_code cond_code;
9693 condition_rtx = rs6000_generate_compare (code);
9694 cond_code = GET_CODE (condition_rtx);
9697 || cond_code == GE || cond_code == LE
9698 || cond_code == GEU || cond_code == LEU
9699 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9701 rtx not_result = gen_reg_rtx (CCEQmode);
9702 rtx not_op, rev_cond_rtx;
9703 enum machine_mode cc_mode;
9705 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9707 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
9708 SImode, XEXP (condition_rtx, 0), const0_rtx);
9709 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9710 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9711 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9714 op_mode = GET_MODE (rs6000_compare_op0);
9715 if (op_mode == VOIDmode)
9716 op_mode = GET_MODE (rs6000_compare_op1);
9718 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9720 PUT_MODE (condition_rtx, DImode);
9721 convert_move (result, condition_rtx, 0);
9725 PUT_MODE (condition_rtx, SImode);
9726 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9730 /* Emit a branch of kind CODE to location LOC. */
9733 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
9735 rtx condition_rtx, loc_ref;
9737 condition_rtx = rs6000_generate_compare (code);
9738 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9739 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9740 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9744 /* Return the string to output a conditional branch to LABEL, which is
9745 the operand number of the label, or -1 if the branch is really a
9748 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9749 condition code register and its mode specifies what kind of
9752 REVERSED is nonzero if we should reverse the sense of the comparison.
9754 INSN is the insn. */
9757 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
9759 static char string[64];
9760 enum rtx_code code = GET_CODE (op);
9761 rtx cc_reg = XEXP (op, 0);
9762 enum machine_mode mode = GET_MODE (cc_reg);
9763 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9764 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9765 int really_reversed = reversed ^ need_longbranch;
9771 validate_condition_mode (code, mode);
9773 /* Work out which way this really branches. We could use
9774 reverse_condition_maybe_unordered here always but this
9775 makes the resulting assembler clearer. */
9776 if (really_reversed)
9778 /* Reversal of FP compares takes care -- an ordered compare
9779 becomes an unordered compare and vice versa. */
9780 if (mode == CCFPmode)
9781 code = reverse_condition_maybe_unordered (code);
9783 code = reverse_condition (code);
9786 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9788 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9791 /* Opposite of GT. */
9793 else if (code == NE)
9801 /* Not all of these are actually distinct opcodes, but
9802 we distinguish them for clarity of the resulting assembler. */
9804 ccode = "ne"; break;
9806 ccode = "eq"; break;
9808 ccode = "ge"; break;
9809 case GT: case GTU: case UNGT:
9810 ccode = "gt"; break;
9812 ccode = "le"; break;
9813 case LT: case LTU: case UNLT:
9814 ccode = "lt"; break;
9815 case UNORDERED: ccode = "un"; break;
9816 case ORDERED: ccode = "nu"; break;
9817 case UNGE: ccode = "nl"; break;
9818 case UNLE: ccode = "ng"; break;
9823 /* Maybe we have a guess as to how likely the branch is.
9824 The old mnemonics don't have a way to specify this information. */
9826 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9827 if (note != NULL_RTX)
9829 /* PROB is the difference from 50%. */
9830 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9831 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9833 /* Only hint for highly probable/improbable branches on newer
9834 cpus as static prediction overrides processor dynamic
9835 prediction. For older cpus we may as well always hint, but
9836 assume not taken for branches that are very close to 50% as a
9837 mispredicted taken branch is more expensive than a
9838 mispredicted not-taken branch. */
9840 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9842 if (abs (prob) > REG_BR_PROB_BASE / 20
9843 && ((prob > 0) ^ need_longbranch))
9851 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9853 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9855 /* We need to escape any '%' characters in the reg_names string.
9856 Assume they'd only be the first character.... */
9857 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9859 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9863 /* If the branch distance was too far, we may have to use an
9864 unconditional branch to go the distance. */
9865 if (need_longbranch)
9866 s += sprintf (s, ",$+8\n\tb %s", label);
9868 s += sprintf (s, ",%s", label);
9874 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9875 operands of the last comparison is nonzero/true, FALSE_COND if it
9876 is zero/false. Return 0 if the hardware has no such operation. */
9879 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
9881 enum rtx_code code = GET_CODE (op);
9882 rtx op0 = rs6000_compare_op0;
9883 rtx op1 = rs6000_compare_op1;
9885 enum machine_mode compare_mode = GET_MODE (op0);
9886 enum machine_mode result_mode = GET_MODE (dest);
9889 /* These modes should always match. */
9890 if (GET_MODE (op1) != compare_mode
9891 /* In the isel case however, we can use a compare immediate, so
9892 op1 may be a small constant. */
9893 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9895 if (GET_MODE (true_cond) != result_mode)
9897 if (GET_MODE (false_cond) != result_mode)
9900 /* First, work out if the hardware can do this at all, or
9901 if it's too slow.... */
9902 if (! rs6000_compare_fp_p)
9905 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9909 /* Eliminate half of the comparisons by switching operands, this
9910 makes the remaining code simpler. */
9911 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9912 || code == LTGT || code == LT || code == UNLE)
9914 code = reverse_condition_maybe_unordered (code);
9916 true_cond = false_cond;
9920 /* UNEQ and LTGT take four instructions for a comparison with zero,
9921 it'll probably be faster to use a branch here too. */
9922 if (code == UNEQ && HONOR_NANS (compare_mode))
9925 if (GET_CODE (op1) == CONST_DOUBLE)
9926 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9928 /* We're going to try to implement comparisons by performing
9929 a subtract, then comparing against zero. Unfortunately,
9930 Inf - Inf is NaN which is not zero, and so if we don't
9931 know that the operand is finite and the comparison
9932 would treat EQ different to UNORDERED, we can't do it. */
9933 if (HONOR_INFINITIES (compare_mode)
9934 && code != GT && code != UNGE
9935 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9936 /* Constructs of the form (a OP b ? a : b) are safe. */
9937 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9938 || (! rtx_equal_p (op0, true_cond)
9939 && ! rtx_equal_p (op1, true_cond))))
9941 /* At this point we know we can use fsel. */
9943 /* Reduce the comparison to a comparison against zero. */
9944 temp = gen_reg_rtx (compare_mode);
9945 emit_insn (gen_rtx_SET (VOIDmode, temp,
9946 gen_rtx_MINUS (compare_mode, op0, op1)));
9948 op1 = CONST0_RTX (compare_mode);
9950 /* If we don't care about NaNs we can reduce some of the comparisons
9951 down to faster ones. */
9952 if (! HONOR_NANS (compare_mode))
9958 true_cond = false_cond;
9971 /* Now, reduce everything down to a GE. */
9978 temp = gen_reg_rtx (compare_mode);
9979 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9984 temp = gen_reg_rtx (compare_mode);
9985 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9990 temp = gen_reg_rtx (compare_mode);
9991 emit_insn (gen_rtx_SET (VOIDmode, temp,
9992 gen_rtx_NEG (compare_mode,
9993 gen_rtx_ABS (compare_mode, op0))));
9998 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9999 temp = gen_reg_rtx (result_mode);
10000 emit_insn (gen_rtx_SET (VOIDmode, temp,
10001 gen_rtx_IF_THEN_ELSE (result_mode,
10002 gen_rtx_GE (VOIDmode,
10004 true_cond, false_cond)));
10005 false_cond = true_cond;
10008 temp = gen_reg_rtx (compare_mode);
10009 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10014 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
10015 temp = gen_reg_rtx (result_mode);
10016 emit_insn (gen_rtx_SET (VOIDmode, temp,
10017 gen_rtx_IF_THEN_ELSE (result_mode,
10018 gen_rtx_GE (VOIDmode,
10020 true_cond, false_cond)));
10021 true_cond = false_cond;
10024 temp = gen_reg_rtx (compare_mode);
10025 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10033 emit_insn (gen_rtx_SET (VOIDmode, dest,
10034 gen_rtx_IF_THEN_ELSE (result_mode,
10035 gen_rtx_GE (VOIDmode,
10037 true_cond, false_cond)));
10041 /* Same as above, but for ints (isel). */
10044 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10046 rtx condition_rtx, cr;
10048 /* All isel implementations thus far are 32-bits. */
10049 if (GET_MODE (rs6000_compare_op0) != SImode)
10052 /* We still have to do the compare, because isel doesn't do a
10053 compare, it just looks at the CRx bits set by a previous compare
10055 condition_rtx = rs6000_generate_compare (GET_CODE (op));
10056 cr = XEXP (condition_rtx, 0);
10058 if (GET_MODE (cr) == CCmode)
10059 emit_insn (gen_isel_signed (dest, condition_rtx,
10060 true_cond, false_cond, cr));
10062 emit_insn (gen_isel_unsigned (dest, condition_rtx,
10063 true_cond, false_cond, cr));
10069 output_isel (rtx *operands)
10071 enum rtx_code code;
10073 code = GET_CODE (operands[1]);
10074 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
10076 PUT_CODE (operands[1], reverse_condition (code));
10077 return "isel %0,%3,%2,%j1";
10080 return "isel %0,%2,%3,%j1";
10084 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
10086 enum machine_mode mode = GET_MODE (op0);
10090 if (code == SMAX || code == SMIN)
10095 if (code == SMAX || code == UMAX)
10096 target = emit_conditional_move (dest, c, op0, op1, mode,
10097 op0, op1, mode, 0);
10099 target = emit_conditional_move (dest, c, op0, op1, mode,
10100 op1, op0, mode, 0);
10101 if (target == NULL_RTX)
10103 if (target != dest)
10104 emit_move_insn (dest, target);
10107 /* Emit instructions to move SRC to DST. Called by splitters for
10108 multi-register moves. It will emit at most one instruction for
10109 each register that is accessed; that is, it won't emit li/lis pairs
10110 (or equivalent for 64-bit code). One of SRC or DST must be a hard
10114 rs6000_split_multireg_move (rtx dst, rtx src)
10116 /* The register number of the first register being moved. */
10118 /* The mode that is to be moved. */
10119 enum machine_mode mode;
10120 /* The mode that the move is being done in, and its size. */
10121 enum machine_mode reg_mode;
10123 /* The number of registers that will be moved. */
10126 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
10127 mode = GET_MODE (dst);
10128 nregs = HARD_REGNO_NREGS (reg, mode);
10129 if (FP_REGNO_P (reg))
10131 else if (ALTIVEC_REGNO_P (reg))
10132 reg_mode = V16QImode;
10134 reg_mode = word_mode;
10135 reg_mode_size = GET_MODE_SIZE (reg_mode);
10137 if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
10140 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
10142 /* Move register range backwards, if we might have destructive
10145 for (i = nregs - 1; i >= 0; i--)
10146 emit_insn (gen_rtx_SET (VOIDmode,
10147 simplify_gen_subreg (reg_mode, dst, mode,
10148 i * reg_mode_size),
10149 simplify_gen_subreg (reg_mode, src, mode,
10150 i * reg_mode_size)));
10156 bool used_update = false;
10158 if (GET_CODE (src) == MEM && INT_REGNO_P (reg))
10162 if (GET_CODE (XEXP (src, 0)) == PRE_INC
10163 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
10166 breg = XEXP (XEXP (src, 0), 0);
10167 delta_rtx = GET_CODE (XEXP (src, 0)) == PRE_INC
10168 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
10169 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
10170 emit_insn (TARGET_32BIT
10171 ? gen_addsi3 (breg, breg, delta_rtx)
10172 : gen_adddi3 (breg, breg, delta_rtx));
10173 src = gen_rtx_MEM (mode, breg);
10176 /* We have now address involving an base register only.
10177 If we use one of the registers to address memory,
10178 we have change that register last. */
10180 breg = (GET_CODE (XEXP (src, 0)) == PLUS
10181 ? XEXP (XEXP (src, 0), 0)
10187 if (REGNO (breg) >= REGNO (dst)
10188 && REGNO (breg) < REGNO (dst) + nregs)
10189 j = REGNO (breg) - REGNO (dst);
10192 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
10196 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
10197 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
10200 breg = XEXP (XEXP (dst, 0), 0);
10201 delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
10202 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
10203 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
10205 /* We have to update the breg before doing the store.
10206 Use store with update, if available. */
10210 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
10211 emit_insn (TARGET_32BIT
10212 ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
10213 : gen_movdi_update (breg, breg, delta_rtx, nsrc));
10214 used_update = true;
10217 emit_insn (TARGET_32BIT
10218 ? gen_addsi3 (breg, breg, delta_rtx)
10219 : gen_adddi3 (breg, breg, delta_rtx));
10220 dst = gen_rtx_MEM (mode, breg);
10224 for (i = 0; i < nregs; i++)
10226 /* Calculate index to next subword. */
10231 /* If compiler already emited move of first word by
10232 store with update, no need to do anything. */
10233 if (j == 0 && used_update)
10236 emit_insn (gen_rtx_SET (VOIDmode,
10237 simplify_gen_subreg (reg_mode, dst, mode,
10238 j * reg_mode_size),
10239 simplify_gen_subreg (reg_mode, src, mode,
10240 j * reg_mode_size)));
10246 /* This page contains routines that are used to determine what the
10247 function prologue and epilogue code will do and write them out. */
10249 /* Return the first fixed-point register that is required to be
10250 saved. 32 if none. */
10253 first_reg_to_save (void)
10257 /* Find lowest numbered live register. */
10258 for (first_reg = 13; first_reg <= 31; first_reg++)
10259 if (regs_ever_live[first_reg]
10260 && (! call_used_regs[first_reg]
10261 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10262 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10263 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
10268 && current_function_uses_pic_offset_table
10269 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10270 return RS6000_PIC_OFFSET_TABLE_REGNUM;
10276 /* Similar, for FP regs. */
10279 first_fp_reg_to_save (void)
10283 /* Find lowest numbered live register. */
10284 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10285 if (regs_ever_live[first_reg])
10291 /* Similar, for AltiVec regs. */
10294 first_altivec_reg_to_save (void)
10298 /* Stack frame remains as is unless we are in AltiVec ABI. */
10299 if (! TARGET_ALTIVEC_ABI)
10300 return LAST_ALTIVEC_REGNO + 1;
10302 /* Find lowest numbered live register. */
10303 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10304 if (regs_ever_live[i])
10310 /* Return a 32-bit mask of the AltiVec registers we need to set in
10311 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
10312 the 32-bit word is 0. */
10314 static unsigned int
10315 compute_vrsave_mask (void)
10317 unsigned int i, mask = 0;
10319 /* First, find out if we use _any_ altivec registers. */
10320 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10321 if (regs_ever_live[i])
10322 mask |= ALTIVEC_REG_BIT (i);
10327 /* Next, remove the argument registers from the set. These must
10328 be in the VRSAVE mask set by the caller, so we don't need to add
10329 them in again. More importantly, the mask we compute here is
10330 used to generate CLOBBERs in the set_vrsave insn, and we do not
10331 wish the argument registers to die. */
10332 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10333 mask &= ~ALTIVEC_REG_BIT (i);
10335 /* Similarly, remove the return value from the set. */
10338 diddle_return_value (is_altivec_return_reg, &yes);
10340 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10347 is_altivec_return_reg (rtx reg, void *xyes)
10349 bool *yes = (bool *) xyes;
10350 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10355 /* Calculate the stack information for the current function. This is
10356 complicated by having two separate calling sequences, the AIX calling
10357 sequence and the V.4 calling sequence.
10359 AIX (and Darwin/Mac OS X) stack frames look like:
10361 SP----> +---------------------------------------+
10362 | back chain to caller | 0 0
10363 +---------------------------------------+
10364 | saved CR | 4 8 (8-11)
10365 +---------------------------------------+
10367 +---------------------------------------+
10368 | reserved for compilers | 12 24
10369 +---------------------------------------+
10370 | reserved for binders | 16 32
10371 +---------------------------------------+
10372 | saved TOC pointer | 20 40
10373 +---------------------------------------+
10374 | Parameter save area (P) | 24 48
10375 +---------------------------------------+
10376 | Alloca space (A) | 24+P etc.
10377 +---------------------------------------+
10378 | Local variable space (L) | 24+P+A
10379 +---------------------------------------+
10380 | Float/int conversion temporary (X) | 24+P+A+L
10381 +---------------------------------------+
10382 | Save area for AltiVec registers (W) | 24+P+A+L+X
10383 +---------------------------------------+
10384 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
10385 +---------------------------------------+
10386 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
10387 +---------------------------------------+
10388 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
10389 +---------------------------------------+
10390 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
10391 +---------------------------------------+
10392 old SP->| back chain to caller's caller |
10393 +---------------------------------------+
10395 The required alignment for AIX configurations is two words (i.e., 8
10399 V.4 stack frames look like:
10401 SP----> +---------------------------------------+
10402 | back chain to caller | 0
10403 +---------------------------------------+
10404 | caller's saved LR | 4
10405 +---------------------------------------+
10406 | Parameter save area (P) | 8
10407 +---------------------------------------+
10408 | Alloca space (A) | 8+P
10409 +---------------------------------------+
10410 | Varargs save area (V) | 8+P+A
10411 +---------------------------------------+
10412 | Local variable space (L) | 8+P+A+V
10413 +---------------------------------------+
10414 | Float/int conversion temporary (X) | 8+P+A+V+L
10415 +---------------------------------------+
10416 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10417 +---------------------------------------+
10418 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10419 +---------------------------------------+
10420 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10421 +---------------------------------------+
10422 | SPE: area for 64-bit GP registers |
10423 +---------------------------------------+
10424 | SPE alignment padding |
10425 +---------------------------------------+
10426 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10427 +---------------------------------------+
10428 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10429 +---------------------------------------+
10430 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10431 +---------------------------------------+
10432 old SP->| back chain to caller's caller |
10433 +---------------------------------------+
10435 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10436 given. (But note below and in sysv4.h that we require only 8 and
10437 may round up the size of our stack frame anyways. The historical
10438 reason is early versions of powerpc-linux which didn't properly
10439 align the stack at program startup. A happy side-effect is that
10440 -mno-eabi libraries can be used with -meabi programs.)
10442 The EABI configuration defaults to the V.4 layout. However,
10443 the stack alignment requirements may differ. If -mno-eabi is not
10444 given, the required stack alignment is 8 bytes; if -mno-eabi is
10445 given, the required alignment is 16 bytes. (But see V.4 comment
10448 #ifndef ABI_STACK_BOUNDARY
10449 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10452 static rs6000_stack_t *
10453 rs6000_stack_info (void)
10455 static rs6000_stack_t info, zero_info;
10456 rs6000_stack_t *info_ptr = &info;
10457 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10459 HOST_WIDE_INT total_raw_size;
10461 /* Zero all fields portably. */
10466 /* Cache value so we don't rescan instruction chain over and over. */
10467 if (cfun->machine->insn_chain_scanned_p == 0)
10469 cfun->machine->insn_chain_scanned_p = 1;
10470 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10474 /* Select which calling sequence. */
10475 info_ptr->abi = DEFAULT_ABI;
10477 /* Calculate which registers need to be saved & save area size. */
10478 info_ptr->first_gp_reg_save = first_reg_to_save ();
10479 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10480 even if it currently looks like we won't. */
10481 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10482 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10483 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10484 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10485 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10487 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10489 /* For the SPE, we have an additional upper 32-bits on each GPR.
10490 Ideally we should save the entire 64-bits only when the upper
10491 half is used in SIMD instructions. Since we only record
10492 registers live (not the size they are used in), this proves
10493 difficult because we'd have to traverse the instruction chain at
10494 the right time, taking reload into account. This is a real pain,
10495 so we opt to save the GPRs in 64-bits always if but one register
10496 gets used in 64-bits. Otherwise, all the registers in the frame
10497 get saved in 32-bits.
10499 So... since when we save all GPRs (except the SP) in 64-bits, the
10500 traditional GP save area will be empty. */
10501 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10502 info_ptr->gp_size = 0;
10504 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10505 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10507 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10508 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10509 - info_ptr->first_altivec_reg_save);
10511 /* Does this function call anything? */
10512 info_ptr->calls_p = (! current_function_is_leaf
10513 || cfun->machine->ra_needs_full_frame);
10515 /* Determine if we need to save the link register. */
10516 if (rs6000_ra_ever_killed ()
10517 || (DEFAULT_ABI == ABI_AIX
10518 && current_function_profile
10519 && !TARGET_PROFILE_KERNEL)
10520 #ifdef TARGET_RELOCATABLE
10521 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10523 || (info_ptr->first_fp_reg_save != 64
10524 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10525 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10526 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10527 || (DEFAULT_ABI == ABI_DARWIN
10529 && current_function_uses_pic_offset_table)
10530 || info_ptr->calls_p)
10532 info_ptr->lr_save_p = 1;
10533 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10536 /* Determine if we need to save the condition code registers. */
10537 if (regs_ever_live[CR2_REGNO]
10538 || regs_ever_live[CR3_REGNO]
10539 || regs_ever_live[CR4_REGNO])
10541 info_ptr->cr_save_p = 1;
10542 if (DEFAULT_ABI == ABI_V4)
10543 info_ptr->cr_size = reg_size;
10546 /* If the current function calls __builtin_eh_return, then we need
10547 to allocate stack space for registers that will hold data for
10548 the exception handler. */
10549 if (current_function_calls_eh_return)
10552 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10555 /* SPE saves EH registers in 64-bits. */
10556 ehrd_size = i * (TARGET_SPE_ABI
10557 && info_ptr->spe_64bit_regs_used != 0
10558 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10563 /* Determine various sizes. */
10564 info_ptr->reg_size = reg_size;
10565 info_ptr->fixed_size = RS6000_SAVE_AREA;
10566 info_ptr->varargs_size = RS6000_VARARGS_AREA;
10567 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
10568 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
10571 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10572 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10574 info_ptr->spe_gp_size = 0;
10576 if (TARGET_ALTIVEC_ABI)
10577 info_ptr->vrsave_mask = compute_vrsave_mask ();
10579 info_ptr->vrsave_mask = 0;
10581 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
10582 info_ptr->vrsave_size = 4;
10584 info_ptr->vrsave_size = 0;
10586 /* Calculate the offsets. */
10587 switch (DEFAULT_ABI)
10595 info_ptr->fp_save_offset = - info_ptr->fp_size;
10596 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10598 if (TARGET_ALTIVEC_ABI)
10600 info_ptr->vrsave_save_offset
10601 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10603 /* Align stack so vector save area is on a quadword boundary. */
10604 if (info_ptr->altivec_size != 0)
10605 info_ptr->altivec_padding_size
10606 = 16 - (-info_ptr->vrsave_save_offset % 16);
10608 info_ptr->altivec_padding_size = 0;
10610 info_ptr->altivec_save_offset
10611 = info_ptr->vrsave_save_offset
10612 - info_ptr->altivec_padding_size
10613 - info_ptr->altivec_size;
10615 /* Adjust for AltiVec case. */
10616 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10619 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10620 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10621 info_ptr->lr_save_offset = 2*reg_size;
10625 info_ptr->fp_save_offset = - info_ptr->fp_size;
10626 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10627 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10629 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10631 /* Align stack so SPE GPR save area is aligned on a
10632 double-word boundary. */
10633 if (info_ptr->spe_gp_size != 0)
10634 info_ptr->spe_padding_size
10635 = 8 - (-info_ptr->cr_save_offset % 8);
10637 info_ptr->spe_padding_size = 0;
10639 info_ptr->spe_gp_save_offset
10640 = info_ptr->cr_save_offset
10641 - info_ptr->spe_padding_size
10642 - info_ptr->spe_gp_size;
10644 /* Adjust for SPE case. */
10645 info_ptr->toc_save_offset
10646 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10648 else if (TARGET_ALTIVEC_ABI)
10650 info_ptr->vrsave_save_offset
10651 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10653 /* Align stack so vector save area is on a quadword boundary. */
10654 if (info_ptr->altivec_size != 0)
10655 info_ptr->altivec_padding_size
10656 = 16 - (-info_ptr->vrsave_save_offset % 16);
10658 info_ptr->altivec_padding_size = 0;
10660 info_ptr->altivec_save_offset
10661 = info_ptr->vrsave_save_offset
10662 - info_ptr->altivec_padding_size
10663 - info_ptr->altivec_size;
10665 /* Adjust for AltiVec case. */
10666 info_ptr->toc_save_offset
10667 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10670 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10671 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10672 info_ptr->lr_save_offset = reg_size;
10676 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10677 + info_ptr->gp_size
10678 + info_ptr->altivec_size
10679 + info_ptr->altivec_padding_size
10680 + info_ptr->spe_gp_size
10681 + info_ptr->spe_padding_size
10683 + info_ptr->cr_size
10684 + info_ptr->lr_size
10685 + info_ptr->vrsave_size
10686 + info_ptr->toc_size,
10687 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10690 total_raw_size = (info_ptr->vars_size
10691 + info_ptr->parm_size
10692 + info_ptr->save_size
10693 + info_ptr->varargs_size
10694 + info_ptr->fixed_size);
10696 info_ptr->total_size =
10697 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10699 /* Determine if we need to allocate any stack frame:
10701 For AIX we need to push the stack if a frame pointer is needed
10702 (because the stack might be dynamically adjusted), if we are
10703 debugging, if we make calls, or if the sum of fp_save, gp_save,
10704 and local variables are more than the space needed to save all
10705 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10706 + 18*8 = 288 (GPR13 reserved).
10708 For V.4 we don't have the stack cushion that AIX uses, but assume
10709 that the debugger can handle stackless frames. */
10711 if (info_ptr->calls_p)
10712 info_ptr->push_p = 1;
10714 else if (DEFAULT_ABI == ABI_V4)
10715 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10717 else if (frame_pointer_needed)
10718 info_ptr->push_p = 1;
10720 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10721 info_ptr->push_p = 1;
10725 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10727 /* Zero offsets if we're not saving those registers. */
10728 if (info_ptr->fp_size == 0)
10729 info_ptr->fp_save_offset = 0;
10731 if (info_ptr->gp_size == 0)
10732 info_ptr->gp_save_offset = 0;
10734 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10735 info_ptr->altivec_save_offset = 0;
10737 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10738 info_ptr->vrsave_save_offset = 0;
10740 if (! TARGET_SPE_ABI
10741 || info_ptr->spe_64bit_regs_used == 0
10742 || info_ptr->spe_gp_size == 0)
10743 info_ptr->spe_gp_save_offset = 0;
10745 if (! info_ptr->lr_save_p)
10746 info_ptr->lr_save_offset = 0;
10748 if (! info_ptr->cr_save_p)
10749 info_ptr->cr_save_offset = 0;
10751 if (! info_ptr->toc_save_p)
10752 info_ptr->toc_save_offset = 0;
10757 /* Return true if the current function uses any GPRs in 64-bit SIMD
10761 spe_func_has_64bit_regs_p (void)
10765 /* Functions that save and restore all the call-saved registers will
10766 need to save/restore the registers in 64-bits. */
10767 if (current_function_calls_eh_return
10768 || current_function_calls_setjmp
10769 || current_function_has_nonlocal_goto)
10772 insns = get_insns ();
10774 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10780 i = PATTERN (insn);
10781 if (GET_CODE (i) == SET
10782 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10791 debug_stack_info (rs6000_stack_t *info)
10793 const char *abi_string;
10796 info = rs6000_stack_info ();
10798 fprintf (stderr, "\nStack information for function %s:\n",
10799 ((current_function_decl && DECL_NAME (current_function_decl))
10800 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10805 default: abi_string = "Unknown"; break;
10806 case ABI_NONE: abi_string = "NONE"; break;
10807 case ABI_AIX: abi_string = "AIX"; break;
10808 case ABI_DARWIN: abi_string = "Darwin"; break;
10809 case ABI_V4: abi_string = "V.4"; break;
10812 fprintf (stderr, "\tABI = %5s\n", abi_string);
10814 if (TARGET_ALTIVEC_ABI)
10815 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10817 if (TARGET_SPE_ABI)
10818 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10820 if (info->first_gp_reg_save != 32)
10821 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10823 if (info->first_fp_reg_save != 64)
10824 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10826 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10827 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10828 info->first_altivec_reg_save);
10830 if (info->lr_save_p)
10831 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10833 if (info->cr_save_p)
10834 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10836 if (info->toc_save_p)
10837 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10839 if (info->vrsave_mask)
10840 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10843 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10846 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10848 if (info->gp_save_offset)
10849 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10851 if (info->fp_save_offset)
10852 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10854 if (info->altivec_save_offset)
10855 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10856 info->altivec_save_offset);
10858 if (info->spe_gp_save_offset)
10859 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10860 info->spe_gp_save_offset);
10862 if (info->vrsave_save_offset)
10863 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10864 info->vrsave_save_offset);
10866 if (info->lr_save_offset)
10867 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10869 if (info->cr_save_offset)
10870 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10872 if (info->toc_save_offset)
10873 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10875 if (info->varargs_save_offset)
10876 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10878 if (info->total_size)
10879 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
10882 if (info->varargs_size)
10883 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10885 if (info->vars_size)
10886 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
10889 if (info->parm_size)
10890 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10892 if (info->fixed_size)
10893 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10896 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10898 if (info->spe_gp_size)
10899 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10902 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10904 if (info->altivec_size)
10905 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10907 if (info->vrsave_size)
10908 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10910 if (info->altivec_padding_size)
10911 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10912 info->altivec_padding_size);
10914 if (info->spe_padding_size)
10915 fprintf (stderr, "\tspe_padding_size = %5d\n",
10916 info->spe_padding_size);
10919 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10922 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10924 if (info->toc_size)
10925 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10927 if (info->save_size)
10928 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10930 if (info->reg_size != 4)
10931 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10933 fprintf (stderr, "\n");
10937 rs6000_return_addr (int count, rtx frame)
10939 /* Currently we don't optimize very well between prolog and body
10940 code and for PIC code the code can be actually quite bad, so
10941 don't try to be too clever here. */
10942 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10944 cfun->machine->ra_needs_full_frame = 1;
10951 plus_constant (copy_to_reg
10952 (gen_rtx_MEM (Pmode,
10953 memory_address (Pmode, frame))),
10954 RETURN_ADDRESS_OFFSET)));
10957 cfun->machine->ra_need_lr = 1;
10958 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10961 /* Say whether a function is a candidate for sibcall handling or not.
10962 We do not allow indirect calls to be optimized into sibling calls.
10963 Also, we can't do it if there are any vector parameters; there's
10964 nowhere to put the VRsave code so it works; note that functions with
10965 vector parameters are required to have a prototype, so the argument
10966 type info must be available here. (The tail recursion case can work
10967 with vector parameters, but there's no way to distinguish here.) */
10969 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
10974 if (TARGET_ALTIVEC_VRSAVE)
10976 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10977 type; type = TREE_CHAIN (type))
10979 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10983 if (DEFAULT_ABI == ABI_DARWIN
10984 || (*targetm.binds_local_p) (decl))
10986 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10988 if (!lookup_attribute ("longcall", attr_list)
10989 || lookup_attribute ("shortcall", attr_list))
10997 rs6000_ra_ever_killed (void)
11003 /* Irritatingly, there are two kinds of thunks -- those created with
11004 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
11005 through the regular part of the compiler. This is a very hacky
11006 way to tell them apart. */
11007 if (current_function_is_thunk && !no_new_pseudos)
11010 /* regs_ever_live has LR marked as used if any sibcalls are present,
11011 but this should not force saving and restoring in the
11012 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
11013 clobbers LR, so that is inappropriate. */
11015 /* Also, the prologue can generate a store into LR that
11016 doesn't really count, like this:
11019 bcl to set PIC register
11023 When we're called from the epilogue, we need to avoid counting
11024 this as a store. */
11026 push_topmost_sequence ();
11027 top = get_insns ();
11028 pop_topmost_sequence ();
11029 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11031 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
11035 if (FIND_REG_INC_NOTE (insn, reg))
11037 else if (GET_CODE (insn) == CALL_INSN
11038 && !SIBLING_CALL_P (insn))
11040 else if (set_of (reg, insn) != NULL_RTX
11041 && !prologue_epilogue_contains (insn))
11048 /* Add a REG_MAYBE_DEAD note to the insn. */
11050 rs6000_maybe_dead (rtx insn)
11052 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
11057 /* Emit instructions needed to load the TOC register.
11058 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
11059 a constant pool; or for SVR4 -fpic. */
11062 rs6000_emit_load_toc_table (int fromprolog)
11065 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
11067 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
11069 rtx temp = (fromprolog
11070 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11071 : gen_reg_rtx (Pmode));
11072 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
11074 rs6000_maybe_dead (insn);
11075 insn = emit_move_insn (dest, temp);
11077 rs6000_maybe_dead (insn);
11079 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
11082 rtx tempLR = (fromprolog
11083 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11084 : gen_reg_rtx (Pmode));
11085 rtx temp0 = (fromprolog
11086 ? gen_rtx_REG (Pmode, 0)
11087 : gen_reg_rtx (Pmode));
11090 /* possibly create the toc section */
11091 if (! toc_initialized)
11094 function_section (current_function_decl);
11101 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
11102 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11104 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
11105 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11107 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
11109 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
11110 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
11117 static int reload_toc_labelno = 0;
11119 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
11121 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
11122 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11124 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
11125 emit_move_insn (dest, tempLR);
11126 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
11128 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
11130 rs6000_maybe_dead (insn);
11132 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
11134 /* This is for AIX code running in non-PIC ELF32. */
11137 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
11138 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11140 insn = emit_insn (gen_elf_high (dest, realsym));
11142 rs6000_maybe_dead (insn);
11143 insn = emit_insn (gen_elf_low (dest, dest, realsym));
11145 rs6000_maybe_dead (insn);
11147 else if (DEFAULT_ABI == ABI_AIX)
11150 insn = emit_insn (gen_load_toc_aix_si (dest));
11152 insn = emit_insn (gen_load_toc_aix_di (dest));
11154 rs6000_maybe_dead (insn);
11160 /* Emit instructions to restore the link register after determining where
11161 its value has been stored. */
11164 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
11166 rs6000_stack_t *info = rs6000_stack_info ();
11169 operands[0] = source;
11170 operands[1] = scratch;
11172 if (info->lr_save_p)
11174 rtx frame_rtx = stack_pointer_rtx;
11175 HOST_WIDE_INT sp_offset = 0;
11178 if (frame_pointer_needed
11179 || current_function_calls_alloca
11180 || info->total_size > 32767)
11182 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
11183 frame_rtx = operands[1];
11185 else if (info->push_p)
11186 sp_offset = info->total_size;
11188 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
11189 tmp = gen_rtx_MEM (Pmode, tmp);
11190 emit_move_insn (tmp, operands[0]);
11193 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
11196 static GTY(()) int set = -1;
11199 get_TOC_alias_set (void)
11202 set = new_alias_set ();
11206 /* This returns nonzero if the current function uses the TOC. This is
11207 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
11208 is generated by the ABI_V4 load_toc_* patterns. */
11215 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11218 rtx pat = PATTERN (insn);
11221 if (GET_CODE (pat) == PARALLEL)
11222 for (i = 0; i < XVECLEN (pat, 0); i++)
11224 rtx sub = XVECEXP (pat, 0, i);
11225 if (GET_CODE (sub) == USE)
11227 sub = XEXP (sub, 0);
11228 if (GET_CODE (sub) == UNSPEC
11229 && XINT (sub, 1) == UNSPEC_TOC)
11239 create_TOC_reference (rtx symbol)
11241 return gen_rtx_PLUS (Pmode,
11242 gen_rtx_REG (Pmode, TOC_REGISTER),
11243 gen_rtx_CONST (Pmode,
11244 gen_rtx_MINUS (Pmode, symbol,
11245 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11248 /* If _Unwind_* has been called from within the same module,
11249 toc register is not guaranteed to be saved to 40(1) on function
11250 entry. Save it there in that case. */
11253 rs6000_aix_emit_builtin_unwind_init (void)
11256 rtx stack_top = gen_reg_rtx (Pmode);
11257 rtx opcode_addr = gen_reg_rtx (Pmode);
11258 rtx opcode = gen_reg_rtx (SImode);
11259 rtx tocompare = gen_reg_rtx (SImode);
11260 rtx no_toc_save_needed = gen_label_rtx ();
11262 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11263 emit_move_insn (stack_top, mem);
11265 mem = gen_rtx_MEM (Pmode,
11266 gen_rtx_PLUS (Pmode, stack_top,
11267 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11268 emit_move_insn (opcode_addr, mem);
11269 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11270 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11271 : 0xE8410028, SImode));
11273 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11274 SImode, NULL_RTX, NULL_RTX,
11275 no_toc_save_needed);
11277 mem = gen_rtx_MEM (Pmode,
11278 gen_rtx_PLUS (Pmode, stack_top,
11279 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11280 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11281 emit_label (no_toc_save_needed);
11284 /* This ties together stack memory (MEM with an alias set of
11285 rs6000_sr_alias_set) and the change to the stack pointer. */
11288 rs6000_emit_stack_tie (void)
11290 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11292 set_mem_alias_set (mem, rs6000_sr_alias_set);
11293 emit_insn (gen_stack_tie (mem));
11296 /* Emit the correct code for allocating stack space, as insns.
11297 If COPY_R12, make sure a copy of the old frame is left in r12.
11298 The generated code may use hard register 0 as a temporary. */
11301 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11304 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11305 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11306 rtx todec = GEN_INT (-size);
11308 if (current_function_limit_stack)
11310 if (REG_P (stack_limit_rtx)
11311 && REGNO (stack_limit_rtx) > 1
11312 && REGNO (stack_limit_rtx) <= 31)
11314 emit_insn (TARGET_32BIT
11315 ? gen_addsi3 (tmp_reg,
11318 : gen_adddi3 (tmp_reg,
11322 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11325 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11327 && DEFAULT_ABI == ABI_V4)
11329 rtx toload = gen_rtx_CONST (VOIDmode,
11330 gen_rtx_PLUS (Pmode,
11334 emit_insn (gen_elf_high (tmp_reg, toload));
11335 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11336 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11340 warning ("stack limit expression is not supported");
11343 if (copy_r12 || ! TARGET_UPDATE)
11344 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11350 /* Need a note here so that try_split doesn't get confused. */
11351 if (get_last_insn() == NULL_RTX)
11352 emit_note (NOTE_INSN_DELETED);
11353 insn = emit_move_insn (tmp_reg, todec);
11354 try_split (PATTERN (insn), insn, 0);
11358 insn = emit_insn (TARGET_32BIT
11359 ? gen_movsi_update (stack_reg, stack_reg,
11361 : gen_movdi_update (stack_reg, stack_reg,
11362 todec, stack_reg));
11366 insn = emit_insn (TARGET_32BIT
11367 ? gen_addsi3 (stack_reg, stack_reg, todec)
11368 : gen_adddi3 (stack_reg, stack_reg, todec));
11369 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11370 gen_rtx_REG (Pmode, 12));
11373 RTX_FRAME_RELATED_P (insn) = 1;
11375 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11376 gen_rtx_SET (VOIDmode, stack_reg,
11377 gen_rtx_PLUS (Pmode, stack_reg,
11382 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11383 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11384 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
11385 deduce these equivalences by itself so it wasn't necessary to hold
11386 its hand so much. */
11389 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11390 rtx reg2, rtx rreg)
11394 /* copy_rtx will not make unique copies of registers, so we need to
11395 ensure we don't have unwanted sharing here. */
11397 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11400 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11402 real = copy_rtx (PATTERN (insn));
11404 if (reg2 != NULL_RTX)
11405 real = replace_rtx (real, reg2, rreg);
11407 real = replace_rtx (real, reg,
11408 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11409 STACK_POINTER_REGNUM),
11412 /* We expect that 'real' is either a SET or a PARALLEL containing
11413 SETs (and possibly other stuff). In a PARALLEL, all the SETs
11414 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11416 if (GET_CODE (real) == SET)
11420 temp = simplify_rtx (SET_SRC (set));
11422 SET_SRC (set) = temp;
11423 temp = simplify_rtx (SET_DEST (set));
11425 SET_DEST (set) = temp;
11426 if (GET_CODE (SET_DEST (set)) == MEM)
11428 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11430 XEXP (SET_DEST (set), 0) = temp;
11433 else if (GET_CODE (real) == PARALLEL)
11436 for (i = 0; i < XVECLEN (real, 0); i++)
11437 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11439 rtx set = XVECEXP (real, 0, i);
11441 temp = simplify_rtx (SET_SRC (set));
11443 SET_SRC (set) = temp;
11444 temp = simplify_rtx (SET_DEST (set));
11446 SET_DEST (set) = temp;
11447 if (GET_CODE (SET_DEST (set)) == MEM)
11449 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11451 XEXP (SET_DEST (set), 0) = temp;
11453 RTX_FRAME_RELATED_P (set) = 1;
11460 real = spe_synthesize_frame_save (real);
11462 RTX_FRAME_RELATED_P (insn) = 1;
11463 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11468 /* Given an SPE frame note, return a PARALLEL of SETs with the
11469 original note, plus a synthetic register save. */
11472 spe_synthesize_frame_save (rtx real)
11474 rtx synth, offset, reg, real2;
11476 if (GET_CODE (real) != SET
11477 || GET_MODE (SET_SRC (real)) != V2SImode)
11480 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11481 frame related note. The parallel contains a set of the register
11482 being saved, and another set to a synthetic register (n+1200).
11483 This is so we can differentiate between 64-bit and 32-bit saves.
11484 Words cannot describe this nastiness. */
11486 if (GET_CODE (SET_DEST (real)) != MEM
11487 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11488 || GET_CODE (SET_SRC (real)) != REG)
11492 (set (mem (plus (reg x) (const y)))
11495 (set (mem (plus (reg x) (const y+4)))
11499 real2 = copy_rtx (real);
11500 PUT_MODE (SET_DEST (real2), SImode);
11501 reg = SET_SRC (real2);
11502 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11503 synth = copy_rtx (real2);
11505 if (BYTES_BIG_ENDIAN)
11507 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11508 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11511 reg = SET_SRC (synth);
11513 synth = replace_rtx (synth, reg,
11514 gen_rtx_REG (SImode, REGNO (reg) + 1200));
11516 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11517 synth = replace_rtx (synth, offset,
11518 GEN_INT (INTVAL (offset)
11519 + (BYTES_BIG_ENDIAN ? 0 : 4)));
11521 RTX_FRAME_RELATED_P (synth) = 1;
11522 RTX_FRAME_RELATED_P (real2) = 1;
11523 if (BYTES_BIG_ENDIAN)
11524 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11526 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11531 /* Returns an insn that has a vrsave set operation with the
11532 appropriate CLOBBERs. */
11535 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
11538 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11539 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11542 = gen_rtx_SET (VOIDmode,
11544 gen_rtx_UNSPEC_VOLATILE (SImode,
11545 gen_rtvec (2, reg, vrsave),
11550 /* We need to clobber the registers in the mask so the scheduler
11551 does not move sets to VRSAVE before sets of AltiVec registers.
11553 However, if the function receives nonlocal gotos, reload will set
11554 all call saved registers live. We will end up with:
11556 (set (reg 999) (mem))
11557 (parallel [ (set (reg vrsave) (unspec blah))
11558 (clobber (reg 999))])
11560 The clobber will cause the store into reg 999 to be dead, and
11561 flow will attempt to delete an epilogue insn. In this case, we
11562 need an unspec use/set of the register. */
11564 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11565 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
11567 if (!epiloguep || call_used_regs [i])
11568 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11569 gen_rtx_REG (V4SImode, i));
11572 rtx reg = gen_rtx_REG (V4SImode, i);
11575 = gen_rtx_SET (VOIDmode,
11577 gen_rtx_UNSPEC (V4SImode,
11578 gen_rtvec (1, reg), 27));
11582 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11584 for (i = 0; i < nclobs; ++i)
11585 XVECEXP (insn, 0, i) = clobs[i];
11590 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11591 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11594 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
11595 unsigned int regno, int offset, HOST_WIDE_INT total_size)
11597 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11598 rtx replacea, replaceb;
11600 int_rtx = GEN_INT (offset);
11602 /* Some cases that need register indexed addressing. */
11603 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11605 && SPE_VECTOR_MODE (mode)
11606 && !SPE_CONST_OFFSET_OK (offset)))
11608 /* Whomever calls us must make sure r11 is available in the
11609 flow path of instructions in the prologue. */
11610 offset_rtx = gen_rtx_REG (Pmode, 11);
11611 emit_move_insn (offset_rtx, int_rtx);
11613 replacea = offset_rtx;
11614 replaceb = int_rtx;
11618 offset_rtx = int_rtx;
11619 replacea = NULL_RTX;
11620 replaceb = NULL_RTX;
11623 reg = gen_rtx_REG (mode, regno);
11624 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11625 mem = gen_rtx_MEM (mode, addr);
11626 set_mem_alias_set (mem, rs6000_sr_alias_set);
11628 insn = emit_move_insn (mem, reg);
11630 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11633 /* Emit an offset memory reference suitable for a frame store, while
11634 converting to a valid addressing mode. */
11637 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
11639 rtx int_rtx, offset_rtx;
11641 int_rtx = GEN_INT (offset);
11643 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11645 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11646 emit_move_insn (offset_rtx, int_rtx);
11649 offset_rtx = int_rtx;
11651 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11654 /* Emit function prologue as insns. */
11657 rs6000_emit_prologue (void)
11659 rs6000_stack_t *info = rs6000_stack_info ();
11660 enum machine_mode reg_mode = Pmode;
11661 int reg_size = UNITS_PER_WORD;
11662 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11663 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11664 rtx frame_reg_rtx = sp_reg_rtx;
11665 rtx cr_save_rtx = NULL_RTX;
11667 int saving_FPRs_inline;
11668 int using_store_multiple;
11669 HOST_WIDE_INT sp_offset = 0;
11671 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11673 reg_mode = V2SImode;
11677 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11678 && (!TARGET_SPE_ABI
11679 || info->spe_64bit_regs_used == 0)
11680 && info->first_gp_reg_save < 31);
11681 saving_FPRs_inline = (info->first_fp_reg_save == 64
11682 || FP_SAVE_INLINE (info->first_fp_reg_save)
11683 || current_function_calls_eh_return
11684 || cfun->machine->ra_need_lr);
11686 /* For V.4, update stack before we do any saving and set back pointer. */
11688 && (DEFAULT_ABI == ABI_V4
11689 || current_function_calls_eh_return))
11691 if (info->total_size < 32767)
11692 sp_offset = info->total_size;
11694 frame_reg_rtx = frame_ptr_rtx;
11695 rs6000_emit_allocate_stack (info->total_size,
11696 (frame_reg_rtx != sp_reg_rtx
11697 && (info->cr_save_p
11699 || info->first_fp_reg_save < 64
11700 || info->first_gp_reg_save < 32
11702 if (frame_reg_rtx != sp_reg_rtx)
11703 rs6000_emit_stack_tie ();
11706 /* Save AltiVec registers if needed. */
11707 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11711 /* There should be a non inline version of this, for when we
11712 are saving lots of vector registers. */
11713 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11714 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11716 rtx areg, savereg, mem;
11719 offset = info->altivec_save_offset + sp_offset
11720 + 16 * (i - info->first_altivec_reg_save);
11722 savereg = gen_rtx_REG (V4SImode, i);
11724 areg = gen_rtx_REG (Pmode, 0);
11725 emit_move_insn (areg, GEN_INT (offset));
11727 /* AltiVec addressing mode is [reg+reg]. */
11728 mem = gen_rtx_MEM (V4SImode,
11729 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11731 set_mem_alias_set (mem, rs6000_sr_alias_set);
11733 insn = emit_move_insn (mem, savereg);
11735 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11736 areg, GEN_INT (offset));
11740 /* VRSAVE is a bit vector representing which AltiVec registers
11741 are used. The OS uses this to determine which vector
11742 registers to save on a context switch. We need to save
11743 VRSAVE on the stack frame, add whatever AltiVec registers we
11744 used in this function, and do the corresponding magic in the
11747 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
11748 && info->vrsave_mask != 0)
11750 rtx reg, mem, vrsave;
11753 /* Get VRSAVE onto a GPR. */
11754 reg = gen_rtx_REG (SImode, 12);
11755 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11757 emit_insn (gen_get_vrsave_internal (reg));
11759 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11762 offset = info->vrsave_save_offset + sp_offset;
11764 = gen_rtx_MEM (SImode,
11765 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11766 set_mem_alias_set (mem, rs6000_sr_alias_set);
11767 insn = emit_move_insn (mem, reg);
11769 /* Include the registers in the mask. */
11770 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11772 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11775 /* If we use the link register, get it into r0. */
11776 if (info->lr_save_p)
11777 emit_move_insn (gen_rtx_REG (Pmode, 0),
11778 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11780 /* If we need to save CR, put it into r12. */
11781 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11783 cr_save_rtx = gen_rtx_REG (SImode, 12);
11784 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11787 /* Do any required saving of fpr's. If only one or two to save, do
11788 it ourselves. Otherwise, call function. */
11789 if (saving_FPRs_inline)
11792 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11793 if ((regs_ever_live[info->first_fp_reg_save+i]
11794 && ! call_used_regs[info->first_fp_reg_save+i]))
11795 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11796 info->first_fp_reg_save + i,
11797 info->fp_save_offset + sp_offset + 8 * i,
11800 else if (info->first_fp_reg_save != 64)
11804 const char *alloc_rname;
11806 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11808 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11809 gen_rtx_REG (Pmode,
11810 LINK_REGISTER_REGNUM));
11811 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11812 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11813 alloc_rname = ggc_strdup (rname);
11814 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11815 gen_rtx_SYMBOL_REF (Pmode,
11817 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11819 rtx addr, reg, mem;
11820 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11821 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11822 GEN_INT (info->fp_save_offset
11823 + sp_offset + 8*i));
11824 mem = gen_rtx_MEM (DFmode, addr);
11825 set_mem_alias_set (mem, rs6000_sr_alias_set);
11827 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11829 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11830 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11831 NULL_RTX, NULL_RTX);
11834 /* Save GPRs. This is done as a PARALLEL if we are using
11835 the store-multiple instructions. */
11836 if (using_store_multiple)
11840 p = rtvec_alloc (32 - info->first_gp_reg_save);
11841 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11843 rtx addr, reg, mem;
11844 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11845 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11846 GEN_INT (info->gp_save_offset
11849 mem = gen_rtx_MEM (reg_mode, addr);
11850 set_mem_alias_set (mem, rs6000_sr_alias_set);
11852 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11854 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11855 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11856 NULL_RTX, NULL_RTX);
11861 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11862 if ((regs_ever_live[info->first_gp_reg_save+i]
11863 && ! call_used_regs[info->first_gp_reg_save+i])
11864 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11865 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11866 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11868 rtx addr, reg, mem;
11869 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11871 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11873 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11876 if (!SPE_CONST_OFFSET_OK (offset))
11878 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11879 emit_move_insn (b, GEN_INT (offset));
11882 b = GEN_INT (offset);
11884 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11885 mem = gen_rtx_MEM (V2SImode, addr);
11886 set_mem_alias_set (mem, rs6000_sr_alias_set);
11887 insn = emit_move_insn (mem, reg);
11889 if (GET_CODE (b) == CONST_INT)
11890 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11891 NULL_RTX, NULL_RTX);
11893 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11894 b, GEN_INT (offset));
11898 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11899 GEN_INT (info->gp_save_offset
11902 mem = gen_rtx_MEM (reg_mode, addr);
11903 set_mem_alias_set (mem, rs6000_sr_alias_set);
11905 insn = emit_move_insn (mem, reg);
11906 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11907 NULL_RTX, NULL_RTX);
11912 /* ??? There's no need to emit actual instructions here, but it's the
11913 easiest way to get the frame unwind information emitted. */
11914 if (current_function_calls_eh_return)
11916 unsigned int i, regno;
11918 /* In AIX ABI we need to pretend we save r2 here. */
11921 rtx addr, reg, mem;
11923 reg = gen_rtx_REG (reg_mode, 2);
11924 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11925 GEN_INT (sp_offset + 5 * reg_size));
11926 mem = gen_rtx_MEM (reg_mode, addr);
11927 set_mem_alias_set (mem, rs6000_sr_alias_set);
11929 insn = emit_move_insn (mem, reg);
11930 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11931 NULL_RTX, NULL_RTX);
11932 PATTERN (insn) = gen_blockage ();
11937 regno = EH_RETURN_DATA_REGNO (i);
11938 if (regno == INVALID_REGNUM)
11941 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11942 info->ehrd_offset + sp_offset
11943 + reg_size * (int) i,
11948 /* Save lr if we used it. */
11949 if (info->lr_save_p)
11951 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11952 GEN_INT (info->lr_save_offset + sp_offset));
11953 rtx reg = gen_rtx_REG (Pmode, 0);
11954 rtx mem = gen_rtx_MEM (Pmode, addr);
11955 /* This should not be of rs6000_sr_alias_set, because of
11956 __builtin_return_address. */
11958 insn = emit_move_insn (mem, reg);
11959 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11960 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11963 /* Save CR if we use any that must be preserved. */
11964 if (info->cr_save_p)
11966 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11967 GEN_INT (info->cr_save_offset + sp_offset));
11968 rtx mem = gen_rtx_MEM (SImode, addr);
11970 set_mem_alias_set (mem, rs6000_sr_alias_set);
11972 /* If r12 was used to hold the original sp, copy cr into r0 now
11974 if (REGNO (frame_reg_rtx) == 12)
11976 cr_save_rtx = gen_rtx_REG (SImode, 0);
11977 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11979 insn = emit_move_insn (mem, cr_save_rtx);
11981 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11982 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11983 But that's OK. All we have to do is specify that _one_ condition
11984 code register is saved in this stack slot. The thrower's epilogue
11985 will then restore all the call-saved registers.
11986 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11987 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11988 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11991 /* Update stack and set back pointer unless this is V.4,
11992 for which it was done previously. */
11994 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
11995 rs6000_emit_allocate_stack (info->total_size, FALSE);
11997 /* Set frame pointer, if needed. */
11998 if (frame_pointer_needed)
12000 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
12002 RTX_FRAME_RELATED_P (insn) = 1;
12005 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
12006 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
12007 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
12008 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
12010 /* If emit_load_toc_table will use the link register, we need to save
12011 it. We use R12 for this purpose because emit_load_toc_table
12012 can use register 0. This allows us to use a plain 'blr' to return
12013 from the procedure more often. */
12014 int save_LR_around_toc_setup = (TARGET_ELF
12015 && DEFAULT_ABI != ABI_AIX
12017 && ! info->lr_save_p
12018 && EXIT_BLOCK_PTR->pred != NULL);
12019 if (save_LR_around_toc_setup)
12021 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12022 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
12023 rs6000_emit_load_toc_table (TRUE);
12024 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
12027 rs6000_emit_load_toc_table (TRUE);
12031 if (DEFAULT_ABI == ABI_DARWIN
12032 && flag_pic && current_function_uses_pic_offset_table)
12034 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12035 const char *picbase = machopic_function_base_name ();
12036 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
12038 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
12040 rs6000_maybe_dead (
12041 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
12042 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
12047 /* Write function prologue. */
12050 rs6000_output_function_prologue (FILE *file,
12051 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12053 rs6000_stack_t *info = rs6000_stack_info ();
12055 if (TARGET_DEBUG_STACK)
12056 debug_stack_info (info);
12058 /* Write .extern for any function we will call to save and restore
12060 if (info->first_fp_reg_save < 64
12061 && !FP_SAVE_INLINE (info->first_fp_reg_save))
12062 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
12063 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
12064 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
12065 RESTORE_FP_SUFFIX);
12067 /* Write .extern for AIX common mode routines, if needed. */
12068 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
12070 fputs ("\t.extern __mulh\n", file);
12071 fputs ("\t.extern __mull\n", file);
12072 fputs ("\t.extern __divss\n", file);
12073 fputs ("\t.extern __divus\n", file);
12074 fputs ("\t.extern __quoss\n", file);
12075 fputs ("\t.extern __quous\n", file);
12076 common_mode_defined = 1;
12079 if (! HAVE_prologue)
12083 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
12084 the "toplevel" insn chain. */
12085 emit_note (NOTE_INSN_DELETED);
12086 rs6000_emit_prologue ();
12087 emit_note (NOTE_INSN_DELETED);
12089 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12093 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12095 INSN_ADDRESSES_NEW (insn, addr);
12100 if (TARGET_DEBUG_STACK)
12101 debug_rtx_list (get_insns (), 100);
12102 final (get_insns (), file, FALSE, FALSE);
12106 rs6000_pic_labelno++;
12109 /* Emit function epilogue as insns.
12111 At present, dwarf2out_frame_debug_expr doesn't understand
12112 register restores, so we don't bother setting RTX_FRAME_RELATED_P
12113 anywhere in the epilogue. Most of the insns below would in any case
12114 need special notes to explain where r11 is in relation to the stack. */
12117 rs6000_emit_epilogue (int sibcall)
12119 rs6000_stack_t *info;
12120 int restoring_FPRs_inline;
12121 int using_load_multiple;
12122 int using_mfcr_multiple;
12123 int use_backchain_to_restore_sp;
12125 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
12126 rtx frame_reg_rtx = sp_reg_rtx;
12127 enum machine_mode reg_mode = Pmode;
12128 int reg_size = UNITS_PER_WORD;
12131 info = rs6000_stack_info ();
12133 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12135 reg_mode = V2SImode;
12139 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12140 && (!TARGET_SPE_ABI
12141 || info->spe_64bit_regs_used == 0)
12142 && info->first_gp_reg_save < 31);
12143 restoring_FPRs_inline = (sibcall
12144 || current_function_calls_eh_return
12145 || info->first_fp_reg_save == 64
12146 || FP_SAVE_INLINE (info->first_fp_reg_save));
12147 use_backchain_to_restore_sp = (frame_pointer_needed
12148 || current_function_calls_alloca
12149 || info->total_size > 32767);
12150 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
12151 || rs6000_cpu == PROCESSOR_PPC603
12152 || rs6000_cpu == PROCESSOR_PPC750
12155 /* If we have a frame pointer, a call to alloca, or a large stack
12156 frame, restore the old stack pointer using the backchain. Otherwise,
12157 we know what size to update it with. */
12158 if (use_backchain_to_restore_sp)
12160 /* Under V.4, don't reset the stack pointer until after we're done
12161 loading the saved registers. */
12162 if (DEFAULT_ABI == ABI_V4)
12163 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
12165 emit_move_insn (frame_reg_rtx,
12166 gen_rtx_MEM (Pmode, sp_reg_rtx));
12169 else if (info->push_p)
12171 if (DEFAULT_ABI == ABI_V4
12172 || current_function_calls_eh_return)
12173 sp_offset = info->total_size;
12176 emit_insn (TARGET_32BIT
12177 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12178 GEN_INT (info->total_size))
12179 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12180 GEN_INT (info->total_size)));
12184 /* Restore AltiVec registers if needed. */
12185 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12189 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12190 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12192 rtx addr, areg, mem;
12194 areg = gen_rtx_REG (Pmode, 0);
12196 (areg, GEN_INT (info->altivec_save_offset
12198 + 16 * (i - info->first_altivec_reg_save)));
12200 /* AltiVec addressing mode is [reg+reg]. */
12201 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
12202 mem = gen_rtx_MEM (V4SImode, addr);
12203 set_mem_alias_set (mem, rs6000_sr_alias_set);
12205 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
12209 /* Restore VRSAVE if needed. */
12210 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE
12211 && info->vrsave_mask != 0)
12213 rtx addr, mem, reg;
12215 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12216 GEN_INT (info->vrsave_save_offset + sp_offset));
12217 mem = gen_rtx_MEM (SImode, addr);
12218 set_mem_alias_set (mem, rs6000_sr_alias_set);
12219 reg = gen_rtx_REG (SImode, 12);
12220 emit_move_insn (reg, mem);
12222 emit_insn (generate_set_vrsave (reg, info, 1));
12225 /* Get the old lr if we saved it. */
12226 if (info->lr_save_p)
12228 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
12229 info->lr_save_offset + sp_offset);
12231 set_mem_alias_set (mem, rs6000_sr_alias_set);
12233 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
12236 /* Get the old cr if we saved it. */
12237 if (info->cr_save_p)
12239 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12240 GEN_INT (info->cr_save_offset + sp_offset));
12241 rtx mem = gen_rtx_MEM (SImode, addr);
12243 set_mem_alias_set (mem, rs6000_sr_alias_set);
12245 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12248 /* Set LR here to try to overlap restores below. */
12249 if (info->lr_save_p)
12250 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12251 gen_rtx_REG (Pmode, 0));
12253 /* Load exception handler data registers, if needed. */
12254 if (current_function_calls_eh_return)
12256 unsigned int i, regno;
12260 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12261 GEN_INT (sp_offset + 5 * reg_size));
12262 rtx mem = gen_rtx_MEM (reg_mode, addr);
12264 set_mem_alias_set (mem, rs6000_sr_alias_set);
12266 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12273 regno = EH_RETURN_DATA_REGNO (i);
12274 if (regno == INVALID_REGNUM)
12277 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12278 info->ehrd_offset + sp_offset
12279 + reg_size * (int) i);
12280 set_mem_alias_set (mem, rs6000_sr_alias_set);
12282 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12286 /* Restore GPRs. This is done as a PARALLEL if we are using
12287 the load-multiple instructions. */
12288 if (using_load_multiple)
12291 p = rtvec_alloc (32 - info->first_gp_reg_save);
12292 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12294 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12295 GEN_INT (info->gp_save_offset
12298 rtx mem = gen_rtx_MEM (reg_mode, addr);
12300 set_mem_alias_set (mem, rs6000_sr_alias_set);
12303 gen_rtx_SET (VOIDmode,
12304 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12307 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12310 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12311 if ((regs_ever_live[info->first_gp_reg_save+i]
12312 && ! call_used_regs[info->first_gp_reg_save+i])
12313 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12314 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12315 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12317 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12318 GEN_INT (info->gp_save_offset
12321 rtx mem = gen_rtx_MEM (reg_mode, addr);
12323 /* Restore 64-bit quantities for SPE. */
12324 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12326 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12329 if (!SPE_CONST_OFFSET_OK (offset))
12331 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12332 emit_move_insn (b, GEN_INT (offset));
12335 b = GEN_INT (offset);
12337 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12338 mem = gen_rtx_MEM (V2SImode, addr);
12341 set_mem_alias_set (mem, rs6000_sr_alias_set);
12343 emit_move_insn (gen_rtx_REG (reg_mode,
12344 info->first_gp_reg_save + i), mem);
12347 /* Restore fpr's if we need to do it without calling a function. */
12348 if (restoring_FPRs_inline)
12349 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12350 if ((regs_ever_live[info->first_fp_reg_save+i]
12351 && ! call_used_regs[info->first_fp_reg_save+i]))
12354 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12355 GEN_INT (info->fp_save_offset
12358 mem = gen_rtx_MEM (DFmode, addr);
12359 set_mem_alias_set (mem, rs6000_sr_alias_set);
12361 emit_move_insn (gen_rtx_REG (DFmode,
12362 info->first_fp_reg_save + i),
12366 /* If we saved cr, restore it here. Just those that were used. */
12367 if (info->cr_save_p)
12369 rtx r12_rtx = gen_rtx_REG (SImode, 12);
12372 if (using_mfcr_multiple)
12374 for (i = 0; i < 8; i++)
12375 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12381 if (using_mfcr_multiple && count > 1)
12386 p = rtvec_alloc (count);
12389 for (i = 0; i < 8; i++)
12390 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12392 rtvec r = rtvec_alloc (2);
12393 RTVEC_ELT (r, 0) = r12_rtx;
12394 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12395 RTVEC_ELT (p, ndx) =
12396 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12397 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12400 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12405 for (i = 0; i < 8; i++)
12406 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12408 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12414 /* If this is V.4, unwind the stack pointer after all of the loads
12415 have been done. We need to emit a block here so that sched
12416 doesn't decide to move the sp change before the register restores
12417 (which may not have any obvious dependency on the stack). This
12418 doesn't hurt performance, because there is no scheduling that can
12419 be done after this point. */
12420 if (DEFAULT_ABI == ABI_V4
12421 || current_function_calls_eh_return)
12423 if (frame_reg_rtx != sp_reg_rtx)
12424 rs6000_emit_stack_tie ();
12426 if (use_backchain_to_restore_sp)
12428 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12430 else if (sp_offset != 0)
12432 emit_insn (TARGET_32BIT
12433 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12434 GEN_INT (sp_offset))
12435 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12436 GEN_INT (sp_offset)));
12440 if (current_function_calls_eh_return)
12442 rtx sa = EH_RETURN_STACKADJ_RTX;
12443 emit_insn (TARGET_32BIT
12444 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12445 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12451 if (! restoring_FPRs_inline)
12452 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12454 p = rtvec_alloc (2);
12456 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12457 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12458 gen_rtx_REG (Pmode,
12459 LINK_REGISTER_REGNUM));
12461 /* If we have to restore more than two FP registers, branch to the
12462 restore function. It will return to our caller. */
12463 if (! restoring_FPRs_inline)
12467 const char *alloc_rname;
12469 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12470 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12471 alloc_rname = ggc_strdup (rname);
12472 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12473 gen_rtx_SYMBOL_REF (Pmode,
12476 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12479 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12480 GEN_INT (info->fp_save_offset + 8*i));
12481 mem = gen_rtx_MEM (DFmode, addr);
12482 set_mem_alias_set (mem, rs6000_sr_alias_set);
12484 RTVEC_ELT (p, i+3) =
12485 gen_rtx_SET (VOIDmode,
12486 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12491 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12495 /* Write function epilogue. */
12498 rs6000_output_function_epilogue (FILE *file,
12499 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12501 rs6000_stack_t *info = rs6000_stack_info ();
12503 if (! HAVE_epilogue)
12505 rtx insn = get_last_insn ();
12506 /* If the last insn was a BARRIER, we don't have to write anything except
12507 the trace table. */
12508 if (GET_CODE (insn) == NOTE)
12509 insn = prev_nonnote_insn (insn);
12510 if (insn == 0 || GET_CODE (insn) != BARRIER)
12512 /* This is slightly ugly, but at least we don't have two
12513 copies of the epilogue-emitting code. */
12516 /* A NOTE_INSN_DELETED is supposed to be at the start
12517 and end of the "toplevel" insn chain. */
12518 emit_note (NOTE_INSN_DELETED);
12519 rs6000_emit_epilogue (FALSE);
12520 emit_note (NOTE_INSN_DELETED);
12522 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12526 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12528 INSN_ADDRESSES_NEW (insn, addr);
12533 if (TARGET_DEBUG_STACK)
12534 debug_rtx_list (get_insns (), 100);
12535 final (get_insns (), file, FALSE, FALSE);
12541 macho_branch_islands ();
12542 /* Mach-O doesn't support labels at the end of objects, so if
12543 it looks like we might want one, insert a NOP. */
12545 rtx insn = get_last_insn ();
12548 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
12549 insn = PREV_INSN (insn);
12553 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
12554 fputs ("\tnop\n", file);
12558 /* Output a traceback table here. See /usr/include/sys/debug.h for info
12561 We don't output a traceback table if -finhibit-size-directive was
12562 used. The documentation for -finhibit-size-directive reads
12563 ``don't output a @code{.size} assembler directive, or anything
12564 else that would cause trouble if the function is split in the
12565 middle, and the two halves are placed at locations far apart in
12566 memory.'' The traceback table has this property, since it
12567 includes the offset from the start of the function to the
12568 traceback table itself.
12570 System V.4 Powerpc's (and the embedded ABI derived from it) use a
12571 different traceback table. */
12572 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12573 && rs6000_traceback != traceback_none)
12575 const char *fname = NULL;
12576 const char *language_string = lang_hooks.name;
12577 int fixed_parms = 0, float_parms = 0, parm_info = 0;
12579 int optional_tbtab;
12581 if (rs6000_traceback == traceback_full)
12582 optional_tbtab = 1;
12583 else if (rs6000_traceback == traceback_part)
12584 optional_tbtab = 0;
12586 optional_tbtab = !optimize_size && !TARGET_ELF;
12588 if (optional_tbtab)
12590 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12591 while (*fname == '.') /* V.4 encodes . in the name */
12594 /* Need label immediately before tbtab, so we can compute
12595 its offset from the function start. */
12596 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12597 ASM_OUTPUT_LABEL (file, fname);
12600 /* The .tbtab pseudo-op can only be used for the first eight
12601 expressions, since it can't handle the possibly variable
12602 length fields that follow. However, if you omit the optional
12603 fields, the assembler outputs zeros for all optional fields
12604 anyways, giving each variable length field is minimum length
12605 (as defined in sys/debug.h). Thus we can not use the .tbtab
12606 pseudo-op at all. */
12608 /* An all-zero word flags the start of the tbtab, for debuggers
12609 that have to find it by searching forward from the entry
12610 point or from the current pc. */
12611 fputs ("\t.long 0\n", file);
12613 /* Tbtab format type. Use format type 0. */
12614 fputs ("\t.byte 0,", file);
12616 /* Language type. Unfortunately, there does not seem to be any
12617 official way to discover the language being compiled, so we
12618 use language_string.
12619 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
12620 Java is 13. Objective-C is 14. */
12621 if (! strcmp (language_string, "GNU C"))
12623 else if (! strcmp (language_string, "GNU F77"))
12625 else if (! strcmp (language_string, "GNU Pascal"))
12627 else if (! strcmp (language_string, "GNU Ada"))
12629 else if (! strcmp (language_string, "GNU C++"))
12631 else if (! strcmp (language_string, "GNU Java"))
12633 else if (! strcmp (language_string, "GNU Objective-C"))
12637 fprintf (file, "%d,", i);
12639 /* 8 single bit fields: global linkage (not set for C extern linkage,
12640 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12641 from start of procedure stored in tbtab, internal function, function
12642 has controlled storage, function has no toc, function uses fp,
12643 function logs/aborts fp operations. */
12644 /* Assume that fp operations are used if any fp reg must be saved. */
12645 fprintf (file, "%d,",
12646 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12648 /* 6 bitfields: function is interrupt handler, name present in
12649 proc table, function calls alloca, on condition directives
12650 (controls stack walks, 3 bits), saves condition reg, saves
12652 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12653 set up as a frame pointer, even when there is no alloca call. */
12654 fprintf (file, "%d,",
12655 ((optional_tbtab << 6)
12656 | ((optional_tbtab & frame_pointer_needed) << 5)
12657 | (info->cr_save_p << 1)
12658 | (info->lr_save_p)));
12660 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12662 fprintf (file, "%d,",
12663 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12665 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12666 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12668 if (optional_tbtab)
12670 /* Compute the parameter info from the function decl argument
12673 int next_parm_info_bit = 31;
12675 for (decl = DECL_ARGUMENTS (current_function_decl);
12676 decl; decl = TREE_CHAIN (decl))
12678 rtx parameter = DECL_INCOMING_RTL (decl);
12679 enum machine_mode mode = GET_MODE (parameter);
12681 if (GET_CODE (parameter) == REG)
12683 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12689 if (mode == SFmode)
12691 else if (mode == DFmode || mode == TFmode)
12696 /* If only one bit will fit, don't or in this entry. */
12697 if (next_parm_info_bit > 0)
12698 parm_info |= (bits << (next_parm_info_bit - 1));
12699 next_parm_info_bit -= 2;
12703 fixed_parms += ((GET_MODE_SIZE (mode)
12704 + (UNITS_PER_WORD - 1))
12706 next_parm_info_bit -= 1;
12712 /* Number of fixed point parameters. */
12713 /* This is actually the number of words of fixed point parameters; thus
12714 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12715 fprintf (file, "%d,", fixed_parms);
12717 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12719 /* This is actually the number of fp registers that hold parameters;
12720 and thus the maximum value is 13. */
12721 /* Set parameters on stack bit if parameters are not in their original
12722 registers, regardless of whether they are on the stack? Xlc
12723 seems to set the bit when not optimizing. */
12724 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12726 if (! optional_tbtab)
12729 /* Optional fields follow. Some are variable length. */
12731 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12732 11 double float. */
12733 /* There is an entry for each parameter in a register, in the order that
12734 they occur in the parameter list. Any intervening arguments on the
12735 stack are ignored. If the list overflows a long (max possible length
12736 34 bits) then completely leave off all elements that don't fit. */
12737 /* Only emit this long if there was at least one parameter. */
12738 if (fixed_parms || float_parms)
12739 fprintf (file, "\t.long %d\n", parm_info);
12741 /* Offset from start of code to tb table. */
12742 fputs ("\t.long ", file);
12743 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12745 RS6000_OUTPUT_BASENAME (file, fname);
12747 assemble_name (file, fname);
12749 fputs ("-.", file);
12751 RS6000_OUTPUT_BASENAME (file, fname);
12753 assemble_name (file, fname);
12757 /* Interrupt handler mask. */
12758 /* Omit this long, since we never set the interrupt handler bit
12761 /* Number of CTL (controlled storage) anchors. */
12762 /* Omit this long, since the has_ctl bit is never set above. */
12764 /* Displacement into stack of each CTL anchor. */
12765 /* Omit this list of longs, because there are no CTL anchors. */
12767 /* Length of function name. */
12770 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12772 /* Function name. */
12773 assemble_string (fname, strlen (fname));
12775 /* Register for alloca automatic storage; this is always reg 31.
12776 Only emit this if the alloca bit was set above. */
12777 if (frame_pointer_needed)
12778 fputs ("\t.byte 31\n", file);
12780 fputs ("\t.align 2\n", file);
12784 /* A C compound statement that outputs the assembler code for a thunk
12785 function, used to implement C++ virtual function calls with
12786 multiple inheritance. The thunk acts as a wrapper around a virtual
12787 function, adjusting the implicit object parameter before handing
12788 control off to the real function.
12790 First, emit code to add the integer DELTA to the location that
12791 contains the incoming first argument. Assume that this argument
12792 contains a pointer, and is the one used to pass the `this' pointer
12793 in C++. This is the incoming argument *before* the function
12794 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12795 values of all other incoming arguments.
12797 After the addition, emit code to jump to FUNCTION, which is a
12798 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12799 not touch the return address. Hence returning from FUNCTION will
12800 return to whoever called the current `thunk'.
12802 The effect must be as if FUNCTION had been called directly with the
12803 adjusted first argument. This macro is responsible for emitting
12804 all of the code for a thunk function; output_function_prologue()
12805 and output_function_epilogue() are not invoked.
12807 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12808 been extracted from it.) It might possibly be useful on some
12809 targets, but probably not.
12811 If you do not define this macro, the target-independent code in the
12812 C++ frontend will generate a less efficient heavyweight thunk that
12813 calls FUNCTION instead of jumping to it. The generic approach does
12814 not support varargs. */
12817 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
12818 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
12821 rtx this, insn, funexp;
12823 reload_completed = 1;
12824 epilogue_completed = 1;
12825 no_new_pseudos = 1;
12827 /* Mark the end of the (empty) prologue. */
12828 emit_note (NOTE_INSN_PROLOGUE_END);
12830 /* Find the "this" pointer. If the function returns a structure,
12831 the structure return pointer is in r3. */
12832 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
12833 this = gen_rtx_REG (Pmode, 4);
12835 this = gen_rtx_REG (Pmode, 3);
12837 /* Apply the constant offset, if required. */
12840 rtx delta_rtx = GEN_INT (delta);
12841 emit_insn (TARGET_32BIT
12842 ? gen_addsi3 (this, this, delta_rtx)
12843 : gen_adddi3 (this, this, delta_rtx));
12846 /* Apply the offset from the vtable, if required. */
12849 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12850 rtx tmp = gen_rtx_REG (Pmode, 12);
12852 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12853 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
12855 emit_insn (TARGET_32BIT
12856 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12857 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12858 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12862 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
12864 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
12866 emit_insn (TARGET_32BIT
12867 ? gen_addsi3 (this, this, tmp)
12868 : gen_adddi3 (this, this, tmp));
12871 /* Generate a tail call to the target function. */
12872 if (!TREE_USED (function))
12874 assemble_external (function);
12875 TREE_USED (function) = 1;
12877 funexp = XEXP (DECL_RTL (function), 0);
12878 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12881 if (MACHOPIC_INDIRECT)
12882 funexp = machopic_indirect_call_target (funexp);
12885 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12886 generate sibcall RTL explicitly to avoid constraint abort. */
12887 insn = emit_call_insn (
12888 gen_rtx_PARALLEL (VOIDmode,
12890 gen_rtx_CALL (VOIDmode,
12891 funexp, const0_rtx),
12892 gen_rtx_USE (VOIDmode, const0_rtx),
12893 gen_rtx_USE (VOIDmode,
12894 gen_rtx_REG (SImode,
12895 LINK_REGISTER_REGNUM)),
12896 gen_rtx_RETURN (VOIDmode))));
12897 SIBLING_CALL_P (insn) = 1;
12900 /* Run just enough of rest_of_compilation to get the insns emitted.
12901 There's not really enough bulk here to make other passes such as
12902 instruction scheduling worth while. Note that use_thunk calls
12903 assemble_start_function and assemble_end_function. */
12904 insn = get_insns ();
12905 insn_locators_initialize ();
12906 shorten_branches (insn);
12907 final_start_function (insn, file, 1);
12908 final (insn, file, 1, 0);
12909 final_end_function ();
12911 reload_completed = 0;
12912 epilogue_completed = 0;
12913 no_new_pseudos = 0;
12916 /* A quick summary of the various types of 'constant-pool tables'
12919 Target Flags Name One table per
12920 AIX (none) AIX TOC object file
12921 AIX -mfull-toc AIX TOC object file
12922 AIX -mminimal-toc AIX minimal TOC translation unit
12923 SVR4/EABI (none) SVR4 SDATA object file
12924 SVR4/EABI -fpic SVR4 pic object file
12925 SVR4/EABI -fPIC SVR4 PIC translation unit
12926 SVR4/EABI -mrelocatable EABI TOC function
12927 SVR4/EABI -maix AIX TOC object file
12928 SVR4/EABI -maix -mminimal-toc
12929 AIX minimal TOC translation unit
12931 Name Reg. Set by entries contains:
12932 made by addrs? fp? sum?
12934 AIX TOC 2 crt0 as Y option option
12935 AIX minimal TOC 30 prolog gcc Y Y option
12936 SVR4 SDATA 13 crt0 gcc N Y N
12937 SVR4 pic 30 prolog ld Y not yet N
12938 SVR4 PIC 30 prolog gcc Y option option
12939 EABI TOC 30 prolog gcc Y option option
12943 /* Hash functions for the hash table. */
12946 rs6000_hash_constant (rtx k)
12948 enum rtx_code code = GET_CODE (k);
12949 enum machine_mode mode = GET_MODE (k);
12950 unsigned result = (code << 3) ^ mode;
12951 const char *format;
12954 format = GET_RTX_FORMAT (code);
12955 flen = strlen (format);
12961 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12964 if (mode != VOIDmode)
12965 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12977 for (; fidx < flen; fidx++)
12978 switch (format[fidx])
12983 const char *str = XSTR (k, fidx);
12984 len = strlen (str);
12985 result = result * 613 + len;
12986 for (i = 0; i < len; i++)
12987 result = result * 613 + (unsigned) str[i];
12992 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12996 result = result * 613 + (unsigned) XINT (k, fidx);
12999 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
13000 result = result * 613 + (unsigned) XWINT (k, fidx);
13004 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
13005 result = result * 613 + (unsigned) (XWINT (k, fidx)
13019 toc_hash_function (const void *hash_entry)
13021 const struct toc_hash_struct *thc =
13022 (const struct toc_hash_struct *) hash_entry;
13023 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
13026 /* Compare H1 and H2 for equivalence. */
13029 toc_hash_eq (const void *h1, const void *h2)
13031 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
13032 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
13034 if (((const struct toc_hash_struct *) h1)->key_mode
13035 != ((const struct toc_hash_struct *) h2)->key_mode)
13038 return rtx_equal_p (r1, r2);
13041 /* These are the names given by the C++ front-end to vtables, and
13042 vtable-like objects. Ideally, this logic should not be here;
13043 instead, there should be some programmatic way of inquiring as
13044 to whether or not an object is a vtable. */
13046 #define VTABLE_NAME_P(NAME) \
13047 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
13048 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
13049 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
13050 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
13053 rs6000_output_symbol_ref (FILE *file, rtx x)
13055 /* Currently C++ toc references to vtables can be emitted before it
13056 is decided whether the vtable is public or private. If this is
13057 the case, then the linker will eventually complain that there is
13058 a reference to an unknown section. Thus, for vtables only,
13059 we emit the TOC reference to reference the symbol and not the
13061 const char *name = XSTR (x, 0);
13063 if (VTABLE_NAME_P (name))
13065 RS6000_OUTPUT_BASENAME (file, name);
13068 assemble_name (file, name);
13071 /* Output a TOC entry. We derive the entry name from what is being
13075 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
13078 const char *name = buf;
13079 const char *real_name;
13086 /* When the linker won't eliminate them, don't output duplicate
13087 TOC entries (this happens on AIX if there is any kind of TOC,
13088 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
13090 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
13092 struct toc_hash_struct *h;
13095 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
13096 time because GGC is not initialized at that point. */
13097 if (toc_hash_table == NULL)
13098 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
13099 toc_hash_eq, NULL);
13101 h = ggc_alloc (sizeof (*h));
13103 h->key_mode = mode;
13104 h->labelno = labelno;
13106 found = htab_find_slot (toc_hash_table, h, 1);
13107 if (*found == NULL)
13109 else /* This is indeed a duplicate.
13110 Set this label equal to that label. */
13112 fputs ("\t.set ", file);
13113 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13114 fprintf (file, "%d,", labelno);
13115 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13116 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
13122 /* If we're going to put a double constant in the TOC, make sure it's
13123 aligned properly when strict alignment is on. */
13124 if (GET_CODE (x) == CONST_DOUBLE
13125 && STRICT_ALIGNMENT
13126 && GET_MODE_BITSIZE (mode) >= 64
13127 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
13128 ASM_OUTPUT_ALIGN (file, 3);
13131 (*targetm.asm_out.internal_label) (file, "LC", labelno);
13133 /* Handle FP constants specially. Note that if we have a minimal
13134 TOC, things we put here aren't actually in the TOC, so we can allow
13136 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
13138 REAL_VALUE_TYPE rv;
13141 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13142 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
13146 if (TARGET_MINIMAL_TOC)
13147 fputs (DOUBLE_INT_ASM_OP, file);
13149 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13150 k[0] & 0xffffffff, k[1] & 0xffffffff,
13151 k[2] & 0xffffffff, k[3] & 0xffffffff);
13152 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
13153 k[0] & 0xffffffff, k[1] & 0xffffffff,
13154 k[2] & 0xffffffff, k[3] & 0xffffffff);
13159 if (TARGET_MINIMAL_TOC)
13160 fputs ("\t.long ", file);
13162 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13163 k[0] & 0xffffffff, k[1] & 0xffffffff,
13164 k[2] & 0xffffffff, k[3] & 0xffffffff);
13165 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
13166 k[0] & 0xffffffff, k[1] & 0xffffffff,
13167 k[2] & 0xffffffff, k[3] & 0xffffffff);
13171 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
13173 REAL_VALUE_TYPE rv;
13176 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13177 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
13181 if (TARGET_MINIMAL_TOC)
13182 fputs (DOUBLE_INT_ASM_OP, file);
13184 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13185 k[0] & 0xffffffff, k[1] & 0xffffffff);
13186 fprintf (file, "0x%lx%08lx\n",
13187 k[0] & 0xffffffff, k[1] & 0xffffffff);
13192 if (TARGET_MINIMAL_TOC)
13193 fputs ("\t.long ", file);
13195 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13196 k[0] & 0xffffffff, k[1] & 0xffffffff);
13197 fprintf (file, "0x%lx,0x%lx\n",
13198 k[0] & 0xffffffff, k[1] & 0xffffffff);
13202 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
13204 REAL_VALUE_TYPE rv;
13207 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13208 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
13212 if (TARGET_MINIMAL_TOC)
13213 fputs (DOUBLE_INT_ASM_OP, file);
13215 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13216 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
13221 if (TARGET_MINIMAL_TOC)
13222 fputs ("\t.long ", file);
13224 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13225 fprintf (file, "0x%lx\n", l & 0xffffffff);
13229 else if (GET_MODE (x) == VOIDmode
13230 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
13232 unsigned HOST_WIDE_INT low;
13233 HOST_WIDE_INT high;
13235 if (GET_CODE (x) == CONST_DOUBLE)
13237 low = CONST_DOUBLE_LOW (x);
13238 high = CONST_DOUBLE_HIGH (x);
13241 #if HOST_BITS_PER_WIDE_INT == 32
13244 high = (low & 0x80000000) ? ~0 : 0;
13248 low = INTVAL (x) & 0xffffffff;
13249 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13253 /* TOC entries are always Pmode-sized, but since this
13254 is a bigendian machine then if we're putting smaller
13255 integer constants in the TOC we have to pad them.
13256 (This is still a win over putting the constants in
13257 a separate constant pool, because then we'd have
13258 to have both a TOC entry _and_ the actual constant.)
13260 For a 32-bit target, CONST_INT values are loaded and shifted
13261 entirely within `low' and can be stored in one TOC entry. */
13263 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13264 abort ();/* It would be easy to make this work, but it doesn't now. */
13266 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13268 #if HOST_BITS_PER_WIDE_INT == 32
13269 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13270 POINTER_SIZE, &low, &high, 0);
13273 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13274 high = (HOST_WIDE_INT) low >> 32;
13281 if (TARGET_MINIMAL_TOC)
13282 fputs (DOUBLE_INT_ASM_OP, file);
13284 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13285 (long) high & 0xffffffff, (long) low & 0xffffffff);
13286 fprintf (file, "0x%lx%08lx\n",
13287 (long) high & 0xffffffff, (long) low & 0xffffffff);
13292 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13294 if (TARGET_MINIMAL_TOC)
13295 fputs ("\t.long ", file);
13297 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13298 (long) high & 0xffffffff, (long) low & 0xffffffff);
13299 fprintf (file, "0x%lx,0x%lx\n",
13300 (long) high & 0xffffffff, (long) low & 0xffffffff);
13304 if (TARGET_MINIMAL_TOC)
13305 fputs ("\t.long ", file);
13307 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13308 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13314 if (GET_CODE (x) == CONST)
13316 if (GET_CODE (XEXP (x, 0)) != PLUS)
13319 base = XEXP (XEXP (x, 0), 0);
13320 offset = INTVAL (XEXP (XEXP (x, 0), 1));
13323 if (GET_CODE (base) == SYMBOL_REF)
13324 name = XSTR (base, 0);
13325 else if (GET_CODE (base) == LABEL_REF)
13326 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13327 else if (GET_CODE (base) == CODE_LABEL)
13328 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13332 real_name = (*targetm.strip_name_encoding) (name);
13333 if (TARGET_MINIMAL_TOC)
13334 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13337 fprintf (file, "\t.tc %s", real_name);
13340 fprintf (file, ".N%d", - offset);
13342 fprintf (file, ".P%d", offset);
13344 fputs ("[TC],", file);
13347 /* Currently C++ toc references to vtables can be emitted before it
13348 is decided whether the vtable is public or private. If this is
13349 the case, then the linker will eventually complain that there is
13350 a TOC reference to an unknown section. Thus, for vtables only,
13351 we emit the TOC reference to reference the symbol and not the
13353 if (VTABLE_NAME_P (name))
13355 RS6000_OUTPUT_BASENAME (file, name);
13357 fprintf (file, "%d", offset);
13358 else if (offset > 0)
13359 fprintf (file, "+%d", offset);
13362 output_addr_const (file, x);
13366 /* Output an assembler pseudo-op to write an ASCII string of N characters
13367 starting at P to FILE.
13369 On the RS/6000, we have to do this using the .byte operation and
13370 write out special characters outside the quoted string.
13371 Also, the assembler is broken; very long strings are truncated,
13372 so we must artificially break them up early. */
13375 output_ascii (FILE *file, const char *p, int n)
13378 int i, count_string;
13379 const char *for_string = "\t.byte \"";
13380 const char *for_decimal = "\t.byte ";
13381 const char *to_close = NULL;
13384 for (i = 0; i < n; i++)
13387 if (c >= ' ' && c < 0177)
13390 fputs (for_string, file);
13393 /* Write two quotes to get one. */
13401 for_decimal = "\"\n\t.byte ";
13405 if (count_string >= 512)
13407 fputs (to_close, file);
13409 for_string = "\t.byte \"";
13410 for_decimal = "\t.byte ";
13418 fputs (for_decimal, file);
13419 fprintf (file, "%d", c);
13421 for_string = "\n\t.byte \"";
13422 for_decimal = ", ";
13428 /* Now close the string if we have written one. Then end the line. */
13430 fputs (to_close, file);
13433 /* Generate a unique section name for FILENAME for a section type
13434 represented by SECTION_DESC. Output goes into BUF.
13436 SECTION_DESC can be any string, as long as it is different for each
13437 possible section type.
13439 We name the section in the same manner as xlc. The name begins with an
13440 underscore followed by the filename (after stripping any leading directory
13441 names) with the last period replaced by the string SECTION_DESC. If
13442 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13446 rs6000_gen_section_name (char **buf, const char *filename,
13447 const char *section_desc)
13449 const char *q, *after_last_slash, *last_period = 0;
13453 after_last_slash = filename;
13454 for (q = filename; *q; q++)
13457 after_last_slash = q + 1;
13458 else if (*q == '.')
13462 len = strlen (after_last_slash) + strlen (section_desc) + 2;
13463 *buf = (char *) xmalloc (len);
13468 for (q = after_last_slash; *q; q++)
13470 if (q == last_period)
13472 strcpy (p, section_desc);
13473 p += strlen (section_desc);
13477 else if (ISALNUM (*q))
13481 if (last_period == 0)
13482 strcpy (p, section_desc);
13487 /* Emit profile function. */
13490 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
13492 if (TARGET_PROFILE_KERNEL)
13495 if (DEFAULT_ABI == ABI_AIX)
13497 #ifndef NO_PROFILE_COUNTERS
13498 # define NO_PROFILE_COUNTERS 0
13500 if (NO_PROFILE_COUNTERS)
13501 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13505 const char *label_name;
13508 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13509 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13510 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13512 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13516 else if (DEFAULT_ABI == ABI_DARWIN)
13518 const char *mcount_name = RS6000_MCOUNT;
13519 int caller_addr_regno = LINK_REGISTER_REGNUM;
13521 /* Be conservative and always set this, at least for now. */
13522 current_function_uses_pic_offset_table = 1;
13525 /* For PIC code, set up a stub and collect the caller's address
13526 from r0, which is where the prologue puts it. */
13527 if (MACHOPIC_INDIRECT)
13529 mcount_name = machopic_stub_name (mcount_name);
13530 if (current_function_uses_pic_offset_table)
13531 caller_addr_regno = 0;
13534 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13536 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13540 /* Write function profiler code. */
13543 output_function_profiler (FILE *file, int labelno)
13548 switch (DEFAULT_ABI)
13557 warning ("no profiling of 64-bit code for this ABI");
13560 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13561 fprintf (file, "\tmflr %s\n", reg_names[0]);
13564 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13565 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13566 reg_names[0], save_lr, reg_names[1]);
13567 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13568 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13569 assemble_name (file, buf);
13570 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13572 else if (flag_pic > 1)
13574 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13575 reg_names[0], save_lr, reg_names[1]);
13576 /* Now, we need to get the address of the label. */
13577 fputs ("\tbl 1f\n\t.long ", file);
13578 assemble_name (file, buf);
13579 fputs ("-.\n1:", file);
13580 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13581 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13582 reg_names[0], reg_names[11]);
13583 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13584 reg_names[0], reg_names[0], reg_names[11]);
13588 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13589 assemble_name (file, buf);
13590 fputs ("@ha\n", file);
13591 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13592 reg_names[0], save_lr, reg_names[1]);
13593 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13594 assemble_name (file, buf);
13595 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13598 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13599 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13604 if (!TARGET_PROFILE_KERNEL)
13606 /* Don't do anything, done in output_profile_hook (). */
13613 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13614 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13616 if (current_function_needs_context)
13618 asm_fprintf (file, "\tstd %s,24(%s)\n",
13619 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13620 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13621 asm_fprintf (file, "\tld %s,24(%s)\n",
13622 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13625 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13633 rs6000_use_dfa_pipeline_interface (void)
13638 /* Power4 load update and store update instructions are cracked into a
13639 load or store and an integer insn which are executed in the same cycle.
13640 Branches have their own dispatch slot which does not count against the
13641 GCC issue rate, but it changes the program flow so there are no other
13642 instructions to issue in this cycle. */
13645 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
13646 int verbose ATTRIBUTE_UNUSED,
13647 rtx insn, int more)
13649 if (GET_CODE (PATTERN (insn)) == USE
13650 || GET_CODE (PATTERN (insn)) == CLOBBER)
13653 if (rs6000_cpu == PROCESSOR_POWER4)
13655 if (is_microcoded_insn (insn))
13657 else if (is_cracked_insn (insn))
13658 return more > 2 ? more - 2 : 0;
13664 /* Adjust the cost of a scheduling dependency. Return the new cost of
13665 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13668 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
13671 if (! recog_memoized (insn))
13674 if (REG_NOTE_KIND (link) != 0)
13677 if (REG_NOTE_KIND (link) == 0)
13679 /* Data dependency; DEP_INSN writes a register that INSN reads
13680 some cycles later. */
13681 switch (get_attr_type (insn))
13684 /* Tell the first scheduling pass about the latency between
13685 a mtctr and bctr (and mtlr and br/blr). The first
13686 scheduling pass will not know about this latency since
13687 the mtctr instruction, which has the latency associated
13688 to it, will be generated by reload. */
13689 return TARGET_POWER ? 5 : 4;
13691 /* Leave some extra cycles between a compare and its
13692 dependent branch, to inhibit expensive mispredicts. */
13693 if ((rs6000_cpu_attr == CPU_PPC603
13694 || rs6000_cpu_attr == CPU_PPC604
13695 || rs6000_cpu_attr == CPU_PPC604E
13696 || rs6000_cpu_attr == CPU_PPC620
13697 || rs6000_cpu_attr == CPU_PPC630
13698 || rs6000_cpu_attr == CPU_PPC750
13699 || rs6000_cpu_attr == CPU_PPC7400
13700 || rs6000_cpu_attr == CPU_PPC7450
13701 || rs6000_cpu_attr == CPU_POWER4)
13702 && recog_memoized (dep_insn)
13703 && (INSN_CODE (dep_insn) >= 0)
13704 && (get_attr_type (dep_insn) == TYPE_CMP
13705 || get_attr_type (dep_insn) == TYPE_COMPARE
13706 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13707 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13708 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13709 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13710 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13711 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13716 /* Fall out to return default cost. */
13722 /* The function returns a true if INSN is microcoded.
13723 Return false otherwise. */
13726 is_microcoded_insn (rtx insn)
13728 if (!insn || !INSN_P (insn)
13729 || GET_CODE (PATTERN (insn)) == USE
13730 || GET_CODE (PATTERN (insn)) == CLOBBER)
13733 if (rs6000_cpu == PROCESSOR_POWER4)
13735 enum attr_type type = get_attr_type (insn);
13736 if (type == TYPE_LOAD_EXT_U
13737 || type == TYPE_LOAD_EXT_UX
13738 || type == TYPE_LOAD_UX
13739 || type == TYPE_STORE_UX
13740 || type == TYPE_MFCR)
13747 /* The function returns a nonzero value if INSN can be scheduled only
13748 as the first insn in a dispatch group ("dispatch-slot restricted").
13749 In this case, the returned value indicates how many dispatch slots
13750 the insn occupies (at the beginning of the group).
13751 Return 0 otherwise. */
13754 is_dispatch_slot_restricted (rtx insn)
13756 enum attr_type type;
13758 if (rs6000_cpu != PROCESSOR_POWER4)
13762 || insn == NULL_RTX
13763 || GET_CODE (insn) == NOTE
13764 || GET_CODE (PATTERN (insn)) == USE
13765 || GET_CODE (PATTERN (insn)) == CLOBBER)
13768 type = get_attr_type (insn);
13774 case TYPE_DELAYED_CR:
13775 case TYPE_CR_LOGICAL:
13787 /* The function returns true if INSN is cracked into 2 instructions
13788 by the processor (and therefore occupies 2 issue slots). */
13791 is_cracked_insn (rtx insn)
13793 if (!insn || !INSN_P (insn)
13794 || GET_CODE (PATTERN (insn)) == USE
13795 || GET_CODE (PATTERN (insn)) == CLOBBER)
13798 if (rs6000_cpu == PROCESSOR_POWER4)
13800 enum attr_type type = get_attr_type (insn);
13801 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13802 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13803 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13804 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13805 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13806 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13807 || type == TYPE_IDIV || type == TYPE_LDIV
13808 || type == TYPE_INSERT_WORD)
13815 /* The function returns true if INSN can be issued only from
13816 the branch slot. */
13819 is_branch_slot_insn (rtx insn)
13821 if (!insn || !INSN_P (insn)
13822 || GET_CODE (PATTERN (insn)) == USE
13823 || GET_CODE (PATTERN (insn)) == CLOBBER)
13826 if (rs6000_cpu == PROCESSOR_POWER4)
13828 enum attr_type type = get_attr_type (insn);
13829 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
13837 /* A C statement (sans semicolon) to update the integer scheduling
13838 priority INSN_PRIORITY (INSN). Increase the priority to execute the
13839 INSN earlier, reduce the priority to execute INSN later. Do not
13840 define this macro if you do not need to adjust the scheduling
13841 priorities of insns. */
13844 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
13846 /* On machines (like the 750) which have asymmetric integer units,
13847 where one integer unit can do multiply and divides and the other
13848 can't, reduce the priority of multiply/divide so it is scheduled
13849 before other integer operations. */
13852 if (! INSN_P (insn))
13855 if (GET_CODE (PATTERN (insn)) == USE)
13858 switch (rs6000_cpu_attr) {
13860 switch (get_attr_type (insn))
13867 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13868 priority, priority);
13869 if (priority >= 0 && priority < 0x01000000)
13876 if (is_dispatch_slot_restricted (insn)
13877 && reload_completed
13878 && current_sched_info->sched_max_insns_priority
13879 && rs6000_sched_restricted_insns_priority)
13882 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
13883 if (rs6000_sched_restricted_insns_priority == 1)
13884 /* Attach highest priority to insn. This means that in
13885 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
13886 precede 'priority' (critical path) considerations. */
13887 return current_sched_info->sched_max_insns_priority;
13888 else if (rs6000_sched_restricted_insns_priority == 2)
13889 /* Increase priority of insn by a minimal amount. This means that in
13890 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
13891 precede dispatch-slot restriction considerations. */
13892 return (priority + 1);
13898 /* Return how many instructions the machine can issue per cycle. */
13901 rs6000_issue_rate (void)
13903 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13904 if (!reload_completed)
13907 switch (rs6000_cpu_attr) {
13908 case CPU_RIOS1: /* ? */
13910 case CPU_PPC601: /* ? */
13932 /* Return how many instructions to look ahead for better insn
13936 rs6000_use_sched_lookahead (void)
13938 if (rs6000_cpu_attr == CPU_PPC8540)
13943 /* Determine is PAT refers to memory. */
13946 is_mem_ref (rtx pat)
13952 if (GET_CODE (pat) == MEM)
13955 /* Recursively process the pattern. */
13956 fmt = GET_RTX_FORMAT (GET_CODE (pat));
13958 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
13961 ret |= is_mem_ref (XEXP (pat, i));
13962 else if (fmt[i] == 'E')
13963 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
13964 ret |= is_mem_ref (XVECEXP (pat, i, j));
13970 /* Determine if PAT is a PATTERN of a load insn. */
13973 is_load_insn1 (rtx pat)
13975 if (!pat || pat == NULL_RTX)
13978 if (GET_CODE (pat) == SET)
13979 return is_mem_ref (SET_SRC (pat));
13981 if (GET_CODE (pat) == PARALLEL)
13985 for (i = 0; i < XVECLEN (pat, 0); i++)
13986 if (is_load_insn1 (XVECEXP (pat, 0, i)))
13993 /* Determine if INSN loads from memory. */
13996 is_load_insn (rtx insn)
13998 if (!insn || !INSN_P (insn))
14001 if (GET_CODE (insn) == CALL_INSN)
14004 return is_load_insn1 (PATTERN (insn));
14007 /* Determine if PAT is a PATTERN of a store insn. */
14010 is_store_insn1 (rtx pat)
14012 if (!pat || pat == NULL_RTX)
14015 if (GET_CODE (pat) == SET)
14016 return is_mem_ref (SET_DEST (pat));
14018 if (GET_CODE (pat) == PARALLEL)
14022 for (i = 0; i < XVECLEN (pat, 0); i++)
14023 if (is_store_insn1 (XVECEXP (pat, 0, i)))
14030 /* Determine if INSN stores to memory. */
14033 is_store_insn (rtx insn)
14035 if (!insn || !INSN_P (insn))
14038 return is_store_insn1 (PATTERN (insn));
14041 /* Returns whether the dependence between INSN and NEXT is considered
14042 costly by the given target. */
14045 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
14047 /* If the flag is not enbled - no dependence is considered costly;
14048 allow all dependent insns in the same group.
14049 This is the most aggressive option. */
14050 if (rs6000_sched_costly_dep == no_dep_costly)
14053 /* If the flag is set to 1 - a dependence is always considered costly;
14054 do not allow dependent instructions in the same group.
14055 This is the most conservative option. */
14056 if (rs6000_sched_costly_dep == all_deps_costly)
14059 if (rs6000_sched_costly_dep == store_to_load_dep_costly
14060 && is_load_insn (next)
14061 && is_store_insn (insn))
14062 /* Prevent load after store in the same group. */
14065 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
14066 && is_load_insn (next)
14067 && is_store_insn (insn)
14068 && (!link || (int) REG_NOTE_KIND (link) == 0))
14069 /* Prevent load after store in the same group if it is a true dependence. */
14072 /* The flag is set to X; dependences with latency >= X are considered costly,
14073 and will not be scheduled in the same group. */
14074 if (rs6000_sched_costly_dep <= max_dep_latency
14075 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
14081 /* Return the next insn after INSN that is found before TAIL is reached,
14082 skipping any "non-active" insns - insns that will not actually occupy
14083 an issue slot. Return NULL_RTX if such an insn is not found. */
14086 get_next_active_insn (rtx insn, rtx tail)
14090 if (!insn || insn == tail)
14093 next_insn = NEXT_INSN (insn);
14096 && next_insn != tail
14097 && (GET_CODE(next_insn) == NOTE
14098 || GET_CODE (PATTERN (next_insn)) == USE
14099 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
14101 next_insn = NEXT_INSN (next_insn);
14104 if (!next_insn || next_insn == tail)
14110 /* Return whether the presence of INSN causes a dispatch group termination
14111 of group WHICH_GROUP.
14113 If WHICH_GROUP == current_group, this function will return true if INSN
14114 causes the termination of the current group (i.e, the dispatch group to
14115 which INSN belongs). This means that INSN will be the last insn in the
14116 group it belongs to.
14118 If WHICH_GROUP == previous_group, this function will return true if INSN
14119 causes the termination of the previous group (i.e, the dispatch group that
14120 precedes the group to which INSN belongs). This means that INSN will be
14121 the first insn in the group it belongs to). */
14124 insn_terminates_group_p (rtx insn, enum group_termination which_group)
14126 enum attr_type type;
14131 type = get_attr_type (insn);
14133 if (is_microcoded_insn (insn))
14136 if (which_group == current_group)
14138 if (is_branch_slot_insn (insn))
14142 else if (which_group == previous_group)
14144 if (is_dispatch_slot_restricted (insn))
14152 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
14153 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
14156 is_costly_group (rtx *group_insns, rtx next_insn)
14161 int issue_rate = rs6000_issue_rate ();
14163 for (i = 0; i < issue_rate; i++)
14165 rtx insn = group_insns[i];
14168 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
14170 rtx next = XEXP (link, 0);
14171 if (next == next_insn)
14173 cost = insn_cost (insn, link, next_insn);
14174 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
14183 /* Utility of the function redefine_groups.
14184 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
14185 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
14186 to keep it "far" (in a separate group) from GROUP_INSNS, following
14187 one of the following schemes, depending on the value of the flag
14188 -minsert_sched_nops = X:
14189 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
14190 in order to force NEXT_INSN into a separate group.
14191 (2) X < sched_finish_regroup_exact: insert exactly X nops.
14192 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
14193 insertion (has a group just ended, how many vacant issue slots remain in the
14194 last group, and how many dispatch groups were encountered so far). */
14197 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
14198 bool *group_end, int can_issue_more, int *group_count)
14202 int issue_rate = rs6000_issue_rate ();
14203 bool end = *group_end;
14206 if (next_insn == NULL_RTX)
14207 return can_issue_more;
14209 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
14210 return can_issue_more;
14212 force = is_costly_group (group_insns, next_insn);
14214 return can_issue_more;
14216 if (sched_verbose > 6)
14217 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
14218 *group_count ,can_issue_more);
14220 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
14223 can_issue_more = 0;
14225 /* Since only a branch can be issued in the last issue_slot, it is
14226 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14227 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14228 in this case the last nop will start a new group and the branch will be
14229 forced to the new group. */
14230 if (can_issue_more && !is_branch_slot_insn (next_insn))
14233 while (can_issue_more > 0)
14236 emit_insn_before (nop, next_insn);
14244 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14246 int n_nops = rs6000_sched_insert_nops;
14248 /* Nops can't be issued from the branch slot, so the effective
14249 issue_rate for nops is 'issue_rate - 1'. */
14250 if (can_issue_more == 0)
14251 can_issue_more = issue_rate;
14253 if (can_issue_more == 0)
14255 can_issue_more = issue_rate - 1;
14258 for (i = 0; i < issue_rate; i++)
14260 group_insns[i] = 0;
14267 emit_insn_before (nop, next_insn);
14268 if (can_issue_more == issue_rate - 1) /* new group begins */
14271 if (can_issue_more == 0)
14273 can_issue_more = issue_rate - 1;
14276 for (i = 0; i < issue_rate; i++)
14278 group_insns[i] = 0;
14284 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
14287 *group_end = /* Is next_insn going to start a new group? */
14289 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14290 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14291 || (can_issue_more < issue_rate &&
14292 insn_terminates_group_p (next_insn, previous_group)));
14293 if (*group_end && end)
14296 if (sched_verbose > 6)
14297 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14298 *group_count, can_issue_more);
14299 return can_issue_more;
14302 return can_issue_more;
14305 /* This function tries to synch the dispatch groups that the compiler "sees"
14306 with the dispatch groups that the processor dispatcher is expected to
14307 form in practice. It tries to achieve this synchronization by forcing the
14308 estimated processor grouping on the compiler (as opposed to the function
14309 'pad_goups' which tries to force the scheduler's grouping on the processor).
14311 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14312 examines the (estimated) dispatch groups that will be formed by the processor
14313 dispatcher. It marks these group boundaries to reflect the estimated
14314 processor grouping, overriding the grouping that the scheduler had marked.
14315 Depending on the value of the flag '-minsert-sched-nops' this function can
14316 force certain insns into separate groups or force a certain distance between
14317 them by inserting nops, for example, if there exists a "costly dependence"
14320 The function estimates the group boundaries that the processor will form as
14321 folllows: It keeps track of how many vacant issue slots are available after
14322 each insn. A subsequent insn will start a new group if one of the following
14324 - no more vacant issue slots remain in the current dispatch group.
14325 - only the last issue slot, which is the branch slot, is vacant, but the next
14326 insn is not a branch.
14327 - only the last 2 or less issue slots, including the branch slot, are vacant,
14328 which means that a cracked insn (which occupies two issue slots) can't be
14329 issued in this group.
14330 - less than 'issue_rate' slots are vacant, and the next insn always needs to
14331 start a new group. */
14334 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14336 rtx insn, next_insn;
14338 int can_issue_more;
14341 int group_count = 0;
14345 issue_rate = rs6000_issue_rate ();
14346 group_insns = alloca (issue_rate * sizeof (rtx));
14347 for (i = 0; i < issue_rate; i++)
14349 group_insns[i] = 0;
14351 can_issue_more = issue_rate;
14353 insn = get_next_active_insn (prev_head_insn, tail);
14356 while (insn != NULL_RTX)
14358 slot = (issue_rate - can_issue_more);
14359 group_insns[slot] = insn;
14361 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14362 if (insn_terminates_group_p (insn, current_group))
14363 can_issue_more = 0;
14365 next_insn = get_next_active_insn (insn, tail);
14366 if (next_insn == NULL_RTX)
14367 return group_count + 1;
14369 group_end = /* Is next_insn going to start a new group? */
14370 (can_issue_more == 0
14371 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14372 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14373 || (can_issue_more < issue_rate &&
14374 insn_terminates_group_p (next_insn, previous_group)));
14376 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14377 next_insn, &group_end, can_issue_more, &group_count);
14382 can_issue_more = 0;
14383 for (i = 0; i < issue_rate; i++)
14385 group_insns[i] = 0;
14389 if (GET_MODE (next_insn) == TImode && can_issue_more)
14390 PUT_MODE(next_insn, VOIDmode);
14391 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14392 PUT_MODE (next_insn, TImode);
14395 if (can_issue_more == 0)
14396 can_issue_more = issue_rate;
14399 return group_count;
14402 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14403 dispatch group boundaries that the scheduler had marked. Pad with nops
14404 any dispatch groups which have vacant issue slots, in order to force the
14405 scheduler's grouping on the processor dispatcher. The function
14406 returns the number of dispatch groups found. */
14409 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14411 rtx insn, next_insn;
14414 int can_issue_more;
14416 int group_count = 0;
14418 /* Initialize issue_rate. */
14419 issue_rate = rs6000_issue_rate ();
14420 can_issue_more = issue_rate;
14422 insn = get_next_active_insn (prev_head_insn, tail);
14423 next_insn = get_next_active_insn (insn, tail);
14425 while (insn != NULL_RTX)
14428 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14430 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14432 if (next_insn == NULL_RTX)
14437 /* If the scheduler had marked group termination at this location
14438 (between insn and next_indn), and neither insn nor next_insn will
14439 force group termination, pad the group with nops to force group
14442 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14443 && !insn_terminates_group_p (insn, current_group)
14444 && !insn_terminates_group_p (next_insn, previous_group))
14446 if (!is_branch_slot_insn(next_insn))
14449 while (can_issue_more)
14452 emit_insn_before (nop, next_insn);
14457 can_issue_more = issue_rate;
14462 next_insn = get_next_active_insn (insn, tail);
14465 return group_count;
14468 /* The following function is called at the end of scheduling BB.
14469 After reload, it inserts nops at insn group bundling. */
14472 rs6000_sched_finish (FILE *dump, int sched_verbose)
14477 fprintf (dump, "=== Finishing schedule.\n");
14479 if (reload_completed && rs6000_cpu == PROCESSOR_POWER4)
14481 if (rs6000_sched_insert_nops == sched_finish_none)
14484 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
14485 n_groups = pad_groups (dump, sched_verbose,
14486 current_sched_info->prev_head,
14487 current_sched_info->next_tail);
14489 n_groups = redefine_groups (dump, sched_verbose,
14490 current_sched_info->prev_head,
14491 current_sched_info->next_tail);
14493 if (sched_verbose >= 6)
14495 fprintf (dump, "ngroups = %d\n", n_groups);
14496 print_rtl (dump, current_sched_info->prev_head);
14497 fprintf (dump, "Done finish_sched\n");
14502 /* Length in units of the trampoline for entering a nested function. */
14505 rs6000_trampoline_size (void)
14509 switch (DEFAULT_ABI)
14515 ret = (TARGET_32BIT) ? 12 : 24;
14520 ret = (TARGET_32BIT) ? 40 : 48;
14527 /* Emit RTL insns to initialize the variable parts of a trampoline.
14528 FNADDR is an RTX for the address of the function's pure code.
14529 CXT is an RTX for the static chain value for the function. */
14532 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
14534 enum machine_mode pmode = Pmode;
14535 int regsize = (TARGET_32BIT) ? 4 : 8;
14536 rtx ctx_reg = force_reg (pmode, cxt);
14538 switch (DEFAULT_ABI)
14543 /* Macros to shorten the code expansions below. */
14544 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
14545 #define MEM_PLUS(addr,offset) \
14546 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
14548 /* Under AIX, just build the 3 word function descriptor */
14551 rtx fn_reg = gen_reg_rtx (pmode);
14552 rtx toc_reg = gen_reg_rtx (pmode);
14553 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
14554 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
14555 emit_move_insn (MEM_DEREF (addr), fn_reg);
14556 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
14557 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
14561 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
14564 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
14565 FALSE, VOIDmode, 4,
14567 GEN_INT (rs6000_trampoline_size ()), SImode,
14577 /* Table of valid machine attributes. */
14579 const struct attribute_spec rs6000_attribute_table[] =
14581 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
14582 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14583 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14584 { NULL, 0, 0, false, false, false, NULL }
14587 /* Handle a "longcall" or "shortcall" attribute; arguments as in
14588 struct attribute_spec.handler. */
14591 rs6000_handle_longcall_attribute (tree *node, tree name,
14592 tree args ATTRIBUTE_UNUSED,
14593 int flags ATTRIBUTE_UNUSED,
14594 bool *no_add_attrs)
14596 if (TREE_CODE (*node) != FUNCTION_TYPE
14597 && TREE_CODE (*node) != FIELD_DECL
14598 && TREE_CODE (*node) != TYPE_DECL)
14600 warning ("`%s' attribute only applies to functions",
14601 IDENTIFIER_POINTER (name));
14602 *no_add_attrs = true;
14608 /* Set longcall attributes on all functions declared when
14609 rs6000_default_long_calls is true. */
14611 rs6000_set_default_type_attributes (tree type)
14613 if (rs6000_default_long_calls
14614 && (TREE_CODE (type) == FUNCTION_TYPE
14615 || TREE_CODE (type) == METHOD_TYPE))
14616 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
14618 TYPE_ATTRIBUTES (type));
14621 /* Return a reference suitable for calling a function with the
14622 longcall attribute. */
14625 rs6000_longcall_ref (rtx call_ref)
14627 const char *call_name;
14630 if (GET_CODE (call_ref) != SYMBOL_REF)
14633 /* System V adds '.' to the internal name, so skip them. */
14634 call_name = XSTR (call_ref, 0);
14635 if (*call_name == '.')
14637 while (*call_name == '.')
14640 node = get_identifier (call_name);
14641 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
14644 return force_reg (Pmode, call_ref);
14647 #ifdef USING_ELFOS_H
14649 /* A C statement or statements to switch to the appropriate section
14650 for output of RTX in mode MODE. You can assume that RTX is some
14651 kind of constant in RTL. The argument MODE is redundant except in
14652 the case of a `const_int' rtx. Select the section by calling
14653 `text_section' or one of the alternatives for other sections.
14655 Do not define this macro if you put all constants in the read-only
14659 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
14660 unsigned HOST_WIDE_INT align)
14662 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14665 default_elf_select_rtx_section (mode, x, align);
14668 /* A C statement or statements to switch to the appropriate
14669 section for output of DECL. DECL is either a `VAR_DECL' node
14670 or a constant of some sort. RELOC indicates whether forming
14671 the initial value of DECL requires link-time relocations. */
14674 rs6000_elf_select_section (tree decl, int reloc,
14675 unsigned HOST_WIDE_INT align)
14677 /* Pretend that we're always building for a shared library when
14678 ABI_AIX, because otherwise we end up with dynamic relocations
14679 in read-only sections. This happens for function pointers,
14680 references to vtables in typeinfo, and probably other cases. */
14681 default_elf_select_section_1 (decl, reloc, align,
14682 flag_pic || DEFAULT_ABI == ABI_AIX);
14685 /* A C statement to build up a unique section name, expressed as a
14686 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
14687 RELOC indicates whether the initial value of EXP requires
14688 link-time relocations. If you do not define this macro, GCC will use
14689 the symbol name prefixed by `.' as the section name. Note - this
14690 macro can now be called for uninitialized data items as well as
14691 initialized data and functions. */
14694 rs6000_elf_unique_section (tree decl, int reloc)
14696 /* As above, pretend that we're always building for a shared library
14697 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
14698 default_unique_section_1 (decl, reloc,
14699 flag_pic || DEFAULT_ABI == ABI_AIX);
14702 /* For a SYMBOL_REF, set generic flags and then perform some
14703 target-specific processing.
14705 When the AIX ABI is requested on a non-AIX system, replace the
14706 function name with the real name (with a leading .) rather than the
14707 function descriptor name. This saves a lot of overriding code to
14708 read the prefixes. */
14711 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
14713 default_encode_section_info (decl, rtl, first);
14716 && TREE_CODE (decl) == FUNCTION_DECL
14718 && DEFAULT_ABI == ABI_AIX)
14720 rtx sym_ref = XEXP (rtl, 0);
14721 size_t len = strlen (XSTR (sym_ref, 0));
14722 char *str = alloca (len + 2);
14724 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
14725 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
14730 rs6000_elf_in_small_data_p (tree decl)
14732 if (rs6000_sdata == SDATA_NONE)
14735 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
14737 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
14738 if (strcmp (section, ".sdata") == 0
14739 || strcmp (section, ".sdata2") == 0
14740 || strcmp (section, ".sbss") == 0
14741 || strcmp (section, ".sbss2") == 0
14742 || strcmp (section, ".PPC.EMB.sdata0") == 0
14743 || strcmp (section, ".PPC.EMB.sbss0") == 0)
14748 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
14751 && (unsigned HOST_WIDE_INT) size <= g_switch_value
14752 /* If it's not public, and we're not going to reference it there,
14753 there's no need to put it in the small data section. */
14754 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
14761 #endif /* USING_ELFOS_H */
14764 /* Return a REG that occurs in ADDR with coefficient 1.
14765 ADDR can be effectively incremented by incrementing REG.
14767 r0 is special and we must not select it as an address
14768 register by this routine since our caller will try to
14769 increment the returned register via an "la" instruction. */
14772 find_addr_reg (rtx addr)
14774 while (GET_CODE (addr) == PLUS)
14776 if (GET_CODE (XEXP (addr, 0)) == REG
14777 && REGNO (XEXP (addr, 0)) != 0)
14778 addr = XEXP (addr, 0);
14779 else if (GET_CODE (XEXP (addr, 1)) == REG
14780 && REGNO (XEXP (addr, 1)) != 0)
14781 addr = XEXP (addr, 1);
14782 else if (CONSTANT_P (XEXP (addr, 0)))
14783 addr = XEXP (addr, 1);
14784 else if (CONSTANT_P (XEXP (addr, 1)))
14785 addr = XEXP (addr, 0);
14789 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
14795 rs6000_fatal_bad_address (rtx op)
14797 fatal_insn ("bad address", op);
14803 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
14804 reference and a constant. */
14807 symbolic_operand (rtx op)
14809 switch (GET_CODE (op))
14816 return (GET_CODE (op) == SYMBOL_REF ||
14817 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
14818 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
14819 && GET_CODE (XEXP (op, 1)) == CONST_INT);
14828 static tree branch_island_list = 0;
14830 /* Remember to generate a branch island for far calls to the given
14834 add_compiler_branch_island (tree label_name, tree function_name, int line_number)
14836 tree branch_island = build_tree_list (function_name, label_name);
14837 TREE_TYPE (branch_island) = build_int_2 (line_number, 0);
14838 TREE_CHAIN (branch_island) = branch_island_list;
14839 branch_island_list = branch_island;
14842 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
14843 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
14844 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
14845 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
14847 /* Generate far-jump branch islands for everything on the
14848 branch_island_list. Invoked immediately after the last instruction
14849 of the epilogue has been emitted; the branch-islands must be
14850 appended to, and contiguous with, the function body. Mach-O stubs
14851 are generated in machopic_output_stub(). */
14854 macho_branch_islands (void)
14857 tree branch_island;
14859 for (branch_island = branch_island_list;
14861 branch_island = TREE_CHAIN (branch_island))
14863 const char *label =
14864 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
14866 darwin_strip_name_encoding (
14867 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island)));
14868 char name_buf[512];
14869 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
14870 if (name[0] == '*' || name[0] == '&')
14871 strcpy (name_buf, name+1);
14875 strcpy (name_buf+1, name);
14877 strcpy (tmp_buf, "\n");
14878 strcat (tmp_buf, label);
14879 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
14880 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
14881 fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
14882 BRANCH_ISLAND_LINE_NUMBER(branch_island));
14883 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
14886 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
14887 strcat (tmp_buf, label);
14888 strcat (tmp_buf, "_pic\n");
14889 strcat (tmp_buf, label);
14890 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
14892 strcat (tmp_buf, "\taddis r11,r11,ha16(");
14893 strcat (tmp_buf, name_buf);
14894 strcat (tmp_buf, " - ");
14895 strcat (tmp_buf, label);
14896 strcat (tmp_buf, "_pic)\n");
14898 strcat (tmp_buf, "\tmtlr r0\n");
14900 strcat (tmp_buf, "\taddi r12,r11,lo16(");
14901 strcat (tmp_buf, name_buf);
14902 strcat (tmp_buf, " - ");
14903 strcat (tmp_buf, label);
14904 strcat (tmp_buf, "_pic)\n");
14906 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
14910 strcat (tmp_buf, ":\nlis r12,hi16(");
14911 strcat (tmp_buf, name_buf);
14912 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
14913 strcat (tmp_buf, name_buf);
14914 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
14916 output_asm_insn (tmp_buf, 0);
14917 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
14918 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
14919 fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
14920 BRANCH_ISLAND_LINE_NUMBER (branch_island));
14921 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
14924 branch_island_list = 0;
14927 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
14928 already there or not. */
14931 no_previous_def (tree function_name)
14933 tree branch_island;
14934 for (branch_island = branch_island_list;
14936 branch_island = TREE_CHAIN (branch_island))
14937 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
14942 /* GET_PREV_LABEL gets the label name from the previous definition of
14946 get_prev_label (tree function_name)
14948 tree branch_island;
14949 for (branch_island = branch_island_list;
14951 branch_island = TREE_CHAIN (branch_island))
14952 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
14953 return BRANCH_ISLAND_LABEL_NAME (branch_island);
14957 /* INSN is either a function call or a millicode call. It may have an
14958 unconditional jump in its delay slot.
14960 CALL_DEST is the routine we are calling. */
14963 output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
14965 static char buf[256];
14966 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
14967 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
14970 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
14972 if (no_previous_def (funname))
14974 int line_number = 0;
14975 rtx label_rtx = gen_label_rtx ();
14976 char *label_buf, temp_buf[256];
14977 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
14978 CODE_LABEL_NUMBER (label_rtx));
14979 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
14980 labelname = get_identifier (label_buf);
14981 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
14983 line_number = NOTE_LINE_NUMBER (insn);
14984 add_compiler_branch_island (labelname, funname, line_number);
14987 labelname = get_prev_label (funname);
14989 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
14990 instruction will reach 'foo', otherwise link as 'bl L42'".
14991 "L42" should be a 'branch island', that will do a far jump to
14992 'foo'. Branch islands are generated in
14993 macho_branch_islands(). */
14994 sprintf (buf, "jbsr %%z%d,%.246s",
14995 dest_operand_number, IDENTIFIER_POINTER (labelname));
14998 sprintf (buf, "bl %%z%d", dest_operand_number);
15002 #endif /* TARGET_MACHO */
15004 /* Generate PIC and indirect symbol stubs. */
15007 machopic_output_stub (FILE *file, const char *symb, const char *stub)
15009 unsigned int length;
15010 char *symbol_name, *lazy_ptr_name;
15011 char *local_label_0;
15012 static int label = 0;
15014 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
15015 symb = (*targetm.strip_name_encoding) (symb);
15018 length = strlen (symb);
15019 symbol_name = alloca (length + 32);
15020 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
15022 lazy_ptr_name = alloca (length + 32);
15023 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
15026 machopic_picsymbol_stub1_section ();
15028 machopic_symbol_stub1_section ();
15029 fprintf (file, "\t.align 2\n");
15031 fprintf (file, "%s:\n", stub);
15032 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15037 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
15038 sprintf (local_label_0, "\"L%011d$spb\"", label);
15040 fprintf (file, "\tmflr r0\n");
15041 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
15042 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
15043 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
15044 lazy_ptr_name, local_label_0);
15045 fprintf (file, "\tmtlr r0\n");
15046 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
15047 lazy_ptr_name, local_label_0);
15048 fprintf (file, "\tmtctr r12\n");
15049 fprintf (file, "\tbctr\n");
15053 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
15054 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
15055 fprintf (file, "\tmtctr r12\n");
15056 fprintf (file, "\tbctr\n");
15059 machopic_lazy_symbol_ptr_section ();
15060 fprintf (file, "%s:\n", lazy_ptr_name);
15061 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15062 fprintf (file, "\t.long dyld_stub_binding_helper\n");
15065 /* Legitimize PIC addresses. If the address is already
15066 position-independent, we return ORIG. Newly generated
15067 position-independent addresses go into a reg. This is REG if non
15068 zero, otherwise we allocate register(s) as necessary. */
15070 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
15073 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
15078 if (reg == NULL && ! reload_in_progress && ! reload_completed)
15079 reg = gen_reg_rtx (Pmode);
15081 if (GET_CODE (orig) == CONST)
15083 if (GET_CODE (XEXP (orig, 0)) == PLUS
15084 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
15087 if (GET_CODE (XEXP (orig, 0)) == PLUS)
15089 /* Use a different reg for the intermediate value, as
15090 it will be marked UNCHANGING. */
15091 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
15094 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
15097 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
15103 if (GET_CODE (offset) == CONST_INT)
15105 if (SMALL_INT (offset))
15106 return plus_constant (base, INTVAL (offset));
15107 else if (! reload_in_progress && ! reload_completed)
15108 offset = force_reg (Pmode, offset);
15111 rtx mem = force_const_mem (Pmode, orig);
15112 return machopic_legitimize_pic_address (mem, Pmode, reg);
15115 return gen_rtx_PLUS (Pmode, base, offset);
15118 /* Fall back on generic machopic code. */
15119 return machopic_legitimize_pic_address (orig, mode, reg);
15122 /* This is just a placeholder to make linking work without having to
15123 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
15124 ever needed for Darwin (not too likely!) this would have to get a
15125 real definition. */
15132 #endif /* TARGET_MACHO */
15135 static unsigned int
15136 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
15138 return default_section_type_flags_1 (decl, name, reloc,
15139 flag_pic || DEFAULT_ABI == ABI_AIX);
15142 /* Record an element in the table of global constructors. SYMBOL is
15143 a SYMBOL_REF of the function to be called; PRIORITY is a number
15144 between 0 and MAX_INIT_PRIORITY.
15146 This differs from default_named_section_asm_out_constructor in
15147 that we have special handling for -mrelocatable. */
15150 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
15152 const char *section = ".ctors";
15155 if (priority != DEFAULT_INIT_PRIORITY)
15157 sprintf (buf, ".ctors.%.5u",
15158 /* Invert the numbering so the linker puts us in the proper
15159 order; constructors are run from right to left, and the
15160 linker sorts in increasing order. */
15161 MAX_INIT_PRIORITY - priority);
15165 named_section_flags (section, SECTION_WRITE);
15166 assemble_align (POINTER_SIZE);
15168 if (TARGET_RELOCATABLE)
15170 fputs ("\t.long (", asm_out_file);
15171 output_addr_const (asm_out_file, symbol);
15172 fputs (")@fixup\n", asm_out_file);
15175 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15179 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
15181 const char *section = ".dtors";
15184 if (priority != DEFAULT_INIT_PRIORITY)
15186 sprintf (buf, ".dtors.%.5u",
15187 /* Invert the numbering so the linker puts us in the proper
15188 order; constructors are run from right to left, and the
15189 linker sorts in increasing order. */
15190 MAX_INIT_PRIORITY - priority);
15194 named_section_flags (section, SECTION_WRITE);
15195 assemble_align (POINTER_SIZE);
15197 if (TARGET_RELOCATABLE)
15199 fputs ("\t.long (", asm_out_file);
15200 output_addr_const (asm_out_file, symbol);
15201 fputs (")@fixup\n", asm_out_file);
15204 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15208 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
15212 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
15213 ASM_OUTPUT_LABEL (file, name);
15214 fputs (DOUBLE_INT_ASM_OP, file);
15216 assemble_name (file, name);
15217 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
15218 assemble_name (file, name);
15219 fputs (",24\n\t.type\t.", file);
15220 assemble_name (file, name);
15221 fputs (",@function\n", file);
15222 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
15224 fputs ("\t.globl\t.", file);
15225 assemble_name (file, name);
15228 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15230 ASM_OUTPUT_LABEL (file, name);
15234 if (TARGET_RELOCATABLE
15235 && (get_pool_size () != 0 || current_function_profile)
15240 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
15242 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15243 fprintf (file, "\t.long ");
15244 assemble_name (file, buf);
15246 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15247 assemble_name (file, buf);
15251 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
15252 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15254 if (DEFAULT_ABI == ABI_AIX)
15256 const char *desc_name, *orig_name;
15258 orig_name = (*targetm.strip_name_encoding) (name);
15259 desc_name = orig_name;
15260 while (*desc_name == '.')
15263 if (TREE_PUBLIC (decl))
15264 fprintf (file, "\t.globl %s\n", desc_name);
15266 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
15267 fprintf (file, "%s:\n", desc_name);
15268 fprintf (file, "\t.long %s\n", orig_name);
15269 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
15270 if (DEFAULT_ABI == ABI_AIX)
15271 fputs ("\t.long 0\n", file);
15272 fprintf (file, "\t.previous\n");
15274 ASM_OUTPUT_LABEL (file, name);
15280 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15282 fputs (GLOBAL_ASM_OP, stream);
15283 RS6000_OUTPUT_BASENAME (stream, name);
15284 putc ('\n', stream);
15288 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15291 static const char * const suffix[3] = { "PR", "RO", "RW" };
15293 if (flags & SECTION_CODE)
15295 else if (flags & SECTION_WRITE)
15300 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15301 (flags & SECTION_CODE) ? "." : "",
15302 name, suffix[smclass], flags & SECTION_ENTSIZE);
15306 rs6000_xcoff_select_section (tree decl, int reloc,
15307 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15309 if (decl_readonly_section_1 (decl, reloc, 1))
15311 if (TREE_PUBLIC (decl))
15312 read_only_data_section ();
15314 read_only_private_data_section ();
15318 if (TREE_PUBLIC (decl))
15321 private_data_section ();
15326 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15330 /* Use select_section for private and uninitialized data. */
15331 if (!TREE_PUBLIC (decl)
15332 || DECL_COMMON (decl)
15333 || DECL_INITIAL (decl) == NULL_TREE
15334 || DECL_INITIAL (decl) == error_mark_node
15335 || (flag_zero_initialized_in_bss
15336 && initializer_zerop (DECL_INITIAL (decl))))
15339 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15340 name = (*targetm.strip_name_encoding) (name);
15341 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15344 /* Select section for constant in constant pool.
15346 On RS/6000, all constants are in the private read-only data area.
15347 However, if this is being placed in the TOC it must be output as a
15351 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
15352 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15354 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15357 read_only_private_data_section ();
15360 /* Remove any trailing [DS] or the like from the symbol name. */
15362 static const char *
15363 rs6000_xcoff_strip_name_encoding (const char *name)
15368 len = strlen (name);
15369 if (name[len - 1] == ']')
15370 return ggc_alloc_string (name, len - 4);
15375 /* Section attributes. AIX is always PIC. */
15377 static unsigned int
15378 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
15380 unsigned int align;
15381 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
15383 /* Align to at least UNIT size. */
15384 if (flags & SECTION_CODE)
15385 align = MIN_UNITS_PER_WORD;
15387 /* Increase alignment of large objects if not already stricter. */
15388 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
15389 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
15390 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
15392 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
15395 /* Output at beginning of assembler file.
15397 Initialize the section names for the RS/6000 at this point.
15399 Specify filename, including full path, to assembler.
15401 We want to go into the TOC section so at least one .toc will be emitted.
15402 Also, in order to output proper .bs/.es pairs, we need at least one static
15403 [RW] section emitted.
15405 Finally, declare mcount when profiling to make the assembler happy. */
15408 rs6000_xcoff_file_start (void)
15410 rs6000_gen_section_name (&xcoff_bss_section_name,
15411 main_input_filename, ".bss_");
15412 rs6000_gen_section_name (&xcoff_private_data_section_name,
15413 main_input_filename, ".rw_");
15414 rs6000_gen_section_name (&xcoff_read_only_section_name,
15415 main_input_filename, ".ro_");
15417 fputs ("\t.file\t", asm_out_file);
15418 output_quoted_string (asm_out_file, main_input_filename);
15419 fputc ('\n', asm_out_file);
15421 if (write_symbols != NO_DEBUG)
15422 private_data_section ();
15425 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
15426 rs6000_file_start ();
15429 /* Output at end of assembler file.
15430 On the RS/6000, referencing data should automatically pull in text. */
15433 rs6000_xcoff_file_end (void)
15436 fputs ("_section_.text:\n", asm_out_file);
15438 fputs (TARGET_32BIT
15439 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
15442 #endif /* TARGET_XCOFF */
15445 /* Cross-module name binding. Darwin does not support overriding
15446 functions at dynamic-link time. */
15449 rs6000_binds_local_p (tree decl)
15451 return default_binds_local_p_1 (decl, 0);
15455 /* Compute a (partial) cost for rtx X. Return true if the complete
15456 cost has been computed, and false if subexpressions should be
15457 scanned. In either case, *TOTAL contains the cost result. */
15460 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
15465 /* On the RS/6000, if it is valid in the insn, it is free.
15466 So this always returns 0. */
15477 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15478 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
15479 + 0x8000) >= 0x10000)
15480 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15481 ? COSTS_N_INSNS (2)
15482 : COSTS_N_INSNS (1));
15488 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15489 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
15490 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15491 ? COSTS_N_INSNS (2)
15492 : COSTS_N_INSNS (1));
15498 *total = COSTS_N_INSNS (2);
15501 switch (rs6000_cpu)
15503 case PROCESSOR_RIOS1:
15504 case PROCESSOR_PPC405:
15505 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15506 ? COSTS_N_INSNS (5)
15507 : (INTVAL (XEXP (x, 1)) >= -256
15508 && INTVAL (XEXP (x, 1)) <= 255)
15509 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15512 case PROCESSOR_PPC440:
15513 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15514 ? COSTS_N_INSNS (3)
15515 : COSTS_N_INSNS (2));
15518 case PROCESSOR_RS64A:
15519 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15520 ? GET_MODE (XEXP (x, 1)) != DImode
15521 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
15522 : (INTVAL (XEXP (x, 1)) >= -256
15523 && INTVAL (XEXP (x, 1)) <= 255)
15524 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
15527 case PROCESSOR_RIOS2:
15528 case PROCESSOR_MPCCORE:
15529 case PROCESSOR_PPC604e:
15530 *total = COSTS_N_INSNS (2);
15533 case PROCESSOR_PPC601:
15534 *total = COSTS_N_INSNS (5);
15537 case PROCESSOR_PPC603:
15538 case PROCESSOR_PPC7400:
15539 case PROCESSOR_PPC750:
15540 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15541 ? COSTS_N_INSNS (5)
15542 : (INTVAL (XEXP (x, 1)) >= -256
15543 && INTVAL (XEXP (x, 1)) <= 255)
15544 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
15547 case PROCESSOR_PPC7450:
15548 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15549 ? COSTS_N_INSNS (4)
15550 : COSTS_N_INSNS (3));
15553 case PROCESSOR_PPC403:
15554 case PROCESSOR_PPC604:
15555 case PROCESSOR_PPC8540:
15556 *total = COSTS_N_INSNS (4);
15559 case PROCESSOR_PPC620:
15560 case PROCESSOR_PPC630:
15561 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15562 ? GET_MODE (XEXP (x, 1)) != DImode
15563 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
15564 : (INTVAL (XEXP (x, 1)) >= -256
15565 && INTVAL (XEXP (x, 1)) <= 255)
15566 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15569 case PROCESSOR_POWER4:
15570 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15571 ? GET_MODE (XEXP (x, 1)) != DImode
15572 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
15573 : COSTS_N_INSNS (2));
15582 if (GET_CODE (XEXP (x, 1)) == CONST_INT
15583 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
15585 *total = COSTS_N_INSNS (2);
15592 switch (rs6000_cpu)
15594 case PROCESSOR_RIOS1:
15595 *total = COSTS_N_INSNS (19);
15598 case PROCESSOR_RIOS2:
15599 *total = COSTS_N_INSNS (13);
15602 case PROCESSOR_RS64A:
15603 *total = (GET_MODE (XEXP (x, 1)) != DImode
15604 ? COSTS_N_INSNS (65)
15605 : COSTS_N_INSNS (67));
15608 case PROCESSOR_MPCCORE:
15609 *total = COSTS_N_INSNS (6);
15612 case PROCESSOR_PPC403:
15613 *total = COSTS_N_INSNS (33);
15616 case PROCESSOR_PPC405:
15617 *total = COSTS_N_INSNS (35);
15620 case PROCESSOR_PPC440:
15621 *total = COSTS_N_INSNS (34);
15624 case PROCESSOR_PPC601:
15625 *total = COSTS_N_INSNS (36);
15628 case PROCESSOR_PPC603:
15629 *total = COSTS_N_INSNS (37);
15632 case PROCESSOR_PPC604:
15633 case PROCESSOR_PPC604e:
15634 *total = COSTS_N_INSNS (20);
15637 case PROCESSOR_PPC620:
15638 case PROCESSOR_PPC630:
15639 *total = (GET_MODE (XEXP (x, 1)) != DImode
15640 ? COSTS_N_INSNS (21)
15641 : COSTS_N_INSNS (37));
15644 case PROCESSOR_PPC750:
15645 case PROCESSOR_PPC8540:
15646 case PROCESSOR_PPC7400:
15647 *total = COSTS_N_INSNS (19);
15650 case PROCESSOR_PPC7450:
15651 *total = COSTS_N_INSNS (23);
15654 case PROCESSOR_POWER4:
15655 *total = (GET_MODE (XEXP (x, 1)) != DImode
15656 ? COSTS_N_INSNS (18)
15657 : COSTS_N_INSNS (34));
15665 *total = COSTS_N_INSNS (4);
15669 /* MEM should be slightly more expensive than (plus (reg) (const)). */
15678 /* A C expression returning the cost of moving data from a register of class
15679 CLASS1 to one of CLASS2. */
15682 rs6000_register_move_cost (enum machine_mode mode,
15683 enum reg_class from, enum reg_class to)
15685 /* Moves from/to GENERAL_REGS. */
15686 if (reg_classes_intersect_p (to, GENERAL_REGS)
15687 || reg_classes_intersect_p (from, GENERAL_REGS))
15689 if (! reg_classes_intersect_p (to, GENERAL_REGS))
15692 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
15693 return (rs6000_memory_move_cost (mode, from, 0)
15694 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
15696 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
15697 else if (from == CR_REGS)
15701 /* A move will cost one instruction per GPR moved. */
15702 return 2 * HARD_REGNO_NREGS (0, mode);
15705 /* Moving between two similar registers is just one instruction. */
15706 else if (reg_classes_intersect_p (to, from))
15707 return mode == TFmode ? 4 : 2;
15709 /* Everything else has to go through GENERAL_REGS. */
15711 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
15712 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
15715 /* A C expressions returning the cost of moving data of MODE from a register to
15719 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
15720 int in ATTRIBUTE_UNUSED)
15722 if (reg_classes_intersect_p (class, GENERAL_REGS))
15723 return 4 * HARD_REGNO_NREGS (0, mode);
15724 else if (reg_classes_intersect_p (class, FLOAT_REGS))
15725 return 4 * HARD_REGNO_NREGS (32, mode);
15726 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
15727 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
15729 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
15732 /* Return an RTX representing where to find the function value of a
15733 function returning MODE. */
15735 rs6000_complex_function_value (enum machine_mode mode)
15737 unsigned int regno;
15739 enum machine_mode inner = GET_MODE_INNER (mode);
15741 if (FLOAT_MODE_P (mode))
15742 regno = FP_ARG_RETURN;
15745 regno = GP_ARG_RETURN;
15747 /* 32-bit is OK since it'll go in r3/r4. */
15749 && GET_MODE_BITSIZE (inner) >= 32)
15750 return gen_rtx_REG (mode, regno);
15753 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
15755 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
15756 GEN_INT (GET_MODE_UNIT_SIZE (inner)));
15757 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
15760 /* Define how to find the value returned by a function.
15761 VALTYPE is the data type of the value (as a tree).
15762 If the precise function being called is known, FUNC is its FUNCTION_DECL;
15763 otherwise, FUNC is 0.
15765 On the SPE, both FPs and vectors are returned in r3.
15767 On RS/6000 an integer value is in r3 and a floating-point value is in
15768 fp1, unless -msoft-float. */
15771 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
15773 enum machine_mode mode;
15774 unsigned int regno;
15776 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
15778 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
15779 return gen_rtx_PARALLEL (DImode,
15781 gen_rtx_EXPR_LIST (VOIDmode,
15782 gen_rtx_REG (SImode, GP_ARG_RETURN),
15784 gen_rtx_EXPR_LIST (VOIDmode,
15785 gen_rtx_REG (SImode,
15786 GP_ARG_RETURN + 1),
15790 if ((INTEGRAL_TYPE_P (valtype)
15791 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
15792 || POINTER_TYPE_P (valtype))
15793 mode = TARGET_32BIT ? SImode : DImode;
15795 mode = TYPE_MODE (valtype);
15797 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
15798 regno = FP_ARG_RETURN;
15799 else if (TREE_CODE (valtype) == COMPLEX_TYPE
15800 && TARGET_HARD_FLOAT
15801 && SPLIT_COMPLEX_ARGS)
15802 return rs6000_complex_function_value (mode);
15803 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
15804 regno = ALTIVEC_ARG_RETURN;
15806 regno = GP_ARG_RETURN;
15808 return gen_rtx_REG (mode, regno);
15811 /* Define how to find the value returned by a library function
15812 assuming the value has mode MODE. */
15814 rs6000_libcall_value (enum machine_mode mode)
15816 unsigned int regno;
15818 if (GET_MODE_CLASS (mode) == MODE_FLOAT
15819 && TARGET_HARD_FLOAT && TARGET_FPRS)
15820 regno = FP_ARG_RETURN;
15821 else if (ALTIVEC_VECTOR_MODE (mode))
15822 regno = ALTIVEC_ARG_RETURN;
15823 else if (COMPLEX_MODE_P (mode) && SPLIT_COMPLEX_ARGS)
15824 return rs6000_complex_function_value (mode);
15826 regno = GP_ARG_RETURN;
15828 return gen_rtx_REG (mode, regno);
15831 /* Define the offset between two registers, FROM to be eliminated and its
15832 replacement TO, at the start of a routine. */
15834 rs6000_initial_elimination_offset (int from, int to)
15836 rs6000_stack_t *info = rs6000_stack_info ();
15837 HOST_WIDE_INT offset;
15839 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
15840 offset = info->push_p ? 0 : -info->total_size;
15841 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
15842 offset = info->total_size;
15843 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
15844 offset = info->push_p ? info->total_size : 0;
15845 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
15853 /* Return true if TYPE is of type __ev64_opaque__. */
15856 is_ev64_opaque_type (tree type)
15859 && (type == opaque_V2SI_type_node
15860 || type == opaque_V2SF_type_node
15861 || type == opaque_p_V2SI_type_node));
15865 rs6000_dwarf_register_span (rtx reg)
15869 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
15872 regno = REGNO (reg);
15874 /* The duality of the SPE register size wreaks all kinds of havoc.
15875 This is a way of distinguishing r0 in 32-bits from r0 in
15878 gen_rtx_PARALLEL (VOIDmode,
15881 gen_rtx_REG (SImode, regno + 1200),
15882 gen_rtx_REG (SImode, regno))
15884 gen_rtx_REG (SImode, regno),
15885 gen_rtx_REG (SImode, regno + 1200)));
15888 /* Map internal gcc register numbers to DWARF2 register numbers. */
15891 rs6000_dbx_register_number (unsigned int regno)
15893 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
15895 if (regno == MQ_REGNO)
15897 if (regno == LINK_REGISTER_REGNUM)
15899 if (regno == COUNT_REGISTER_REGNUM)
15901 if (CR_REGNO_P (regno))
15902 return regno - CR0_REGNO + 86;
15903 if (regno == XER_REGNO)
15905 if (ALTIVEC_REGNO_P (regno))
15906 return regno - FIRST_ALTIVEC_REGNO + 1124;
15907 if (regno == VRSAVE_REGNO)
15909 if (regno == VSCR_REGNO)
15911 if (regno == SPE_ACC_REGNO)
15913 if (regno == SPEFSCR_REGNO)
15915 /* SPE high reg number. We get these values of regno from
15916 rs6000_dwarf_register_span. */
15917 if (regno >= 1200 && regno < 1232)
15923 #include "gt-rs6000.h"