1 /* Subroutines used for MIPS code generation.
2 Copyright (C) 1989, 1990, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by A. Lichnewsky, lich@inria.inria.fr.
5 Changes by Michael Meissner, meissner@osf.org.
6 64 bit r4000 support by Ian Lance Taylor, ian@cygnus.com, and
7 Brendan Eich, brendan@microunity.com.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify
12 it under the terms of the GNU General Public License as published by
13 the Free Software Foundation; either version 2, or (at your option)
16 GCC is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to
23 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
24 Boston, MA 02110-1301, USA. */
28 #include "coretypes.h"
33 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "insn-attr.h"
53 #include "target-def.h"
54 #include "integrate.h"
55 #include "langhooks.h"
56 #include "cfglayout.h"
57 #include "sched-int.h"
58 #include "tree-gimple.h"
61 /* True if X is an unspec wrapper around a SYMBOL_REF or LABEL_REF. */
62 #define UNSPEC_ADDRESS_P(X) \
63 (GET_CODE (X) == UNSPEC \
64 && XINT (X, 1) >= UNSPEC_ADDRESS_FIRST \
65 && XINT (X, 1) < UNSPEC_ADDRESS_FIRST + NUM_SYMBOL_TYPES)
67 /* Extract the symbol or label from UNSPEC wrapper X. */
68 #define UNSPEC_ADDRESS(X) \
71 /* Extract the symbol type from UNSPEC wrapper X. */
72 #define UNSPEC_ADDRESS_TYPE(X) \
73 ((enum mips_symbol_type) (XINT (X, 1) - UNSPEC_ADDRESS_FIRST))
75 /* The maximum distance between the top of the stack frame and the
76 value $sp has when we save & restore registers.
78 Use a maximum gap of 0x100 in the mips16 case. We can then use
79 unextended instructions to save and restore registers, and to
80 allocate and deallocate the top part of the frame.
82 The value in the !mips16 case must be a SMALL_OPERAND and must
83 preserve the maximum stack alignment. */
84 #define MIPS_MAX_FIRST_STACK_STEP (TARGET_MIPS16 ? 0x100 : 0x7ff0)
86 /* True if INSN is a mips.md pattern or asm statement. */
87 #define USEFUL_INSN_P(INSN) \
89 && GET_CODE (PATTERN (INSN)) != USE \
90 && GET_CODE (PATTERN (INSN)) != CLOBBER \
91 && GET_CODE (PATTERN (INSN)) != ADDR_VEC \
92 && GET_CODE (PATTERN (INSN)) != ADDR_DIFF_VEC)
94 /* If INSN is a delayed branch sequence, return the first instruction
95 in the sequence, otherwise return INSN itself. */
96 #define SEQ_BEGIN(INSN) \
97 (INSN_P (INSN) && GET_CODE (PATTERN (INSN)) == SEQUENCE \
98 ? XVECEXP (PATTERN (INSN), 0, 0) \
101 /* Likewise for the last instruction in a delayed branch sequence. */
102 #define SEQ_END(INSN) \
103 (INSN_P (INSN) && GET_CODE (PATTERN (INSN)) == SEQUENCE \
104 ? XVECEXP (PATTERN (INSN), 0, XVECLEN (PATTERN (INSN), 0) - 1) \
107 /* Execute the following loop body with SUBINSN set to each instruction
108 between SEQ_BEGIN (INSN) and SEQ_END (INSN) inclusive. */
109 #define FOR_EACH_SUBINSN(SUBINSN, INSN) \
110 for ((SUBINSN) = SEQ_BEGIN (INSN); \
111 (SUBINSN) != NEXT_INSN (SEQ_END (INSN)); \
112 (SUBINSN) = NEXT_INSN (SUBINSN))
114 /* Classifies an address.
117 A natural register + offset address. The register satisfies
118 mips_valid_base_register_p and the offset is a const_arith_operand.
121 A LO_SUM rtx. The first operand is a valid base register and
122 the second operand is a symbolic address.
125 A signed 16-bit constant address.
128 A constant symbolic address (equivalent to CONSTANT_SYMBOLIC). */
129 enum mips_address_type {
136 /* Classifies the prototype of a builtin function. */
137 enum mips_function_type
139 MIPS_V2SF_FTYPE_V2SF,
140 MIPS_V2SF_FTYPE_V2SF_V2SF,
141 MIPS_V2SF_FTYPE_V2SF_V2SF_INT,
142 MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF,
143 MIPS_V2SF_FTYPE_SF_SF,
144 MIPS_INT_FTYPE_V2SF_V2SF,
145 MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF,
146 MIPS_INT_FTYPE_SF_SF,
147 MIPS_INT_FTYPE_DF_DF,
154 /* For MIPS DSP ASE */
156 MIPS_DI_FTYPE_DI_SI_SI,
157 MIPS_DI_FTYPE_DI_V2HI_V2HI,
158 MIPS_DI_FTYPE_DI_V4QI_V4QI,
160 MIPS_SI_FTYPE_PTR_SI,
164 MIPS_SI_FTYPE_V2HI_V2HI,
166 MIPS_SI_FTYPE_V4QI_V4QI,
169 MIPS_V2HI_FTYPE_SI_SI,
170 MIPS_V2HI_FTYPE_V2HI,
171 MIPS_V2HI_FTYPE_V2HI_SI,
172 MIPS_V2HI_FTYPE_V2HI_V2HI,
173 MIPS_V2HI_FTYPE_V4QI,
174 MIPS_V2HI_FTYPE_V4QI_V2HI,
176 MIPS_V4QI_FTYPE_V2HI_V2HI,
177 MIPS_V4QI_FTYPE_V4QI_SI,
178 MIPS_V4QI_FTYPE_V4QI_V4QI,
179 MIPS_VOID_FTYPE_SI_SI,
180 MIPS_VOID_FTYPE_V2HI_V2HI,
181 MIPS_VOID_FTYPE_V4QI_V4QI,
187 /* Specifies how a builtin function should be converted into rtl. */
188 enum mips_builtin_type
190 /* The builtin corresponds directly to an .md pattern. The return
191 value is mapped to operand 0 and the arguments are mapped to
192 operands 1 and above. */
195 /* The builtin corresponds directly to an .md pattern. There is no return
196 value and the arguments are mapped to operands 0 and above. */
197 MIPS_BUILTIN_DIRECT_NO_TARGET,
199 /* The builtin corresponds to a comparison instruction followed by
200 a mips_cond_move_tf_ps pattern. The first two arguments are the
201 values to compare and the second two arguments are the vector
202 operands for the movt.ps or movf.ps instruction (in assembly order). */
206 /* The builtin corresponds to a V2SF comparison instruction. Operand 0
207 of this instruction is the result of the comparison, which has mode
208 CCV2 or CCV4. The function arguments are mapped to operands 1 and
209 above. The function's return value is an SImode boolean that is
210 true under the following conditions:
212 MIPS_BUILTIN_CMP_ANY: one of the registers is true
213 MIPS_BUILTIN_CMP_ALL: all of the registers are true
214 MIPS_BUILTIN_CMP_LOWER: the first register is true
215 MIPS_BUILTIN_CMP_UPPER: the second register is true. */
216 MIPS_BUILTIN_CMP_ANY,
217 MIPS_BUILTIN_CMP_ALL,
218 MIPS_BUILTIN_CMP_UPPER,
219 MIPS_BUILTIN_CMP_LOWER,
221 /* As above, but the instruction only sets a single $fcc register. */
222 MIPS_BUILTIN_CMP_SINGLE,
224 /* For generating bposge32 branch instructions in MIPS32 DSP ASE. */
225 MIPS_BUILTIN_BPOSGE32
228 /* Invokes MACRO (COND) for each c.cond.fmt condition. */
229 #define MIPS_FP_CONDITIONS(MACRO) \
247 /* Enumerates the codes above as MIPS_FP_COND_<X>. */
248 #define DECLARE_MIPS_COND(X) MIPS_FP_COND_ ## X
249 enum mips_fp_condition {
250 MIPS_FP_CONDITIONS (DECLARE_MIPS_COND)
253 /* Index X provides the string representation of MIPS_FP_COND_<X>. */
254 #define STRINGIFY(X) #X
255 static const char *const mips_fp_conditions[] = {
256 MIPS_FP_CONDITIONS (STRINGIFY)
259 /* A function to save or store a register. The first argument is the
260 register and the second is the stack slot. */
261 typedef void (*mips_save_restore_fn) (rtx, rtx);
263 struct mips16_constant;
264 struct mips_arg_info;
265 struct mips_address_info;
266 struct mips_integer_op;
269 static enum mips_symbol_type mips_classify_symbol (rtx);
270 static void mips_split_const (rtx, rtx *, HOST_WIDE_INT *);
271 static bool mips_offset_within_object_p (rtx, HOST_WIDE_INT);
272 static bool mips_valid_base_register_p (rtx, enum machine_mode, int);
273 static bool mips_symbolic_address_p (enum mips_symbol_type, enum machine_mode);
274 static bool mips_classify_address (struct mips_address_info *, rtx,
275 enum machine_mode, int);
276 static bool mips_cannot_force_const_mem (rtx);
277 static int mips_symbol_insns (enum mips_symbol_type);
278 static bool mips16_unextended_reference_p (enum machine_mode mode, rtx, rtx);
279 static rtx mips_force_temporary (rtx, rtx);
280 static rtx mips_split_symbol (rtx, rtx);
281 static rtx mips_unspec_offset_high (rtx, rtx, rtx, enum mips_symbol_type);
282 static rtx mips_add_offset (rtx, rtx, HOST_WIDE_INT);
283 static unsigned int mips_build_shift (struct mips_integer_op *, HOST_WIDE_INT);
284 static unsigned int mips_build_lower (struct mips_integer_op *,
285 unsigned HOST_WIDE_INT);
286 static unsigned int mips_build_integer (struct mips_integer_op *,
287 unsigned HOST_WIDE_INT);
288 static void mips_move_integer (rtx, unsigned HOST_WIDE_INT);
289 static void mips_legitimize_const_move (enum machine_mode, rtx, rtx);
290 static int m16_check_op (rtx, int, int, int);
291 static bool mips_rtx_costs (rtx, int, int, int *);
292 static int mips_address_cost (rtx);
293 static void mips_emit_compare (enum rtx_code *, rtx *, rtx *, bool);
294 static void mips_load_call_address (rtx, rtx, int);
295 static bool mips_function_ok_for_sibcall (tree, tree);
296 static void mips_block_move_straight (rtx, rtx, HOST_WIDE_INT);
297 static void mips_adjust_block_mem (rtx, HOST_WIDE_INT, rtx *, rtx *);
298 static void mips_block_move_loop (rtx, rtx, HOST_WIDE_INT);
299 static void mips_arg_info (const CUMULATIVE_ARGS *, enum machine_mode,
300 tree, int, struct mips_arg_info *);
301 static bool mips_get_unaligned_mem (rtx *, unsigned int, int, rtx *, rtx *);
302 static void mips_set_architecture (const struct mips_cpu_info *);
303 static void mips_set_tune (const struct mips_cpu_info *);
304 static bool mips_handle_option (size_t, const char *, int);
305 static struct machine_function *mips_init_machine_status (void);
306 static void print_operand_reloc (FILE *, rtx, const char **);
308 static void irix_output_external_libcall (rtx);
310 static void mips_file_start (void);
311 static void mips_file_end (void);
312 static bool mips_rewrite_small_data_p (rtx);
313 static int mips_small_data_pattern_1 (rtx *, void *);
314 static int mips_rewrite_small_data_1 (rtx *, void *);
315 static bool mips_function_has_gp_insn (void);
316 static unsigned int mips_global_pointer (void);
317 static bool mips_save_reg_p (unsigned int);
318 static void mips_save_restore_reg (enum machine_mode, int, HOST_WIDE_INT,
319 mips_save_restore_fn);
320 static void mips_for_each_saved_reg (HOST_WIDE_INT, mips_save_restore_fn);
321 static void mips_output_cplocal (void);
322 static void mips_emit_loadgp (void);
323 static void mips_output_function_prologue (FILE *, HOST_WIDE_INT);
324 static void mips_set_frame_expr (rtx);
325 static rtx mips_frame_set (rtx, rtx);
326 static void mips_save_reg (rtx, rtx);
327 static void mips_output_function_epilogue (FILE *, HOST_WIDE_INT);
328 static void mips_restore_reg (rtx, rtx);
329 static void mips_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
330 HOST_WIDE_INT, tree);
331 static int symbolic_expression_p (rtx);
332 static section *mips_select_rtx_section (enum machine_mode, rtx,
333 unsigned HOST_WIDE_INT);
334 static section *mips_function_rodata_section (tree);
335 static bool mips_in_small_data_p (tree);
336 static int mips_fpr_return_fields (tree, tree *);
337 static bool mips_return_in_msb (tree);
338 static rtx mips_return_fpr_pair (enum machine_mode mode,
339 enum machine_mode mode1, HOST_WIDE_INT,
340 enum machine_mode mode2, HOST_WIDE_INT);
341 static rtx mips16_gp_pseudo_reg (void);
342 static void mips16_fp_args (FILE *, int, int);
343 static void build_mips16_function_stub (FILE *);
344 static rtx dump_constants_1 (enum machine_mode, rtx, rtx);
345 static void dump_constants (struct mips16_constant *, rtx);
346 static int mips16_insn_length (rtx);
347 static int mips16_rewrite_pool_refs (rtx *, void *);
348 static void mips16_lay_out_constants (void);
349 static void mips_sim_reset (struct mips_sim *);
350 static void mips_sim_init (struct mips_sim *, state_t);
351 static void mips_sim_next_cycle (struct mips_sim *);
352 static void mips_sim_wait_reg (struct mips_sim *, rtx, rtx);
353 static int mips_sim_wait_regs_2 (rtx *, void *);
354 static void mips_sim_wait_regs_1 (rtx *, void *);
355 static void mips_sim_wait_regs (struct mips_sim *, rtx);
356 static void mips_sim_wait_units (struct mips_sim *, rtx);
357 static void mips_sim_wait_insn (struct mips_sim *, rtx);
358 static void mips_sim_record_set (rtx, rtx, void *);
359 static void mips_sim_issue_insn (struct mips_sim *, rtx);
360 static void mips_sim_issue_nop (struct mips_sim *);
361 static void mips_sim_finish_insn (struct mips_sim *, rtx);
362 static void vr4130_avoid_branch_rt_conflict (rtx);
363 static void vr4130_align_insns (void);
364 static void mips_avoid_hazard (rtx, rtx, int *, rtx *, rtx);
365 static void mips_avoid_hazards (void);
366 static void mips_reorg (void);
367 static bool mips_strict_matching_cpu_name_p (const char *, const char *);
368 static bool mips_matching_cpu_name_p (const char *, const char *);
369 static const struct mips_cpu_info *mips_parse_cpu (const char *);
370 static const struct mips_cpu_info *mips_cpu_info_from_isa (int);
371 static bool mips_return_in_memory (tree, tree);
372 static bool mips_strict_argument_naming (CUMULATIVE_ARGS *);
373 static void mips_macc_chains_record (rtx);
374 static void mips_macc_chains_reorder (rtx *, int);
375 static void vr4130_true_reg_dependence_p_1 (rtx, rtx, void *);
376 static bool vr4130_true_reg_dependence_p (rtx);
377 static bool vr4130_swap_insns_p (rtx, rtx);
378 static void vr4130_reorder (rtx *, int);
379 static void mips_promote_ready (rtx *, int, int);
380 static int mips_sched_reorder (FILE *, int, rtx *, int *, int);
381 static int mips_variable_issue (FILE *, int, rtx, int);
382 static int mips_adjust_cost (rtx, rtx, rtx, int);
383 static int mips_issue_rate (void);
384 static int mips_multipass_dfa_lookahead (void);
385 static void mips_init_libfuncs (void);
386 static void mips_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
388 static tree mips_build_builtin_va_list (void);
389 static tree mips_gimplify_va_arg_expr (tree, tree, tree *, tree *);
390 static bool mips_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode mode,
392 static bool mips_callee_copies (CUMULATIVE_ARGS *, enum machine_mode mode,
394 static int mips_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode mode,
396 static bool mips_valid_pointer_mode (enum machine_mode);
397 static bool mips_vector_mode_supported_p (enum machine_mode);
398 static rtx mips_prepare_builtin_arg (enum insn_code, unsigned int, tree *);
399 static rtx mips_prepare_builtin_target (enum insn_code, unsigned int, rtx);
400 static rtx mips_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
401 static void mips_init_builtins (void);
402 static rtx mips_expand_builtin_direct (enum insn_code, rtx, tree, bool);
403 static rtx mips_expand_builtin_movtf (enum mips_builtin_type,
404 enum insn_code, enum mips_fp_condition,
406 static rtx mips_expand_builtin_compare (enum mips_builtin_type,
407 enum insn_code, enum mips_fp_condition,
409 static rtx mips_expand_builtin_bposge (enum mips_builtin_type, rtx);
410 static void mips_encode_section_info (tree, rtx, int);
411 static void mips_extra_live_on_entry (bitmap);
413 /* Structure to be filled in by compute_frame_size with register
414 save masks, and offsets for the current function. */
416 struct mips_frame_info GTY(())
418 HOST_WIDE_INT total_size; /* # bytes that the entire frame takes up */
419 HOST_WIDE_INT var_size; /* # bytes that variables take up */
420 HOST_WIDE_INT args_size; /* # bytes that outgoing arguments take up */
421 HOST_WIDE_INT cprestore_size; /* # bytes that the .cprestore slot takes up */
422 HOST_WIDE_INT gp_reg_size; /* # bytes needed to store gp regs */
423 HOST_WIDE_INT fp_reg_size; /* # bytes needed to store fp regs */
424 unsigned int mask; /* mask of saved gp registers */
425 unsigned int fmask; /* mask of saved fp registers */
426 HOST_WIDE_INT gp_save_offset; /* offset from vfp to store gp registers */
427 HOST_WIDE_INT fp_save_offset; /* offset from vfp to store fp registers */
428 HOST_WIDE_INT gp_sp_offset; /* offset from new sp to store gp registers */
429 HOST_WIDE_INT fp_sp_offset; /* offset from new sp to store fp registers */
430 bool initialized; /* true if frame size already calculated */
431 int num_gp; /* number of gp registers saved */
432 int num_fp; /* number of fp registers saved */
435 struct machine_function GTY(()) {
436 /* Pseudo-reg holding the value of $28 in a mips16 function which
437 refers to GP relative global variables. */
438 rtx mips16_gp_pseudo_rtx;
440 /* The number of extra stack bytes taken up by register varargs.
441 This area is allocated by the callee at the very top of the frame. */
444 /* Current frame information, calculated by compute_frame_size. */
445 struct mips_frame_info frame;
447 /* The register to use as the global pointer within this function. */
448 unsigned int global_pointer;
450 /* True if mips_adjust_insn_length should ignore an instruction's
452 bool ignore_hazard_length_p;
454 /* True if the whole function is suitable for .set noreorder and
456 bool all_noreorder_p;
458 /* True if the function is known to have an instruction that needs $gp. */
462 /* Information about a single argument. */
465 /* True if the argument is passed in a floating-point register, or
466 would have been if we hadn't run out of registers. */
469 /* The number of words passed in registers, rounded up. */
470 unsigned int reg_words;
472 /* For EABI, the offset of the first register from GP_ARG_FIRST or
473 FP_ARG_FIRST. For other ABIs, the offset of the first register from
474 the start of the ABI's argument structure (see the CUMULATIVE_ARGS
475 comment for details).
477 The value is MAX_ARGS_IN_REGISTERS if the argument is passed entirely
479 unsigned int reg_offset;
481 /* The number of words that must be passed on the stack, rounded up. */
482 unsigned int stack_words;
484 /* The offset from the start of the stack overflow area of the argument's
485 first stack word. Only meaningful when STACK_WORDS is nonzero. */
486 unsigned int stack_offset;
490 /* Information about an address described by mips_address_type.
496 REG is the base register and OFFSET is the constant offset.
499 REG is the register that contains the high part of the address,
500 OFFSET is the symbolic address being referenced and SYMBOL_TYPE
501 is the type of OFFSET's symbol.
504 SYMBOL_TYPE is the type of symbol being referenced. */
506 struct mips_address_info
508 enum mips_address_type type;
511 enum mips_symbol_type symbol_type;
515 /* One stage in a constant building sequence. These sequences have
519 A = A CODE[1] VALUE[1]
520 A = A CODE[2] VALUE[2]
523 where A is an accumulator, each CODE[i] is a binary rtl operation
524 and each VALUE[i] is a constant integer. */
525 struct mips_integer_op {
527 unsigned HOST_WIDE_INT value;
531 /* The largest number of operations needed to load an integer constant.
532 The worst accepted case for 64-bit constants is LUI,ORI,SLL,ORI,SLL,ORI.
533 When the lowest bit is clear, we can try, but reject a sequence with
534 an extra SLL at the end. */
535 #define MIPS_MAX_INTEGER_OPS 7
538 /* Global variables for machine-dependent things. */
540 /* Threshold for data being put into the small data/bss area, instead
541 of the normal data area. */
542 int mips_section_threshold = -1;
544 /* Count the number of .file directives, so that .loc is up to date. */
545 int num_source_filenames = 0;
547 /* Count the number of sdb related labels are generated (to find block
548 start and end boundaries). */
549 int sdb_label_count = 0;
551 /* Next label # for each statement for Silicon Graphics IRIS systems. */
554 /* Linked list of all externals that are to be emitted when optimizing
555 for the global pointer if they haven't been declared by the end of
556 the program with an appropriate .comm or initialization. */
558 struct extern_list GTY (())
560 struct extern_list *next; /* next external */
561 const char *name; /* name of the external */
562 int size; /* size in bytes */
565 static GTY (()) struct extern_list *extern_head = 0;
567 /* Name of the file containing the current function. */
568 const char *current_function_file = "";
570 /* Number of nested .set noreorder, noat, nomacro, and volatile requests. */
576 /* The next branch instruction is a branch likely, not branch normal. */
577 int mips_branch_likely;
579 /* The operands passed to the last cmpMM expander. */
582 /* The target cpu for code generation. */
583 enum processor_type mips_arch;
584 const struct mips_cpu_info *mips_arch_info;
586 /* The target cpu for optimization and scheduling. */
587 enum processor_type mips_tune;
588 const struct mips_cpu_info *mips_tune_info;
590 /* Which instruction set architecture to use. */
593 /* Which ABI to use. */
594 int mips_abi = MIPS_ABI_DEFAULT;
596 /* Cost information to use. */
597 const struct mips_rtx_cost_data *mips_cost;
599 /* Whether we are generating mips16 hard float code. In mips16 mode
600 we always set TARGET_SOFT_FLOAT; this variable is nonzero if
601 -msoft-float was not specified by the user, which means that we
602 should arrange to call mips32 hard floating point code. */
603 int mips16_hard_float;
605 /* The architecture selected by -mipsN. */
606 static const struct mips_cpu_info *mips_isa_info;
608 /* If TRUE, we split addresses into their high and low parts in the RTL. */
609 int mips_split_addresses;
611 /* Mode used for saving/restoring general purpose registers. */
612 static enum machine_mode gpr_mode;
614 /* Array giving truth value on whether or not a given hard register
615 can support a given mode. */
616 char mips_hard_regno_mode_ok[(int)MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
618 /* List of all MIPS punctuation characters used by print_operand. */
619 char mips_print_operand_punct[256];
621 /* Map GCC register number to debugger register number. */
622 int mips_dbx_regno[FIRST_PSEUDO_REGISTER];
624 /* A copy of the original flag_delayed_branch: see override_options. */
625 static int mips_flag_delayed_branch;
627 static GTY (()) int mips_output_filename_first_time = 1;
629 /* mips_split_p[X] is true if symbols of type X can be split by
630 mips_split_symbol(). */
631 static bool mips_split_p[NUM_SYMBOL_TYPES];
633 /* mips_lo_relocs[X] is the relocation to use when a symbol of type X
634 appears in a LO_SUM. It can be null if such LO_SUMs aren't valid or
635 if they are matched by a special .md file pattern. */
636 static const char *mips_lo_relocs[NUM_SYMBOL_TYPES];
638 /* Likewise for HIGHs. */
639 static const char *mips_hi_relocs[NUM_SYMBOL_TYPES];
641 /* Map hard register number to register class */
642 const enum reg_class mips_regno_to_class[] =
644 LEA_REGS, LEA_REGS, M16_NA_REGS, V1_REG,
645 M16_REGS, M16_REGS, M16_REGS, M16_REGS,
646 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
647 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
648 M16_NA_REGS, M16_NA_REGS, LEA_REGS, LEA_REGS,
649 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
650 T_REG, PIC_FN_ADDR_REG, LEA_REGS, LEA_REGS,
651 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
652 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
653 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
654 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
655 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
656 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
657 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
658 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
659 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
660 HI_REG, LO_REG, NO_REGS, ST_REGS,
661 ST_REGS, ST_REGS, ST_REGS, ST_REGS,
662 ST_REGS, ST_REGS, ST_REGS, NO_REGS,
663 NO_REGS, ALL_REGS, ALL_REGS, NO_REGS,
664 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
665 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
666 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
667 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
668 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
669 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
670 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
671 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
672 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
673 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
674 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
675 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
676 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
677 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
678 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
679 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
680 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
681 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
682 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
683 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
684 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
685 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
686 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
687 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
688 DSP_ACC_REGS, DSP_ACC_REGS, DSP_ACC_REGS, DSP_ACC_REGS,
689 DSP_ACC_REGS, DSP_ACC_REGS, ALL_REGS, ALL_REGS,
690 ALL_REGS, ALL_REGS, ALL_REGS, ALL_REGS
693 /* Table of machine dependent attributes. */
694 const struct attribute_spec mips_attribute_table[] =
696 { "long_call", 0, 0, false, true, true, NULL },
697 { NULL, 0, 0, false, false, false, NULL }
700 /* A table describing all the processors gcc knows about. Names are
701 matched in the order listed. The first mention of an ISA level is
702 taken as the canonical name for that ISA.
704 To ease comparison, please keep this table in the same order as
705 gas's mips_cpu_info_table[]. */
706 const struct mips_cpu_info mips_cpu_info_table[] = {
707 /* Entries for generic ISAs */
708 { "mips1", PROCESSOR_R3000, 1 },
709 { "mips2", PROCESSOR_R6000, 2 },
710 { "mips3", PROCESSOR_R4000, 3 },
711 { "mips4", PROCESSOR_R8000, 4 },
712 { "mips32", PROCESSOR_4KC, 32 },
713 { "mips32r2", PROCESSOR_M4K, 33 },
714 { "mips64", PROCESSOR_5KC, 64 },
717 { "r3000", PROCESSOR_R3000, 1 },
718 { "r2000", PROCESSOR_R3000, 1 }, /* = r3000 */
719 { "r3900", PROCESSOR_R3900, 1 },
722 { "r6000", PROCESSOR_R6000, 2 },
725 { "r4000", PROCESSOR_R4000, 3 },
726 { "vr4100", PROCESSOR_R4100, 3 },
727 { "vr4111", PROCESSOR_R4111, 3 },
728 { "vr4120", PROCESSOR_R4120, 3 },
729 { "vr4130", PROCESSOR_R4130, 3 },
730 { "vr4300", PROCESSOR_R4300, 3 },
731 { "r4400", PROCESSOR_R4000, 3 }, /* = r4000 */
732 { "r4600", PROCESSOR_R4600, 3 },
733 { "orion", PROCESSOR_R4600, 3 }, /* = r4600 */
734 { "r4650", PROCESSOR_R4650, 3 },
737 { "r8000", PROCESSOR_R8000, 4 },
738 { "vr5000", PROCESSOR_R5000, 4 },
739 { "vr5400", PROCESSOR_R5400, 4 },
740 { "vr5500", PROCESSOR_R5500, 4 },
741 { "rm7000", PROCESSOR_R7000, 4 },
742 { "rm9000", PROCESSOR_R9000, 4 },
745 { "4kc", PROCESSOR_4KC, 32 },
746 { "4km", PROCESSOR_4KC, 32 }, /* = 4kc */
747 { "4kp", PROCESSOR_4KP, 32 },
749 /* MIPS32 Release 2 */
750 { "m4k", PROCESSOR_M4K, 33 },
751 { "24k", PROCESSOR_24K, 33 },
752 { "24kc", PROCESSOR_24K, 33 }, /* 24K no FPU */
753 { "24kf", PROCESSOR_24K, 33 }, /* 24K 1:2 FPU */
754 { "24kx", PROCESSOR_24KX, 33 }, /* 24K 1:1 FPU */
757 { "5kc", PROCESSOR_5KC, 64 },
758 { "5kf", PROCESSOR_5KF, 64 },
759 { "20kc", PROCESSOR_20KC, 64 },
760 { "sb1", PROCESSOR_SB1, 64 },
761 { "sr71000", PROCESSOR_SR71000, 64 },
767 /* Default costs. If these are used for a processor we should look
768 up the actual costs. */
769 #define DEFAULT_COSTS COSTS_N_INSNS (6), /* fp_add */ \
770 COSTS_N_INSNS (7), /* fp_mult_sf */ \
771 COSTS_N_INSNS (8), /* fp_mult_df */ \
772 COSTS_N_INSNS (23), /* fp_div_sf */ \
773 COSTS_N_INSNS (36), /* fp_div_df */ \
774 COSTS_N_INSNS (10), /* int_mult_si */ \
775 COSTS_N_INSNS (10), /* int_mult_di */ \
776 COSTS_N_INSNS (69), /* int_div_si */ \
777 COSTS_N_INSNS (69), /* int_div_di */ \
778 2, /* branch_cost */ \
779 4 /* memory_latency */
781 /* Need to replace these with the costs of calling the appropriate
783 #define SOFT_FP_COSTS COSTS_N_INSNS (256), /* fp_add */ \
784 COSTS_N_INSNS (256), /* fp_mult_sf */ \
785 COSTS_N_INSNS (256), /* fp_mult_df */ \
786 COSTS_N_INSNS (256), /* fp_div_sf */ \
787 COSTS_N_INSNS (256) /* fp_div_df */
789 static struct mips_rtx_cost_data const mips_rtx_cost_data[PROCESSOR_MAX] =
792 COSTS_N_INSNS (2), /* fp_add */
793 COSTS_N_INSNS (4), /* fp_mult_sf */
794 COSTS_N_INSNS (5), /* fp_mult_df */
795 COSTS_N_INSNS (12), /* fp_div_sf */
796 COSTS_N_INSNS (19), /* fp_div_df */
797 COSTS_N_INSNS (12), /* int_mult_si */
798 COSTS_N_INSNS (12), /* int_mult_di */
799 COSTS_N_INSNS (35), /* int_div_si */
800 COSTS_N_INSNS (35), /* int_div_di */
802 4 /* memory_latency */
807 COSTS_N_INSNS (6), /* int_mult_si */
808 COSTS_N_INSNS (6), /* int_mult_di */
809 COSTS_N_INSNS (36), /* int_div_si */
810 COSTS_N_INSNS (36), /* int_div_di */
812 4 /* memory_latency */
816 COSTS_N_INSNS (36), /* int_mult_si */
817 COSTS_N_INSNS (36), /* int_mult_di */
818 COSTS_N_INSNS (37), /* int_div_si */
819 COSTS_N_INSNS (37), /* int_div_di */
821 4 /* memory_latency */
825 COSTS_N_INSNS (4), /* int_mult_si */
826 COSTS_N_INSNS (11), /* int_mult_di */
827 COSTS_N_INSNS (36), /* int_div_si */
828 COSTS_N_INSNS (68), /* int_div_di */
830 4 /* memory_latency */
833 COSTS_N_INSNS (4), /* fp_add */
834 COSTS_N_INSNS (4), /* fp_mult_sf */
835 COSTS_N_INSNS (5), /* fp_mult_df */
836 COSTS_N_INSNS (17), /* fp_div_sf */
837 COSTS_N_INSNS (32), /* fp_div_df */
838 COSTS_N_INSNS (4), /* int_mult_si */
839 COSTS_N_INSNS (11), /* int_mult_di */
840 COSTS_N_INSNS (36), /* int_div_si */
841 COSTS_N_INSNS (68), /* int_div_di */
843 4 /* memory_latency */
849 COSTS_N_INSNS (8), /* fp_add */
850 COSTS_N_INSNS (8), /* fp_mult_sf */
851 COSTS_N_INSNS (10), /* fp_mult_df */
852 COSTS_N_INSNS (34), /* fp_div_sf */
853 COSTS_N_INSNS (64), /* fp_div_df */
854 COSTS_N_INSNS (5), /* int_mult_si */
855 COSTS_N_INSNS (5), /* int_mult_di */
856 COSTS_N_INSNS (41), /* int_div_si */
857 COSTS_N_INSNS (41), /* int_div_di */
859 4 /* memory_latency */
862 COSTS_N_INSNS (4), /* fp_add */
863 COSTS_N_INSNS (4), /* fp_mult_sf */
864 COSTS_N_INSNS (5), /* fp_mult_df */
865 COSTS_N_INSNS (17), /* fp_div_sf */
866 COSTS_N_INSNS (32), /* fp_div_df */
867 COSTS_N_INSNS (5), /* int_mult_si */
868 COSTS_N_INSNS (5), /* int_mult_di */
869 COSTS_N_INSNS (41), /* int_div_si */
870 COSTS_N_INSNS (41), /* int_div_di */
872 4 /* memory_latency */
878 COSTS_N_INSNS (2), /* fp_add */
879 COSTS_N_INSNS (4), /* fp_mult_sf */
880 COSTS_N_INSNS (5), /* fp_mult_df */
881 COSTS_N_INSNS (12), /* fp_div_sf */
882 COSTS_N_INSNS (19), /* fp_div_df */
883 COSTS_N_INSNS (2), /* int_mult_si */
884 COSTS_N_INSNS (2), /* int_mult_di */
885 COSTS_N_INSNS (35), /* int_div_si */
886 COSTS_N_INSNS (35), /* int_div_di */
888 4 /* memory_latency */
891 COSTS_N_INSNS (3), /* fp_add */
892 COSTS_N_INSNS (5), /* fp_mult_sf */
893 COSTS_N_INSNS (6), /* fp_mult_df */
894 COSTS_N_INSNS (15), /* fp_div_sf */
895 COSTS_N_INSNS (16), /* fp_div_df */
896 COSTS_N_INSNS (17), /* int_mult_si */
897 COSTS_N_INSNS (17), /* int_mult_di */
898 COSTS_N_INSNS (38), /* int_div_si */
899 COSTS_N_INSNS (38), /* int_div_di */
901 6 /* memory_latency */
904 COSTS_N_INSNS (6), /* fp_add */
905 COSTS_N_INSNS (7), /* fp_mult_sf */
906 COSTS_N_INSNS (8), /* fp_mult_df */
907 COSTS_N_INSNS (23), /* fp_div_sf */
908 COSTS_N_INSNS (36), /* fp_div_df */
909 COSTS_N_INSNS (10), /* int_mult_si */
910 COSTS_N_INSNS (10), /* int_mult_di */
911 COSTS_N_INSNS (69), /* int_div_si */
912 COSTS_N_INSNS (69), /* int_div_di */
914 6 /* memory_latency */
926 /* The only costs that appear to be updated here are
927 integer multiplication. */
929 COSTS_N_INSNS (4), /* int_mult_si */
930 COSTS_N_INSNS (6), /* int_mult_di */
931 COSTS_N_INSNS (69), /* int_div_si */
932 COSTS_N_INSNS (69), /* int_div_di */
934 4 /* memory_latency */
946 COSTS_N_INSNS (6), /* fp_add */
947 COSTS_N_INSNS (4), /* fp_mult_sf */
948 COSTS_N_INSNS (5), /* fp_mult_df */
949 COSTS_N_INSNS (23), /* fp_div_sf */
950 COSTS_N_INSNS (36), /* fp_div_df */
951 COSTS_N_INSNS (5), /* int_mult_si */
952 COSTS_N_INSNS (5), /* int_mult_di */
953 COSTS_N_INSNS (36), /* int_div_si */
954 COSTS_N_INSNS (36), /* int_div_di */
956 4 /* memory_latency */
959 COSTS_N_INSNS (6), /* fp_add */
960 COSTS_N_INSNS (5), /* fp_mult_sf */
961 COSTS_N_INSNS (6), /* fp_mult_df */
962 COSTS_N_INSNS (30), /* fp_div_sf */
963 COSTS_N_INSNS (59), /* fp_div_df */
964 COSTS_N_INSNS (3), /* int_mult_si */
965 COSTS_N_INSNS (4), /* int_mult_di */
966 COSTS_N_INSNS (42), /* int_div_si */
967 COSTS_N_INSNS (74), /* int_div_di */
969 4 /* memory_latency */
972 COSTS_N_INSNS (6), /* fp_add */
973 COSTS_N_INSNS (5), /* fp_mult_sf */
974 COSTS_N_INSNS (6), /* fp_mult_df */
975 COSTS_N_INSNS (30), /* fp_div_sf */
976 COSTS_N_INSNS (59), /* fp_div_df */
977 COSTS_N_INSNS (5), /* int_mult_si */
978 COSTS_N_INSNS (9), /* int_mult_di */
979 COSTS_N_INSNS (42), /* int_div_si */
980 COSTS_N_INSNS (74), /* int_div_di */
982 4 /* memory_latency */
985 /* The only costs that are changed here are
986 integer multiplication. */
987 COSTS_N_INSNS (6), /* fp_add */
988 COSTS_N_INSNS (7), /* fp_mult_sf */
989 COSTS_N_INSNS (8), /* fp_mult_df */
990 COSTS_N_INSNS (23), /* fp_div_sf */
991 COSTS_N_INSNS (36), /* fp_div_df */
992 COSTS_N_INSNS (5), /* int_mult_si */
993 COSTS_N_INSNS (9), /* int_mult_di */
994 COSTS_N_INSNS (69), /* int_div_si */
995 COSTS_N_INSNS (69), /* int_div_di */
997 4 /* memory_latency */
1003 /* The only costs that are changed here are
1004 integer multiplication. */
1005 COSTS_N_INSNS (6), /* fp_add */
1006 COSTS_N_INSNS (7), /* fp_mult_sf */
1007 COSTS_N_INSNS (8), /* fp_mult_df */
1008 COSTS_N_INSNS (23), /* fp_div_sf */
1009 COSTS_N_INSNS (36), /* fp_div_df */
1010 COSTS_N_INSNS (3), /* int_mult_si */
1011 COSTS_N_INSNS (8), /* int_mult_di */
1012 COSTS_N_INSNS (69), /* int_div_si */
1013 COSTS_N_INSNS (69), /* int_div_di */
1014 1, /* branch_cost */
1015 4 /* memory_latency */
1018 COSTS_N_INSNS (4), /* fp_add */
1019 COSTS_N_INSNS (4), /* fp_mult_sf */
1020 COSTS_N_INSNS (4), /* fp_mult_df */
1021 COSTS_N_INSNS (24), /* fp_div_sf */
1022 COSTS_N_INSNS (32), /* fp_div_df */
1023 COSTS_N_INSNS (3), /* int_mult_si */
1024 COSTS_N_INSNS (4), /* int_mult_di */
1025 COSTS_N_INSNS (36), /* int_div_si */
1026 COSTS_N_INSNS (68), /* int_div_di */
1027 1, /* branch_cost */
1028 4 /* memory_latency */
1036 /* Nonzero if -march should decide the default value of MASK_SOFT_FLOAT. */
1037 #ifndef MIPS_MARCH_CONTROLS_SOFT_FLOAT
1038 #define MIPS_MARCH_CONTROLS_SOFT_FLOAT 0
1041 /* Initialize the GCC target structure. */
1042 #undef TARGET_ASM_ALIGNED_HI_OP
1043 #define TARGET_ASM_ALIGNED_HI_OP "\t.half\t"
1044 #undef TARGET_ASM_ALIGNED_SI_OP
1045 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
1046 #undef TARGET_ASM_ALIGNED_DI_OP
1047 #define TARGET_ASM_ALIGNED_DI_OP "\t.dword\t"
1049 #undef TARGET_ASM_FUNCTION_PROLOGUE
1050 #define TARGET_ASM_FUNCTION_PROLOGUE mips_output_function_prologue
1051 #undef TARGET_ASM_FUNCTION_EPILOGUE
1052 #define TARGET_ASM_FUNCTION_EPILOGUE mips_output_function_epilogue
1053 #undef TARGET_ASM_SELECT_RTX_SECTION
1054 #define TARGET_ASM_SELECT_RTX_SECTION mips_select_rtx_section
1055 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
1056 #define TARGET_ASM_FUNCTION_RODATA_SECTION mips_function_rodata_section
1058 #undef TARGET_SCHED_REORDER
1059 #define TARGET_SCHED_REORDER mips_sched_reorder
1060 #undef TARGET_SCHED_VARIABLE_ISSUE
1061 #define TARGET_SCHED_VARIABLE_ISSUE mips_variable_issue
1062 #undef TARGET_SCHED_ADJUST_COST
1063 #define TARGET_SCHED_ADJUST_COST mips_adjust_cost
1064 #undef TARGET_SCHED_ISSUE_RATE
1065 #define TARGET_SCHED_ISSUE_RATE mips_issue_rate
1066 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1067 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD \
1068 mips_multipass_dfa_lookahead
1070 #undef TARGET_DEFAULT_TARGET_FLAGS
1071 #define TARGET_DEFAULT_TARGET_FLAGS \
1073 | TARGET_CPU_DEFAULT \
1074 | TARGET_ENDIAN_DEFAULT \
1075 | TARGET_FP_EXCEPTIONS_DEFAULT \
1076 | MASK_CHECK_ZERO_DIV \
1078 #undef TARGET_HANDLE_OPTION
1079 #define TARGET_HANDLE_OPTION mips_handle_option
1081 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
1082 #define TARGET_FUNCTION_OK_FOR_SIBCALL mips_function_ok_for_sibcall
1084 #undef TARGET_VALID_POINTER_MODE
1085 #define TARGET_VALID_POINTER_MODE mips_valid_pointer_mode
1086 #undef TARGET_RTX_COSTS
1087 #define TARGET_RTX_COSTS mips_rtx_costs
1088 #undef TARGET_ADDRESS_COST
1089 #define TARGET_ADDRESS_COST mips_address_cost
1091 #undef TARGET_IN_SMALL_DATA_P
1092 #define TARGET_IN_SMALL_DATA_P mips_in_small_data_p
1094 #undef TARGET_MACHINE_DEPENDENT_REORG
1095 #define TARGET_MACHINE_DEPENDENT_REORG mips_reorg
1097 #undef TARGET_ASM_FILE_START
1098 #undef TARGET_ASM_FILE_END
1099 #define TARGET_ASM_FILE_START mips_file_start
1100 #define TARGET_ASM_FILE_END mips_file_end
1101 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
1102 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
1104 #undef TARGET_INIT_LIBFUNCS
1105 #define TARGET_INIT_LIBFUNCS mips_init_libfuncs
1107 #undef TARGET_BUILD_BUILTIN_VA_LIST
1108 #define TARGET_BUILD_BUILTIN_VA_LIST mips_build_builtin_va_list
1109 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
1110 #define TARGET_GIMPLIFY_VA_ARG_EXPR mips_gimplify_va_arg_expr
1112 #undef TARGET_PROMOTE_FUNCTION_ARGS
1113 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
1114 #undef TARGET_PROMOTE_FUNCTION_RETURN
1115 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
1116 #undef TARGET_PROMOTE_PROTOTYPES
1117 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
1119 #undef TARGET_RETURN_IN_MEMORY
1120 #define TARGET_RETURN_IN_MEMORY mips_return_in_memory
1121 #undef TARGET_RETURN_IN_MSB
1122 #define TARGET_RETURN_IN_MSB mips_return_in_msb
1124 #undef TARGET_ASM_OUTPUT_MI_THUNK
1125 #define TARGET_ASM_OUTPUT_MI_THUNK mips_output_mi_thunk
1126 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
1127 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
1129 #undef TARGET_SETUP_INCOMING_VARARGS
1130 #define TARGET_SETUP_INCOMING_VARARGS mips_setup_incoming_varargs
1131 #undef TARGET_STRICT_ARGUMENT_NAMING
1132 #define TARGET_STRICT_ARGUMENT_NAMING mips_strict_argument_naming
1133 #undef TARGET_MUST_PASS_IN_STACK
1134 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
1135 #undef TARGET_PASS_BY_REFERENCE
1136 #define TARGET_PASS_BY_REFERENCE mips_pass_by_reference
1137 #undef TARGET_CALLEE_COPIES
1138 #define TARGET_CALLEE_COPIES mips_callee_copies
1139 #undef TARGET_ARG_PARTIAL_BYTES
1140 #define TARGET_ARG_PARTIAL_BYTES mips_arg_partial_bytes
1142 #undef TARGET_VECTOR_MODE_SUPPORTED_P
1143 #define TARGET_VECTOR_MODE_SUPPORTED_P mips_vector_mode_supported_p
1145 #undef TARGET_INIT_BUILTINS
1146 #define TARGET_INIT_BUILTINS mips_init_builtins
1147 #undef TARGET_EXPAND_BUILTIN
1148 #define TARGET_EXPAND_BUILTIN mips_expand_builtin
1150 #undef TARGET_HAVE_TLS
1151 #define TARGET_HAVE_TLS HAVE_AS_TLS
1153 #undef TARGET_CANNOT_FORCE_CONST_MEM
1154 #define TARGET_CANNOT_FORCE_CONST_MEM mips_cannot_force_const_mem
1156 #undef TARGET_ENCODE_SECTION_INFO
1157 #define TARGET_ENCODE_SECTION_INFO mips_encode_section_info
1159 #undef TARGET_ATTRIBUTE_TABLE
1160 #define TARGET_ATTRIBUTE_TABLE mips_attribute_table
1162 #undef TARGET_EXTRA_LIVE_ON_ENTRY
1163 /* With -mabicalls (which is the default on GNU/Linux),
1164 PIC_FUNCTION_ADDR_REGNUM is live on function entry and is to
1165 initialize $28, which is PIC_OFFSET_TABLE_REGNUM. */
1166 #define TARGET_EXTRA_LIVE_ON_ENTRY mips_extra_live_on_entry
1168 struct gcc_target targetm = TARGET_INITIALIZER;
1170 /* Classify symbol X, which must be a SYMBOL_REF or a LABEL_REF. */
1172 static enum mips_symbol_type
1173 mips_classify_symbol (rtx x)
1175 if (GET_CODE (x) == LABEL_REF)
1178 return SYMBOL_CONSTANT_POOL;
1179 if (TARGET_ABICALLS)
1180 return SYMBOL_GOT_LOCAL;
1181 return SYMBOL_GENERAL;
1184 gcc_assert (GET_CODE (x) == SYMBOL_REF);
1186 if (SYMBOL_REF_TLS_MODEL (x))
1189 if (CONSTANT_POOL_ADDRESS_P (x))
1192 return SYMBOL_CONSTANT_POOL;
1194 if (TARGET_ABICALLS)
1195 return SYMBOL_GOT_LOCAL;
1197 if (GET_MODE_SIZE (get_pool_mode (x)) <= mips_section_threshold)
1198 return SYMBOL_SMALL_DATA;
1200 return SYMBOL_GENERAL;
1203 if (SYMBOL_REF_SMALL_P (x))
1204 return SYMBOL_SMALL_DATA;
1206 if (TARGET_ABICALLS)
1208 if (SYMBOL_REF_DECL (x) == 0)
1209 return SYMBOL_REF_LOCAL_P (x) ? SYMBOL_GOT_LOCAL : SYMBOL_GOT_GLOBAL;
1211 /* There are three cases to consider:
1213 - o32 PIC (either with or without explicit relocs)
1214 - n32/n64 PIC without explicit relocs
1215 - n32/n64 PIC with explicit relocs
1217 In the first case, both local and global accesses will use an
1218 R_MIPS_GOT16 relocation. We must correctly predict which of
1219 the two semantics (local or global) the assembler and linker
1220 will apply. The choice doesn't depend on the symbol's
1221 visibility, so we deliberately ignore decl_visibility and
1224 In the second case, the assembler will not use R_MIPS_GOT16
1225 relocations, but it chooses between local and global accesses
1226 in the same way as for o32 PIC.
1228 In the third case we have more freedom since both forms of
1229 access will work for any kind of symbol. However, there seems
1230 little point in doing things differently. */
1231 if (DECL_P (SYMBOL_REF_DECL (x)) && TREE_PUBLIC (SYMBOL_REF_DECL (x)))
1232 return SYMBOL_GOT_GLOBAL;
1234 return SYMBOL_GOT_LOCAL;
1237 return SYMBOL_GENERAL;
1241 /* Split X into a base and a constant offset, storing them in *BASE
1242 and *OFFSET respectively. */
1245 mips_split_const (rtx x, rtx *base, HOST_WIDE_INT *offset)
1249 if (GET_CODE (x) == CONST)
1252 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
1254 *offset += INTVAL (XEXP (x, 1));
1261 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
1262 to the same object as SYMBOL. */
1265 mips_offset_within_object_p (rtx symbol, HOST_WIDE_INT offset)
1267 if (GET_CODE (symbol) != SYMBOL_REF)
1270 if (CONSTANT_POOL_ADDRESS_P (symbol)
1272 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
1275 if (SYMBOL_REF_DECL (symbol) != 0
1277 && offset < int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (symbol))))
1284 /* Return true if X is a symbolic constant that can be calculated in
1285 the same way as a bare symbol. If it is, store the type of the
1286 symbol in *SYMBOL_TYPE. */
1289 mips_symbolic_constant_p (rtx x, enum mips_symbol_type *symbol_type)
1291 HOST_WIDE_INT offset;
1293 mips_split_const (x, &x, &offset);
1294 if (UNSPEC_ADDRESS_P (x))
1295 *symbol_type = UNSPEC_ADDRESS_TYPE (x);
1296 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1298 *symbol_type = mips_classify_symbol (x);
1299 if (*symbol_type == SYMBOL_TLS)
1308 /* Check whether a nonzero offset is valid for the underlying
1310 switch (*symbol_type)
1312 case SYMBOL_GENERAL:
1313 case SYMBOL_64_HIGH:
1316 /* If the target has 64-bit pointers and the object file only
1317 supports 32-bit symbols, the values of those symbols will be
1318 sign-extended. In this case we can't allow an arbitrary offset
1319 in case the 32-bit value X + OFFSET has a different sign from X. */
1320 if (Pmode == DImode && !ABI_HAS_64BIT_SYMBOLS)
1321 return mips_offset_within_object_p (x, offset);
1323 /* In other cases the relocations can handle any offset. */
1326 case SYMBOL_CONSTANT_POOL:
1327 /* Allow constant pool references to be converted to LABEL+CONSTANT.
1328 In this case, we no longer have access to the underlying constant,
1329 but the original symbol-based access was known to be valid. */
1330 if (GET_CODE (x) == LABEL_REF)
1335 case SYMBOL_SMALL_DATA:
1336 /* Make sure that the offset refers to something within the
1337 underlying object. This should guarantee that the final
1338 PC- or GP-relative offset is within the 16-bit limit. */
1339 return mips_offset_within_object_p (x, offset);
1341 case SYMBOL_GOT_LOCAL:
1342 case SYMBOL_GOTOFF_PAGE:
1343 /* The linker should provide enough local GOT entries for a
1344 16-bit offset. Larger offsets may lead to GOT overflow. */
1345 return SMALL_OPERAND (offset);
1347 case SYMBOL_GOT_GLOBAL:
1348 case SYMBOL_GOTOFF_GLOBAL:
1349 case SYMBOL_GOTOFF_CALL:
1350 case SYMBOL_GOTOFF_LOADGP:
1355 case SYMBOL_GOTTPREL:
1363 /* Return true if X is a symbolic constant whose value is not split
1364 into separate relocations. */
1367 mips_atomic_symbolic_constant_p (rtx x)
1369 enum mips_symbol_type type;
1370 return mips_symbolic_constant_p (x, &type) && !mips_split_p[type];
1374 /* This function is used to implement REG_MODE_OK_FOR_BASE_P. */
1377 mips_regno_mode_ok_for_base_p (int regno, enum machine_mode mode, int strict)
1379 if (regno >= FIRST_PSEUDO_REGISTER)
1383 regno = reg_renumber[regno];
1386 /* These fake registers will be eliminated to either the stack or
1387 hard frame pointer, both of which are usually valid base registers.
1388 Reload deals with the cases where the eliminated form isn't valid. */
1389 if (regno == ARG_POINTER_REGNUM || regno == FRAME_POINTER_REGNUM)
1392 /* In mips16 mode, the stack pointer can only address word and doubleword
1393 values, nothing smaller. There are two problems here:
1395 (a) Instantiating virtual registers can introduce new uses of the
1396 stack pointer. If these virtual registers are valid addresses,
1397 the stack pointer should be too.
1399 (b) Most uses of the stack pointer are not made explicit until
1400 FRAME_POINTER_REGNUM and ARG_POINTER_REGNUM have been eliminated.
1401 We don't know until that stage whether we'll be eliminating to the
1402 stack pointer (which needs the restriction) or the hard frame
1403 pointer (which doesn't).
1405 All in all, it seems more consistent to only enforce this restriction
1406 during and after reload. */
1407 if (TARGET_MIPS16 && regno == STACK_POINTER_REGNUM)
1408 return !strict || GET_MODE_SIZE (mode) == 4 || GET_MODE_SIZE (mode) == 8;
1410 return TARGET_MIPS16 ? M16_REG_P (regno) : GP_REG_P (regno);
1414 /* Return true if X is a valid base register for the given mode.
1415 Allow only hard registers if STRICT. */
1418 mips_valid_base_register_p (rtx x, enum machine_mode mode, int strict)
1420 if (!strict && GET_CODE (x) == SUBREG)
1424 && mips_regno_mode_ok_for_base_p (REGNO (x), mode, strict));
1428 /* Return true if symbols of type SYMBOL_TYPE can directly address a value
1429 with mode MODE. This is used for both symbolic and LO_SUM addresses. */
1432 mips_symbolic_address_p (enum mips_symbol_type symbol_type,
1433 enum machine_mode mode)
1435 switch (symbol_type)
1437 case SYMBOL_GENERAL:
1438 return !TARGET_MIPS16;
1440 case SYMBOL_SMALL_DATA:
1443 case SYMBOL_CONSTANT_POOL:
1444 /* PC-relative addressing is only available for lw and ld. */
1445 return GET_MODE_SIZE (mode) == 4 || GET_MODE_SIZE (mode) == 8;
1447 case SYMBOL_GOT_LOCAL:
1450 case SYMBOL_GOT_GLOBAL:
1451 /* The address will have to be loaded from the GOT first. */
1458 case SYMBOL_GOTTPREL:
1462 case SYMBOL_GOTOFF_PAGE:
1463 case SYMBOL_GOTOFF_GLOBAL:
1464 case SYMBOL_GOTOFF_CALL:
1465 case SYMBOL_GOTOFF_LOADGP:
1466 case SYMBOL_64_HIGH:
1475 /* Return true if X is a valid address for machine mode MODE. If it is,
1476 fill in INFO appropriately. STRICT is true if we should only accept
1477 hard base registers. */
1480 mips_classify_address (struct mips_address_info *info, rtx x,
1481 enum machine_mode mode, int strict)
1483 switch (GET_CODE (x))
1487 info->type = ADDRESS_REG;
1489 info->offset = const0_rtx;
1490 return mips_valid_base_register_p (info->reg, mode, strict);
1493 info->type = ADDRESS_REG;
1494 info->reg = XEXP (x, 0);
1495 info->offset = XEXP (x, 1);
1496 return (mips_valid_base_register_p (info->reg, mode, strict)
1497 && const_arith_operand (info->offset, VOIDmode));
1500 info->type = ADDRESS_LO_SUM;
1501 info->reg = XEXP (x, 0);
1502 info->offset = XEXP (x, 1);
1503 return (mips_valid_base_register_p (info->reg, mode, strict)
1504 && mips_symbolic_constant_p (info->offset, &info->symbol_type)
1505 && mips_symbolic_address_p (info->symbol_type, mode)
1506 && mips_lo_relocs[info->symbol_type] != 0);
1509 /* Small-integer addresses don't occur very often, but they
1510 are legitimate if $0 is a valid base register. */
1511 info->type = ADDRESS_CONST_INT;
1512 return !TARGET_MIPS16 && SMALL_INT (x);
1517 info->type = ADDRESS_SYMBOLIC;
1518 return (mips_symbolic_constant_p (x, &info->symbol_type)
1519 && mips_symbolic_address_p (info->symbol_type, mode)
1520 && !mips_split_p[info->symbol_type]);
1527 /* Return true if X is a thread-local symbol. */
1530 mips_tls_operand_p (rtx x)
1532 return GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0;
1535 /* Return true if X can not be forced into a constant pool. */
1538 mips_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
1540 return mips_tls_operand_p (*x);
1543 /* Return true if X can not be forced into a constant pool. */
1546 mips_cannot_force_const_mem (rtx x)
1548 if (! TARGET_HAVE_TLS)
1551 return for_each_rtx (&x, &mips_tls_symbol_ref_1, 0);
1554 /* Return the number of instructions needed to load a symbol of the
1555 given type into a register. If valid in an address, the same number
1556 of instructions are needed for loads and stores. Treat extended
1557 mips16 instructions as two instructions. */
1560 mips_symbol_insns (enum mips_symbol_type type)
1564 case SYMBOL_GENERAL:
1565 /* In mips16 code, general symbols must be fetched from the
1570 /* When using 64-bit symbols, we need 5 preparatory instructions,
1573 lui $at,%highest(symbol)
1574 daddiu $at,$at,%higher(symbol)
1576 daddiu $at,$at,%hi(symbol)
1579 The final address is then $at + %lo(symbol). With 32-bit
1580 symbols we just need a preparatory lui. */
1581 return (ABI_HAS_64BIT_SYMBOLS ? 6 : 2);
1583 case SYMBOL_SMALL_DATA:
1586 case SYMBOL_CONSTANT_POOL:
1587 /* This case is for mips16 only. Assume we'll need an
1588 extended instruction. */
1591 case SYMBOL_GOT_LOCAL:
1592 case SYMBOL_GOT_GLOBAL:
1593 /* Unless -funit-at-a-time is in effect, we can't be sure whether
1594 the local/global classification is accurate. See override_options
1597 The worst cases are:
1599 (1) For local symbols when generating o32 or o64 code. The assembler
1605 ...and the final address will be $at + %lo(symbol).
1607 (2) For global symbols when -mxgot. The assembler will use:
1609 lui $at,%got_hi(symbol)
1612 ...and the final address will be $at + %got_lo(symbol). */
1615 case SYMBOL_GOTOFF_PAGE:
1616 case SYMBOL_GOTOFF_GLOBAL:
1617 case SYMBOL_GOTOFF_CALL:
1618 case SYMBOL_GOTOFF_LOADGP:
1619 case SYMBOL_64_HIGH:
1625 case SYMBOL_GOTTPREL:
1627 /* Check whether the offset is a 16- or 32-bit value. */
1628 return mips_split_p[type] ? 2 : 1;
1631 /* We don't treat a bare TLS symbol as a constant. */
1637 /* Return true if X is a legitimate $sp-based address for mode MDOE. */
1640 mips_stack_address_p (rtx x, enum machine_mode mode)
1642 struct mips_address_info addr;
1644 return (mips_classify_address (&addr, x, mode, false)
1645 && addr.type == ADDRESS_REG
1646 && addr.reg == stack_pointer_rtx);
1649 /* Return true if a value at OFFSET bytes from BASE can be accessed
1650 using an unextended mips16 instruction. MODE is the mode of the
1653 Usually the offset in an unextended instruction is a 5-bit field.
1654 The offset is unsigned and shifted left once for HIs, twice
1655 for SIs, and so on. An exception is SImode accesses off the
1656 stack pointer, which have an 8-bit immediate field. */
1659 mips16_unextended_reference_p (enum machine_mode mode, rtx base, rtx offset)
1662 && GET_CODE (offset) == CONST_INT
1663 && INTVAL (offset) >= 0
1664 && (INTVAL (offset) & (GET_MODE_SIZE (mode) - 1)) == 0)
1666 if (GET_MODE_SIZE (mode) == 4 && base == stack_pointer_rtx)
1667 return INTVAL (offset) < 256 * GET_MODE_SIZE (mode);
1668 return INTVAL (offset) < 32 * GET_MODE_SIZE (mode);
1674 /* Return the number of instructions needed to load or store a value
1675 of mode MODE at X. Return 0 if X isn't valid for MODE.
1677 For mips16 code, count extended instructions as two instructions. */
1680 mips_address_insns (rtx x, enum machine_mode mode)
1682 struct mips_address_info addr;
1685 if (mode == BLKmode)
1686 /* BLKmode is used for single unaligned loads and stores. */
1689 /* Each word of a multi-word value will be accessed individually. */
1690 factor = (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1692 if (mips_classify_address (&addr, x, mode, false))
1697 && !mips16_unextended_reference_p (mode, addr.reg, addr.offset))
1701 case ADDRESS_LO_SUM:
1702 return (TARGET_MIPS16 ? factor * 2 : factor);
1704 case ADDRESS_CONST_INT:
1707 case ADDRESS_SYMBOLIC:
1708 return factor * mips_symbol_insns (addr.symbol_type);
1714 /* Likewise for constant X. */
1717 mips_const_insns (rtx x)
1719 struct mips_integer_op codes[MIPS_MAX_INTEGER_OPS];
1720 enum mips_symbol_type symbol_type;
1721 HOST_WIDE_INT offset;
1723 switch (GET_CODE (x))
1727 || !mips_symbolic_constant_p (XEXP (x, 0), &symbol_type)
1728 || !mips_split_p[symbol_type])
1735 /* Unsigned 8-bit constants can be loaded using an unextended
1736 LI instruction. Unsigned 16-bit constants can be loaded
1737 using an extended LI. Negative constants must be loaded
1738 using LI and then negated. */
1739 return (INTVAL (x) >= 0 && INTVAL (x) < 256 ? 1
1740 : SMALL_OPERAND_UNSIGNED (INTVAL (x)) ? 2
1741 : INTVAL (x) > -256 && INTVAL (x) < 0 ? 2
1742 : SMALL_OPERAND_UNSIGNED (-INTVAL (x)) ? 3
1745 return mips_build_integer (codes, INTVAL (x));
1749 return (!TARGET_MIPS16 && x == CONST0_RTX (GET_MODE (x)) ? 1 : 0);
1755 /* See if we can refer to X directly. */
1756 if (mips_symbolic_constant_p (x, &symbol_type))
1757 return mips_symbol_insns (symbol_type);
1759 /* Otherwise try splitting the constant into a base and offset.
1760 16-bit offsets can be added using an extra addiu. Larger offsets
1761 must be calculated separately and then added to the base. */
1762 mips_split_const (x, &x, &offset);
1765 int n = mips_const_insns (x);
1768 if (SMALL_OPERAND (offset))
1771 return n + 1 + mips_build_integer (codes, offset);
1778 return mips_symbol_insns (mips_classify_symbol (x));
1786 /* Return the number of instructions needed for memory reference X.
1787 Count extended mips16 instructions as two instructions. */
1790 mips_fetch_insns (rtx x)
1792 gcc_assert (MEM_P (x));
1793 return mips_address_insns (XEXP (x, 0), GET_MODE (x));
1797 /* Return the number of instructions needed for an integer division. */
1800 mips_idiv_insns (void)
1805 if (TARGET_CHECK_ZERO_DIV)
1807 if (GENERATE_DIVIDE_TRAPS)
1813 if (TARGET_FIX_R4000 || TARGET_FIX_R4400)
1818 /* This function is used to implement GO_IF_LEGITIMATE_ADDRESS. It
1819 returns a nonzero value if X is a legitimate address for a memory
1820 operand of the indicated MODE. STRICT is nonzero if this function
1821 is called during reload. */
1824 mips_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
1826 struct mips_address_info addr;
1828 return mips_classify_address (&addr, x, mode, strict);
1832 /* Copy VALUE to a register and return that register. If new psuedos
1833 are allowed, copy it into a new register, otherwise use DEST. */
1836 mips_force_temporary (rtx dest, rtx value)
1838 if (!no_new_pseudos)
1839 return force_reg (Pmode, value);
1842 emit_move_insn (copy_rtx (dest), value);
1848 /* Return a LO_SUM expression for ADDR. TEMP is as for mips_force_temporary
1849 and is used to load the high part into a register. */
1852 mips_split_symbol (rtx temp, rtx addr)
1857 high = mips16_gp_pseudo_reg ();
1859 high = mips_force_temporary (temp, gen_rtx_HIGH (Pmode, copy_rtx (addr)));
1860 return gen_rtx_LO_SUM (Pmode, high, addr);
1864 /* Return an UNSPEC address with underlying address ADDRESS and symbol
1865 type SYMBOL_TYPE. */
1868 mips_unspec_address (rtx address, enum mips_symbol_type symbol_type)
1871 HOST_WIDE_INT offset;
1873 mips_split_const (address, &base, &offset);
1874 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base),
1875 UNSPEC_ADDRESS_FIRST + symbol_type);
1876 return plus_constant (gen_rtx_CONST (Pmode, base), offset);
1880 /* If mips_unspec_address (ADDR, SYMBOL_TYPE) is a 32-bit value, add the
1881 high part to BASE and return the result. Just return BASE otherwise.
1882 TEMP is available as a temporary register if needed.
1884 The returned expression can be used as the first operand to a LO_SUM. */
1887 mips_unspec_offset_high (rtx temp, rtx base, rtx addr,
1888 enum mips_symbol_type symbol_type)
1890 if (mips_split_p[symbol_type])
1892 addr = gen_rtx_HIGH (Pmode, mips_unspec_address (addr, symbol_type));
1893 addr = mips_force_temporary (temp, addr);
1894 return mips_force_temporary (temp, gen_rtx_PLUS (Pmode, addr, base));
1900 /* Return a legitimate address for REG + OFFSET. TEMP is as for
1901 mips_force_temporary; it is only needed when OFFSET is not a
1905 mips_add_offset (rtx temp, rtx reg, HOST_WIDE_INT offset)
1907 if (!SMALL_OPERAND (offset))
1912 /* Load the full offset into a register so that we can use
1913 an unextended instruction for the address itself. */
1914 high = GEN_INT (offset);
1919 /* Leave OFFSET as a 16-bit offset and put the excess in HIGH. */
1920 high = GEN_INT (CONST_HIGH_PART (offset));
1921 offset = CONST_LOW_PART (offset);
1923 high = mips_force_temporary (temp, high);
1924 reg = mips_force_temporary (temp, gen_rtx_PLUS (Pmode, high, reg));
1926 return plus_constant (reg, offset);
1929 /* Emit a call to __tls_get_addr. SYM is the TLS symbol we are
1930 referencing, and TYPE is the symbol type to use (either global
1931 dynamic or local dynamic). V0 is an RTX for the return value
1932 location. The entire insn sequence is returned. */
1934 static GTY(()) rtx mips_tls_symbol;
1937 mips_call_tls_get_addr (rtx sym, enum mips_symbol_type type, rtx v0)
1939 rtx insn, loc, tga, a0;
1941 a0 = gen_rtx_REG (Pmode, GP_ARG_FIRST);
1943 if (!mips_tls_symbol)
1944 mips_tls_symbol = init_one_libfunc ("__tls_get_addr");
1946 loc = mips_unspec_address (sym, type);
1950 emit_insn (gen_rtx_SET (Pmode, a0,
1951 gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, loc)));
1952 tga = gen_rtx_MEM (Pmode, mips_tls_symbol);
1953 insn = emit_call_insn (gen_call_value (v0, tga, const0_rtx, const0_rtx));
1954 CONST_OR_PURE_CALL_P (insn) = 1;
1955 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), v0);
1956 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), a0);
1957 insn = get_insns ();
1964 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
1965 return value will be a valid address and move_operand (either a REG
1969 mips_legitimize_tls_address (rtx loc)
1971 rtx dest, insn, v0, v1, tmp1, tmp2, eqv;
1972 enum tls_model model;
1974 v0 = gen_rtx_REG (Pmode, GP_RETURN);
1975 v1 = gen_rtx_REG (Pmode, GP_RETURN + 1);
1977 model = SYMBOL_REF_TLS_MODEL (loc);
1981 case TLS_MODEL_GLOBAL_DYNAMIC:
1982 insn = mips_call_tls_get_addr (loc, SYMBOL_TLSGD, v0);
1983 dest = gen_reg_rtx (Pmode);
1984 emit_libcall_block (insn, dest, v0, loc);
1987 case TLS_MODEL_LOCAL_DYNAMIC:
1988 insn = mips_call_tls_get_addr (loc, SYMBOL_TLSLDM, v0);
1989 tmp1 = gen_reg_rtx (Pmode);
1991 /* Attach a unique REG_EQUIV, to allow the RTL optimizers to
1992 share the LDM result with other LD model accesses. */
1993 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
1995 emit_libcall_block (insn, tmp1, v0, eqv);
1997 tmp2 = mips_unspec_offset_high (NULL, tmp1, loc, SYMBOL_DTPREL);
1998 dest = gen_rtx_LO_SUM (Pmode, tmp2,
1999 mips_unspec_address (loc, SYMBOL_DTPREL));
2002 case TLS_MODEL_INITIAL_EXEC:
2003 tmp1 = gen_reg_rtx (Pmode);
2004 tmp2 = mips_unspec_address (loc, SYMBOL_GOTTPREL);
2005 if (Pmode == DImode)
2007 emit_insn (gen_tls_get_tp_di (v1));
2008 emit_insn (gen_load_gotdi (tmp1, pic_offset_table_rtx, tmp2));
2012 emit_insn (gen_tls_get_tp_si (v1));
2013 emit_insn (gen_load_gotsi (tmp1, pic_offset_table_rtx, tmp2));
2015 dest = gen_reg_rtx (Pmode);
2016 emit_insn (gen_add3_insn (dest, tmp1, v1));
2019 case TLS_MODEL_LOCAL_EXEC:
2021 if (Pmode == DImode)
2022 emit_insn (gen_tls_get_tp_di (v1));
2024 emit_insn (gen_tls_get_tp_si (v1));
2026 tmp1 = mips_unspec_offset_high (NULL, v1, loc, SYMBOL_TPREL);
2027 dest = gen_rtx_LO_SUM (Pmode, tmp1,
2028 mips_unspec_address (loc, SYMBOL_TPREL));
2038 /* This function is used to implement LEGITIMIZE_ADDRESS. If *XLOC can
2039 be legitimized in a way that the generic machinery might not expect,
2040 put the new address in *XLOC and return true. MODE is the mode of
2041 the memory being accessed. */
2044 mips_legitimize_address (rtx *xloc, enum machine_mode mode)
2046 enum mips_symbol_type symbol_type;
2048 if (mips_tls_operand_p (*xloc))
2050 *xloc = mips_legitimize_tls_address (*xloc);
2054 /* See if the address can split into a high part and a LO_SUM. */
2055 if (mips_symbolic_constant_p (*xloc, &symbol_type)
2056 && mips_symbolic_address_p (symbol_type, mode)
2057 && mips_split_p[symbol_type])
2059 *xloc = mips_split_symbol (0, *xloc);
2063 if (GET_CODE (*xloc) == PLUS && GET_CODE (XEXP (*xloc, 1)) == CONST_INT)
2065 /* Handle REG + CONSTANT using mips_add_offset. */
2068 reg = XEXP (*xloc, 0);
2069 if (!mips_valid_base_register_p (reg, mode, 0))
2070 reg = copy_to_mode_reg (Pmode, reg);
2071 *xloc = mips_add_offset (0, reg, INTVAL (XEXP (*xloc, 1)));
2079 /* Subroutine of mips_build_integer (with the same interface).
2080 Assume that the final action in the sequence should be a left shift. */
2083 mips_build_shift (struct mips_integer_op *codes, HOST_WIDE_INT value)
2085 unsigned int i, shift;
2087 /* Shift VALUE right until its lowest bit is set. Shift arithmetically
2088 since signed numbers are easier to load than unsigned ones. */
2090 while ((value & 1) == 0)
2091 value /= 2, shift++;
2093 i = mips_build_integer (codes, value);
2094 codes[i].code = ASHIFT;
2095 codes[i].value = shift;
2100 /* As for mips_build_shift, but assume that the final action will be
2101 an IOR or PLUS operation. */
2104 mips_build_lower (struct mips_integer_op *codes, unsigned HOST_WIDE_INT value)
2106 unsigned HOST_WIDE_INT high;
2109 high = value & ~(unsigned HOST_WIDE_INT) 0xffff;
2110 if (!LUI_OPERAND (high) && (value & 0x18000) == 0x18000)
2112 /* The constant is too complex to load with a simple lui/ori pair
2113 so our goal is to clear as many trailing zeros as possible.
2114 In this case, we know bit 16 is set and that the low 16 bits
2115 form a negative number. If we subtract that number from VALUE,
2116 we will clear at least the lowest 17 bits, maybe more. */
2117 i = mips_build_integer (codes, CONST_HIGH_PART (value));
2118 codes[i].code = PLUS;
2119 codes[i].value = CONST_LOW_PART (value);
2123 i = mips_build_integer (codes, high);
2124 codes[i].code = IOR;
2125 codes[i].value = value & 0xffff;
2131 /* Fill CODES with a sequence of rtl operations to load VALUE.
2132 Return the number of operations needed. */
2135 mips_build_integer (struct mips_integer_op *codes,
2136 unsigned HOST_WIDE_INT value)
2138 if (SMALL_OPERAND (value)
2139 || SMALL_OPERAND_UNSIGNED (value)
2140 || LUI_OPERAND (value))
2142 /* The value can be loaded with a single instruction. */
2143 codes[0].code = UNKNOWN;
2144 codes[0].value = value;
2147 else if ((value & 1) != 0 || LUI_OPERAND (CONST_HIGH_PART (value)))
2149 /* Either the constant is a simple LUI/ORI combination or its
2150 lowest bit is set. We don't want to shift in this case. */
2151 return mips_build_lower (codes, value);
2153 else if ((value & 0xffff) == 0)
2155 /* The constant will need at least three actions. The lowest
2156 16 bits are clear, so the final action will be a shift. */
2157 return mips_build_shift (codes, value);
2161 /* The final action could be a shift, add or inclusive OR.
2162 Rather than use a complex condition to select the best
2163 approach, try both mips_build_shift and mips_build_lower
2164 and pick the one that gives the shortest sequence.
2165 Note that this case is only used once per constant. */
2166 struct mips_integer_op alt_codes[MIPS_MAX_INTEGER_OPS];
2167 unsigned int cost, alt_cost;
2169 cost = mips_build_shift (codes, value);
2170 alt_cost = mips_build_lower (alt_codes, value);
2171 if (alt_cost < cost)
2173 memcpy (codes, alt_codes, alt_cost * sizeof (codes[0]));
2181 /* Move VALUE into register DEST. */
2184 mips_move_integer (rtx dest, unsigned HOST_WIDE_INT value)
2186 struct mips_integer_op codes[MIPS_MAX_INTEGER_OPS];
2187 enum machine_mode mode;
2188 unsigned int i, cost;
2191 mode = GET_MODE (dest);
2192 cost = mips_build_integer (codes, value);
2194 /* Apply each binary operation to X. Invariant: X is a legitimate
2195 source operand for a SET pattern. */
2196 x = GEN_INT (codes[0].value);
2197 for (i = 1; i < cost; i++)
2200 emit_move_insn (dest, x), x = dest;
2202 x = force_reg (mode, x);
2203 x = gen_rtx_fmt_ee (codes[i].code, mode, x, GEN_INT (codes[i].value));
2206 emit_insn (gen_rtx_SET (VOIDmode, dest, x));
2210 /* Subroutine of mips_legitimize_move. Move constant SRC into register
2211 DEST given that SRC satisfies immediate_operand but doesn't satisfy
2215 mips_legitimize_const_move (enum machine_mode mode, rtx dest, rtx src)
2218 HOST_WIDE_INT offset;
2219 enum mips_symbol_type symbol_type;
2221 /* Split moves of big integers into smaller pieces. In mips16 code,
2222 it's better to force the constant into memory instead. */
2223 if (GET_CODE (src) == CONST_INT && !TARGET_MIPS16)
2225 mips_move_integer (dest, INTVAL (src));
2229 if (mips_tls_operand_p (src))
2231 emit_move_insn (dest, mips_legitimize_tls_address (src));
2235 /* See if the symbol can be split. For mips16, this is often worse than
2236 forcing it in the constant pool since it needs the single-register form
2237 of addiu or daddiu. */
2239 && mips_symbolic_constant_p (src, &symbol_type)
2240 && mips_split_p[symbol_type])
2242 emit_move_insn (dest, mips_split_symbol (dest, src));
2246 /* If we have (const (plus symbol offset)), load the symbol first
2247 and then add in the offset. This is usually better than forcing
2248 the constant into memory, at least in non-mips16 code. */
2249 mips_split_const (src, &base, &offset);
2252 && (!no_new_pseudos || SMALL_OPERAND (offset)))
2254 base = mips_force_temporary (dest, base);
2255 emit_move_insn (dest, mips_add_offset (0, base, offset));
2259 src = force_const_mem (mode, src);
2261 /* When using explicit relocs, constant pool references are sometimes
2262 not legitimate addresses. */
2263 if (!memory_operand (src, VOIDmode))
2264 src = replace_equiv_address (src, mips_split_symbol (dest, XEXP (src, 0)));
2265 emit_move_insn (dest, src);
2269 /* If (set DEST SRC) is not a valid instruction, emit an equivalent
2270 sequence that is valid. */
2273 mips_legitimize_move (enum machine_mode mode, rtx dest, rtx src)
2275 if (!register_operand (dest, mode) && !reg_or_0_operand (src, mode))
2277 emit_move_insn (dest, force_reg (mode, src));
2281 /* Check for individual, fully-reloaded mflo and mfhi instructions. */
2282 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
2283 && REG_P (src) && MD_REG_P (REGNO (src))
2284 && REG_P (dest) && GP_REG_P (REGNO (dest)))
2286 int other_regno = REGNO (src) == HI_REGNUM ? LO_REGNUM : HI_REGNUM;
2287 if (GET_MODE_SIZE (mode) <= 4)
2288 emit_insn (gen_mfhilo_si (gen_rtx_REG (SImode, REGNO (dest)),
2289 gen_rtx_REG (SImode, REGNO (src)),
2290 gen_rtx_REG (SImode, other_regno)));
2292 emit_insn (gen_mfhilo_di (gen_rtx_REG (DImode, REGNO (dest)),
2293 gen_rtx_REG (DImode, REGNO (src)),
2294 gen_rtx_REG (DImode, other_regno)));
2298 /* We need to deal with constants that would be legitimate
2299 immediate_operands but not legitimate move_operands. */
2300 if (CONSTANT_P (src) && !move_operand (src, mode))
2302 mips_legitimize_const_move (mode, dest, src);
2303 set_unique_reg_note (get_last_insn (), REG_EQUAL, copy_rtx (src));
2309 /* We need a lot of little routines to check constant values on the
2310 mips16. These are used to figure out how long the instruction will
2311 be. It would be much better to do this using constraints, but
2312 there aren't nearly enough letters available. */
2315 m16_check_op (rtx op, int low, int high, int mask)
2317 return (GET_CODE (op) == CONST_INT
2318 && INTVAL (op) >= low
2319 && INTVAL (op) <= high
2320 && (INTVAL (op) & mask) == 0);
2324 m16_uimm3_b (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2326 return m16_check_op (op, 0x1, 0x8, 0);
2330 m16_simm4_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2332 return m16_check_op (op, - 0x8, 0x7, 0);
2336 m16_nsimm4_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2338 return m16_check_op (op, - 0x7, 0x8, 0);
2342 m16_simm5_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2344 return m16_check_op (op, - 0x10, 0xf, 0);
2348 m16_nsimm5_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2350 return m16_check_op (op, - 0xf, 0x10, 0);
2354 m16_uimm5_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2356 return m16_check_op (op, (- 0x10) << 2, 0xf << 2, 3);
2360 m16_nuimm5_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2362 return m16_check_op (op, (- 0xf) << 2, 0x10 << 2, 3);
2366 m16_simm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2368 return m16_check_op (op, - 0x80, 0x7f, 0);
2372 m16_nsimm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2374 return m16_check_op (op, - 0x7f, 0x80, 0);
2378 m16_uimm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2380 return m16_check_op (op, 0x0, 0xff, 0);
2384 m16_nuimm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2386 return m16_check_op (op, - 0xff, 0x0, 0);
2390 m16_uimm8_m1_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2392 return m16_check_op (op, - 0x1, 0xfe, 0);
2396 m16_uimm8_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2398 return m16_check_op (op, 0x0, 0xff << 2, 3);
2402 m16_nuimm8_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2404 return m16_check_op (op, (- 0xff) << 2, 0x0, 3);
2408 m16_simm8_8 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2410 return m16_check_op (op, (- 0x80) << 3, 0x7f << 3, 7);
2414 m16_nsimm8_8 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2416 return m16_check_op (op, (- 0x7f) << 3, 0x80 << 3, 7);
2420 mips_rtx_costs (rtx x, int code, int outer_code, int *total)
2422 enum machine_mode mode = GET_MODE (x);
2423 bool float_mode_p = FLOAT_MODE_P (mode);
2430 /* A number between 1 and 8 inclusive is efficient for a shift.
2431 Otherwise, we will need an extended instruction. */
2432 if ((outer_code) == ASHIFT || (outer_code) == ASHIFTRT
2433 || (outer_code) == LSHIFTRT)
2435 if (INTVAL (x) >= 1 && INTVAL (x) <= 8)
2438 *total = COSTS_N_INSNS (1);
2442 /* We can use cmpi for an xor with an unsigned 16 bit value. */
2443 if ((outer_code) == XOR
2444 && INTVAL (x) >= 0 && INTVAL (x) < 0x10000)
2450 /* We may be able to use slt or sltu for a comparison with a
2451 signed 16 bit value. (The boundary conditions aren't quite
2452 right, but this is just a heuristic anyhow.) */
2453 if (((outer_code) == LT || (outer_code) == LE
2454 || (outer_code) == GE || (outer_code) == GT
2455 || (outer_code) == LTU || (outer_code) == LEU
2456 || (outer_code) == GEU || (outer_code) == GTU)
2457 && INTVAL (x) >= -0x8000 && INTVAL (x) < 0x8000)
2463 /* Equality comparisons with 0 are cheap. */
2464 if (((outer_code) == EQ || (outer_code) == NE)
2471 /* Constants in the range 0...255 can be loaded with an unextended
2472 instruction. They are therefore as cheap as a register move.
2474 Given the choice between "li R1,0...255" and "move R1,R2"
2475 (where R2 is a known constant), it is usually better to use "li",
2476 since we do not want to unnecessarily extend the lifetime
2478 if (outer_code == SET
2480 && INTVAL (x) < 256)
2488 /* These can be used anywhere. */
2493 /* Otherwise fall through to the handling below because
2494 we'll need to construct the constant. */
2500 if (LEGITIMATE_CONSTANT_P (x))
2502 *total = COSTS_N_INSNS (1);
2507 /* The value will need to be fetched from the constant pool. */
2508 *total = CONSTANT_POOL_COST;
2514 /* If the address is legitimate, return the number of
2515 instructions it needs, otherwise use the default handling. */
2516 int n = mips_address_insns (XEXP (x, 0), GET_MODE (x));
2519 *total = COSTS_N_INSNS (n + 1);
2526 *total = COSTS_N_INSNS (6);
2530 *total = COSTS_N_INSNS ((mode == DImode && !TARGET_64BIT) ? 2 : 1);
2536 if (mode == DImode && !TARGET_64BIT)
2538 *total = COSTS_N_INSNS (2);
2546 if (mode == DImode && !TARGET_64BIT)
2548 *total = COSTS_N_INSNS ((GET_CODE (XEXP (x, 1)) == CONST_INT)
2556 *total = COSTS_N_INSNS (1);
2558 *total = COSTS_N_INSNS (4);
2562 *total = COSTS_N_INSNS (1);
2569 *total = mips_cost->fp_add;
2573 else if (mode == DImode && !TARGET_64BIT)
2575 *total = COSTS_N_INSNS (4);
2581 if (mode == DImode && !TARGET_64BIT)
2583 *total = COSTS_N_INSNS (4);
2590 *total = mips_cost->fp_mult_sf;
2592 else if (mode == DFmode)
2593 *total = mips_cost->fp_mult_df;
2595 else if (mode == SImode)
2596 *total = mips_cost->int_mult_si;
2599 *total = mips_cost->int_mult_di;
2608 *total = mips_cost->fp_div_sf;
2610 *total = mips_cost->fp_div_df;
2619 *total = mips_cost->int_div_di;
2621 *total = mips_cost->int_div_si;
2626 /* A sign extend from SImode to DImode in 64 bit mode is often
2627 zero instructions, because the result can often be used
2628 directly by another instruction; we'll call it one. */
2629 if (TARGET_64BIT && mode == DImode
2630 && GET_MODE (XEXP (x, 0)) == SImode)
2631 *total = COSTS_N_INSNS (1);
2633 *total = COSTS_N_INSNS (2);
2637 if (TARGET_64BIT && mode == DImode
2638 && GET_MODE (XEXP (x, 0)) == SImode)
2639 *total = COSTS_N_INSNS (2);
2641 *total = COSTS_N_INSNS (1);
2645 case UNSIGNED_FLOAT:
2648 case FLOAT_TRUNCATE:
2650 *total = mips_cost->fp_add;
2658 /* Provide the costs of an addressing mode that contains ADDR.
2659 If ADDR is not a valid address, its cost is irrelevant. */
2662 mips_address_cost (rtx addr)
2664 return mips_address_insns (addr, SImode);
2667 /* Return one word of double-word value OP, taking into account the fixed
2668 endianness of certain registers. HIGH_P is true to select the high part,
2669 false to select the low part. */
2672 mips_subword (rtx op, int high_p)
2675 enum machine_mode mode;
2677 mode = GET_MODE (op);
2678 if (mode == VOIDmode)
2681 if (TARGET_BIG_ENDIAN ? !high_p : high_p)
2682 byte = UNITS_PER_WORD;
2688 if (FP_REG_P (REGNO (op)))
2689 return gen_rtx_REG (word_mode, high_p ? REGNO (op) + 1 : REGNO (op));
2690 if (ACC_HI_REG_P (REGNO (op)))
2691 return gen_rtx_REG (word_mode, high_p ? REGNO (op) : REGNO (op) + 1);
2695 return mips_rewrite_small_data (adjust_address (op, word_mode, byte));
2697 return simplify_gen_subreg (word_mode, op, mode, byte);
2701 /* Return true if a 64-bit move from SRC to DEST should be split into two. */
2704 mips_split_64bit_move_p (rtx dest, rtx src)
2709 /* FP->FP moves can be done in a single instruction. */
2710 if (FP_REG_RTX_P (src) && FP_REG_RTX_P (dest))
2713 /* Check for floating-point loads and stores. They can be done using
2714 ldc1 and sdc1 on MIPS II and above. */
2717 if (FP_REG_RTX_P (dest) && MEM_P (src))
2719 if (FP_REG_RTX_P (src) && MEM_P (dest))
2726 /* Split a 64-bit move from SRC to DEST assuming that
2727 mips_split_64bit_move_p holds.
2729 Moves into and out of FPRs cause some difficulty here. Such moves
2730 will always be DFmode, since paired FPRs are not allowed to store
2731 DImode values. The most natural representation would be two separate
2732 32-bit moves, such as:
2734 (set (reg:SI $f0) (mem:SI ...))
2735 (set (reg:SI $f1) (mem:SI ...))
2737 However, the second insn is invalid because odd-numbered FPRs are
2738 not allowed to store independent values. Use the patterns load_df_low,
2739 load_df_high and store_df_high instead. */
2742 mips_split_64bit_move (rtx dest, rtx src)
2744 if (FP_REG_RTX_P (dest))
2746 /* Loading an FPR from memory or from GPRs. */
2747 emit_insn (gen_load_df_low (copy_rtx (dest), mips_subword (src, 0)));
2748 emit_insn (gen_load_df_high (dest, mips_subword (src, 1),
2751 else if (FP_REG_RTX_P (src))
2753 /* Storing an FPR into memory or GPRs. */
2754 emit_move_insn (mips_subword (dest, 0), mips_subword (src, 0));
2755 emit_insn (gen_store_df_high (mips_subword (dest, 1), src));
2759 /* The operation can be split into two normal moves. Decide in
2760 which order to do them. */
2763 low_dest = mips_subword (dest, 0);
2764 if (REG_P (low_dest)
2765 && reg_overlap_mentioned_p (low_dest, src))
2767 emit_move_insn (mips_subword (dest, 1), mips_subword (src, 1));
2768 emit_move_insn (low_dest, mips_subword (src, 0));
2772 emit_move_insn (low_dest, mips_subword (src, 0));
2773 emit_move_insn (mips_subword (dest, 1), mips_subword (src, 1));
2778 /* Return the appropriate instructions to move SRC into DEST. Assume
2779 that SRC is operand 1 and DEST is operand 0. */
2782 mips_output_move (rtx dest, rtx src)
2784 enum rtx_code dest_code, src_code;
2787 dest_code = GET_CODE (dest);
2788 src_code = GET_CODE (src);
2789 dbl_p = (GET_MODE_SIZE (GET_MODE (dest)) == 8);
2791 if (dbl_p && mips_split_64bit_move_p (dest, src))
2794 if ((src_code == REG && GP_REG_P (REGNO (src)))
2795 || (!TARGET_MIPS16 && src == CONST0_RTX (GET_MODE (dest))))
2797 if (dest_code == REG)
2799 if (GP_REG_P (REGNO (dest)))
2800 return "move\t%0,%z1";
2802 if (MD_REG_P (REGNO (dest)))
2805 if (DSP_ACC_REG_P (REGNO (dest)))
2807 static char retval[] = "mt__\t%z1,%q0";
2808 retval[2] = reg_names[REGNO (dest)][4];
2809 retval[3] = reg_names[REGNO (dest)][5];
2813 if (FP_REG_P (REGNO (dest)))
2814 return (dbl_p ? "dmtc1\t%z1,%0" : "mtc1\t%z1,%0");
2816 if (ALL_COP_REG_P (REGNO (dest)))
2818 static char retval[] = "dmtc_\t%z1,%0";
2820 retval[4] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (dest));
2821 return (dbl_p ? retval : retval + 1);
2824 if (dest_code == MEM)
2825 return (dbl_p ? "sd\t%z1,%0" : "sw\t%z1,%0");
2827 if (dest_code == REG && GP_REG_P (REGNO (dest)))
2829 if (src_code == REG)
2831 if (DSP_ACC_REG_P (REGNO (src)))
2833 static char retval[] = "mf__\t%0,%q1";
2834 retval[2] = reg_names[REGNO (src)][4];
2835 retval[3] = reg_names[REGNO (src)][5];
2839 if (ST_REG_P (REGNO (src)) && ISA_HAS_8CC)
2840 return "lui\t%0,0x3f80\n\tmovf\t%0,%.,%1";
2842 if (FP_REG_P (REGNO (src)))
2843 return (dbl_p ? "dmfc1\t%0,%1" : "mfc1\t%0,%1");
2845 if (ALL_COP_REG_P (REGNO (src)))
2847 static char retval[] = "dmfc_\t%0,%1";
2849 retval[4] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (src));
2850 return (dbl_p ? retval : retval + 1);
2854 if (src_code == MEM)
2855 return (dbl_p ? "ld\t%0,%1" : "lw\t%0,%1");
2857 if (src_code == CONST_INT)
2859 /* Don't use the X format, because that will give out of
2860 range numbers for 64 bit hosts and 32 bit targets. */
2862 return "li\t%0,%1\t\t\t# %X1";
2864 if (INTVAL (src) >= 0 && INTVAL (src) <= 0xffff)
2867 if (INTVAL (src) < 0 && INTVAL (src) >= -0xffff)
2871 if (src_code == HIGH)
2872 return "lui\t%0,%h1";
2874 if (CONST_GP_P (src))
2875 return "move\t%0,%1";
2877 if (symbolic_operand (src, VOIDmode))
2878 return (dbl_p ? "dla\t%0,%1" : "la\t%0,%1");
2880 if (src_code == REG && FP_REG_P (REGNO (src)))
2882 if (dest_code == REG && FP_REG_P (REGNO (dest)))
2884 if (GET_MODE (dest) == V2SFmode)
2885 return "mov.ps\t%0,%1";
2887 return (dbl_p ? "mov.d\t%0,%1" : "mov.s\t%0,%1");
2890 if (dest_code == MEM)
2891 return (dbl_p ? "sdc1\t%1,%0" : "swc1\t%1,%0");
2893 if (dest_code == REG && FP_REG_P (REGNO (dest)))
2895 if (src_code == MEM)
2896 return (dbl_p ? "ldc1\t%0,%1" : "lwc1\t%0,%1");
2898 if (dest_code == REG && ALL_COP_REG_P (REGNO (dest)) && src_code == MEM)
2900 static char retval[] = "l_c_\t%0,%1";
2902 retval[1] = (dbl_p ? 'd' : 'w');
2903 retval[3] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (dest));
2906 if (dest_code == MEM && src_code == REG && ALL_COP_REG_P (REGNO (src)))
2908 static char retval[] = "s_c_\t%1,%0";
2910 retval[1] = (dbl_p ? 'd' : 'w');
2911 retval[3] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (src));
2917 /* Restore $gp from its save slot. Valid only when using o32 or
2921 mips_restore_gp (void)
2925 gcc_assert (TARGET_ABICALLS && TARGET_OLDABI);
2927 address = mips_add_offset (pic_offset_table_rtx,
2928 frame_pointer_needed
2929 ? hard_frame_pointer_rtx
2930 : stack_pointer_rtx,
2931 current_function_outgoing_args_size);
2932 slot = gen_rtx_MEM (Pmode, address);
2934 emit_move_insn (pic_offset_table_rtx, slot);
2935 if (!TARGET_EXPLICIT_RELOCS)
2936 emit_insn (gen_blockage ());
2939 /* Emit an instruction of the form (set TARGET (CODE OP0 OP1)). */
2942 mips_emit_binary (enum rtx_code code, rtx target, rtx op0, rtx op1)
2944 emit_insn (gen_rtx_SET (VOIDmode, target,
2945 gen_rtx_fmt_ee (code, GET_MODE (target), op0, op1)));
2948 /* Return true if CMP1 is a suitable second operand for relational
2949 operator CODE. See also the *sCC patterns in mips.md. */
2952 mips_relational_operand_ok_p (enum rtx_code code, rtx cmp1)
2958 return reg_or_0_operand (cmp1, VOIDmode);
2962 return !TARGET_MIPS16 && cmp1 == const1_rtx;
2966 return arith_operand (cmp1, VOIDmode);
2969 return sle_operand (cmp1, VOIDmode);
2972 return sleu_operand (cmp1, VOIDmode);
2979 /* Canonicalize LE or LEU comparisons into LT comparisons when
2980 possible to avoid extra instructions or inverting the
2984 mips_canonicalize_comparison (enum rtx_code *code, rtx *cmp1,
2985 enum machine_mode mode)
2987 HOST_WIDE_INT original, plus_one;
2989 if (GET_CODE (*cmp1) != CONST_INT)
2992 original = INTVAL (*cmp1);
2993 plus_one = trunc_int_for_mode ((unsigned HOST_WIDE_INT) original + 1, mode);
2998 if (original < plus_one)
3001 *cmp1 = force_reg (mode, GEN_INT (plus_one));
3010 *cmp1 = force_reg (mode, GEN_INT (plus_one));
3023 /* Compare CMP0 and CMP1 using relational operator CODE and store the
3024 result in TARGET. CMP0 and TARGET are register_operands that have
3025 the same integer mode. If INVERT_PTR is nonnull, it's OK to set
3026 TARGET to the inverse of the result and flip *INVERT_PTR instead. */
3029 mips_emit_int_relational (enum rtx_code code, bool *invert_ptr,
3030 rtx target, rtx cmp0, rtx cmp1)
3032 /* First see if there is a MIPS instruction that can do this operation
3033 with CMP1 in its current form. If not, try to canonicalize the
3034 comparison to LT. If that fails, try doing the same for the
3035 inverse operation. If that also fails, force CMP1 into a register
3037 if (mips_relational_operand_ok_p (code, cmp1))
3038 mips_emit_binary (code, target, cmp0, cmp1);
3039 else if (mips_canonicalize_comparison (&code, &cmp1, GET_MODE (target)))
3040 mips_emit_binary (code, target, cmp0, cmp1);
3043 enum rtx_code inv_code = reverse_condition (code);
3044 if (!mips_relational_operand_ok_p (inv_code, cmp1))
3046 cmp1 = force_reg (GET_MODE (cmp0), cmp1);
3047 mips_emit_int_relational (code, invert_ptr, target, cmp0, cmp1);
3049 else if (invert_ptr == 0)
3051 rtx inv_target = gen_reg_rtx (GET_MODE (target));
3052 mips_emit_binary (inv_code, inv_target, cmp0, cmp1);
3053 mips_emit_binary (XOR, target, inv_target, const1_rtx);
3057 *invert_ptr = !*invert_ptr;
3058 mips_emit_binary (inv_code, target, cmp0, cmp1);
3063 /* Return a register that is zero iff CMP0 and CMP1 are equal.
3064 The register will have the same mode as CMP0. */
3067 mips_zero_if_equal (rtx cmp0, rtx cmp1)
3069 if (cmp1 == const0_rtx)
3072 if (uns_arith_operand (cmp1, VOIDmode))
3073 return expand_binop (GET_MODE (cmp0), xor_optab,
3074 cmp0, cmp1, 0, 0, OPTAB_DIRECT);
3076 return expand_binop (GET_MODE (cmp0), sub_optab,
3077 cmp0, cmp1, 0, 0, OPTAB_DIRECT);
3080 /* Convert a comparison into something that can be used in a branch or
3081 conditional move. cmp_operands[0] and cmp_operands[1] are the values
3082 being compared and *CODE is the code used to compare them.
3084 Update *CODE, *OP0 and *OP1 so that they describe the final comparison.
3085 If NEED_EQ_NE_P, then only EQ/NE comparisons against zero are possible,
3086 otherwise any standard branch condition can be used. The standard branch
3089 - EQ/NE between two registers.
3090 - any comparison between a register and zero. */
3093 mips_emit_compare (enum rtx_code *code, rtx *op0, rtx *op1, bool need_eq_ne_p)
3095 if (GET_MODE_CLASS (GET_MODE (cmp_operands[0])) == MODE_INT)
3097 if (!need_eq_ne_p && cmp_operands[1] == const0_rtx)
3099 *op0 = cmp_operands[0];
3100 *op1 = cmp_operands[1];
3102 else if (*code == EQ || *code == NE)
3106 *op0 = mips_zero_if_equal (cmp_operands[0], cmp_operands[1]);
3111 *op0 = cmp_operands[0];
3112 *op1 = force_reg (GET_MODE (*op0), cmp_operands[1]);
3117 /* The comparison needs a separate scc instruction. Store the
3118 result of the scc in *OP0 and compare it against zero. */
3119 bool invert = false;
3120 *op0 = gen_reg_rtx (GET_MODE (cmp_operands[0]));
3122 mips_emit_int_relational (*code, &invert, *op0,
3123 cmp_operands[0], cmp_operands[1]);
3124 *code = (invert ? EQ : NE);
3129 enum rtx_code cmp_code;
3131 /* Floating-point tests use a separate c.cond.fmt comparison to
3132 set a condition code register. The branch or conditional move
3133 will then compare that register against zero.
3135 Set CMP_CODE to the code of the comparison instruction and
3136 *CODE to the code that the branch or move should use. */
3142 cmp_code = reverse_condition_maybe_unordered (*code);
3152 ? gen_reg_rtx (CCmode)
3153 : gen_rtx_REG (CCmode, FPSW_REGNUM));
3155 mips_emit_binary (cmp_code, *op0, cmp_operands[0], cmp_operands[1]);
3159 /* Try comparing cmp_operands[0] and cmp_operands[1] using rtl code CODE.
3160 Store the result in TARGET and return true if successful.
3162 On 64-bit targets, TARGET may be wider than cmp_operands[0]. */
3165 mips_emit_scc (enum rtx_code code, rtx target)
3167 if (GET_MODE_CLASS (GET_MODE (cmp_operands[0])) != MODE_INT)
3170 target = gen_lowpart (GET_MODE (cmp_operands[0]), target);
3171 if (code == EQ || code == NE)
3173 rtx zie = mips_zero_if_equal (cmp_operands[0], cmp_operands[1]);
3174 mips_emit_binary (code, target, zie, const0_rtx);
3177 mips_emit_int_relational (code, 0, target,
3178 cmp_operands[0], cmp_operands[1]);
3182 /* Emit the common code for doing conditional branches.
3183 operand[0] is the label to jump to.
3184 The comparison operands are saved away by cmp{si,di,sf,df}. */
3187 gen_conditional_branch (rtx *operands, enum rtx_code code)
3189 rtx op0, op1, condition;
3191 mips_emit_compare (&code, &op0, &op1, TARGET_MIPS16);
3192 condition = gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
3193 emit_jump_insn (gen_condjump (condition, operands[0]));
3196 /* Emit the common code for conditional moves. OPERANDS is the array
3197 of operands passed to the conditional move define_expand. */
3200 gen_conditional_move (rtx *operands)
3205 code = GET_CODE (operands[1]);
3206 mips_emit_compare (&code, &op0, &op1, true);
3207 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3208 gen_rtx_IF_THEN_ELSE (GET_MODE (operands[0]),
3209 gen_rtx_fmt_ee (code,
3212 operands[2], operands[3])));
3215 /* Emit a conditional trap. OPERANDS is the array of operands passed to
3216 the conditional_trap expander. */
3219 mips_gen_conditional_trap (rtx *operands)
3222 enum rtx_code cmp_code = GET_CODE (operands[0]);
3223 enum machine_mode mode = GET_MODE (cmp_operands[0]);
3225 /* MIPS conditional trap machine instructions don't have GT or LE
3226 flavors, so we must invert the comparison and convert to LT and
3227 GE, respectively. */
3230 case GT: cmp_code = LT; break;
3231 case LE: cmp_code = GE; break;
3232 case GTU: cmp_code = LTU; break;
3233 case LEU: cmp_code = GEU; break;
3236 if (cmp_code == GET_CODE (operands[0]))
3238 op0 = cmp_operands[0];
3239 op1 = cmp_operands[1];
3243 op0 = cmp_operands[1];
3244 op1 = cmp_operands[0];
3246 op0 = force_reg (mode, op0);
3247 if (!arith_operand (op1, mode))
3248 op1 = force_reg (mode, op1);
3250 emit_insn (gen_rtx_TRAP_IF (VOIDmode,
3251 gen_rtx_fmt_ee (cmp_code, mode, op0, op1),
3255 /* Load function address ADDR into register DEST. SIBCALL_P is true
3256 if the address is needed for a sibling call. */
3259 mips_load_call_address (rtx dest, rtx addr, int sibcall_p)
3261 /* If we're generating PIC, and this call is to a global function,
3262 try to allow its address to be resolved lazily. This isn't
3263 possible for NewABI sibcalls since the value of $gp on entry
3264 to the stub would be our caller's gp, not ours. */
3265 if (TARGET_EXPLICIT_RELOCS
3266 && !(sibcall_p && TARGET_NEWABI)
3267 && global_got_operand (addr, VOIDmode))
3269 rtx high, lo_sum_symbol;
3271 high = mips_unspec_offset_high (dest, pic_offset_table_rtx,
3272 addr, SYMBOL_GOTOFF_CALL);
3273 lo_sum_symbol = mips_unspec_address (addr, SYMBOL_GOTOFF_CALL);
3274 if (Pmode == SImode)
3275 emit_insn (gen_load_callsi (dest, high, lo_sum_symbol));
3277 emit_insn (gen_load_calldi (dest, high, lo_sum_symbol));
3280 emit_move_insn (dest, addr);
3284 /* Expand a call or call_value instruction. RESULT is where the
3285 result will go (null for calls), ADDR is the address of the
3286 function, ARGS_SIZE is the size of the arguments and AUX is
3287 the value passed to us by mips_function_arg. SIBCALL_P is true
3288 if we are expanding a sibling call, false if we're expanding
3292 mips_expand_call (rtx result, rtx addr, rtx args_size, rtx aux, int sibcall_p)
3294 rtx orig_addr, pattern, insn;
3297 if (!call_insn_operand (addr, VOIDmode))
3299 addr = gen_reg_rtx (Pmode);
3300 mips_load_call_address (addr, orig_addr, sibcall_p);
3304 && mips16_hard_float
3305 && build_mips16_call_stub (result, addr, args_size,
3306 aux == 0 ? 0 : (int) GET_MODE (aux)))
3310 pattern = (sibcall_p
3311 ? gen_sibcall_internal (addr, args_size)
3312 : gen_call_internal (addr, args_size));
3313 else if (GET_CODE (result) == PARALLEL && XVECLEN (result, 0) == 2)
3317 reg1 = XEXP (XVECEXP (result, 0, 0), 0);
3318 reg2 = XEXP (XVECEXP (result, 0, 1), 0);
3321 ? gen_sibcall_value_multiple_internal (reg1, addr, args_size, reg2)
3322 : gen_call_value_multiple_internal (reg1, addr, args_size, reg2));
3325 pattern = (sibcall_p
3326 ? gen_sibcall_value_internal (result, addr, args_size)
3327 : gen_call_value_internal (result, addr, args_size));
3329 insn = emit_call_insn (pattern);
3331 /* Lazy-binding stubs require $gp to be valid on entry. */
3332 if (global_got_operand (orig_addr, VOIDmode))
3333 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
3337 /* We can handle any sibcall when TARGET_SIBCALLS is true. */
3340 mips_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
3341 tree exp ATTRIBUTE_UNUSED)
3343 return TARGET_SIBCALLS;
3346 /* Emit code to move general operand SRC into condition-code
3347 register DEST. SCRATCH is a scratch TFmode float register.
3354 where FP1 and FP2 are single-precision float registers
3355 taken from SCRATCH. */
3358 mips_emit_fcc_reload (rtx dest, rtx src, rtx scratch)
3362 /* Change the source to SFmode. */
3364 src = adjust_address (src, SFmode, 0);
3365 else if (REG_P (src) || GET_CODE (src) == SUBREG)
3366 src = gen_rtx_REG (SFmode, true_regnum (src));
3368 fp1 = gen_rtx_REG (SFmode, REGNO (scratch));
3369 fp2 = gen_rtx_REG (SFmode, REGNO (scratch) + FP_INC);
3371 emit_move_insn (copy_rtx (fp1), src);
3372 emit_move_insn (copy_rtx (fp2), CONST0_RTX (SFmode));
3373 emit_insn (gen_slt_sf (dest, fp2, fp1));
3376 /* Emit code to change the current function's return address to
3377 ADDRESS. SCRATCH is available as a scratch register, if needed.
3378 ADDRESS and SCRATCH are both word-mode GPRs. */
3381 mips_set_return_address (rtx address, rtx scratch)
3385 compute_frame_size (get_frame_size ());
3386 gcc_assert ((cfun->machine->frame.mask >> 31) & 1);
3387 slot_address = mips_add_offset (scratch, stack_pointer_rtx,
3388 cfun->machine->frame.gp_sp_offset);
3390 emit_move_insn (gen_rtx_MEM (GET_MODE (address), slot_address), address);
3393 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
3394 Assume that the areas do not overlap. */
3397 mips_block_move_straight (rtx dest, rtx src, HOST_WIDE_INT length)
3399 HOST_WIDE_INT offset, delta;
3400 unsigned HOST_WIDE_INT bits;
3402 enum machine_mode mode;
3405 /* Work out how many bits to move at a time. If both operands have
3406 half-word alignment, it is usually better to move in half words.
3407 For instance, lh/lh/sh/sh is usually better than lwl/lwr/swl/swr
3408 and lw/lw/sw/sw is usually better than ldl/ldr/sdl/sdr.
3409 Otherwise move word-sized chunks. */
3410 if (MEM_ALIGN (src) == BITS_PER_WORD / 2
3411 && MEM_ALIGN (dest) == BITS_PER_WORD / 2)
3412 bits = BITS_PER_WORD / 2;
3414 bits = BITS_PER_WORD;
3416 mode = mode_for_size (bits, MODE_INT, 0);
3417 delta = bits / BITS_PER_UNIT;
3419 /* Allocate a buffer for the temporary registers. */
3420 regs = alloca (sizeof (rtx) * length / delta);
3422 /* Load as many BITS-sized chunks as possible. Use a normal load if
3423 the source has enough alignment, otherwise use left/right pairs. */
3424 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
3426 regs[i] = gen_reg_rtx (mode);
3427 if (MEM_ALIGN (src) >= bits)
3428 emit_move_insn (regs[i], adjust_address (src, mode, offset));
3431 rtx part = adjust_address (src, BLKmode, offset);
3432 if (!mips_expand_unaligned_load (regs[i], part, bits, 0))
3437 /* Copy the chunks to the destination. */
3438 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
3439 if (MEM_ALIGN (dest) >= bits)
3440 emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
3443 rtx part = adjust_address (dest, BLKmode, offset);
3444 if (!mips_expand_unaligned_store (part, regs[i], bits, 0))
3448 /* Mop up any left-over bytes. */
3449 if (offset < length)
3451 src = adjust_address (src, BLKmode, offset);
3452 dest = adjust_address (dest, BLKmode, offset);
3453 move_by_pieces (dest, src, length - offset,
3454 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
3458 #define MAX_MOVE_REGS 4
3459 #define MAX_MOVE_BYTES (MAX_MOVE_REGS * UNITS_PER_WORD)
3462 /* Helper function for doing a loop-based block operation on memory
3463 reference MEM. Each iteration of the loop will operate on LENGTH
3466 Create a new base register for use within the loop and point it to
3467 the start of MEM. Create a new memory reference that uses this
3468 register. Store them in *LOOP_REG and *LOOP_MEM respectively. */
3471 mips_adjust_block_mem (rtx mem, HOST_WIDE_INT length,
3472 rtx *loop_reg, rtx *loop_mem)
3474 *loop_reg = copy_addr_to_reg (XEXP (mem, 0));
3476 /* Although the new mem does not refer to a known location,
3477 it does keep up to LENGTH bytes of alignment. */
3478 *loop_mem = change_address (mem, BLKmode, *loop_reg);
3479 set_mem_align (*loop_mem, MIN (MEM_ALIGN (mem), length * BITS_PER_UNIT));
3483 /* Move LENGTH bytes from SRC to DEST using a loop that moves MAX_MOVE_BYTES
3484 per iteration. LENGTH must be at least MAX_MOVE_BYTES. Assume that the
3485 memory regions do not overlap. */
3488 mips_block_move_loop (rtx dest, rtx src, HOST_WIDE_INT length)
3490 rtx label, src_reg, dest_reg, final_src;
3491 HOST_WIDE_INT leftover;
3493 leftover = length % MAX_MOVE_BYTES;
3496 /* Create registers and memory references for use within the loop. */
3497 mips_adjust_block_mem (src, MAX_MOVE_BYTES, &src_reg, &src);
3498 mips_adjust_block_mem (dest, MAX_MOVE_BYTES, &dest_reg, &dest);
3500 /* Calculate the value that SRC_REG should have after the last iteration
3502 final_src = expand_simple_binop (Pmode, PLUS, src_reg, GEN_INT (length),
3505 /* Emit the start of the loop. */
3506 label = gen_label_rtx ();
3509 /* Emit the loop body. */
3510 mips_block_move_straight (dest, src, MAX_MOVE_BYTES);
3512 /* Move on to the next block. */
3513 emit_move_insn (src_reg, plus_constant (src_reg, MAX_MOVE_BYTES));
3514 emit_move_insn (dest_reg, plus_constant (dest_reg, MAX_MOVE_BYTES));
3516 /* Emit the loop condition. */
3517 if (Pmode == DImode)
3518 emit_insn (gen_cmpdi (src_reg, final_src));
3520 emit_insn (gen_cmpsi (src_reg, final_src));
3521 emit_jump_insn (gen_bne (label));
3523 /* Mop up any left-over bytes. */
3525 mips_block_move_straight (dest, src, leftover);
3528 /* Expand a movmemsi instruction. */
3531 mips_expand_block_move (rtx dest, rtx src, rtx length)
3533 if (GET_CODE (length) == CONST_INT)
3535 if (INTVAL (length) <= 2 * MAX_MOVE_BYTES)
3537 mips_block_move_straight (dest, src, INTVAL (length));
3542 mips_block_move_loop (dest, src, INTVAL (length));
3549 /* Argument support functions. */
3551 /* Initialize CUMULATIVE_ARGS for a function. */
3554 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3555 rtx libname ATTRIBUTE_UNUSED)
3557 static CUMULATIVE_ARGS zero_cum;
3558 tree param, next_param;
3561 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3563 /* Determine if this function has variable arguments. This is
3564 indicated by the last argument being 'void_type_mode' if there
3565 are no variable arguments. The standard MIPS calling sequence
3566 passes all arguments in the general purpose registers in this case. */
3568 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
3569 param != 0; param = next_param)
3571 next_param = TREE_CHAIN (param);
3572 if (next_param == 0 && TREE_VALUE (param) != void_type_node)
3573 cum->gp_reg_found = 1;
3578 /* Fill INFO with information about a single argument. CUM is the
3579 cumulative state for earlier arguments. MODE is the mode of this
3580 argument and TYPE is its type (if known). NAMED is true if this
3581 is a named (fixed) argument rather than a variable one. */
3584 mips_arg_info (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
3585 tree type, int named, struct mips_arg_info *info)
3587 bool doubleword_aligned_p;
3588 unsigned int num_bytes, num_words, max_regs;
3590 /* Work out the size of the argument. */
3591 num_bytes = type ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
3592 num_words = (num_bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3594 /* Decide whether it should go in a floating-point register, assuming
3595 one is free. Later code checks for availability.
3597 The checks against UNITS_PER_FPVALUE handle the soft-float and
3598 single-float cases. */
3602 /* The EABI conventions have traditionally been defined in terms
3603 of TYPE_MODE, regardless of the actual type. */
3604 info->fpr_p = ((GET_MODE_CLASS (mode) == MODE_FLOAT
3605 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
3606 && GET_MODE_SIZE (mode) <= UNITS_PER_FPVALUE);
3611 /* Only leading floating-point scalars are passed in
3612 floating-point registers. We also handle vector floats the same
3613 say, which is OK because they are not covered by the standard ABI. */
3614 info->fpr_p = (!cum->gp_reg_found
3615 && cum->arg_number < 2
3616 && (type == 0 || SCALAR_FLOAT_TYPE_P (type)
3617 || VECTOR_FLOAT_TYPE_P (type))
3618 && (GET_MODE_CLASS (mode) == MODE_FLOAT
3619 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
3620 && GET_MODE_SIZE (mode) <= UNITS_PER_FPVALUE);
3625 /* Scalar and complex floating-point types are passed in
3626 floating-point registers. */
3627 info->fpr_p = (named
3628 && (type == 0 || FLOAT_TYPE_P (type))
3629 && (GET_MODE_CLASS (mode) == MODE_FLOAT
3630 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3631 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
3632 && GET_MODE_UNIT_SIZE (mode) <= UNITS_PER_FPVALUE);
3634 /* ??? According to the ABI documentation, the real and imaginary
3635 parts of complex floats should be passed in individual registers.
3636 The real and imaginary parts of stack arguments are supposed
3637 to be contiguous and there should be an extra word of padding
3640 This has two problems. First, it makes it impossible to use a
3641 single "void *" va_list type, since register and stack arguments
3642 are passed differently. (At the time of writing, MIPSpro cannot
3643 handle complex float varargs correctly.) Second, it's unclear
3644 what should happen when there is only one register free.
3646 For now, we assume that named complex floats should go into FPRs
3647 if there are two FPRs free, otherwise they should be passed in the
3648 same way as a struct containing two floats. */
3650 && GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3651 && GET_MODE_UNIT_SIZE (mode) < UNITS_PER_FPVALUE)
3653 if (cum->num_gprs >= MAX_ARGS_IN_REGISTERS - 1)
3654 info->fpr_p = false;
3664 /* See whether the argument has doubleword alignment. */
3665 doubleword_aligned_p = FUNCTION_ARG_BOUNDARY (mode, type) > BITS_PER_WORD;
3667 /* Set REG_OFFSET to the register count we're interested in.
3668 The EABI allocates the floating-point registers separately,
3669 but the other ABIs allocate them like integer registers. */
3670 info->reg_offset = (mips_abi == ABI_EABI && info->fpr_p
3674 /* Advance to an even register if the argument is doubleword-aligned. */
3675 if (doubleword_aligned_p)
3676 info->reg_offset += info->reg_offset & 1;
3678 /* Work out the offset of a stack argument. */
3679 info->stack_offset = cum->stack_words;
3680 if (doubleword_aligned_p)
3681 info->stack_offset += info->stack_offset & 1;
3683 max_regs = MAX_ARGS_IN_REGISTERS - info->reg_offset;
3685 /* Partition the argument between registers and stack. */
3686 info->reg_words = MIN (num_words, max_regs);
3687 info->stack_words = num_words - info->reg_words;
3691 /* Implement FUNCTION_ARG_ADVANCE. */
3694 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3695 tree type, int named)
3697 struct mips_arg_info info;
3699 mips_arg_info (cum, mode, type, named, &info);
3702 cum->gp_reg_found = true;
3704 /* See the comment above the cumulative args structure in mips.h
3705 for an explanation of what this code does. It assumes the O32
3706 ABI, which passes at most 2 arguments in float registers. */
3707 if (cum->arg_number < 2 && info.fpr_p)
3708 cum->fp_code += (mode == SFmode ? 1 : 2) << ((cum->arg_number - 1) * 2);
3710 if (mips_abi != ABI_EABI || !info.fpr_p)
3711 cum->num_gprs = info.reg_offset + info.reg_words;
3712 else if (info.reg_words > 0)
3713 cum->num_fprs += FP_INC;
3715 if (info.stack_words > 0)
3716 cum->stack_words = info.stack_offset + info.stack_words;
3721 /* Implement FUNCTION_ARG. */
3724 function_arg (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
3725 tree type, int named)
3727 struct mips_arg_info info;
3729 /* We will be called with a mode of VOIDmode after the last argument
3730 has been seen. Whatever we return will be passed to the call
3731 insn. If we need a mips16 fp_code, return a REG with the code
3732 stored as the mode. */
3733 if (mode == VOIDmode)
3735 if (TARGET_MIPS16 && cum->fp_code != 0)
3736 return gen_rtx_REG ((enum machine_mode) cum->fp_code, 0);
3742 mips_arg_info (cum, mode, type, named, &info);
3744 /* Return straight away if the whole argument is passed on the stack. */
3745 if (info.reg_offset == MAX_ARGS_IN_REGISTERS)
3749 && TREE_CODE (type) == RECORD_TYPE
3751 && TYPE_SIZE_UNIT (type)
3752 && host_integerp (TYPE_SIZE_UNIT (type), 1)
3755 /* The Irix 6 n32/n64 ABIs say that if any 64 bit chunk of the
3756 structure contains a double in its entirety, then that 64 bit
3757 chunk is passed in a floating point register. */
3760 /* First check to see if there is any such field. */
3761 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
3762 if (TREE_CODE (field) == FIELD_DECL
3763 && TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
3764 && TYPE_PRECISION (TREE_TYPE (field)) == BITS_PER_WORD
3765 && host_integerp (bit_position (field), 0)
3766 && int_bit_position (field) % BITS_PER_WORD == 0)
3771 /* Now handle the special case by returning a PARALLEL
3772 indicating where each 64 bit chunk goes. INFO.REG_WORDS
3773 chunks are passed in registers. */
3775 HOST_WIDE_INT bitpos;
3778 /* assign_parms checks the mode of ENTRY_PARM, so we must
3779 use the actual mode here. */
3780 ret = gen_rtx_PARALLEL (mode, rtvec_alloc (info.reg_words));
3783 field = TYPE_FIELDS (type);
3784 for (i = 0; i < info.reg_words; i++)
3788 for (; field; field = TREE_CHAIN (field))
3789 if (TREE_CODE (field) == FIELD_DECL
3790 && int_bit_position (field) >= bitpos)
3794 && int_bit_position (field) == bitpos
3795 && TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
3796 && !TARGET_SOFT_FLOAT
3797 && TYPE_PRECISION (TREE_TYPE (field)) == BITS_PER_WORD)
3798 reg = gen_rtx_REG (DFmode, FP_ARG_FIRST + info.reg_offset + i);
3800 reg = gen_rtx_REG (DImode, GP_ARG_FIRST + info.reg_offset + i);
3803 = gen_rtx_EXPR_LIST (VOIDmode, reg,
3804 GEN_INT (bitpos / BITS_PER_UNIT));
3806 bitpos += BITS_PER_WORD;
3812 /* Handle the n32/n64 conventions for passing complex floating-point
3813 arguments in FPR pairs. The real part goes in the lower register
3814 and the imaginary part goes in the upper register. */
3817 && GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
3820 enum machine_mode inner;
3823 inner = GET_MODE_INNER (mode);
3824 reg = FP_ARG_FIRST + info.reg_offset;
3825 real = gen_rtx_EXPR_LIST (VOIDmode,
3826 gen_rtx_REG (inner, reg),
3828 imag = gen_rtx_EXPR_LIST (VOIDmode,
3829 gen_rtx_REG (inner, reg + info.reg_words / 2),
3830 GEN_INT (GET_MODE_SIZE (inner)));
3831 return gen_rtx_PARALLEL (mode, gen_rtvec (2, real, imag));
3835 return gen_rtx_REG (mode, GP_ARG_FIRST + info.reg_offset);
3836 else if (info.reg_offset == 1)
3837 /* This code handles the special o32 case in which the second word
3838 of the argument structure is passed in floating-point registers. */
3839 return gen_rtx_REG (mode, FP_ARG_FIRST + FP_INC);
3841 return gen_rtx_REG (mode, FP_ARG_FIRST + info.reg_offset);
3845 /* Implement TARGET_ARG_PARTIAL_BYTES. */
3848 mips_arg_partial_bytes (CUMULATIVE_ARGS *cum,
3849 enum machine_mode mode, tree type, bool named)
3851 struct mips_arg_info info;
3853 mips_arg_info (cum, mode, type, named, &info);
3854 return info.stack_words > 0 ? info.reg_words * UNITS_PER_WORD : 0;
3858 /* Implement FUNCTION_ARG_BOUNDARY. Every parameter gets at least
3859 PARM_BOUNDARY bits of alignment, but will be given anything up
3860 to STACK_BOUNDARY bits if the type requires it. */
3863 function_arg_boundary (enum machine_mode mode, tree type)
3865 unsigned int alignment;
3867 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
3868 if (alignment < PARM_BOUNDARY)
3869 alignment = PARM_BOUNDARY;
3870 if (alignment > STACK_BOUNDARY)
3871 alignment = STACK_BOUNDARY;
3875 /* Return true if FUNCTION_ARG_PADDING (MODE, TYPE) should return
3876 upward rather than downward. In other words, return true if the
3877 first byte of the stack slot has useful data, false if the last
3881 mips_pad_arg_upward (enum machine_mode mode, tree type)
3883 /* On little-endian targets, the first byte of every stack argument
3884 is passed in the first byte of the stack slot. */
3885 if (!BYTES_BIG_ENDIAN)
3888 /* Otherwise, integral types are padded downward: the last byte of a
3889 stack argument is passed in the last byte of the stack slot. */
3891 ? INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
3892 : GET_MODE_CLASS (mode) == MODE_INT)
3895 /* Big-endian o64 pads floating-point arguments downward. */
3896 if (mips_abi == ABI_O64)
3897 if (type != 0 ? FLOAT_TYPE_P (type) : GET_MODE_CLASS (mode) == MODE_FLOAT)
3900 /* Other types are padded upward for o32, o64, n32 and n64. */
3901 if (mips_abi != ABI_EABI)
3904 /* Arguments smaller than a stack slot are padded downward. */
3905 if (mode != BLKmode)
3906 return (GET_MODE_BITSIZE (mode) >= PARM_BOUNDARY);
3908 return (int_size_in_bytes (type) >= (PARM_BOUNDARY / BITS_PER_UNIT));
3912 /* Likewise BLOCK_REG_PADDING (MODE, TYPE, ...). Return !BYTES_BIG_ENDIAN
3913 if the least significant byte of the register has useful data. Return
3914 the opposite if the most significant byte does. */
3917 mips_pad_reg_upward (enum machine_mode mode, tree type)
3919 /* No shifting is required for floating-point arguments. */
3920 if (type != 0 ? FLOAT_TYPE_P (type) : GET_MODE_CLASS (mode) == MODE_FLOAT)
3921 return !BYTES_BIG_ENDIAN;
3923 /* Otherwise, apply the same padding to register arguments as we do
3924 to stack arguments. */
3925 return mips_pad_arg_upward (mode, type);
3929 mips_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3930 tree type, int *pretend_size ATTRIBUTE_UNUSED,
3933 CUMULATIVE_ARGS local_cum;
3934 int gp_saved, fp_saved;
3936 /* The caller has advanced CUM up to, but not beyond, the last named
3937 argument. Advance a local copy of CUM past the last "real" named
3938 argument, to find out how many registers are left over. */
3941 FUNCTION_ARG_ADVANCE (local_cum, mode, type, 1);
3943 /* Found out how many registers we need to save. */
3944 gp_saved = MAX_ARGS_IN_REGISTERS - local_cum.num_gprs;
3945 fp_saved = (EABI_FLOAT_VARARGS_P
3946 ? MAX_ARGS_IN_REGISTERS - local_cum.num_fprs
3955 ptr = plus_constant (virtual_incoming_args_rtx,
3956 REG_PARM_STACK_SPACE (cfun->decl)
3957 - gp_saved * UNITS_PER_WORD);
3958 mem = gen_rtx_MEM (BLKmode, ptr);
3959 set_mem_alias_set (mem, get_varargs_alias_set ());
3961 move_block_from_reg (local_cum.num_gprs + GP_ARG_FIRST,
3966 /* We can't use move_block_from_reg, because it will use
3968 enum machine_mode mode;
3971 /* Set OFF to the offset from virtual_incoming_args_rtx of
3972 the first float register. The FP save area lies below
3973 the integer one, and is aligned to UNITS_PER_FPVALUE bytes. */
3974 off = -gp_saved * UNITS_PER_WORD;
3975 off &= ~(UNITS_PER_FPVALUE - 1);
3976 off -= fp_saved * UNITS_PER_FPREG;
3978 mode = TARGET_SINGLE_FLOAT ? SFmode : DFmode;
3980 for (i = local_cum.num_fprs; i < MAX_ARGS_IN_REGISTERS; i += FP_INC)
3984 ptr = plus_constant (virtual_incoming_args_rtx, off);
3985 mem = gen_rtx_MEM (mode, ptr);
3986 set_mem_alias_set (mem, get_varargs_alias_set ());
3987 emit_move_insn (mem, gen_rtx_REG (mode, FP_ARG_FIRST + i));
3988 off += UNITS_PER_HWFPVALUE;
3992 if (REG_PARM_STACK_SPACE (cfun->decl) == 0)
3993 cfun->machine->varargs_size = (gp_saved * UNITS_PER_WORD
3994 + fp_saved * UNITS_PER_FPREG);
3997 /* Create the va_list data type.
3998 We keep 3 pointers, and two offsets.
3999 Two pointers are to the overflow area, which starts at the CFA.
4000 One of these is constant, for addressing into the GPR save area below it.
4001 The other is advanced up the stack through the overflow region.
4002 The third pointer is to the GPR save area. Since the FPR save area
4003 is just below it, we can address FPR slots off this pointer.
4004 We also keep two one-byte offsets, which are to be subtracted from the
4005 constant pointers to yield addresses in the GPR and FPR save areas.
4006 These are downcounted as float or non-float arguments are used,
4007 and when they get to zero, the argument must be obtained from the
4009 If !EABI_FLOAT_VARARGS_P, then no FPR save area exists, and a single
4010 pointer is enough. It's started at the GPR save area, and is
4012 Note that the GPR save area is not constant size, due to optimization
4013 in the prologue. Hence, we can't use a design with two pointers
4014 and two offsets, although we could have designed this with two pointers
4015 and three offsets. */
4018 mips_build_builtin_va_list (void)
4020 if (EABI_FLOAT_VARARGS_P)
4022 tree f_ovfl, f_gtop, f_ftop, f_goff, f_foff, f_res, record;
4025 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4027 f_ovfl = build_decl (FIELD_DECL, get_identifier ("__overflow_argptr"),
4029 f_gtop = build_decl (FIELD_DECL, get_identifier ("__gpr_top"),
4031 f_ftop = build_decl (FIELD_DECL, get_identifier ("__fpr_top"),
4033 f_goff = build_decl (FIELD_DECL, get_identifier ("__gpr_offset"),
4034 unsigned_char_type_node);
4035 f_foff = build_decl (FIELD_DECL, get_identifier ("__fpr_offset"),
4036 unsigned_char_type_node);
4037 /* Explicitly pad to the size of a pointer, so that -Wpadded won't
4038 warn on every user file. */
4039 index = build_int_cst (NULL_TREE, GET_MODE_SIZE (ptr_mode) - 2 - 1);
4040 array = build_array_type (unsigned_char_type_node,
4041 build_index_type (index));
4042 f_res = build_decl (FIELD_DECL, get_identifier ("__reserved"), array);
4044 DECL_FIELD_CONTEXT (f_ovfl) = record;
4045 DECL_FIELD_CONTEXT (f_gtop) = record;
4046 DECL_FIELD_CONTEXT (f_ftop) = record;
4047 DECL_FIELD_CONTEXT (f_goff) = record;
4048 DECL_FIELD_CONTEXT (f_foff) = record;
4049 DECL_FIELD_CONTEXT (f_res) = record;
4051 TYPE_FIELDS (record) = f_ovfl;
4052 TREE_CHAIN (f_ovfl) = f_gtop;
4053 TREE_CHAIN (f_gtop) = f_ftop;
4054 TREE_CHAIN (f_ftop) = f_goff;
4055 TREE_CHAIN (f_goff) = f_foff;
4056 TREE_CHAIN (f_foff) = f_res;
4058 layout_type (record);
4061 else if (TARGET_IRIX && TARGET_IRIX6)
4062 /* On IRIX 6, this type is 'char *'. */
4063 return build_pointer_type (char_type_node);
4065 /* Otherwise, we use 'void *'. */
4066 return ptr_type_node;
4069 /* Implement va_start. */
4072 mips_va_start (tree valist, rtx nextarg)
4074 if (EABI_FLOAT_VARARGS_P)
4076 const CUMULATIVE_ARGS *cum;
4077 tree f_ovfl, f_gtop, f_ftop, f_goff, f_foff;
4078 tree ovfl, gtop, ftop, goff, foff;
4080 int gpr_save_area_size;
4081 int fpr_save_area_size;
4084 cum = ¤t_function_args_info;
4086 = (MAX_ARGS_IN_REGISTERS - cum->num_gprs) * UNITS_PER_WORD;
4088 = (MAX_ARGS_IN_REGISTERS - cum->num_fprs) * UNITS_PER_FPREG;
4090 f_ovfl = TYPE_FIELDS (va_list_type_node);
4091 f_gtop = TREE_CHAIN (f_ovfl);
4092 f_ftop = TREE_CHAIN (f_gtop);
4093 f_goff = TREE_CHAIN (f_ftop);
4094 f_foff = TREE_CHAIN (f_goff);
4096 ovfl = build3 (COMPONENT_REF, TREE_TYPE (f_ovfl), valist, f_ovfl,
4098 gtop = build3 (COMPONENT_REF, TREE_TYPE (f_gtop), valist, f_gtop,
4100 ftop = build3 (COMPONENT_REF, TREE_TYPE (f_ftop), valist, f_ftop,
4102 goff = build3 (COMPONENT_REF, TREE_TYPE (f_goff), valist, f_goff,
4104 foff = build3 (COMPONENT_REF, TREE_TYPE (f_foff), valist, f_foff,
4107 /* Emit code to initialize OVFL, which points to the next varargs
4108 stack argument. CUM->STACK_WORDS gives the number of stack
4109 words used by named arguments. */
4110 t = make_tree (TREE_TYPE (ovfl), virtual_incoming_args_rtx);
4111 if (cum->stack_words > 0)
4112 t = build2 (PLUS_EXPR, TREE_TYPE (ovfl), t,
4113 build_int_cst (NULL_TREE,
4114 cum->stack_words * UNITS_PER_WORD));
4115 t = build2 (MODIFY_EXPR, TREE_TYPE (ovfl), ovfl, t);
4116 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4118 /* Emit code to initialize GTOP, the top of the GPR save area. */
4119 t = make_tree (TREE_TYPE (gtop), virtual_incoming_args_rtx);
4120 t = build2 (MODIFY_EXPR, TREE_TYPE (gtop), gtop, t);
4121 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4123 /* Emit code to initialize FTOP, the top of the FPR save area.
4124 This address is gpr_save_area_bytes below GTOP, rounded
4125 down to the next fp-aligned boundary. */
4126 t = make_tree (TREE_TYPE (ftop), virtual_incoming_args_rtx);
4127 fpr_offset = gpr_save_area_size + UNITS_PER_FPVALUE - 1;
4128 fpr_offset &= ~(UNITS_PER_FPVALUE - 1);
4130 t = build2 (PLUS_EXPR, TREE_TYPE (ftop), t,
4131 build_int_cst (NULL_TREE, -fpr_offset));
4132 t = build2 (MODIFY_EXPR, TREE_TYPE (ftop), ftop, t);
4133 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4135 /* Emit code to initialize GOFF, the offset from GTOP of the
4136 next GPR argument. */
4137 t = build2 (MODIFY_EXPR, TREE_TYPE (goff), goff,
4138 build_int_cst (NULL_TREE, gpr_save_area_size));
4139 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4141 /* Likewise emit code to initialize FOFF, the offset from FTOP
4142 of the next FPR argument. */
4143 t = build2 (MODIFY_EXPR, TREE_TYPE (foff), foff,
4144 build_int_cst (NULL_TREE, fpr_save_area_size));
4145 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4149 nextarg = plus_constant (nextarg, -cfun->machine->varargs_size);
4150 std_expand_builtin_va_start (valist, nextarg);
4154 /* Implement va_arg. */
4157 mips_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4159 HOST_WIDE_INT size, rsize;
4163 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, 0);
4166 type = build_pointer_type (type);
4168 size = int_size_in_bytes (type);
4169 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
4171 if (mips_abi != ABI_EABI || !EABI_FLOAT_VARARGS_P)
4172 addr = std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
4175 /* Not a simple merged stack. */
4177 tree f_ovfl, f_gtop, f_ftop, f_goff, f_foff;
4178 tree ovfl, top, off, align;
4179 HOST_WIDE_INT osize;
4182 f_ovfl = TYPE_FIELDS (va_list_type_node);
4183 f_gtop = TREE_CHAIN (f_ovfl);
4184 f_ftop = TREE_CHAIN (f_gtop);
4185 f_goff = TREE_CHAIN (f_ftop);
4186 f_foff = TREE_CHAIN (f_goff);
4188 /* We maintain separate pointers and offsets for floating-point
4189 and integer arguments, but we need similar code in both cases.
4192 TOP be the top of the register save area;
4193 OFF be the offset from TOP of the next register;
4194 ADDR_RTX be the address of the argument;
4195 RSIZE be the number of bytes used to store the argument
4196 when it's in the register save area;
4197 OSIZE be the number of bytes used to store it when it's
4198 in the stack overflow area; and
4199 PADDING be (BYTES_BIG_ENDIAN ? OSIZE - RSIZE : 0)
4201 The code we want is:
4203 1: off &= -rsize; // round down
4206 4: addr_rtx = top - off;
4211 9: ovfl += ((intptr_t) ovfl + osize - 1) & -osize;
4212 10: addr_rtx = ovfl + PADDING;
4216 [1] and [9] can sometimes be optimized away. */
4218 ovfl = build3 (COMPONENT_REF, TREE_TYPE (f_ovfl), valist, f_ovfl,
4221 if (GET_MODE_CLASS (TYPE_MODE (type)) == MODE_FLOAT
4222 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_FPVALUE)
4224 top = build3 (COMPONENT_REF, TREE_TYPE (f_ftop), valist, f_ftop,
4226 off = build3 (COMPONENT_REF, TREE_TYPE (f_foff), valist, f_foff,
4229 /* When floating-point registers are saved to the stack,
4230 each one will take up UNITS_PER_HWFPVALUE bytes, regardless
4231 of the float's precision. */
4232 rsize = UNITS_PER_HWFPVALUE;
4234 /* Overflow arguments are padded to UNITS_PER_WORD bytes
4235 (= PARM_BOUNDARY bits). This can be different from RSIZE
4238 (1) On 32-bit targets when TYPE is a structure such as:
4240 struct s { float f; };
4242 Such structures are passed in paired FPRs, so RSIZE
4243 will be 8 bytes. However, the structure only takes
4244 up 4 bytes of memory, so OSIZE will only be 4.
4246 (2) In combinations such as -mgp64 -msingle-float
4247 -fshort-double. Doubles passed in registers
4248 will then take up 4 (UNITS_PER_HWFPVALUE) bytes,
4249 but those passed on the stack take up
4250 UNITS_PER_WORD bytes. */
4251 osize = MAX (GET_MODE_SIZE (TYPE_MODE (type)), UNITS_PER_WORD);
4255 top = build3 (COMPONENT_REF, TREE_TYPE (f_gtop), valist, f_gtop,
4257 off = build3 (COMPONENT_REF, TREE_TYPE (f_goff), valist, f_goff,
4259 if (rsize > UNITS_PER_WORD)
4261 /* [1] Emit code for: off &= -rsize. */
4262 t = build2 (BIT_AND_EXPR, TREE_TYPE (off), off,
4263 build_int_cst (NULL_TREE, -rsize));
4264 t = build2 (MODIFY_EXPR, TREE_TYPE (off), off, t);
4265 gimplify_and_add (t, pre_p);
4270 /* [2] Emit code to branch if off == 0. */
4271 t = build2 (NE_EXPR, boolean_type_node, off,
4272 build_int_cst (TREE_TYPE (off), 0));
4273 addr = build3 (COND_EXPR, ptr_type_node, t, NULL_TREE, NULL_TREE);
4275 /* [5] Emit code for: off -= rsize. We do this as a form of
4276 post-increment not available to C. Also widen for the
4277 coming pointer arithmetic. */
4278 t = fold_convert (TREE_TYPE (off), build_int_cst (NULL_TREE, rsize));
4279 t = build2 (POSTDECREMENT_EXPR, TREE_TYPE (off), off, t);
4280 t = fold_convert (sizetype, t);
4281 t = fold_convert (TREE_TYPE (top), t);
4283 /* [4] Emit code for: addr_rtx = top - off. On big endian machines,
4284 the argument has RSIZE - SIZE bytes of leading padding. */
4285 t = build2 (MINUS_EXPR, TREE_TYPE (top), top, t);
4286 if (BYTES_BIG_ENDIAN && rsize > size)
4288 u = fold_convert (TREE_TYPE (t), build_int_cst (NULL_TREE,
4290 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, u);
4292 COND_EXPR_THEN (addr) = t;
4294 if (osize > UNITS_PER_WORD)
4296 /* [9] Emit: ovfl += ((intptr_t) ovfl + osize - 1) & -osize. */
4297 u = fold_convert (TREE_TYPE (ovfl),
4298 build_int_cst (NULL_TREE, osize - 1));
4299 t = build2 (PLUS_EXPR, TREE_TYPE (ovfl), ovfl, u);
4300 u = fold_convert (TREE_TYPE (ovfl),
4301 build_int_cst (NULL_TREE, -osize));
4302 t = build2 (BIT_AND_EXPR, TREE_TYPE (ovfl), t, u);
4303 align = build2 (MODIFY_EXPR, TREE_TYPE (ovfl), ovfl, t);
4308 /* [10, 11]. Emit code to store ovfl in addr_rtx, then
4309 post-increment ovfl by osize. On big-endian machines,
4310 the argument has OSIZE - SIZE bytes of leading padding. */
4311 u = fold_convert (TREE_TYPE (ovfl),
4312 build_int_cst (NULL_TREE, osize));
4313 t = build2 (POSTINCREMENT_EXPR, TREE_TYPE (ovfl), ovfl, u);
4314 if (BYTES_BIG_ENDIAN && osize > size)
4316 u = fold_convert (TREE_TYPE (t),
4317 build_int_cst (NULL_TREE, osize - size));
4318 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, u);
4321 /* String [9] and [10,11] together. */
4323 t = build2 (COMPOUND_EXPR, TREE_TYPE (t), align, t);
4324 COND_EXPR_ELSE (addr) = t;
4326 addr = fold_convert (build_pointer_type (type), addr);
4327 addr = build_va_arg_indirect_ref (addr);
4331 addr = build_va_arg_indirect_ref (addr);
4336 /* Return true if it is possible to use left/right accesses for a
4337 bitfield of WIDTH bits starting BITPOS bits into *OP. When
4338 returning true, update *OP, *LEFT and *RIGHT as follows:
4340 *OP is a BLKmode reference to the whole field.
4342 *LEFT is a QImode reference to the first byte if big endian or
4343 the last byte if little endian. This address can be used in the
4344 left-side instructions (lwl, swl, ldl, sdl).
4346 *RIGHT is a QImode reference to the opposite end of the field and
4347 can be used in the patterning right-side instruction. */
4350 mips_get_unaligned_mem (rtx *op, unsigned int width, int bitpos,
4351 rtx *left, rtx *right)
4355 /* Check that the operand really is a MEM. Not all the extv and
4356 extzv predicates are checked. */
4360 /* Check that the size is valid. */
4361 if (width != 32 && (!TARGET_64BIT || width != 64))
4364 /* We can only access byte-aligned values. Since we are always passed
4365 a reference to the first byte of the field, it is not necessary to
4366 do anything with BITPOS after this check. */
4367 if (bitpos % BITS_PER_UNIT != 0)
4370 /* Reject aligned bitfields: we want to use a normal load or store
4371 instead of a left/right pair. */
4372 if (MEM_ALIGN (*op) >= width)
4375 /* Adjust *OP to refer to the whole field. This also has the effect
4376 of legitimizing *OP's address for BLKmode, possibly simplifying it. */
4377 *op = adjust_address (*op, BLKmode, 0);
4378 set_mem_size (*op, GEN_INT (width / BITS_PER_UNIT));
4380 /* Get references to both ends of the field. We deliberately don't
4381 use the original QImode *OP for FIRST since the new BLKmode one
4382 might have a simpler address. */
4383 first = adjust_address (*op, QImode, 0);
4384 last = adjust_address (*op, QImode, width / BITS_PER_UNIT - 1);
4386 /* Allocate to LEFT and RIGHT according to endianness. LEFT should
4387 be the upper word and RIGHT the lower word. */
4388 if (TARGET_BIG_ENDIAN)
4389 *left = first, *right = last;
4391 *left = last, *right = first;
4397 /* Try to emit the equivalent of (set DEST (zero_extract SRC WIDTH BITPOS)).
4398 Return true on success. We only handle cases where zero_extract is
4399 equivalent to sign_extract. */
4402 mips_expand_unaligned_load (rtx dest, rtx src, unsigned int width, int bitpos)
4404 rtx left, right, temp;
4406 /* If TARGET_64BIT, the destination of a 32-bit load will be a
4407 paradoxical word_mode subreg. This is the only case in which
4408 we allow the destination to be larger than the source. */
4409 if (GET_CODE (dest) == SUBREG
4410 && GET_MODE (dest) == DImode
4411 && SUBREG_BYTE (dest) == 0
4412 && GET_MODE (SUBREG_REG (dest)) == SImode)
4413 dest = SUBREG_REG (dest);
4415 /* After the above adjustment, the destination must be the same
4416 width as the source. */
4417 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4420 if (!mips_get_unaligned_mem (&src, width, bitpos, &left, &right))
4423 temp = gen_reg_rtx (GET_MODE (dest));
4424 if (GET_MODE (dest) == DImode)
4426 emit_insn (gen_mov_ldl (temp, src, left));
4427 emit_insn (gen_mov_ldr (dest, copy_rtx (src), right, temp));
4431 emit_insn (gen_mov_lwl (temp, src, left));
4432 emit_insn (gen_mov_lwr (dest, copy_rtx (src), right, temp));
4438 /* Try to expand (set (zero_extract DEST WIDTH BITPOS) SRC). Return
4442 mips_expand_unaligned_store (rtx dest, rtx src, unsigned int width, int bitpos)
4446 if (!mips_get_unaligned_mem (&dest, width, bitpos, &left, &right))
4449 src = gen_lowpart (mode_for_size (width, MODE_INT, 0), src);
4451 if (GET_MODE (src) == DImode)
4453 emit_insn (gen_mov_sdl (dest, src, left));
4454 emit_insn (gen_mov_sdr (copy_rtx (dest), copy_rtx (src), right));
4458 emit_insn (gen_mov_swl (dest, src, left));
4459 emit_insn (gen_mov_swr (copy_rtx (dest), copy_rtx (src), right));
4464 /* Return true if (zero_extract OP SIZE POSITION) can be used as the
4465 source of an "ext" instruction or the destination of an "ins"
4466 instruction. OP must be a register operand and the following
4467 conditions must hold:
4469 0 <= POSITION < GET_MODE_BITSIZE (GET_MODE (op))
4470 0 < SIZE <= GET_MODE_BITSIZE (GET_MODE (op))
4471 0 < POSITION + SIZE <= GET_MODE_BITSIZE (GET_MODE (op))
4473 Also reject lengths equal to a word as they are better handled
4474 by the move patterns. */
4477 mips_use_ins_ext_p (rtx op, rtx size, rtx position)
4479 HOST_WIDE_INT len, pos;
4481 if (!ISA_HAS_EXT_INS
4482 || !register_operand (op, VOIDmode)
4483 || GET_MODE_BITSIZE (GET_MODE (op)) > BITS_PER_WORD)
4486 len = INTVAL (size);
4487 pos = INTVAL (position);
4489 if (len <= 0 || len >= GET_MODE_BITSIZE (GET_MODE (op))
4490 || pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (op)))
4496 /* Set up globals to generate code for the ISA or processor
4497 described by INFO. */
4500 mips_set_architecture (const struct mips_cpu_info *info)
4504 mips_arch_info = info;
4505 mips_arch = info->cpu;
4506 mips_isa = info->isa;
4511 /* Likewise for tuning. */
4514 mips_set_tune (const struct mips_cpu_info *info)
4518 mips_tune_info = info;
4519 mips_tune = info->cpu;
4523 /* Implement TARGET_HANDLE_OPTION. */
4526 mips_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
4531 if (strcmp (arg, "32") == 0)
4533 else if (strcmp (arg, "o64") == 0)
4535 else if (strcmp (arg, "n32") == 0)
4537 else if (strcmp (arg, "64") == 0)
4539 else if (strcmp (arg, "eabi") == 0)
4540 mips_abi = ABI_EABI;
4547 return mips_parse_cpu (arg) != 0;
4550 mips_isa_info = mips_parse_cpu (ACONCAT (("mips", arg, NULL)));
4551 return mips_isa_info != 0;
4553 case OPT_mno_flush_func:
4554 mips_cache_flush_func = NULL;
4562 /* Set up the threshold for data to go into the small data area, instead
4563 of the normal data area, and detect any conflicts in the switches. */
4566 override_options (void)
4568 int i, start, regno;
4569 enum machine_mode mode;
4571 mips_section_threshold = g_switch_set ? g_switch_value : MIPS_DEFAULT_GVALUE;
4573 /* The following code determines the architecture and register size.
4574 Similar code was added to GAS 2.14 (see tc-mips.c:md_after_parse_args()).
4575 The GAS and GCC code should be kept in sync as much as possible. */
4577 if (mips_arch_string != 0)
4578 mips_set_architecture (mips_parse_cpu (mips_arch_string));
4580 if (mips_isa_info != 0)
4582 if (mips_arch_info == 0)
4583 mips_set_architecture (mips_isa_info);
4584 else if (mips_arch_info->isa != mips_isa_info->isa)
4585 error ("-%s conflicts with the other architecture options, "
4586 "which specify a %s processor",
4587 mips_isa_info->name,
4588 mips_cpu_info_from_isa (mips_arch_info->isa)->name);
4591 if (mips_arch_info == 0)
4593 #ifdef MIPS_CPU_STRING_DEFAULT
4594 mips_set_architecture (mips_parse_cpu (MIPS_CPU_STRING_DEFAULT));
4596 mips_set_architecture (mips_cpu_info_from_isa (MIPS_ISA_DEFAULT));
4600 if (ABI_NEEDS_64BIT_REGS && !ISA_HAS_64BIT_REGS)
4601 error ("-march=%s is not compatible with the selected ABI",
4602 mips_arch_info->name);
4604 /* Optimize for mips_arch, unless -mtune selects a different processor. */
4605 if (mips_tune_string != 0)
4606 mips_set_tune (mips_parse_cpu (mips_tune_string));
4608 if (mips_tune_info == 0)
4609 mips_set_tune (mips_arch_info);
4611 /* Set cost structure for the processor. */
4612 mips_cost = &mips_rtx_cost_data[mips_tune];
4614 if ((target_flags_explicit & MASK_64BIT) != 0)
4616 /* The user specified the size of the integer registers. Make sure
4617 it agrees with the ABI and ISA. */
4618 if (TARGET_64BIT && !ISA_HAS_64BIT_REGS)
4619 error ("-mgp64 used with a 32-bit processor");
4620 else if (!TARGET_64BIT && ABI_NEEDS_64BIT_REGS)
4621 error ("-mgp32 used with a 64-bit ABI");
4622 else if (TARGET_64BIT && ABI_NEEDS_32BIT_REGS)
4623 error ("-mgp64 used with a 32-bit ABI");
4627 /* Infer the integer register size from the ABI and processor.
4628 Restrict ourselves to 32-bit registers if that's all the
4629 processor has, or if the ABI cannot handle 64-bit registers. */
4630 if (ABI_NEEDS_32BIT_REGS || !ISA_HAS_64BIT_REGS)
4631 target_flags &= ~MASK_64BIT;
4633 target_flags |= MASK_64BIT;
4636 if ((target_flags_explicit & MASK_FLOAT64) != 0)
4638 /* Really, -mfp32 and -mfp64 are ornamental options. There's
4639 only one right answer here. */
4640 if (TARGET_64BIT && TARGET_DOUBLE_FLOAT && !TARGET_FLOAT64)
4641 error ("unsupported combination: %s", "-mgp64 -mfp32 -mdouble-float");
4642 else if (!TARGET_64BIT && TARGET_FLOAT64)
4643 error ("unsupported combination: %s", "-mgp32 -mfp64");
4644 else if (TARGET_SINGLE_FLOAT && TARGET_FLOAT64)
4645 error ("unsupported combination: %s", "-mfp64 -msingle-float");
4649 /* -msingle-float selects 32-bit float registers. Otherwise the
4650 float registers should be the same size as the integer ones. */
4651 if (TARGET_64BIT && TARGET_DOUBLE_FLOAT)
4652 target_flags |= MASK_FLOAT64;
4654 target_flags &= ~MASK_FLOAT64;
4657 /* End of code shared with GAS. */
4659 if ((target_flags_explicit & MASK_LONG64) == 0)
4661 if ((mips_abi == ABI_EABI && TARGET_64BIT) || mips_abi == ABI_64)
4662 target_flags |= MASK_LONG64;
4664 target_flags &= ~MASK_LONG64;
4667 if (MIPS_MARCH_CONTROLS_SOFT_FLOAT
4668 && (target_flags_explicit & MASK_SOFT_FLOAT) == 0)
4670 /* For some configurations, it is useful to have -march control
4671 the default setting of MASK_SOFT_FLOAT. */
4672 switch ((int) mips_arch)
4674 case PROCESSOR_R4100:
4675 case PROCESSOR_R4111:
4676 case PROCESSOR_R4120:
4677 case PROCESSOR_R4130:
4678 target_flags |= MASK_SOFT_FLOAT;
4682 target_flags &= ~MASK_SOFT_FLOAT;
4688 flag_pcc_struct_return = 0;
4690 if ((target_flags_explicit & MASK_BRANCHLIKELY) == 0)
4692 /* If neither -mbranch-likely nor -mno-branch-likely was given
4693 on the command line, set MASK_BRANCHLIKELY based on the target
4696 By default, we enable use of Branch Likely instructions on
4697 all architectures which support them with the following
4698 exceptions: when creating MIPS32 or MIPS64 code, and when
4699 tuning for architectures where their use tends to hurt
4702 The MIPS32 and MIPS64 architecture specifications say "Software
4703 is strongly encouraged to avoid use of Branch Likely
4704 instructions, as they will be removed from a future revision
4705 of the [MIPS32 and MIPS64] architecture." Therefore, we do not
4706 issue those instructions unless instructed to do so by
4708 if (ISA_HAS_BRANCHLIKELY
4709 && !(ISA_MIPS32 || ISA_MIPS32R2 || ISA_MIPS64)
4710 && !(TUNE_MIPS5500 || TUNE_SB1))
4711 target_flags |= MASK_BRANCHLIKELY;
4713 target_flags &= ~MASK_BRANCHLIKELY;
4715 if (TARGET_BRANCHLIKELY && !ISA_HAS_BRANCHLIKELY)
4716 warning (0, "generation of Branch Likely instructions enabled, but not supported by architecture");
4718 /* The effect of -mabicalls isn't defined for the EABI. */
4719 if (mips_abi == ABI_EABI && TARGET_ABICALLS)
4721 error ("unsupported combination: %s", "-mabicalls -mabi=eabi");
4722 target_flags &= ~MASK_ABICALLS;
4725 /* -fpic (-KPIC) is the default when TARGET_ABICALLS is defined. We need
4726 to set flag_pic so that the LEGITIMATE_PIC_OPERAND_P macro will work. */
4727 /* ??? -non_shared turns off pic code generation, but this is not
4729 if (TARGET_ABICALLS)
4732 if (mips_section_threshold > 0)
4733 warning (0, "-G is incompatible with PIC code which is the default");
4736 /* mips_split_addresses is a half-way house between explicit
4737 relocations and the traditional assembler macros. It can
4738 split absolute 32-bit symbolic constants into a high/lo_sum
4739 pair but uses macros for other sorts of access.
4741 Like explicit relocation support for REL targets, it relies
4742 on GNU extensions in the assembler and the linker.
4744 Although this code should work for -O0, it has traditionally
4745 been treated as an optimization. */
4746 if (!TARGET_MIPS16 && TARGET_SPLIT_ADDRESSES
4747 && optimize && !flag_pic
4748 && !ABI_HAS_64BIT_SYMBOLS)
4749 mips_split_addresses = 1;
4751 mips_split_addresses = 0;
4753 /* -mvr4130-align is a "speed over size" optimization: it usually produces
4754 faster code, but at the expense of more nops. Enable it at -O3 and
4756 if (optimize > 2 && (target_flags_explicit & MASK_VR4130_ALIGN) == 0)
4757 target_flags |= MASK_VR4130_ALIGN;
4759 /* When compiling for the mips16, we cannot use floating point. We
4760 record the original hard float value in mips16_hard_float. */
4763 if (TARGET_SOFT_FLOAT)
4764 mips16_hard_float = 0;
4766 mips16_hard_float = 1;
4767 target_flags |= MASK_SOFT_FLOAT;
4769 /* Don't run the scheduler before reload, since it tends to
4770 increase register pressure. */
4771 flag_schedule_insns = 0;
4773 /* Don't do hot/cold partitioning. The constant layout code expects
4774 the whole function to be in a single section. */
4775 flag_reorder_blocks_and_partition = 0;
4777 /* Silently disable -mexplicit-relocs since it doesn't apply
4778 to mips16 code. Even so, it would overly pedantic to warn
4779 about "-mips16 -mexplicit-relocs", especially given that
4780 we use a %gprel() operator. */
4781 target_flags &= ~MASK_EXPLICIT_RELOCS;
4784 /* When using explicit relocs, we call dbr_schedule from within
4786 if (TARGET_EXPLICIT_RELOCS)
4788 mips_flag_delayed_branch = flag_delayed_branch;
4789 flag_delayed_branch = 0;
4792 #ifdef MIPS_TFMODE_FORMAT
4793 REAL_MODE_FORMAT (TFmode) = &MIPS_TFMODE_FORMAT;
4796 /* Make sure that the user didn't turn off paired single support when
4797 MIPS-3D support is requested. */
4798 if (TARGET_MIPS3D && (target_flags_explicit & MASK_PAIRED_SINGLE_FLOAT)
4799 && !TARGET_PAIRED_SINGLE_FLOAT)
4800 error ("-mips3d requires -mpaired-single");
4802 /* If TARGET_MIPS3D, enable MASK_PAIRED_SINGLE_FLOAT. */
4804 target_flags |= MASK_PAIRED_SINGLE_FLOAT;
4806 /* Make sure that when TARGET_PAIRED_SINGLE_FLOAT is true, TARGET_FLOAT64
4807 and TARGET_HARD_FLOAT are both true. */
4808 if (TARGET_PAIRED_SINGLE_FLOAT && !(TARGET_FLOAT64 && TARGET_HARD_FLOAT))
4809 error ("-mips3d/-mpaired-single must be used with -mfp64 -mhard-float");
4811 /* Make sure that the ISA supports TARGET_PAIRED_SINGLE_FLOAT when it is
4813 if (TARGET_PAIRED_SINGLE_FLOAT && !ISA_MIPS64)
4814 error ("-mips3d/-mpaired-single must be used with -mips64");
4816 if (TARGET_MIPS16 && TARGET_DSP)
4817 error ("-mips16 and -mdsp cannot be used together");
4819 mips_print_operand_punct['?'] = 1;
4820 mips_print_operand_punct['#'] = 1;
4821 mips_print_operand_punct['/'] = 1;
4822 mips_print_operand_punct['&'] = 1;
4823 mips_print_operand_punct['!'] = 1;
4824 mips_print_operand_punct['*'] = 1;
4825 mips_print_operand_punct['@'] = 1;
4826 mips_print_operand_punct['.'] = 1;
4827 mips_print_operand_punct['('] = 1;
4828 mips_print_operand_punct[')'] = 1;
4829 mips_print_operand_punct['['] = 1;
4830 mips_print_operand_punct[']'] = 1;
4831 mips_print_operand_punct['<'] = 1;
4832 mips_print_operand_punct['>'] = 1;
4833 mips_print_operand_punct['{'] = 1;
4834 mips_print_operand_punct['}'] = 1;
4835 mips_print_operand_punct['^'] = 1;
4836 mips_print_operand_punct['$'] = 1;
4837 mips_print_operand_punct['+'] = 1;
4838 mips_print_operand_punct['~'] = 1;
4840 /* Set up array to map GCC register number to debug register number.
4841 Ignore the special purpose register numbers. */
4843 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4844 mips_dbx_regno[i] = -1;
4846 start = GP_DBX_FIRST - GP_REG_FIRST;
4847 for (i = GP_REG_FIRST; i <= GP_REG_LAST; i++)
4848 mips_dbx_regno[i] = i + start;
4850 start = FP_DBX_FIRST - FP_REG_FIRST;
4851 for (i = FP_REG_FIRST; i <= FP_REG_LAST; i++)
4852 mips_dbx_regno[i] = i + start;
4854 mips_dbx_regno[HI_REGNUM] = MD_DBX_FIRST + 0;
4855 mips_dbx_regno[LO_REGNUM] = MD_DBX_FIRST + 1;
4857 /* Set up array giving whether a given register can hold a given mode. */
4859 for (mode = VOIDmode;
4860 mode != MAX_MACHINE_MODE;
4861 mode = (enum machine_mode) ((int)mode + 1))
4863 register int size = GET_MODE_SIZE (mode);
4864 register enum mode_class class = GET_MODE_CLASS (mode);
4866 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4870 if (mode == CCV2mode)
4873 && (regno - ST_REG_FIRST) % 2 == 0);
4875 else if (mode == CCV4mode)
4878 && (regno - ST_REG_FIRST) % 4 == 0);
4880 else if (mode == CCmode)
4883 temp = (regno == FPSW_REGNUM);
4885 temp = (ST_REG_P (regno) || GP_REG_P (regno)
4886 || FP_REG_P (regno));
4889 else if (GP_REG_P (regno))
4890 temp = ((regno & 1) == 0 || size <= UNITS_PER_WORD);
4892 else if (FP_REG_P (regno))
4893 temp = ((regno % FP_INC) == 0)
4894 && (((class == MODE_FLOAT || class == MODE_COMPLEX_FLOAT
4895 || class == MODE_VECTOR_FLOAT)
4896 && size <= UNITS_PER_FPVALUE)
4897 /* Allow integer modes that fit into a single
4898 register. We need to put integers into FPRs
4899 when using instructions like cvt and trunc.
4900 We can't allow sizes smaller than a word,
4901 the FPU has no appropriate load/store
4902 instructions for those. */
4903 || (class == MODE_INT
4904 && size >= MIN_UNITS_PER_WORD
4905 && size <= UNITS_PER_FPREG)
4906 /* Allow TFmode for CCmode reloads. */
4907 || (ISA_HAS_8CC && mode == TFmode));
4909 else if (ACC_REG_P (regno))
4910 temp = (INTEGRAL_MODE_P (mode)
4911 && (size <= UNITS_PER_WORD
4912 || (ACC_HI_REG_P (regno)
4913 && size == 2 * UNITS_PER_WORD)));
4915 else if (ALL_COP_REG_P (regno))
4916 temp = (class == MODE_INT && size <= UNITS_PER_WORD);
4920 mips_hard_regno_mode_ok[(int)mode][regno] = temp;
4924 /* Save GPR registers in word_mode sized hunks. word_mode hasn't been
4925 initialized yet, so we can't use that here. */
4926 gpr_mode = TARGET_64BIT ? DImode : SImode;
4928 /* Provide default values for align_* for 64-bit targets. */
4929 if (TARGET_64BIT && !TARGET_MIPS16)
4931 if (align_loops == 0)
4933 if (align_jumps == 0)
4935 if (align_functions == 0)
4936 align_functions = 8;
4939 /* Function to allocate machine-dependent function status. */
4940 init_machine_status = &mips_init_machine_status;
4942 if (ABI_HAS_64BIT_SYMBOLS)
4944 if (TARGET_EXPLICIT_RELOCS)
4946 mips_split_p[SYMBOL_64_HIGH] = true;
4947 mips_hi_relocs[SYMBOL_64_HIGH] = "%highest(";
4948 mips_lo_relocs[SYMBOL_64_HIGH] = "%higher(";
4950 mips_split_p[SYMBOL_64_MID] = true;
4951 mips_hi_relocs[SYMBOL_64_MID] = "%higher(";
4952 mips_lo_relocs[SYMBOL_64_MID] = "%hi(";
4954 mips_split_p[SYMBOL_64_LOW] = true;
4955 mips_hi_relocs[SYMBOL_64_LOW] = "%hi(";
4956 mips_lo_relocs[SYMBOL_64_LOW] = "%lo(";
4958 mips_split_p[SYMBOL_GENERAL] = true;
4959 mips_lo_relocs[SYMBOL_GENERAL] = "%lo(";
4964 if (TARGET_EXPLICIT_RELOCS || mips_split_addresses)
4966 mips_split_p[SYMBOL_GENERAL] = true;
4967 mips_hi_relocs[SYMBOL_GENERAL] = "%hi(";
4968 mips_lo_relocs[SYMBOL_GENERAL] = "%lo(";
4974 /* The high part is provided by a pseudo copy of $gp. */
4975 mips_split_p[SYMBOL_SMALL_DATA] = true;
4976 mips_lo_relocs[SYMBOL_SMALL_DATA] = "%gprel(";
4979 if (TARGET_EXPLICIT_RELOCS)
4981 /* Small data constants are kept whole until after reload,
4982 then lowered by mips_rewrite_small_data. */
4983 mips_lo_relocs[SYMBOL_SMALL_DATA] = "%gp_rel(";
4985 mips_split_p[SYMBOL_GOT_LOCAL] = true;
4988 mips_lo_relocs[SYMBOL_GOTOFF_PAGE] = "%got_page(";
4989 mips_lo_relocs[SYMBOL_GOT_LOCAL] = "%got_ofst(";
4993 mips_lo_relocs[SYMBOL_GOTOFF_PAGE] = "%got(";
4994 mips_lo_relocs[SYMBOL_GOT_LOCAL] = "%lo(";
4999 /* The HIGH and LO_SUM are matched by special .md patterns. */
5000 mips_split_p[SYMBOL_GOT_GLOBAL] = true;
5002 mips_split_p[SYMBOL_GOTOFF_GLOBAL] = true;
5003 mips_hi_relocs[SYMBOL_GOTOFF_GLOBAL] = "%got_hi(";
5004 mips_lo_relocs[SYMBOL_GOTOFF_GLOBAL] = "%got_lo(";
5006 mips_split_p[SYMBOL_GOTOFF_CALL] = true;
5007 mips_hi_relocs[SYMBOL_GOTOFF_CALL] = "%call_hi(";
5008 mips_lo_relocs[SYMBOL_GOTOFF_CALL] = "%call_lo(";
5013 mips_lo_relocs[SYMBOL_GOTOFF_GLOBAL] = "%got_disp(";
5015 mips_lo_relocs[SYMBOL_GOTOFF_GLOBAL] = "%got(";
5016 mips_lo_relocs[SYMBOL_GOTOFF_CALL] = "%call16(";
5022 mips_split_p[SYMBOL_GOTOFF_LOADGP] = true;
5023 mips_hi_relocs[SYMBOL_GOTOFF_LOADGP] = "%hi(%neg(%gp_rel(";
5024 mips_lo_relocs[SYMBOL_GOTOFF_LOADGP] = "%lo(%neg(%gp_rel(";
5027 /* Thread-local relocation operators. */
5028 mips_lo_relocs[SYMBOL_TLSGD] = "%tlsgd(";
5029 mips_lo_relocs[SYMBOL_TLSLDM] = "%tlsldm(";
5030 mips_split_p[SYMBOL_DTPREL] = 1;
5031 mips_hi_relocs[SYMBOL_DTPREL] = "%dtprel_hi(";
5032 mips_lo_relocs[SYMBOL_DTPREL] = "%dtprel_lo(";
5033 mips_lo_relocs[SYMBOL_GOTTPREL] = "%gottprel(";
5034 mips_split_p[SYMBOL_TPREL] = 1;
5035 mips_hi_relocs[SYMBOL_TPREL] = "%tprel_hi(";
5036 mips_lo_relocs[SYMBOL_TPREL] = "%tprel_lo(";
5038 /* We don't have a thread pointer access instruction on MIPS16, or
5039 appropriate TLS relocations. */
5041 targetm.have_tls = false;
5043 /* Default to working around R4000 errata only if the processor
5044 was selected explicitly. */
5045 if ((target_flags_explicit & MASK_FIX_R4000) == 0
5046 && mips_matching_cpu_name_p (mips_arch_info->name, "r4000"))
5047 target_flags |= MASK_FIX_R4000;
5049 /* Default to working around R4400 errata only if the processor
5050 was selected explicitly. */
5051 if ((target_flags_explicit & MASK_FIX_R4400) == 0
5052 && mips_matching_cpu_name_p (mips_arch_info->name, "r4400"))
5053 target_flags |= MASK_FIX_R4400;
5056 /* Implement CONDITIONAL_REGISTER_USAGE. */
5059 mips_conditional_register_usage (void)
5065 for (regno = DSP_ACC_REG_FIRST; regno <= DSP_ACC_REG_LAST; regno++)
5066 fixed_regs[regno] = call_used_regs[regno] = 1;
5068 if (!TARGET_HARD_FLOAT)
5072 for (regno = FP_REG_FIRST; regno <= FP_REG_LAST; regno++)
5073 fixed_regs[regno] = call_used_regs[regno] = 1;
5074 for (regno = ST_REG_FIRST; regno <= ST_REG_LAST; regno++)
5075 fixed_regs[regno] = call_used_regs[regno] = 1;
5077 else if (! ISA_HAS_8CC)
5081 /* We only have a single condition code register. We
5082 implement this by hiding all the condition code registers,
5083 and generating RTL that refers directly to ST_REG_FIRST. */
5084 for (regno = ST_REG_FIRST; regno <= ST_REG_LAST; regno++)
5085 fixed_regs[regno] = call_used_regs[regno] = 1;
5087 /* In mips16 mode, we permit the $t temporary registers to be used
5088 for reload. We prohibit the unused $s registers, since they
5089 are caller saved, and saving them via a mips16 register would
5090 probably waste more time than just reloading the value. */
5093 fixed_regs[18] = call_used_regs[18] = 1;
5094 fixed_regs[19] = call_used_regs[19] = 1;
5095 fixed_regs[20] = call_used_regs[20] = 1;
5096 fixed_regs[21] = call_used_regs[21] = 1;
5097 fixed_regs[22] = call_used_regs[22] = 1;
5098 fixed_regs[23] = call_used_regs[23] = 1;
5099 fixed_regs[26] = call_used_regs[26] = 1;
5100 fixed_regs[27] = call_used_regs[27] = 1;
5101 fixed_regs[30] = call_used_regs[30] = 1;
5103 /* fp20-23 are now caller saved. */
5104 if (mips_abi == ABI_64)
5107 for (regno = FP_REG_FIRST + 20; regno < FP_REG_FIRST + 24; regno++)
5108 call_really_used_regs[regno] = call_used_regs[regno] = 1;
5110 /* Odd registers from fp21 to fp31 are now caller saved. */
5111 if (mips_abi == ABI_N32)
5114 for (regno = FP_REG_FIRST + 21; regno <= FP_REG_FIRST + 31; regno+=2)
5115 call_really_used_regs[regno] = call_used_regs[regno] = 1;
5119 /* Allocate a chunk of memory for per-function machine-dependent data. */
5120 static struct machine_function *
5121 mips_init_machine_status (void)
5123 return ((struct machine_function *)
5124 ggc_alloc_cleared (sizeof (struct machine_function)));
5127 /* On the mips16, we want to allocate $24 (T_REG) before other
5128 registers for instructions for which it is possible. This helps
5129 avoid shuffling registers around in order to set up for an xor,
5130 encouraging the compiler to use a cmp instead. */
5133 mips_order_regs_for_local_alloc (void)
5137 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5138 reg_alloc_order[i] = i;
5142 /* It really doesn't matter where we put register 0, since it is
5143 a fixed register anyhow. */
5144 reg_alloc_order[0] = 24;
5145 reg_alloc_order[24] = 0;
5150 /* The MIPS debug format wants all automatic variables and arguments
5151 to be in terms of the virtual frame pointer (stack pointer before
5152 any adjustment in the function), while the MIPS 3.0 linker wants
5153 the frame pointer to be the stack pointer after the initial
5154 adjustment. So, we do the adjustment here. The arg pointer (which
5155 is eliminated) points to the virtual frame pointer, while the frame
5156 pointer (which may be eliminated) points to the stack pointer after
5157 the initial adjustments. */
5160 mips_debugger_offset (rtx addr, HOST_WIDE_INT offset)
5162 rtx offset2 = const0_rtx;
5163 rtx reg = eliminate_constant_term (addr, &offset2);
5166 offset = INTVAL (offset2);
5168 if (reg == stack_pointer_rtx || reg == frame_pointer_rtx
5169 || reg == hard_frame_pointer_rtx)
5171 HOST_WIDE_INT frame_size = (!cfun->machine->frame.initialized)
5172 ? compute_frame_size (get_frame_size ())
5173 : cfun->machine->frame.total_size;
5175 /* MIPS16 frame is smaller */
5176 if (frame_pointer_needed && TARGET_MIPS16)
5177 frame_size -= cfun->machine->frame.args_size;
5179 offset = offset - frame_size;
5182 /* sdbout_parms does not want this to crash for unrecognized cases. */
5184 else if (reg != arg_pointer_rtx)
5185 fatal_insn ("mips_debugger_offset called with non stack/frame/arg pointer",
5192 /* Implement the PRINT_OPERAND macro. The MIPS-specific operand codes are:
5194 'X' OP is CONST_INT, prints 32 bits in hexadecimal format = "0x%08x",
5195 'x' OP is CONST_INT, prints 16 bits in hexadecimal format = "0x%04x",
5196 'h' OP is HIGH, prints %hi(X),
5197 'd' output integer constant in decimal,
5198 'z' if the operand is 0, use $0 instead of normal operand.
5199 'D' print second part of double-word register or memory operand.
5200 'L' print low-order register of double-word register operand.
5201 'M' print high-order register of double-word register operand.
5202 'C' print part of opcode for a branch condition.
5203 'F' print part of opcode for a floating-point branch condition.
5204 'N' print part of opcode for a branch condition, inverted.
5205 'W' print part of opcode for a floating-point branch condition, inverted.
5206 'T' print 'f' for (eq:CC ...), 't' for (ne:CC ...),
5207 'z' for (eq:?I ...), 'n' for (ne:?I ...).
5208 't' like 'T', but with the EQ/NE cases reversed
5209 'Y' for a CONST_INT X, print mips_fp_conditions[X]
5210 'Z' print the operand and a comma for ISA_HAS_8CC, otherwise print nothing
5211 'R' print the reloc associated with LO_SUM
5212 'q' print DSP accumulator registers
5214 The punctuation characters are:
5216 '(' Turn on .set noreorder
5217 ')' Turn on .set reorder
5218 '[' Turn on .set noat
5220 '<' Turn on .set nomacro
5221 '>' Turn on .set macro
5222 '{' Turn on .set volatile (not GAS)
5223 '}' Turn on .set novolatile (not GAS)
5224 '&' Turn on .set noreorder if filling delay slots
5225 '*' Turn on both .set noreorder and .set nomacro if filling delay slots
5226 '!' Turn on .set nomacro if filling delay slots
5227 '#' Print nop if in a .set noreorder section.
5228 '/' Like '#', but does nothing within a delayed branch sequence
5229 '?' Print 'l' if we are to use a branch likely instead of normal branch.
5230 '@' Print the name of the assembler temporary register (at or $1).
5231 '.' Print the name of the register with a hard-wired zero (zero or $0).
5232 '^' Print the name of the pic call-through register (t9 or $25).
5233 '$' Print the name of the stack pointer register (sp or $29).
5234 '+' Print the name of the gp register (usually gp or $28).
5235 '~' Output a branch alignment to LABEL_ALIGN(NULL). */
5238 print_operand (FILE *file, rtx op, int letter)
5240 register enum rtx_code code;
5242 if (PRINT_OPERAND_PUNCT_VALID_P (letter))
5247 if (mips_branch_likely)
5252 fputs (reg_names [GP_REG_FIRST + 1], file);
5256 fputs (reg_names [PIC_FUNCTION_ADDR_REGNUM], file);
5260 fputs (reg_names [GP_REG_FIRST + 0], file);
5264 fputs (reg_names[STACK_POINTER_REGNUM], file);
5268 fputs (reg_names[PIC_OFFSET_TABLE_REGNUM], file);
5272 if (final_sequence != 0 && set_noreorder++ == 0)
5273 fputs (".set\tnoreorder\n\t", file);
5277 if (final_sequence != 0)
5279 if (set_noreorder++ == 0)
5280 fputs (".set\tnoreorder\n\t", file);
5282 if (set_nomacro++ == 0)
5283 fputs (".set\tnomacro\n\t", file);
5288 if (final_sequence != 0 && set_nomacro++ == 0)
5289 fputs ("\n\t.set\tnomacro", file);
5293 if (set_noreorder != 0)
5294 fputs ("\n\tnop", file);
5298 /* Print an extra newline so that the delayed insn is separated
5299 from the following ones. This looks neater and is consistent
5300 with non-nop delayed sequences. */
5301 if (set_noreorder != 0 && final_sequence == 0)
5302 fputs ("\n\tnop\n", file);
5306 if (set_noreorder++ == 0)
5307 fputs (".set\tnoreorder\n\t", file);
5311 if (set_noreorder == 0)
5312 error ("internal error: %%) found without a %%( in assembler pattern");
5314 else if (--set_noreorder == 0)
5315 fputs ("\n\t.set\treorder", file);
5320 if (set_noat++ == 0)
5321 fputs (".set\tnoat\n\t", file);
5326 error ("internal error: %%] found without a %%[ in assembler pattern");
5327 else if (--set_noat == 0)
5328 fputs ("\n\t.set\tat", file);
5333 if (set_nomacro++ == 0)
5334 fputs (".set\tnomacro\n\t", file);
5338 if (set_nomacro == 0)
5339 error ("internal error: %%> found without a %%< in assembler pattern");
5340 else if (--set_nomacro == 0)
5341 fputs ("\n\t.set\tmacro", file);
5346 if (set_volatile++ == 0)
5347 fputs ("#.set\tvolatile\n\t", file);
5351 if (set_volatile == 0)
5352 error ("internal error: %%} found without a %%{ in assembler pattern");
5353 else if (--set_volatile == 0)
5354 fputs ("\n\t#.set\tnovolatile", file);
5360 if (align_labels_log > 0)
5361 ASM_OUTPUT_ALIGN (file, align_labels_log);
5366 error ("PRINT_OPERAND: unknown punctuation '%c'", letter);
5375 error ("PRINT_OPERAND null pointer");
5379 code = GET_CODE (op);
5384 case EQ: fputs ("eq", file); break;
5385 case NE: fputs ("ne", file); break;
5386 case GT: fputs ("gt", file); break;
5387 case GE: fputs ("ge", file); break;
5388 case LT: fputs ("lt", file); break;
5389 case LE: fputs ("le", file); break;
5390 case GTU: fputs ("gtu", file); break;
5391 case GEU: fputs ("geu", file); break;
5392 case LTU: fputs ("ltu", file); break;
5393 case LEU: fputs ("leu", file); break;
5395 fatal_insn ("PRINT_OPERAND, invalid insn for %%C", op);
5398 else if (letter == 'N')
5401 case EQ: fputs ("ne", file); break;
5402 case NE: fputs ("eq", file); break;
5403 case GT: fputs ("le", file); break;
5404 case GE: fputs ("lt", file); break;
5405 case LT: fputs ("ge", file); break;
5406 case LE: fputs ("gt", file); break;
5407 case GTU: fputs ("leu", file); break;
5408 case GEU: fputs ("ltu", file); break;
5409 case LTU: fputs ("geu", file); break;
5410 case LEU: fputs ("gtu", file); break;
5412 fatal_insn ("PRINT_OPERAND, invalid insn for %%N", op);
5415 else if (letter == 'F')
5418 case EQ: fputs ("c1f", file); break;
5419 case NE: fputs ("c1t", file); break;
5421 fatal_insn ("PRINT_OPERAND, invalid insn for %%F", op);
5424 else if (letter == 'W')
5427 case EQ: fputs ("c1t", file); break;
5428 case NE: fputs ("c1f", file); break;
5430 fatal_insn ("PRINT_OPERAND, invalid insn for %%W", op);
5433 else if (letter == 'h')
5435 if (GET_CODE (op) == HIGH)
5438 print_operand_reloc (file, op, mips_hi_relocs);
5441 else if (letter == 'R')
5442 print_operand_reloc (file, op, mips_lo_relocs);
5444 else if (letter == 'Y')
5446 if (GET_CODE (op) == CONST_INT
5447 && ((unsigned HOST_WIDE_INT) INTVAL (op)
5448 < ARRAY_SIZE (mips_fp_conditions)))
5449 fputs (mips_fp_conditions[INTVAL (op)], file);
5451 output_operand_lossage ("invalid %%Y value");
5454 else if (letter == 'Z')
5458 print_operand (file, op, 0);
5463 else if (letter == 'q')
5468 fatal_insn ("PRINT_OPERAND, invalid insn for %%q", op);
5470 regnum = REGNO (op);
5471 if (MD_REG_P (regnum))
5472 fprintf (file, "$ac0");
5473 else if (DSP_ACC_REG_P (regnum))
5474 fprintf (file, "$ac%c", reg_names[regnum][3]);
5476 fatal_insn ("PRINT_OPERAND, invalid insn for %%q", op);
5479 else if (code == REG || code == SUBREG)
5481 register int regnum;
5484 regnum = REGNO (op);
5486 regnum = true_regnum (op);
5488 if ((letter == 'M' && ! WORDS_BIG_ENDIAN)
5489 || (letter == 'L' && WORDS_BIG_ENDIAN)
5493 fprintf (file, "%s", reg_names[regnum]);
5496 else if (code == MEM)
5499 output_address (plus_constant (XEXP (op, 0), 4));
5501 output_address (XEXP (op, 0));
5504 else if (letter == 'x' && GET_CODE (op) == CONST_INT)
5505 fprintf (file, HOST_WIDE_INT_PRINT_HEX, 0xffff & INTVAL(op));
5507 else if (letter == 'X' && GET_CODE(op) == CONST_INT)
5508 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (op));
5510 else if (letter == 'd' && GET_CODE(op) == CONST_INT)
5511 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (INTVAL(op)));
5513 else if (letter == 'z' && op == CONST0_RTX (GET_MODE (op)))
5514 fputs (reg_names[GP_REG_FIRST], file);
5516 else if (letter == 'd' || letter == 'x' || letter == 'X')
5517 output_operand_lossage ("invalid use of %%d, %%x, or %%X");
5519 else if (letter == 'T' || letter == 't')
5521 int truth = (code == NE) == (letter == 'T');
5522 fputc ("zfnt"[truth * 2 + (GET_MODE (op) == CCmode)], file);
5525 else if (CONST_GP_P (op))
5526 fputs (reg_names[GLOBAL_POINTER_REGNUM], file);
5529 output_addr_const (file, op);
5533 /* Print symbolic operand OP, which is part of a HIGH or LO_SUM.
5534 RELOCS is the array of relocations to use. */
5537 print_operand_reloc (FILE *file, rtx op, const char **relocs)
5539 enum mips_symbol_type symbol_type;
5542 HOST_WIDE_INT offset;
5544 if (!mips_symbolic_constant_p (op, &symbol_type) || relocs[symbol_type] == 0)
5545 fatal_insn ("PRINT_OPERAND, invalid operand for relocation", op);
5547 /* If OP uses an UNSPEC address, we want to print the inner symbol. */
5548 mips_split_const (op, &base, &offset);
5549 if (UNSPEC_ADDRESS_P (base))
5550 op = plus_constant (UNSPEC_ADDRESS (base), offset);
5552 fputs (relocs[symbol_type], file);
5553 output_addr_const (file, op);
5554 for (p = relocs[symbol_type]; *p != 0; p++)
5559 /* Output address operand X to FILE. */
5562 print_operand_address (FILE *file, rtx x)
5564 struct mips_address_info addr;
5566 if (mips_classify_address (&addr, x, word_mode, true))
5570 print_operand (file, addr.offset, 0);
5571 fprintf (file, "(%s)", reg_names[REGNO (addr.reg)]);
5574 case ADDRESS_LO_SUM:
5575 print_operand (file, addr.offset, 'R');
5576 fprintf (file, "(%s)", reg_names[REGNO (addr.reg)]);
5579 case ADDRESS_CONST_INT:
5580 output_addr_const (file, x);
5581 fprintf (file, "(%s)", reg_names[0]);
5584 case ADDRESS_SYMBOLIC:
5585 output_addr_const (file, x);
5591 /* When using assembler macros, keep track of all of small-data externs
5592 so that mips_file_end can emit the appropriate declarations for them.
5594 In most cases it would be safe (though pointless) to emit .externs
5595 for other symbols too. One exception is when an object is within
5596 the -G limit but declared by the user to be in a section other
5597 than .sbss or .sdata. */
5600 mips_output_external (FILE *file ATTRIBUTE_UNUSED, tree decl, const char *name)
5602 register struct extern_list *p;
5604 if (!TARGET_EXPLICIT_RELOCS && mips_in_small_data_p (decl))
5606 p = (struct extern_list *) ggc_alloc (sizeof (struct extern_list));
5607 p->next = extern_head;
5609 p->size = int_size_in_bytes (TREE_TYPE (decl));
5613 if (TARGET_IRIX && mips_abi == ABI_32 && TREE_CODE (decl) == FUNCTION_DECL)
5615 p = (struct extern_list *) ggc_alloc (sizeof (struct extern_list));
5616 p->next = extern_head;
5627 irix_output_external_libcall (rtx fun)
5629 register struct extern_list *p;
5631 if (mips_abi == ABI_32)
5633 p = (struct extern_list *) ggc_alloc (sizeof (struct extern_list));
5634 p->next = extern_head;
5635 p->name = XSTR (fun, 0);
5642 /* Emit a new filename to a stream. If we are smuggling stabs, try to
5643 put out a MIPS ECOFF file and a stab. */
5646 mips_output_filename (FILE *stream, const char *name)
5649 /* If we are emitting DWARF-2, let dwarf2out handle the ".file"
5651 if (write_symbols == DWARF2_DEBUG)
5653 else if (mips_output_filename_first_time)
5655 mips_output_filename_first_time = 0;
5656 num_source_filenames += 1;
5657 current_function_file = name;
5658 fprintf (stream, "\t.file\t%d ", num_source_filenames);
5659 output_quoted_string (stream, name);
5660 putc ('\n', stream);
5663 /* If we are emitting stabs, let dbxout.c handle this (except for
5664 the mips_output_filename_first_time case). */
5665 else if (write_symbols == DBX_DEBUG)
5668 else if (name != current_function_file
5669 && strcmp (name, current_function_file) != 0)
5671 num_source_filenames += 1;
5672 current_function_file = name;
5673 fprintf (stream, "\t.file\t%d ", num_source_filenames);
5674 output_quoted_string (stream, name);
5675 putc ('\n', stream);
5679 /* Output an ASCII string, in a space-saving way. PREFIX is the string
5680 that should be written before the opening quote, such as "\t.ascii\t"
5681 for real string data or "\t# " for a comment. */
5684 mips_output_ascii (FILE *stream, const char *string_param, size_t len,
5689 register const unsigned char *string =
5690 (const unsigned char *)string_param;
5692 fprintf (stream, "%s\"", prefix);
5693 for (i = 0; i < len; i++)
5695 register int c = string[i];
5699 if (c == '\\' || c == '\"')
5701 putc ('\\', stream);
5709 fprintf (stream, "\\%03o", c);
5713 if (cur_pos > 72 && i+1 < len)
5716 fprintf (stream, "\"\n%s\"", prefix);
5719 fprintf (stream, "\"\n");
5722 /* Implement TARGET_ASM_FILE_START. */
5725 mips_file_start (void)
5727 default_file_start ();
5731 /* Generate a special section to describe the ABI switches used to
5732 produce the resultant binary. This used to be done by the assembler
5733 setting bits in the ELF header's flags field, but we have run out of
5734 bits. GDB needs this information in order to be able to correctly
5735 debug these binaries. See the function mips_gdbarch_init() in
5736 gdb/mips-tdep.c. This is unnecessary for the IRIX 5/6 ABIs and
5737 causes unnecessary IRIX 6 ld warnings. */
5738 const char * abi_string = NULL;
5742 case ABI_32: abi_string = "abi32"; break;
5743 case ABI_N32: abi_string = "abiN32"; break;
5744 case ABI_64: abi_string = "abi64"; break;
5745 case ABI_O64: abi_string = "abiO64"; break;
5746 case ABI_EABI: abi_string = TARGET_64BIT ? "eabi64" : "eabi32"; break;
5750 /* Note - we use fprintf directly rather than calling switch_to_section
5751 because in this way we can avoid creating an allocated section. We
5752 do not want this section to take up any space in the running
5754 fprintf (asm_out_file, "\t.section .mdebug.%s\n", abi_string);
5756 /* There is no ELF header flag to distinguish long32 forms of the
5757 EABI from long64 forms. Emit a special section to help tools
5759 if (mips_abi == ABI_EABI)
5760 fprintf (asm_out_file, "\t.section .gcc_compiled_long%d\n",
5761 TARGET_LONG64 ? 64 : 32);
5763 /* Restore the default section. */
5764 fprintf (asm_out_file, "\t.previous\n");
5767 /* Generate the pseudo ops that System V.4 wants. */
5768 if (TARGET_ABICALLS)
5769 /* ??? but do not want this (or want pic0) if -non-shared? */
5770 fprintf (asm_out_file, "\t.abicalls\n");
5773 fprintf (asm_out_file, "\t.set\tmips16\n");
5775 if (flag_verbose_asm)
5776 fprintf (asm_out_file, "\n%s -G value = %d, Arch = %s, ISA = %d\n",
5778 mips_section_threshold, mips_arch_info->name, mips_isa);
5781 #ifdef BSS_SECTION_ASM_OP
5782 /* Implement ASM_OUTPUT_ALIGNED_BSS. This differs from the default only
5783 in the use of sbss. */
5786 mips_output_aligned_bss (FILE *stream, tree decl, const char *name,
5787 unsigned HOST_WIDE_INT size, int align)
5789 extern tree last_assemble_variable_decl;
5791 if (mips_in_small_data_p (decl))
5792 switch_to_section (get_named_section (NULL, ".sbss", 0));
5794 switch_to_section (bss_section);
5795 ASM_OUTPUT_ALIGN (stream, floor_log2 (align / BITS_PER_UNIT));
5796 last_assemble_variable_decl = decl;
5797 ASM_DECLARE_OBJECT_NAME (stream, name, decl);
5798 ASM_OUTPUT_SKIP (stream, size != 0 ? size : 1);
5802 /* Implement TARGET_ASM_FILE_END. When using assembler macros, emit
5803 .externs for any small-data variables that turned out to be external. */
5806 mips_file_end (void)
5809 struct extern_list *p;
5813 fputs ("\n", asm_out_file);
5815 for (p = extern_head; p != 0; p = p->next)
5817 name_tree = get_identifier (p->name);
5819 /* Positively ensure only one .extern for any given symbol. */
5820 if (!TREE_ASM_WRITTEN (name_tree)
5821 && TREE_SYMBOL_REFERENCED (name_tree))
5823 TREE_ASM_WRITTEN (name_tree) = 1;
5824 /* In IRIX 5 or IRIX 6 for the O32 ABI, we must output a
5825 `.global name .text' directive for every used but
5826 undefined function. If we don't, the linker may perform
5827 an optimization (skipping over the insns that set $gp)
5828 when it is unsafe. */
5829 if (TARGET_IRIX && mips_abi == ABI_32 && p->size == -1)
5831 fputs ("\t.globl ", asm_out_file);
5832 assemble_name (asm_out_file, p->name);
5833 fputs (" .text\n", asm_out_file);
5837 fputs ("\t.extern\t", asm_out_file);
5838 assemble_name (asm_out_file, p->name);
5839 fprintf (asm_out_file, ", %d\n", p->size);
5846 /* Implement ASM_OUTPUT_ALIGNED_DECL_COMMON. This is usually the same as the
5847 elfos.h version, but we also need to handle -muninit-const-in-rodata. */
5850 mips_output_aligned_decl_common (FILE *stream, tree decl, const char *name,
5851 unsigned HOST_WIDE_INT size,
5854 /* If the target wants uninitialized const declarations in
5855 .rdata then don't put them in .comm. */
5856 if (TARGET_EMBEDDED_DATA && TARGET_UNINIT_CONST_IN_RODATA
5857 && TREE_CODE (decl) == VAR_DECL && TREE_READONLY (decl)
5858 && (DECL_INITIAL (decl) == 0 || DECL_INITIAL (decl) == error_mark_node))
5860 if (TREE_PUBLIC (decl) && DECL_NAME (decl))
5861 targetm.asm_out.globalize_label (stream, name);
5863 switch_to_section (readonly_data_section);
5864 ASM_OUTPUT_ALIGN (stream, floor_log2 (align / BITS_PER_UNIT));
5865 mips_declare_object (stream, name, "",
5866 ":\n\t.space\t" HOST_WIDE_INT_PRINT_UNSIGNED "\n",
5870 mips_declare_common_object (stream, name, "\n\t.comm\t",
5874 /* Declare a common object of SIZE bytes using asm directive INIT_STRING.
5875 NAME is the name of the object and ALIGN is the required alignment
5876 in bytes. TAKES_ALIGNMENT_P is true if the directive takes a third
5877 alignment argument. */
5880 mips_declare_common_object (FILE *stream, const char *name,
5881 const char *init_string,
5882 unsigned HOST_WIDE_INT size,
5883 unsigned int align, bool takes_alignment_p)
5885 if (!takes_alignment_p)
5887 size += (align / BITS_PER_UNIT) - 1;
5888 size -= size % (align / BITS_PER_UNIT);
5889 mips_declare_object (stream, name, init_string,
5890 "," HOST_WIDE_INT_PRINT_UNSIGNED "\n", size);
5893 mips_declare_object (stream, name, init_string,
5894 "," HOST_WIDE_INT_PRINT_UNSIGNED ",%u\n",
5895 size, align / BITS_PER_UNIT);
5898 /* Emit either a label, .comm, or .lcomm directive. When using assembler
5899 macros, mark the symbol as written so that mips_file_end won't emit an
5900 .extern for it. STREAM is the output file, NAME is the name of the
5901 symbol, INIT_STRING is the string that should be written before the
5902 symbol and FINAL_STRING is the string that should be written after it.
5903 FINAL_STRING is a printf() format that consumes the remaining arguments. */
5906 mips_declare_object (FILE *stream, const char *name, const char *init_string,
5907 const char *final_string, ...)
5911 fputs (init_string, stream);
5912 assemble_name (stream, name);
5913 va_start (ap, final_string);
5914 vfprintf (stream, final_string, ap);
5917 if (!TARGET_EXPLICIT_RELOCS)
5919 tree name_tree = get_identifier (name);
5920 TREE_ASM_WRITTEN (name_tree) = 1;
5924 #ifdef ASM_OUTPUT_SIZE_DIRECTIVE
5925 extern int size_directive_output;
5927 /* Implement ASM_DECLARE_OBJECT_NAME. This is like most of the standard ELF
5928 definitions except that it uses mips_declare_object() to emit the label. */
5931 mips_declare_object_name (FILE *stream, const char *name,
5932 tree decl ATTRIBUTE_UNUSED)
5934 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
5935 ASM_OUTPUT_TYPE_DIRECTIVE (stream, name, "object");
5938 size_directive_output = 0;
5939 if (!flag_inhibit_size_directive && DECL_SIZE (decl))
5943 size_directive_output = 1;
5944 size = int_size_in_bytes (TREE_TYPE (decl));
5945 ASM_OUTPUT_SIZE_DIRECTIVE (stream, name, size);
5948 mips_declare_object (stream, name, "", ":\n");
5951 /* Implement ASM_FINISH_DECLARE_OBJECT. This is generic ELF stuff. */
5954 mips_finish_declare_object (FILE *stream, tree decl, int top_level, int at_end)
5958 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
5959 if (!flag_inhibit_size_directive
5960 && DECL_SIZE (decl) != 0
5961 && !at_end && top_level
5962 && DECL_INITIAL (decl) == error_mark_node
5963 && !size_directive_output)
5967 size_directive_output = 1;
5968 size = int_size_in_bytes (TREE_TYPE (decl));
5969 ASM_OUTPUT_SIZE_DIRECTIVE (stream, name, size);
5974 /* Return true if X is a small data address that can be rewritten
5978 mips_rewrite_small_data_p (rtx x)
5980 enum mips_symbol_type symbol_type;
5982 return (TARGET_EXPLICIT_RELOCS
5983 && mips_symbolic_constant_p (x, &symbol_type)
5984 && symbol_type == SYMBOL_SMALL_DATA);
5988 /* A for_each_rtx callback for mips_small_data_pattern_p. */
5991 mips_small_data_pattern_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
5993 if (GET_CODE (*loc) == LO_SUM)
5996 return mips_rewrite_small_data_p (*loc);
5999 /* Return true if OP refers to small data symbols directly, not through
6003 mips_small_data_pattern_p (rtx op)
6005 return for_each_rtx (&op, mips_small_data_pattern_1, 0);
6008 /* A for_each_rtx callback, used by mips_rewrite_small_data. */
6011 mips_rewrite_small_data_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6013 if (mips_rewrite_small_data_p (*loc))
6014 *loc = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, *loc);
6016 if (GET_CODE (*loc) == LO_SUM)
6022 /* If possible, rewrite OP so that it refers to small data using
6023 explicit relocations. */
6026 mips_rewrite_small_data (rtx op)
6028 op = copy_insn (op);
6029 for_each_rtx (&op, mips_rewrite_small_data_1, 0);
6033 /* Return true if the current function has an insn that implicitly
6037 mips_function_has_gp_insn (void)
6039 /* Don't bother rechecking if we found one last time. */
6040 if (!cfun->machine->has_gp_insn_p)
6044 push_topmost_sequence ();
6045 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6047 && GET_CODE (PATTERN (insn)) != USE
6048 && GET_CODE (PATTERN (insn)) != CLOBBER
6049 && (get_attr_got (insn) != GOT_UNSET
6050 || small_data_pattern (PATTERN (insn), VOIDmode)))
6052 pop_topmost_sequence ();
6054 cfun->machine->has_gp_insn_p = (insn != 0);
6056 return cfun->machine->has_gp_insn_p;
6060 /* Return the register that should be used as the global pointer
6061 within this function. Return 0 if the function doesn't need
6062 a global pointer. */
6065 mips_global_pointer (void)
6069 /* $gp is always available in non-abicalls code. */
6070 if (!TARGET_ABICALLS)
6071 return GLOBAL_POINTER_REGNUM;
6073 /* We must always provide $gp when it is used implicitly. */
6074 if (!TARGET_EXPLICIT_RELOCS)
6075 return GLOBAL_POINTER_REGNUM;
6077 /* FUNCTION_PROFILER includes a jal macro, so we need to give it
6079 if (current_function_profile)
6080 return GLOBAL_POINTER_REGNUM;
6082 /* If the function has a nonlocal goto, $gp must hold the correct
6083 global pointer for the target function. */
6084 if (current_function_has_nonlocal_goto)
6085 return GLOBAL_POINTER_REGNUM;
6087 /* If the gp is never referenced, there's no need to initialize it.
6088 Note that reload can sometimes introduce constant pool references
6089 into a function that otherwise didn't need them. For example,
6090 suppose we have an instruction like:
6092 (set (reg:DF R1) (float:DF (reg:SI R2)))
6094 If R2 turns out to be constant such as 1, the instruction may have a
6095 REG_EQUAL note saying that R1 == 1.0. Reload then has the option of
6096 using this constant if R2 doesn't get allocated to a register.
6098 In cases like these, reload will have added the constant to the pool
6099 but no instruction will yet refer to it. */
6100 if (!regs_ever_live[GLOBAL_POINTER_REGNUM]
6101 && !current_function_uses_const_pool
6102 && !mips_function_has_gp_insn ())
6105 /* We need a global pointer, but perhaps we can use a call-clobbered
6106 register instead of $gp. */
6107 if (TARGET_NEWABI && current_function_is_leaf)
6108 for (regno = GP_REG_FIRST; regno <= GP_REG_LAST; regno++)
6109 if (!regs_ever_live[regno]
6110 && call_used_regs[regno]
6111 && !fixed_regs[regno]
6112 && regno != PIC_FUNCTION_ADDR_REGNUM)
6115 return GLOBAL_POINTER_REGNUM;
6119 /* Return true if the current function must save REGNO. */
6122 mips_save_reg_p (unsigned int regno)
6124 /* We only need to save $gp for NewABI PIC. */
6125 if (regno == GLOBAL_POINTER_REGNUM)
6126 return (TARGET_ABICALLS && TARGET_NEWABI
6127 && cfun->machine->global_pointer == regno);
6129 /* Check call-saved registers. */
6130 if (regs_ever_live[regno] && !call_used_regs[regno])
6133 /* We need to save the old frame pointer before setting up a new one. */
6134 if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed)
6137 /* We need to save the incoming return address if it is ever clobbered
6138 within the function. */
6139 if (regno == GP_REG_FIRST + 31 && regs_ever_live[regno])
6146 return_type = DECL_RESULT (current_function_decl);
6148 /* $18 is a special case in mips16 code. It may be used to call
6149 a function which returns a floating point value, but it is
6150 marked in call_used_regs. */
6151 if (regno == GP_REG_FIRST + 18 && regs_ever_live[regno])
6154 /* $31 is also a special case. It will be used to copy a return
6155 value into the floating point registers if the return value is
6157 if (regno == GP_REG_FIRST + 31
6158 && mips16_hard_float
6159 && !aggregate_value_p (return_type, current_function_decl)
6160 && GET_MODE_CLASS (DECL_MODE (return_type)) == MODE_FLOAT
6161 && GET_MODE_SIZE (DECL_MODE (return_type)) <= UNITS_PER_FPVALUE)
6169 /* Return the bytes needed to compute the frame pointer from the current
6170 stack pointer. SIZE is the size (in bytes) of the local variables.
6172 MIPS stack frames look like:
6174 Before call After call
6175 +-----------------------+ +-----------------------+
6178 | caller's temps. | | caller's temps. |
6180 +-----------------------+ +-----------------------+
6182 | arguments on stack. | | arguments on stack. |
6184 +-----------------------+ +-----------------------+
6185 | 4 words to save | | 4 words to save |
6186 | arguments passed | | arguments passed |
6187 | in registers, even | | in registers, even |
6188 SP->| if not passed. | VFP->| if not passed. |
6189 +-----------------------+ +-----------------------+
6191 | fp register save |
6193 +-----------------------+
6195 | gp register save |
6197 +-----------------------+
6201 +-----------------------+
6203 | alloca allocations |
6205 +-----------------------+
6207 | GP save for V.4 abi |
6209 +-----------------------+
6211 | arguments on stack |
6213 +-----------------------+
6215 | arguments passed |
6216 | in registers, even |
6217 low SP->| if not passed. |
6218 memory +-----------------------+
6223 compute_frame_size (HOST_WIDE_INT size)
6226 HOST_WIDE_INT total_size; /* # bytes that the entire frame takes up */
6227 HOST_WIDE_INT var_size; /* # bytes that variables take up */
6228 HOST_WIDE_INT args_size; /* # bytes that outgoing arguments take up */
6229 HOST_WIDE_INT cprestore_size; /* # bytes that the cprestore slot takes up */
6230 HOST_WIDE_INT gp_reg_rounded; /* # bytes needed to store gp after rounding */
6231 HOST_WIDE_INT gp_reg_size; /* # bytes needed to store gp regs */
6232 HOST_WIDE_INT fp_reg_size; /* # bytes needed to store fp regs */
6233 unsigned int mask; /* mask of saved gp registers */
6234 unsigned int fmask; /* mask of saved fp registers */
6236 cfun->machine->global_pointer = mips_global_pointer ();
6242 var_size = MIPS_STACK_ALIGN (size);
6243 args_size = current_function_outgoing_args_size;
6244 cprestore_size = MIPS_STACK_ALIGN (STARTING_FRAME_OFFSET) - args_size;
6246 /* The space set aside by STARTING_FRAME_OFFSET isn't needed in leaf
6247 functions. If the function has local variables, we're committed
6248 to allocating it anyway. Otherwise reclaim it here. */
6249 if (var_size == 0 && current_function_is_leaf)
6250 cprestore_size = args_size = 0;
6252 /* The MIPS 3.0 linker does not like functions that dynamically
6253 allocate the stack and have 0 for STACK_DYNAMIC_OFFSET, since it
6254 looks like we are trying to create a second frame pointer to the
6255 function, so allocate some stack space to make it happy. */
6257 if (args_size == 0 && current_function_calls_alloca)
6258 args_size = 4 * UNITS_PER_WORD;
6260 total_size = var_size + args_size + cprestore_size;
6262 /* Calculate space needed for gp registers. */
6263 for (regno = GP_REG_FIRST; regno <= GP_REG_LAST; regno++)
6264 if (mips_save_reg_p (regno))
6266 gp_reg_size += GET_MODE_SIZE (gpr_mode);
6267 mask |= 1 << (regno - GP_REG_FIRST);
6270 /* We need to restore these for the handler. */
6271 if (current_function_calls_eh_return)
6276 regno = EH_RETURN_DATA_REGNO (i);
6277 if (regno == INVALID_REGNUM)
6279 gp_reg_size += GET_MODE_SIZE (gpr_mode);
6280 mask |= 1 << (regno - GP_REG_FIRST);
6284 /* This loop must iterate over the same space as its companion in
6285 save_restore_insns. */
6286 for (regno = (FP_REG_LAST - FP_INC + 1);
6287 regno >= FP_REG_FIRST;
6290 if (mips_save_reg_p (regno))
6292 fp_reg_size += FP_INC * UNITS_PER_FPREG;
6293 fmask |= ((1 << FP_INC) - 1) << (regno - FP_REG_FIRST);
6297 gp_reg_rounded = MIPS_STACK_ALIGN (gp_reg_size);
6298 total_size += gp_reg_rounded + MIPS_STACK_ALIGN (fp_reg_size);
6300 /* Add in the space required for saving incoming register arguments. */
6301 total_size += current_function_pretend_args_size;
6302 total_size += MIPS_STACK_ALIGN (cfun->machine->varargs_size);
6304 /* Save other computed information. */
6305 cfun->machine->frame.total_size = total_size;
6306 cfun->machine->frame.var_size = var_size;
6307 cfun->machine->frame.args_size = args_size;
6308 cfun->machine->frame.cprestore_size = cprestore_size;
6309 cfun->machine->frame.gp_reg_size = gp_reg_size;
6310 cfun->machine->frame.fp_reg_size = fp_reg_size;
6311 cfun->machine->frame.mask = mask;
6312 cfun->machine->frame.fmask = fmask;
6313 cfun->machine->frame.initialized = reload_completed;
6314 cfun->machine->frame.num_gp = gp_reg_size / UNITS_PER_WORD;
6315 cfun->machine->frame.num_fp = fp_reg_size / (FP_INC * UNITS_PER_FPREG);
6319 HOST_WIDE_INT offset;
6321 offset = (args_size + cprestore_size + var_size
6322 + gp_reg_size - GET_MODE_SIZE (gpr_mode));
6323 cfun->machine->frame.gp_sp_offset = offset;
6324 cfun->machine->frame.gp_save_offset = offset - total_size;
6328 cfun->machine->frame.gp_sp_offset = 0;
6329 cfun->machine->frame.gp_save_offset = 0;
6334 HOST_WIDE_INT offset;
6336 offset = (args_size + cprestore_size + var_size
6337 + gp_reg_rounded + fp_reg_size
6338 - FP_INC * UNITS_PER_FPREG);
6339 cfun->machine->frame.fp_sp_offset = offset;
6340 cfun->machine->frame.fp_save_offset = offset - total_size;
6344 cfun->machine->frame.fp_sp_offset = 0;
6345 cfun->machine->frame.fp_save_offset = 0;
6348 /* Ok, we're done. */
6352 /* Implement INITIAL_ELIMINATION_OFFSET. FROM is either the frame
6353 pointer or argument pointer. TO is either the stack pointer or
6354 hard frame pointer. */
6357 mips_initial_elimination_offset (int from, int to)
6359 HOST_WIDE_INT offset;
6361 compute_frame_size (get_frame_size ());
6363 /* Set OFFSET to the offset from the stack pointer. */
6366 case FRAME_POINTER_REGNUM:
6370 case ARG_POINTER_REGNUM:
6371 offset = (cfun->machine->frame.total_size
6372 - current_function_pretend_args_size);
6379 if (TARGET_MIPS16 && to == HARD_FRAME_POINTER_REGNUM)
6380 offset -= cfun->machine->frame.args_size;
6385 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
6386 back to a previous frame. */
6388 mips_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
6393 return get_hard_reg_initial_val (Pmode, GP_REG_FIRST + 31);
6396 /* Use FN to save or restore register REGNO. MODE is the register's
6397 mode and OFFSET is the offset of its save slot from the current
6401 mips_save_restore_reg (enum machine_mode mode, int regno,
6402 HOST_WIDE_INT offset, mips_save_restore_fn fn)
6406 mem = gen_rtx_MEM (mode, plus_constant (stack_pointer_rtx, offset));
6408 fn (gen_rtx_REG (mode, regno), mem);
6412 /* Call FN for each register that is saved by the current function.
6413 SP_OFFSET is the offset of the current stack pointer from the start
6417 mips_for_each_saved_reg (HOST_WIDE_INT sp_offset, mips_save_restore_fn fn)
6419 #define BITSET_P(VALUE, BIT) (((VALUE) & (1L << (BIT))) != 0)
6421 enum machine_mode fpr_mode;
6422 HOST_WIDE_INT offset;
6425 /* Save registers starting from high to low. The debuggers prefer at least
6426 the return register be stored at func+4, and also it allows us not to
6427 need a nop in the epilog if at least one register is reloaded in
6428 addition to return address. */
6429 offset = cfun->machine->frame.gp_sp_offset - sp_offset;
6430 for (regno = GP_REG_LAST; regno >= GP_REG_FIRST; regno--)
6431 if (BITSET_P (cfun->machine->frame.mask, regno - GP_REG_FIRST))
6433 mips_save_restore_reg (gpr_mode, regno, offset, fn);
6434 offset -= GET_MODE_SIZE (gpr_mode);
6437 /* This loop must iterate over the same space as its companion in
6438 compute_frame_size. */
6439 offset = cfun->machine->frame.fp_sp_offset - sp_offset;
6440 fpr_mode = (TARGET_SINGLE_FLOAT ? SFmode : DFmode);
6441 for (regno = (FP_REG_LAST - FP_INC + 1);
6442 regno >= FP_REG_FIRST;
6444 if (BITSET_P (cfun->machine->frame.fmask, regno - FP_REG_FIRST))
6446 mips_save_restore_reg (fpr_mode, regno, offset, fn);
6447 offset -= GET_MODE_SIZE (fpr_mode);
6452 /* If we're generating n32 or n64 abicalls, and the current function
6453 does not use $28 as its global pointer, emit a cplocal directive.
6454 Use pic_offset_table_rtx as the argument to the directive. */
6457 mips_output_cplocal (void)
6459 if (!TARGET_EXPLICIT_RELOCS
6460 && cfun->machine->global_pointer > 0
6461 && cfun->machine->global_pointer != GLOBAL_POINTER_REGNUM)
6462 output_asm_insn (".cplocal %+", 0);
6465 /* If we're generating n32 or n64 abicalls, emit instructions
6466 to set up the global pointer. */
6469 mips_emit_loadgp (void)
6471 if (TARGET_ABICALLS && TARGET_NEWABI && cfun->machine->global_pointer > 0)
6473 rtx addr, offset, incoming_address;
6475 addr = XEXP (DECL_RTL (current_function_decl), 0);
6476 offset = mips_unspec_address (addr, SYMBOL_GOTOFF_LOADGP);
6477 incoming_address = gen_rtx_REG (Pmode, PIC_FUNCTION_ADDR_REGNUM);
6478 emit_insn (gen_loadgp (offset, incoming_address));
6479 if (!TARGET_EXPLICIT_RELOCS)
6480 emit_insn (gen_loadgp_blockage ());
6484 /* Set up the stack and frame (if desired) for the function. */
6487 mips_output_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
6490 HOST_WIDE_INT tsize = cfun->machine->frame.total_size;
6492 #ifdef SDB_DEBUGGING_INFO
6493 if (debug_info_level != DINFO_LEVEL_TERSE && write_symbols == SDB_DEBUG)
6494 SDB_OUTPUT_SOURCE_LINE (file, DECL_SOURCE_LINE (current_function_decl));
6497 /* In mips16 mode, we may need to generate a 32 bit to handle
6498 floating point arguments. The linker will arrange for any 32 bit
6499 functions to call this stub, which will then jump to the 16 bit
6501 if (TARGET_MIPS16 && !TARGET_SOFT_FLOAT
6502 && current_function_args_info.fp_code != 0)
6503 build_mips16_function_stub (file);
6505 if (!FUNCTION_NAME_ALREADY_DECLARED)
6507 /* Get the function name the same way that toplev.c does before calling
6508 assemble_start_function. This is needed so that the name used here
6509 exactly matches the name used in ASM_DECLARE_FUNCTION_NAME. */
6510 fnname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
6512 if (!flag_inhibit_size_directive)
6514 fputs ("\t.ent\t", file);
6515 assemble_name (file, fnname);
6519 assemble_name (file, fnname);
6520 fputs (":\n", file);
6523 /* Stop mips_file_end from treating this function as external. */
6524 if (TARGET_IRIX && mips_abi == ABI_32)
6525 TREE_ASM_WRITTEN (DECL_NAME (cfun->decl)) = 1;
6527 if (!flag_inhibit_size_directive)
6529 /* .frame FRAMEREG, FRAMESIZE, RETREG */
6531 "\t.frame\t%s," HOST_WIDE_INT_PRINT_DEC ",%s\t\t"
6532 "# vars= " HOST_WIDE_INT_PRINT_DEC ", regs= %d/%d"
6533 ", args= " HOST_WIDE_INT_PRINT_DEC
6534 ", gp= " HOST_WIDE_INT_PRINT_DEC "\n",
6535 (reg_names[(frame_pointer_needed)
6536 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM]),
6537 ((frame_pointer_needed && TARGET_MIPS16)
6538 ? tsize - cfun->machine->frame.args_size
6540 reg_names[GP_REG_FIRST + 31],
6541 cfun->machine->frame.var_size,
6542 cfun->machine->frame.num_gp,
6543 cfun->machine->frame.num_fp,
6544 cfun->machine->frame.args_size,
6545 cfun->machine->frame.cprestore_size);
6547 /* .mask MASK, GPOFFSET; .fmask FPOFFSET */
6548 fprintf (file, "\t.mask\t0x%08x," HOST_WIDE_INT_PRINT_DEC "\n",
6549 cfun->machine->frame.mask,
6550 cfun->machine->frame.gp_save_offset);
6551 fprintf (file, "\t.fmask\t0x%08x," HOST_WIDE_INT_PRINT_DEC "\n",
6552 cfun->machine->frame.fmask,
6553 cfun->machine->frame.fp_save_offset);
6556 OLD_SP == *FRAMEREG + FRAMESIZE => can find old_sp from nominated FP reg.
6557 HIGHEST_GP_SAVED == *FRAMEREG + FRAMESIZE + GPOFFSET => can find saved regs. */
6560 if (TARGET_ABICALLS && !TARGET_NEWABI && cfun->machine->global_pointer > 0)
6562 /* Handle the initialization of $gp for SVR4 PIC. */
6563 if (!cfun->machine->all_noreorder_p)
6564 output_asm_insn ("%(.cpload\t%^%)", 0);
6566 output_asm_insn ("%(.cpload\t%^\n\t%<", 0);
6568 else if (cfun->machine->all_noreorder_p)
6569 output_asm_insn ("%(%<", 0);
6571 /* Tell the assembler which register we're using as the global
6572 pointer. This is needed for thunks, since they can use either
6573 explicit relocs or assembler macros. */
6574 mips_output_cplocal ();
6577 /* Make the last instruction frame related and note that it performs
6578 the operation described by FRAME_PATTERN. */
6581 mips_set_frame_expr (rtx frame_pattern)
6585 insn = get_last_insn ();
6586 RTX_FRAME_RELATED_P (insn) = 1;
6587 REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6593 /* Return a frame-related rtx that stores REG at MEM.
6594 REG must be a single register. */
6597 mips_frame_set (rtx mem, rtx reg)
6601 /* If we're saving the return address register and the dwarf return
6602 address column differs from the hard register number, adjust the
6603 note reg to refer to the former. */
6604 if (REGNO (reg) == GP_REG_FIRST + 31
6605 && DWARF_FRAME_RETURN_COLUMN != GP_REG_FIRST + 31)
6606 reg = gen_rtx_REG (GET_MODE (reg), DWARF_FRAME_RETURN_COLUMN);
6608 set = gen_rtx_SET (VOIDmode, mem, reg);
6609 RTX_FRAME_RELATED_P (set) = 1;
6615 /* Save register REG to MEM. Make the instruction frame-related. */
6618 mips_save_reg (rtx reg, rtx mem)
6620 if (GET_MODE (reg) == DFmode && !TARGET_FLOAT64)
6624 if (mips_split_64bit_move_p (mem, reg))
6625 mips_split_64bit_move (mem, reg);
6627 emit_move_insn (mem, reg);
6629 x1 = mips_frame_set (mips_subword (mem, 0), mips_subword (reg, 0));
6630 x2 = mips_frame_set (mips_subword (mem, 1), mips_subword (reg, 1));
6631 mips_set_frame_expr (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, x1, x2)));
6636 && REGNO (reg) != GP_REG_FIRST + 31
6637 && !M16_REG_P (REGNO (reg)))
6639 /* Save a non-mips16 register by moving it through a temporary.
6640 We don't need to do this for $31 since there's a special
6641 instruction for it. */
6642 emit_move_insn (MIPS_PROLOGUE_TEMP (GET_MODE (reg)), reg);
6643 emit_move_insn (mem, MIPS_PROLOGUE_TEMP (GET_MODE (reg)));
6646 emit_move_insn (mem, reg);
6648 mips_set_frame_expr (mips_frame_set (mem, reg));
6653 /* Expand the prologue into a bunch of separate insns. */
6656 mips_expand_prologue (void)
6660 if (cfun->machine->global_pointer > 0)
6661 REGNO (pic_offset_table_rtx) = cfun->machine->global_pointer;
6663 size = compute_frame_size (get_frame_size ());
6665 /* Save the registers. Allocate up to MIPS_MAX_FIRST_STACK_STEP
6666 bytes beforehand; this is enough to cover the register save area
6667 without going out of range. */
6668 if ((cfun->machine->frame.mask | cfun->machine->frame.fmask) != 0)
6670 HOST_WIDE_INT step1;
6672 step1 = MIN (size, MIPS_MAX_FIRST_STACK_STEP);
6673 RTX_FRAME_RELATED_P (emit_insn (gen_add3_insn (stack_pointer_rtx,
6675 GEN_INT (-step1)))) = 1;
6677 mips_for_each_saved_reg (size, mips_save_reg);
6680 /* Allocate the rest of the frame. */
6683 if (SMALL_OPERAND (-size))
6684 RTX_FRAME_RELATED_P (emit_insn (gen_add3_insn (stack_pointer_rtx,
6686 GEN_INT (-size)))) = 1;
6689 emit_move_insn (MIPS_PROLOGUE_TEMP (Pmode), GEN_INT (size));
6692 /* There are no instructions to add or subtract registers
6693 from the stack pointer, so use the frame pointer as a
6694 temporary. We should always be using a frame pointer
6695 in this case anyway. */
6696 gcc_assert (frame_pointer_needed);
6697 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
6698 emit_insn (gen_sub3_insn (hard_frame_pointer_rtx,
6699 hard_frame_pointer_rtx,
6700 MIPS_PROLOGUE_TEMP (Pmode)));
6701 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
6704 emit_insn (gen_sub3_insn (stack_pointer_rtx,
6706 MIPS_PROLOGUE_TEMP (Pmode)));
6708 /* Describe the combined effect of the previous instructions. */
6710 (gen_rtx_SET (VOIDmode, stack_pointer_rtx,
6711 plus_constant (stack_pointer_rtx, -size)));
6715 /* Set up the frame pointer, if we're using one. In mips16 code,
6716 we point the frame pointer ahead of the outgoing argument area.
6717 This should allow more variables & incoming arguments to be
6718 accessed with unextended instructions. */
6719 if (frame_pointer_needed)
6721 if (TARGET_MIPS16 && cfun->machine->frame.args_size != 0)
6723 rtx offset = GEN_INT (cfun->machine->frame.args_size);
6724 if (SMALL_OPERAND (cfun->machine->frame.args_size))
6726 (emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
6731 emit_move_insn (MIPS_PROLOGUE_TEMP (Pmode), offset);
6732 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
6733 emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
6734 hard_frame_pointer_rtx,
6735 MIPS_PROLOGUE_TEMP (Pmode)));
6737 (gen_rtx_SET (VOIDmode, hard_frame_pointer_rtx,
6738 plus_constant (stack_pointer_rtx,
6739 cfun->machine->frame.args_size)));
6743 RTX_FRAME_RELATED_P (emit_move_insn (hard_frame_pointer_rtx,
6744 stack_pointer_rtx)) = 1;
6747 /* If generating o32/o64 abicalls, save $gp on the stack. */
6748 if (TARGET_ABICALLS && !TARGET_NEWABI && !current_function_is_leaf)
6749 emit_insn (gen_cprestore (GEN_INT (current_function_outgoing_args_size)));
6751 mips_emit_loadgp ();
6753 /* If we are profiling, make sure no instructions are scheduled before
6754 the call to mcount. */
6756 if (current_function_profile)
6757 emit_insn (gen_blockage ());
6760 /* Do any necessary cleanup after a function to restore stack, frame,
6763 #define RA_MASK BITMASK_HIGH /* 1 << 31 */
6766 mips_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
6767 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
6769 /* Reinstate the normal $gp. */
6770 REGNO (pic_offset_table_rtx) = GLOBAL_POINTER_REGNUM;
6771 mips_output_cplocal ();
6773 if (cfun->machine->all_noreorder_p)
6775 /* Avoid using %>%) since it adds excess whitespace. */
6776 output_asm_insn (".set\tmacro", 0);
6777 output_asm_insn (".set\treorder", 0);
6778 set_noreorder = set_nomacro = 0;
6781 if (!FUNCTION_NAME_ALREADY_DECLARED && !flag_inhibit_size_directive)
6785 /* Get the function name the same way that toplev.c does before calling
6786 assemble_start_function. This is needed so that the name used here
6787 exactly matches the name used in ASM_DECLARE_FUNCTION_NAME. */
6788 fnname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
6789 fputs ("\t.end\t", file);
6790 assemble_name (file, fnname);
6795 /* Emit instructions to restore register REG from slot MEM. */
6798 mips_restore_reg (rtx reg, rtx mem)
6800 /* There's no mips16 instruction to load $31 directly. Load into
6801 $7 instead and adjust the return insn appropriately. */
6802 if (TARGET_MIPS16 && REGNO (reg) == GP_REG_FIRST + 31)
6803 reg = gen_rtx_REG (GET_MODE (reg), 7);
6805 if (TARGET_MIPS16 && !M16_REG_P (REGNO (reg)))
6807 /* Can't restore directly; move through a temporary. */
6808 emit_move_insn (MIPS_EPILOGUE_TEMP (GET_MODE (reg)), mem);
6809 emit_move_insn (reg, MIPS_EPILOGUE_TEMP (GET_MODE (reg)));
6812 emit_move_insn (reg, mem);
6816 /* Expand the epilogue into a bunch of separate insns. SIBCALL_P is true
6817 if this epilogue precedes a sibling call, false if it is for a normal
6818 "epilogue" pattern. */
6821 mips_expand_epilogue (int sibcall_p)
6823 HOST_WIDE_INT step1, step2;
6826 if (!sibcall_p && mips_can_use_return_insn ())
6828 emit_jump_insn (gen_return ());
6832 /* Split the frame into two. STEP1 is the amount of stack we should
6833 deallocate before restoring the registers. STEP2 is the amount we
6834 should deallocate afterwards.
6836 Start off by assuming that no registers need to be restored. */
6837 step1 = cfun->machine->frame.total_size;
6840 /* Work out which register holds the frame address. Account for the
6841 frame pointer offset used by mips16 code. */
6842 if (!frame_pointer_needed)
6843 base = stack_pointer_rtx;
6846 base = hard_frame_pointer_rtx;
6848 step1 -= cfun->machine->frame.args_size;
6851 /* If we need to restore registers, deallocate as much stack as
6852 possible in the second step without going out of range. */
6853 if ((cfun->machine->frame.mask | cfun->machine->frame.fmask) != 0)
6855 step2 = MIN (step1, MIPS_MAX_FIRST_STACK_STEP);
6859 /* Set TARGET to BASE + STEP1. */
6865 /* Get an rtx for STEP1 that we can add to BASE. */
6866 adjust = GEN_INT (step1);
6867 if (!SMALL_OPERAND (step1))
6869 emit_move_insn (MIPS_EPILOGUE_TEMP (Pmode), adjust);
6870 adjust = MIPS_EPILOGUE_TEMP (Pmode);
6873 /* Normal mode code can copy the result straight into $sp. */
6875 target = stack_pointer_rtx;
6877 emit_insn (gen_add3_insn (target, base, adjust));
6880 /* Copy TARGET into the stack pointer. */
6881 if (target != stack_pointer_rtx)
6882 emit_move_insn (stack_pointer_rtx, target);
6884 /* If we're using addressing macros for n32/n64 abicalls, $gp is
6885 implicitly used by all SYMBOL_REFs. We must emit a blockage
6886 insn before restoring it. */
6887 if (TARGET_ABICALLS && TARGET_NEWABI && !TARGET_EXPLICIT_RELOCS)
6888 emit_insn (gen_blockage ());
6890 /* Restore the registers. */
6891 mips_for_each_saved_reg (cfun->machine->frame.total_size - step2,
6894 /* Deallocate the final bit of the frame. */
6896 emit_insn (gen_add3_insn (stack_pointer_rtx,
6900 /* Add in the __builtin_eh_return stack adjustment. We need to
6901 use a temporary in mips16 code. */
6902 if (current_function_calls_eh_return)
6906 emit_move_insn (MIPS_EPILOGUE_TEMP (Pmode), stack_pointer_rtx);
6907 emit_insn (gen_add3_insn (MIPS_EPILOGUE_TEMP (Pmode),
6908 MIPS_EPILOGUE_TEMP (Pmode),
6909 EH_RETURN_STACKADJ_RTX));
6910 emit_move_insn (stack_pointer_rtx, MIPS_EPILOGUE_TEMP (Pmode));
6913 emit_insn (gen_add3_insn (stack_pointer_rtx,
6915 EH_RETURN_STACKADJ_RTX));
6920 /* The mips16 loads the return address into $7, not $31. */
6921 if (TARGET_MIPS16 && (cfun->machine->frame.mask & RA_MASK) != 0)
6922 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode,
6923 GP_REG_FIRST + 7)));
6925 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode,
6926 GP_REG_FIRST + 31)));
6930 /* Return nonzero if this function is known to have a null epilogue.
6931 This allows the optimizer to omit jumps to jumps if no stack
6935 mips_can_use_return_insn (void)
6939 if (! reload_completed)
6942 if (regs_ever_live[31] || current_function_profile)
6945 return_type = DECL_RESULT (current_function_decl);
6947 /* In mips16 mode, a function which returns a floating point value
6948 needs to arrange to copy the return value into the floating point
6951 && mips16_hard_float
6952 && ! aggregate_value_p (return_type, current_function_decl)
6953 && GET_MODE_CLASS (DECL_MODE (return_type)) == MODE_FLOAT
6954 && GET_MODE_SIZE (DECL_MODE (return_type)) <= UNITS_PER_FPVALUE)
6957 if (cfun->machine->frame.initialized)
6958 return cfun->machine->frame.total_size == 0;
6960 return compute_frame_size (get_frame_size ()) == 0;
6963 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. Generate rtl rather than asm text
6964 in order to avoid duplicating too much logic from elsewhere. */
6967 mips_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
6968 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
6971 rtx this, temp1, temp2, insn, fnaddr;
6973 /* Pretend to be a post-reload pass while generating rtl. */
6975 reload_completed = 1;
6976 reset_block_changes ();
6978 /* Pick a global pointer for -mabicalls. Use $15 rather than $28
6979 for TARGET_NEWABI since the latter is a call-saved register. */
6980 if (TARGET_ABICALLS)
6981 cfun->machine->global_pointer
6982 = REGNO (pic_offset_table_rtx)
6983 = TARGET_NEWABI ? 15 : GLOBAL_POINTER_REGNUM;
6985 /* Set up the global pointer for n32 or n64 abicalls. */
6986 mips_emit_loadgp ();
6988 /* We need two temporary registers in some cases. */
6989 temp1 = gen_rtx_REG (Pmode, 2);
6990 temp2 = gen_rtx_REG (Pmode, 3);
6992 /* Find out which register contains the "this" pointer. */
6993 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
6994 this = gen_rtx_REG (Pmode, GP_ARG_FIRST + 1);
6996 this = gen_rtx_REG (Pmode, GP_ARG_FIRST);
6998 /* Add DELTA to THIS. */
7001 rtx offset = GEN_INT (delta);
7002 if (!SMALL_OPERAND (delta))
7004 emit_move_insn (temp1, offset);
7007 emit_insn (gen_add3_insn (this, this, offset));
7010 /* If needed, add *(*THIS + VCALL_OFFSET) to THIS. */
7011 if (vcall_offset != 0)
7015 /* Set TEMP1 to *THIS. */
7016 emit_move_insn (temp1, gen_rtx_MEM (Pmode, this));
7018 /* Set ADDR to a legitimate address for *THIS + VCALL_OFFSET. */
7019 addr = mips_add_offset (temp2, temp1, vcall_offset);
7021 /* Load the offset and add it to THIS. */
7022 emit_move_insn (temp1, gen_rtx_MEM (Pmode, addr));
7023 emit_insn (gen_add3_insn (this, this, temp1));
7026 /* Jump to the target function. Use a sibcall if direct jumps are
7027 allowed, otherwise load the address into a register first. */
7028 fnaddr = XEXP (DECL_RTL (function), 0);
7029 if (TARGET_MIPS16 || TARGET_ABICALLS || TARGET_LONG_CALLS)
7031 /* This is messy. gas treats "la $25,foo" as part of a call
7032 sequence and may allow a global "foo" to be lazily bound.
7033 The general move patterns therefore reject this combination.
7035 In this context, lazy binding would actually be OK for o32 and o64,
7036 but it's still wrong for n32 and n64; see mips_load_call_address.
7037 We must therefore load the address via a temporary register if
7038 mips_dangerous_for_la25_p.
7040 If we jump to the temporary register rather than $25, the assembler
7041 can use the move insn to fill the jump's delay slot. */
7042 if (TARGET_ABICALLS && !mips_dangerous_for_la25_p (fnaddr))
7043 temp1 = gen_rtx_REG (Pmode, PIC_FUNCTION_ADDR_REGNUM);
7044 mips_load_call_address (temp1, fnaddr, true);
7046 if (TARGET_ABICALLS && REGNO (temp1) != PIC_FUNCTION_ADDR_REGNUM)
7047 emit_move_insn (gen_rtx_REG (Pmode, PIC_FUNCTION_ADDR_REGNUM), temp1);
7048 emit_jump_insn (gen_indirect_jump (temp1));
7052 insn = emit_call_insn (gen_sibcall_internal (fnaddr, const0_rtx));
7053 SIBLING_CALL_P (insn) = 1;
7056 /* Run just enough of rest_of_compilation. This sequence was
7057 "borrowed" from alpha.c. */
7058 insn = get_insns ();
7059 insn_locators_initialize ();
7060 split_all_insns_noflow ();
7062 mips16_lay_out_constants ();
7063 shorten_branches (insn);
7064 final_start_function (insn, file, 1);
7065 final (insn, file, 1);
7066 final_end_function ();
7068 /* Clean up the vars set above. Note that final_end_function resets
7069 the global pointer for us. */
7070 reload_completed = 0;
7074 /* Returns nonzero if X contains a SYMBOL_REF. */
7077 symbolic_expression_p (rtx x)
7079 if (GET_CODE (x) == SYMBOL_REF)
7082 if (GET_CODE (x) == CONST)
7083 return symbolic_expression_p (XEXP (x, 0));
7086 return symbolic_expression_p (XEXP (x, 0));
7088 if (ARITHMETIC_P (x))
7089 return (symbolic_expression_p (XEXP (x, 0))
7090 || symbolic_expression_p (XEXP (x, 1)));
7095 /* Choose the section to use for the constant rtx expression X that has
7099 mips_select_rtx_section (enum machine_mode mode, rtx x,
7100 unsigned HOST_WIDE_INT align)
7104 /* In mips16 mode, the constant table always goes in the same section
7105 as the function, so that constants can be loaded using PC relative
7107 return function_section (current_function_decl);
7109 else if (TARGET_EMBEDDED_DATA)
7111 /* For embedded applications, always put constants in read-only data,
7112 in order to reduce RAM usage. */
7113 return mergeable_constant_section (mode, align, 0);
7117 /* For hosted applications, always put constants in small data if
7118 possible, as this gives the best performance. */
7119 /* ??? Consider using mergeable small data sections. */
7121 if (GET_MODE_SIZE (mode) <= (unsigned) mips_section_threshold
7122 && mips_section_threshold > 0)
7123 return get_named_section (NULL, ".sdata", 0);
7124 else if (flag_pic && symbolic_expression_p (x))
7125 return get_named_section (NULL, ".data.rel.ro", 3);
7127 return mergeable_constant_section (mode, align, 0);
7131 /* Implement TARGET_ASM_FUNCTION_RODATA_SECTION.
7133 The complication here is that, with the combination TARGET_ABICALLS
7134 && !TARGET_GPWORD, jump tables will use absolute addresses, and should
7135 therefore not be included in the read-only part of a DSO. Handle such
7136 cases by selecting a normal data section instead of a read-only one.
7137 The logic apes that in default_function_rodata_section. */
7140 mips_function_rodata_section (tree decl)
7142 if (!TARGET_ABICALLS || TARGET_GPWORD)
7143 return default_function_rodata_section (decl);
7145 if (decl && DECL_SECTION_NAME (decl))
7147 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
7148 if (DECL_ONE_ONLY (decl) && strncmp (name, ".gnu.linkonce.t.", 16) == 0)
7150 char *rname = ASTRDUP (name);
7152 return get_section (rname, SECTION_LINKONCE | SECTION_WRITE, decl);
7154 else if (flag_function_sections && flag_data_sections
7155 && strncmp (name, ".text.", 6) == 0)
7157 char *rname = ASTRDUP (name);
7158 memcpy (rname + 1, "data", 4);
7159 return get_section (rname, SECTION_WRITE, decl);
7162 return data_section;
7165 /* Implement TARGET_IN_SMALL_DATA_P. Return true if it would be safe to
7166 access DECL using %gp_rel(...)($gp). */
7169 mips_in_small_data_p (tree decl)
7173 if (TREE_CODE (decl) == STRING_CST || TREE_CODE (decl) == FUNCTION_DECL)
7176 /* We don't yet generate small-data references for -mabicalls. See related
7177 -G handling in override_options. */
7178 if (TARGET_ABICALLS)
7181 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl) != 0)
7185 /* Reject anything that isn't in a known small-data section. */
7186 name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
7187 if (strcmp (name, ".sdata") != 0 && strcmp (name, ".sbss") != 0)
7190 /* If a symbol is defined externally, the assembler will use the
7191 usual -G rules when deciding how to implement macros. */
7192 if (TARGET_EXPLICIT_RELOCS || !DECL_EXTERNAL (decl))
7195 else if (TARGET_EMBEDDED_DATA)
7197 /* Don't put constants into the small data section: we want them
7198 to be in ROM rather than RAM. */
7199 if (TREE_CODE (decl) != VAR_DECL)
7202 if (TREE_READONLY (decl)
7203 && !TREE_SIDE_EFFECTS (decl)
7204 && (!DECL_INITIAL (decl) || TREE_CONSTANT (DECL_INITIAL (decl))))
7208 size = int_size_in_bytes (TREE_TYPE (decl));
7209 return (size > 0 && size <= mips_section_threshold);
7212 /* See whether VALTYPE is a record whose fields should be returned in
7213 floating-point registers. If so, return the number of fields and
7214 list them in FIELDS (which should have two elements). Return 0
7217 For n32 & n64, a structure with one or two fields is returned in
7218 floating-point registers as long as every field has a floating-point
7222 mips_fpr_return_fields (tree valtype, tree *fields)
7230 if (TREE_CODE (valtype) != RECORD_TYPE)
7234 for (field = TYPE_FIELDS (valtype); field != 0; field = TREE_CHAIN (field))
7236 if (TREE_CODE (field) != FIELD_DECL)
7239 if (TREE_CODE (TREE_TYPE (field)) != REAL_TYPE)
7245 fields[i++] = field;
7251 /* Implement TARGET_RETURN_IN_MSB. For n32 & n64, we should return
7252 a value in the most significant part of $2/$3 if:
7254 - the target is big-endian;
7256 - the value has a structure or union type (we generalize this to
7257 cover aggregates from other languages too); and
7259 - the structure is not returned in floating-point registers. */
7262 mips_return_in_msb (tree valtype)
7266 return (TARGET_NEWABI
7267 && TARGET_BIG_ENDIAN
7268 && AGGREGATE_TYPE_P (valtype)
7269 && mips_fpr_return_fields (valtype, fields) == 0);
7273 /* Return a composite value in a pair of floating-point registers.
7274 MODE1 and OFFSET1 are the mode and byte offset for the first value,
7275 likewise MODE2 and OFFSET2 for the second. MODE is the mode of the
7278 For n32 & n64, $f0 always holds the first value and $f2 the second.
7279 Otherwise the values are packed together as closely as possible. */
7282 mips_return_fpr_pair (enum machine_mode mode,
7283 enum machine_mode mode1, HOST_WIDE_INT offset1,
7284 enum machine_mode mode2, HOST_WIDE_INT offset2)
7288 inc = (TARGET_NEWABI ? 2 : FP_INC);
7289 return gen_rtx_PARALLEL
7292 gen_rtx_EXPR_LIST (VOIDmode,
7293 gen_rtx_REG (mode1, FP_RETURN),
7295 gen_rtx_EXPR_LIST (VOIDmode,
7296 gen_rtx_REG (mode2, FP_RETURN + inc),
7297 GEN_INT (offset2))));
7302 /* Implement FUNCTION_VALUE and LIBCALL_VALUE. For normal calls,
7303 VALTYPE is the return type and MODE is VOIDmode. For libcalls,
7304 VALTYPE is null and MODE is the mode of the return value. */
7307 mips_function_value (tree valtype, tree func ATTRIBUTE_UNUSED,
7308 enum machine_mode mode)
7315 mode = TYPE_MODE (valtype);
7316 unsignedp = TYPE_UNSIGNED (valtype);
7318 /* Since we define TARGET_PROMOTE_FUNCTION_RETURN that returns
7319 true, we must promote the mode just as PROMOTE_MODE does. */
7320 mode = promote_mode (valtype, mode, &unsignedp, 1);
7322 /* Handle structures whose fields are returned in $f0/$f2. */
7323 switch (mips_fpr_return_fields (valtype, fields))
7326 return gen_rtx_REG (mode, FP_RETURN);
7329 return mips_return_fpr_pair (mode,
7330 TYPE_MODE (TREE_TYPE (fields[0])),
7331 int_byte_position (fields[0]),
7332 TYPE_MODE (TREE_TYPE (fields[1])),
7333 int_byte_position (fields[1]));
7336 /* If a value is passed in the most significant part of a register, see
7337 whether we have to round the mode up to a whole number of words. */
7338 if (mips_return_in_msb (valtype))
7340 HOST_WIDE_INT size = int_size_in_bytes (valtype);
7341 if (size % UNITS_PER_WORD != 0)
7343 size += UNITS_PER_WORD - size % UNITS_PER_WORD;
7344 mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
7348 /* For EABI, the class of return register depends entirely on MODE.
7349 For example, "struct { some_type x; }" and "union { some_type x; }"
7350 are returned in the same way as a bare "some_type" would be.
7351 Other ABIs only use FPRs for scalar, complex or vector types. */
7352 if (mips_abi != ABI_EABI && !FLOAT_TYPE_P (valtype))
7353 return gen_rtx_REG (mode, GP_RETURN);
7356 if ((GET_MODE_CLASS (mode) == MODE_FLOAT
7357 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7358 && GET_MODE_SIZE (mode) <= UNITS_PER_HWFPVALUE)
7359 return gen_rtx_REG (mode, FP_RETURN);
7361 /* Handle long doubles for n32 & n64. */
7363 return mips_return_fpr_pair (mode,
7365 DImode, GET_MODE_SIZE (mode) / 2);
7367 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
7368 && GET_MODE_SIZE (mode) <= UNITS_PER_HWFPVALUE * 2)
7369 return mips_return_fpr_pair (mode,
7370 GET_MODE_INNER (mode), 0,
7371 GET_MODE_INNER (mode),
7372 GET_MODE_SIZE (mode) / 2);
7374 return gen_rtx_REG (mode, GP_RETURN);
7377 /* Return nonzero when an argument must be passed by reference. */
7380 mips_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
7381 enum machine_mode mode, tree type,
7382 bool named ATTRIBUTE_UNUSED)
7384 if (mips_abi == ABI_EABI)
7388 /* ??? How should SCmode be handled? */
7389 if (type == NULL_TREE || mode == DImode || mode == DFmode)
7392 size = int_size_in_bytes (type);
7393 return size == -1 || size > UNITS_PER_WORD;
7397 /* If we have a variable-sized parameter, we have no choice. */
7398 return targetm.calls.must_pass_in_stack (mode, type);
7403 mips_callee_copies (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
7404 enum machine_mode mode ATTRIBUTE_UNUSED,
7405 tree type ATTRIBUTE_UNUSED, bool named)
7407 return mips_abi == ABI_EABI && named;
7410 /* Return true if registers of class CLASS cannot change from mode FROM
7414 mips_cannot_change_mode_class (enum machine_mode from,
7415 enum machine_mode to, enum reg_class class)
7417 if (MIN (GET_MODE_SIZE (from), GET_MODE_SIZE (to)) <= UNITS_PER_WORD
7418 && MAX (GET_MODE_SIZE (from), GET_MODE_SIZE (to)) > UNITS_PER_WORD)
7420 if (TARGET_BIG_ENDIAN)
7422 /* When a multi-word value is stored in paired floating-point
7423 registers, the first register always holds the low word.
7424 We therefore can't allow FPRs to change between single-word
7425 and multi-word modes. */
7426 if (FP_INC > 1 && reg_classes_intersect_p (FP_REGS, class))
7431 /* LO_REGNO == HI_REGNO + 1, so if a multi-word value is stored
7432 in LO and HI, the high word always comes first. We therefore
7433 can't allow values stored in HI to change between single-word
7434 and multi-word modes.
7435 This rule applies to both the original HI/LO pair and the new
7436 DSP accumulators. */
7437 if (reg_classes_intersect_p (ACC_REGS, class))
7441 /* Loading a 32-bit value into a 64-bit floating-point register
7442 will not sign-extend the value, despite what LOAD_EXTEND_OP says.
7443 We can't allow 64-bit float registers to change from SImode to
7447 && GET_MODE_SIZE (to) >= UNITS_PER_WORD
7448 && reg_classes_intersect_p (FP_REGS, class))
7453 /* Return true if X should not be moved directly into register $25.
7454 We need this because many versions of GAS will treat "la $25,foo" as
7455 part of a call sequence and so allow a global "foo" to be lazily bound. */
7458 mips_dangerous_for_la25_p (rtx x)
7460 HOST_WIDE_INT offset;
7462 if (TARGET_EXPLICIT_RELOCS)
7465 mips_split_const (x, &x, &offset);
7466 return global_got_operand (x, VOIDmode);
7469 /* Implement PREFERRED_RELOAD_CLASS. */
7472 mips_preferred_reload_class (rtx x, enum reg_class class)
7474 if (mips_dangerous_for_la25_p (x) && reg_class_subset_p (LEA_REGS, class))
7477 if (TARGET_HARD_FLOAT
7478 && FLOAT_MODE_P (GET_MODE (x))
7479 && reg_class_subset_p (FP_REGS, class))
7482 if (reg_class_subset_p (GR_REGS, class))
7485 if (TARGET_MIPS16 && reg_class_subset_p (M16_REGS, class))
7491 /* This function returns the register class required for a secondary
7492 register when copying between one of the registers in CLASS, and X,
7493 using MODE. If IN_P is nonzero, the copy is going from X to the
7494 register, otherwise the register is the source. A return value of
7495 NO_REGS means that no secondary register is required. */
7498 mips_secondary_reload_class (enum reg_class class,
7499 enum machine_mode mode, rtx x, int in_p)
7501 enum reg_class gr_regs = TARGET_MIPS16 ? M16_REGS : GR_REGS;
7505 if (REG_P (x)|| GET_CODE (x) == SUBREG)
7506 regno = true_regnum (x);
7508 gp_reg_p = TARGET_MIPS16 ? M16_REG_P (regno) : GP_REG_P (regno);
7510 if (mips_dangerous_for_la25_p (x))
7513 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], 25))
7517 /* Copying from HI or LO to anywhere other than a general register
7518 requires a general register.
7519 This rule applies to both the original HI/LO pair and the new
7520 DSP accumulators. */
7521 if (reg_class_subset_p (class, ACC_REGS))
7523 if (TARGET_MIPS16 && in_p)
7525 /* We can't really copy to HI or LO at all in mips16 mode. */
7528 return gp_reg_p ? NO_REGS : gr_regs;
7530 if (ACC_REG_P (regno))
7532 if (TARGET_MIPS16 && ! in_p)
7534 /* We can't really copy to HI or LO at all in mips16 mode. */
7537 return class == gr_regs ? NO_REGS : gr_regs;
7540 /* We can only copy a value to a condition code register from a
7541 floating point register, and even then we require a scratch
7542 floating point register. We can only copy a value out of a
7543 condition code register into a general register. */
7544 if (class == ST_REGS)
7548 return gp_reg_p ? NO_REGS : gr_regs;
7550 if (ST_REG_P (regno))
7554 return class == gr_regs ? NO_REGS : gr_regs;
7557 if (class == FP_REGS)
7561 /* In this case we can use lwc1, swc1, ldc1 or sdc1. */
7564 else if (CONSTANT_P (x) && GET_MODE_CLASS (mode) == MODE_FLOAT)
7566 /* We can use the l.s and l.d macros to load floating-point
7567 constants. ??? For l.s, we could probably get better
7568 code by returning GR_REGS here. */
7571 else if (gp_reg_p || x == CONST0_RTX (mode))
7573 /* In this case we can use mtc1, mfc1, dmtc1 or dmfc1. */
7576 else if (FP_REG_P (regno))
7578 /* In this case we can use mov.s or mov.d. */
7583 /* Otherwise, we need to reload through an integer register. */
7588 /* In mips16 mode, going between memory and anything but M16_REGS
7589 requires an M16_REG. */
7592 if (class != M16_REGS && class != M16_NA_REGS)
7600 if (class == M16_REGS || class == M16_NA_REGS)
7609 /* Implement CLASS_MAX_NREGS.
7611 Usually all registers are word-sized. The only supported exception
7612 is -mgp64 -msingle-float, which has 64-bit words but 32-bit float
7613 registers. A word-based calculation is correct even in that case,
7614 since -msingle-float disallows multi-FPR values.
7616 The FP status registers are an exception to this rule. They are always
7617 4 bytes wide as they only hold condition code modes, and CCmode is always
7618 considered to be 4 bytes wide. */
7621 mips_class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,
7622 enum machine_mode mode)
7624 if (class == ST_REGS)
7625 return (GET_MODE_SIZE (mode) + 3) / 4;
7627 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
7631 mips_valid_pointer_mode (enum machine_mode mode)
7633 return (mode == SImode || (TARGET_64BIT && mode == DImode));
7636 /* Target hook for vector_mode_supported_p. */
7639 mips_vector_mode_supported_p (enum machine_mode mode)
7644 return TARGET_PAIRED_SINGLE_FLOAT;
7655 /* If we can access small data directly (using gp-relative relocation
7656 operators) return the small data pointer, otherwise return null.
7658 For each mips16 function which refers to GP relative symbols, we
7659 use a pseudo register, initialized at the start of the function, to
7660 hold the $gp value. */
7663 mips16_gp_pseudo_reg (void)
7665 if (cfun->machine->mips16_gp_pseudo_rtx == NULL_RTX)
7670 cfun->machine->mips16_gp_pseudo_rtx = gen_reg_rtx (Pmode);
7672 /* We want to initialize this to a value which gcc will believe
7675 unspec = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, const0_rtx), UNSPEC_GP);
7676 emit_move_insn (cfun->machine->mips16_gp_pseudo_rtx,
7677 gen_rtx_CONST (Pmode, unspec));
7678 insn = get_insns ();
7681 push_topmost_sequence ();
7682 /* We need to emit the initialization after the FUNCTION_BEG
7683 note, so that it will be integrated. */
7684 for (scan = get_insns (); scan != NULL_RTX; scan = NEXT_INSN (scan))
7686 && NOTE_LINE_NUMBER (scan) == NOTE_INSN_FUNCTION_BEG)
7688 if (scan == NULL_RTX)
7689 scan = get_insns ();
7690 insn = emit_insn_after (insn, scan);
7691 pop_topmost_sequence ();
7694 return cfun->machine->mips16_gp_pseudo_rtx;
7697 /* Write out code to move floating point arguments in or out of
7698 general registers. Output the instructions to FILE. FP_CODE is
7699 the code describing which arguments are present (see the comment at
7700 the definition of CUMULATIVE_ARGS in mips.h). FROM_FP_P is nonzero if
7701 we are copying from the floating point registers. */
7704 mips16_fp_args (FILE *file, int fp_code, int from_fp_p)
7710 /* This code only works for the original 32 bit ABI and the O64 ABI. */
7711 gcc_assert (TARGET_OLDABI);
7717 gparg = GP_ARG_FIRST;
7718 fparg = FP_ARG_FIRST;
7719 for (f = (unsigned int) fp_code; f != 0; f >>= 2)
7723 if ((fparg & 1) != 0)
7725 fprintf (file, "\t%s\t%s,%s\n", s,
7726 reg_names[gparg], reg_names[fparg]);
7728 else if ((f & 3) == 2)
7731 fprintf (file, "\td%s\t%s,%s\n", s,
7732 reg_names[gparg], reg_names[fparg]);
7735 if ((fparg & 1) != 0)
7737 if (TARGET_BIG_ENDIAN)
7738 fprintf (file, "\t%s\t%s,%s\n\t%s\t%s,%s\n", s,
7739 reg_names[gparg], reg_names[fparg + 1], s,
7740 reg_names[gparg + 1], reg_names[fparg]);
7742 fprintf (file, "\t%s\t%s,%s\n\t%s\t%s,%s\n", s,
7743 reg_names[gparg], reg_names[fparg], s,
7744 reg_names[gparg + 1], reg_names[fparg + 1]);
7757 /* Build a mips16 function stub. This is used for functions which
7758 take arguments in the floating point registers. It is 32 bit code
7759 that moves the floating point args into the general registers, and
7760 then jumps to the 16 bit code. */
7763 build_mips16_function_stub (FILE *file)
7766 char *secname, *stubname;
7767 tree stubid, stubdecl;
7771 fnname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
7772 secname = (char *) alloca (strlen (fnname) + 20);
7773 sprintf (secname, ".mips16.fn.%s", fnname);
7774 stubname = (char *) alloca (strlen (fnname) + 20);
7775 sprintf (stubname, "__fn_stub_%s", fnname);
7776 stubid = get_identifier (stubname);
7777 stubdecl = build_decl (FUNCTION_DECL, stubid,
7778 build_function_type (void_type_node, NULL_TREE));
7779 DECL_SECTION_NAME (stubdecl) = build_string (strlen (secname), secname);
7781 fprintf (file, "\t# Stub function for %s (", current_function_name ());
7783 for (f = (unsigned int) current_function_args_info.fp_code; f != 0; f >>= 2)
7785 fprintf (file, "%s%s",
7786 need_comma ? ", " : "",
7787 (f & 3) == 1 ? "float" : "double");
7790 fprintf (file, ")\n");
7792 fprintf (file, "\t.set\tnomips16\n");
7793 switch_to_section (function_section (stubdecl));
7794 ASM_OUTPUT_ALIGN (file, floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT));
7796 /* ??? If FUNCTION_NAME_ALREADY_DECLARED is defined, then we are
7797 within a .ent, and we cannot emit another .ent. */
7798 if (!FUNCTION_NAME_ALREADY_DECLARED)
7800 fputs ("\t.ent\t", file);
7801 assemble_name (file, stubname);
7805 assemble_name (file, stubname);
7806 fputs (":\n", file);
7808 /* We don't want the assembler to insert any nops here. */
7809 fprintf (file, "\t.set\tnoreorder\n");
7811 mips16_fp_args (file, current_function_args_info.fp_code, 1);
7813 fprintf (asm_out_file, "\t.set\tnoat\n");
7814 fprintf (asm_out_file, "\tla\t%s,", reg_names[GP_REG_FIRST + 1]);
7815 assemble_name (file, fnname);
7816 fprintf (file, "\n");
7817 fprintf (asm_out_file, "\tjr\t%s\n", reg_names[GP_REG_FIRST + 1]);
7818 fprintf (asm_out_file, "\t.set\tat\n");
7820 /* Unfortunately, we can't fill the jump delay slot. We can't fill
7821 with one of the mfc1 instructions, because the result is not
7822 available for one instruction, so if the very first instruction
7823 in the function refers to the register, it will see the wrong
7825 fprintf (file, "\tnop\n");
7827 fprintf (file, "\t.set\treorder\n");
7829 if (!FUNCTION_NAME_ALREADY_DECLARED)
7831 fputs ("\t.end\t", file);
7832 assemble_name (file, stubname);
7836 fprintf (file, "\t.set\tmips16\n");
7838 switch_to_section (function_section (current_function_decl));
7841 /* We keep a list of functions for which we have already built stubs
7842 in build_mips16_call_stub. */
7846 struct mips16_stub *next;
7851 static struct mips16_stub *mips16_stubs;
7853 /* Build a call stub for a mips16 call. A stub is needed if we are
7854 passing any floating point values which should go into the floating
7855 point registers. If we are, and the call turns out to be to a 32
7856 bit function, the stub will be used to move the values into the
7857 floating point registers before calling the 32 bit function. The
7858 linker will magically adjust the function call to either the 16 bit
7859 function or the 32 bit stub, depending upon where the function call
7860 is actually defined.
7862 Similarly, we need a stub if the return value might come back in a
7863 floating point register.
7865 RETVAL is the location of the return value, or null if this is
7866 a call rather than a call_value. FN is the address of the
7867 function and ARG_SIZE is the size of the arguments. FP_CODE
7868 is the code built by function_arg. This function returns a nonzero
7869 value if it builds the call instruction itself. */
7872 build_mips16_call_stub (rtx retval, rtx fn, rtx arg_size, int fp_code)
7876 char *secname, *stubname;
7877 struct mips16_stub *l;
7878 tree stubid, stubdecl;
7882 /* We don't need to do anything if we aren't in mips16 mode, or if
7883 we were invoked with the -msoft-float option. */
7884 if (! TARGET_MIPS16 || ! mips16_hard_float)
7887 /* Figure out whether the value might come back in a floating point
7889 fpret = (retval != 0
7890 && GET_MODE_CLASS (GET_MODE (retval)) == MODE_FLOAT
7891 && GET_MODE_SIZE (GET_MODE (retval)) <= UNITS_PER_FPVALUE);
7893 /* We don't need to do anything if there were no floating point
7894 arguments and the value will not be returned in a floating point
7896 if (fp_code == 0 && ! fpret)
7899 /* We don't need to do anything if this is a call to a special
7900 mips16 support function. */
7901 if (GET_CODE (fn) == SYMBOL_REF
7902 && strncmp (XSTR (fn, 0), "__mips16_", 9) == 0)
7905 /* This code will only work for o32 and o64 abis. The other ABI's
7906 require more sophisticated support. */
7907 gcc_assert (TARGET_OLDABI);
7909 /* We can only handle SFmode and DFmode floating point return
7912 gcc_assert (GET_MODE (retval) == SFmode || GET_MODE (retval) == DFmode);
7914 /* If we're calling via a function pointer, then we must always call
7915 via a stub. There are magic stubs provided in libgcc.a for each
7916 of the required cases. Each of them expects the function address
7917 to arrive in register $2. */
7919 if (GET_CODE (fn) != SYMBOL_REF)
7925 /* ??? If this code is modified to support other ABI's, we need
7926 to handle PARALLEL return values here. */
7928 sprintf (buf, "__mips16_call_stub_%s%d",
7930 ? (GET_MODE (retval) == SFmode ? "sf_" : "df_")
7933 id = get_identifier (buf);
7934 stub_fn = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (id));
7936 emit_move_insn (gen_rtx_REG (Pmode, 2), fn);
7938 if (retval == NULL_RTX)
7939 insn = gen_call_internal (stub_fn, arg_size);
7941 insn = gen_call_value_internal (retval, stub_fn, arg_size);
7942 insn = emit_call_insn (insn);
7944 /* Put the register usage information on the CALL. */
7945 CALL_INSN_FUNCTION_USAGE (insn) =
7946 gen_rtx_EXPR_LIST (VOIDmode,
7947 gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 2)),
7948 CALL_INSN_FUNCTION_USAGE (insn));
7950 /* If we are handling a floating point return value, we need to
7951 save $18 in the function prologue. Putting a note on the
7952 call will mean that regs_ever_live[$18] will be true if the
7953 call is not eliminated, and we can check that in the prologue
7956 CALL_INSN_FUNCTION_USAGE (insn) =
7957 gen_rtx_EXPR_LIST (VOIDmode,
7958 gen_rtx_USE (VOIDmode,
7959 gen_rtx_REG (word_mode, 18)),
7960 CALL_INSN_FUNCTION_USAGE (insn));
7962 /* Return 1 to tell the caller that we've generated the call
7967 /* We know the function we are going to call. If we have already
7968 built a stub, we don't need to do anything further. */
7970 fnname = XSTR (fn, 0);
7971 for (l = mips16_stubs; l != NULL; l = l->next)
7972 if (strcmp (l->name, fnname) == 0)
7977 /* Build a special purpose stub. When the linker sees a
7978 function call in mips16 code, it will check where the target
7979 is defined. If the target is a 32 bit call, the linker will
7980 search for the section defined here. It can tell which
7981 symbol this section is associated with by looking at the
7982 relocation information (the name is unreliable, since this
7983 might be a static function). If such a section is found, the
7984 linker will redirect the call to the start of the magic
7987 If the function does not return a floating point value, the
7988 special stub section is named
7991 If the function does return a floating point value, the stub
7993 .mips16.call.fp.FNNAME
7996 secname = (char *) alloca (strlen (fnname) + 40);
7997 sprintf (secname, ".mips16.call.%s%s",
8000 stubname = (char *) alloca (strlen (fnname) + 20);
8001 sprintf (stubname, "__call_stub_%s%s",
8004 stubid = get_identifier (stubname);
8005 stubdecl = build_decl (FUNCTION_DECL, stubid,
8006 build_function_type (void_type_node, NULL_TREE));
8007 DECL_SECTION_NAME (stubdecl) = build_string (strlen (secname), secname);
8009 fprintf (asm_out_file, "\t# Stub function to call %s%s (",
8011 ? (GET_MODE (retval) == SFmode ? "float " : "double ")
8015 for (f = (unsigned int) fp_code; f != 0; f >>= 2)
8017 fprintf (asm_out_file, "%s%s",
8018 need_comma ? ", " : "",
8019 (f & 3) == 1 ? "float" : "double");
8022 fprintf (asm_out_file, ")\n");
8024 fprintf (asm_out_file, "\t.set\tnomips16\n");
8025 assemble_start_function (stubdecl, stubname);
8027 if (!FUNCTION_NAME_ALREADY_DECLARED)
8029 fputs ("\t.ent\t", asm_out_file);
8030 assemble_name (asm_out_file, stubname);
8031 fputs ("\n", asm_out_file);
8033 assemble_name (asm_out_file, stubname);
8034 fputs (":\n", asm_out_file);
8037 /* We build the stub code by hand. That's the only way we can
8038 do it, since we can't generate 32 bit code during a 16 bit
8041 /* We don't want the assembler to insert any nops here. */
8042 fprintf (asm_out_file, "\t.set\tnoreorder\n");
8044 mips16_fp_args (asm_out_file, fp_code, 0);
8048 fprintf (asm_out_file, "\t.set\tnoat\n");
8049 fprintf (asm_out_file, "\tla\t%s,%s\n", reg_names[GP_REG_FIRST + 1],
8051 fprintf (asm_out_file, "\tjr\t%s\n", reg_names[GP_REG_FIRST + 1]);
8052 fprintf (asm_out_file, "\t.set\tat\n");
8053 /* Unfortunately, we can't fill the jump delay slot. We
8054 can't fill with one of the mtc1 instructions, because the
8055 result is not available for one instruction, so if the
8056 very first instruction in the function refers to the
8057 register, it will see the wrong value. */
8058 fprintf (asm_out_file, "\tnop\n");
8062 fprintf (asm_out_file, "\tmove\t%s,%s\n",
8063 reg_names[GP_REG_FIRST + 18], reg_names[GP_REG_FIRST + 31]);
8064 fprintf (asm_out_file, "\tjal\t%s\n", fnname);
8065 /* As above, we can't fill the delay slot. */
8066 fprintf (asm_out_file, "\tnop\n");
8067 if (GET_MODE (retval) == SFmode)
8068 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
8069 reg_names[GP_REG_FIRST + 2], reg_names[FP_REG_FIRST + 0]);
8072 if (TARGET_BIG_ENDIAN)
8074 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
8075 reg_names[GP_REG_FIRST + 2],
8076 reg_names[FP_REG_FIRST + 1]);
8077 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
8078 reg_names[GP_REG_FIRST + 3],
8079 reg_names[FP_REG_FIRST + 0]);
8083 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
8084 reg_names[GP_REG_FIRST + 2],
8085 reg_names[FP_REG_FIRST + 0]);
8086 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
8087 reg_names[GP_REG_FIRST + 3],
8088 reg_names[FP_REG_FIRST + 1]);
8091 fprintf (asm_out_file, "\tj\t%s\n", reg_names[GP_REG_FIRST + 18]);
8092 /* As above, we can't fill the delay slot. */
8093 fprintf (asm_out_file, "\tnop\n");
8096 fprintf (asm_out_file, "\t.set\treorder\n");
8098 #ifdef ASM_DECLARE_FUNCTION_SIZE
8099 ASM_DECLARE_FUNCTION_SIZE (asm_out_file, stubname, stubdecl);
8102 if (!FUNCTION_NAME_ALREADY_DECLARED)
8104 fputs ("\t.end\t", asm_out_file);
8105 assemble_name (asm_out_file, stubname);
8106 fputs ("\n", asm_out_file);
8109 fprintf (asm_out_file, "\t.set\tmips16\n");
8111 /* Record this stub. */
8112 l = (struct mips16_stub *) xmalloc (sizeof *l);
8113 l->name = xstrdup (fnname);
8115 l->next = mips16_stubs;
8119 /* If we expect a floating point return value, but we've built a
8120 stub which does not expect one, then we're in trouble. We can't
8121 use the existing stub, because it won't handle the floating point
8122 value. We can't build a new stub, because the linker won't know
8123 which stub to use for the various calls in this object file.
8124 Fortunately, this case is illegal, since it means that a function
8125 was declared in two different ways in a single compilation. */
8126 if (fpret && ! l->fpret)
8127 error ("cannot handle inconsistent calls to %qs", fnname);
8129 /* If we are calling a stub which handles a floating point return
8130 value, we need to arrange to save $18 in the prologue. We do
8131 this by marking the function call as using the register. The
8132 prologue will later see that it is used, and emit code to save
8139 if (retval == NULL_RTX)
8140 insn = gen_call_internal (fn, arg_size);
8142 insn = gen_call_value_internal (retval, fn, arg_size);
8143 insn = emit_call_insn (insn);
8145 CALL_INSN_FUNCTION_USAGE (insn) =
8146 gen_rtx_EXPR_LIST (VOIDmode,
8147 gen_rtx_USE (VOIDmode, gen_rtx_REG (word_mode, 18)),
8148 CALL_INSN_FUNCTION_USAGE (insn));
8150 /* Return 1 to tell the caller that we've generated the call
8155 /* Return 0 to let the caller generate the call insn. */
8159 /* An entry in the mips16 constant pool. VALUE is the pool constant,
8160 MODE is its mode, and LABEL is the CODE_LABEL associated with it. */
8162 struct mips16_constant {
8163 struct mips16_constant *next;
8166 enum machine_mode mode;
8169 /* Information about an incomplete mips16 constant pool. FIRST is the
8170 first constant, HIGHEST_ADDRESS is the highest address that the first
8171 byte of the pool can have, and INSN_ADDRESS is the current instruction
8174 struct mips16_constant_pool {
8175 struct mips16_constant *first;
8176 int highest_address;
8180 /* Add constant VALUE to POOL and return its label. MODE is the
8181 value's mode (used for CONST_INTs, etc.). */
8184 add_constant (struct mips16_constant_pool *pool,
8185 rtx value, enum machine_mode mode)
8187 struct mips16_constant **p, *c;
8188 bool first_of_size_p;
8190 /* See whether the constant is already in the pool. If so, return the
8191 existing label, otherwise leave P pointing to the place where the
8192 constant should be added.
8194 Keep the pool sorted in increasing order of mode size so that we can
8195 reduce the number of alignments needed. */
8196 first_of_size_p = true;
8197 for (p = &pool->first; *p != 0; p = &(*p)->next)
8199 if (mode == (*p)->mode && rtx_equal_p (value, (*p)->value))
8201 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE ((*p)->mode))
8203 if (GET_MODE_SIZE (mode) == GET_MODE_SIZE ((*p)->mode))
8204 first_of_size_p = false;
8207 /* In the worst case, the constant needed by the earliest instruction
8208 will end up at the end of the pool. The entire pool must then be
8209 accessible from that instruction.
8211 When adding the first constant, set the pool's highest address to
8212 the address of the first out-of-range byte. Adjust this address
8213 downwards each time a new constant is added. */
8214 if (pool->first == 0)
8215 /* For pc-relative lw, addiu and daddiu instructions, the base PC value
8216 is the address of the instruction with the lowest two bits clear.
8217 The base PC value for ld has the lowest three bits clear. Assume
8218 the worst case here. */
8219 pool->highest_address = pool->insn_address - (UNITS_PER_WORD - 2) + 0x8000;
8220 pool->highest_address -= GET_MODE_SIZE (mode);
8221 if (first_of_size_p)
8222 /* Take into account the worst possible padding due to alignment. */
8223 pool->highest_address -= GET_MODE_SIZE (mode) - 1;
8225 /* Create a new entry. */
8226 c = (struct mips16_constant *) xmalloc (sizeof *c);
8229 c->label = gen_label_rtx ();
8236 /* Output constant VALUE after instruction INSN and return the last
8237 instruction emitted. MODE is the mode of the constant. */
8240 dump_constants_1 (enum machine_mode mode, rtx value, rtx insn)
8242 switch (GET_MODE_CLASS (mode))
8246 rtx size = GEN_INT (GET_MODE_SIZE (mode));
8247 return emit_insn_after (gen_consttable_int (value, size), insn);
8251 return emit_insn_after (gen_consttable_float (value), insn);
8253 case MODE_VECTOR_FLOAT:
8254 case MODE_VECTOR_INT:
8257 for (i = 0; i < CONST_VECTOR_NUNITS (value); i++)
8258 insn = dump_constants_1 (GET_MODE_INNER (mode),
8259 CONST_VECTOR_ELT (value, i), insn);
8269 /* Dump out the constants in CONSTANTS after INSN. */
8272 dump_constants (struct mips16_constant *constants, rtx insn)
8274 struct mips16_constant *c, *next;
8278 for (c = constants; c != NULL; c = next)
8280 /* If necessary, increase the alignment of PC. */
8281 if (align < GET_MODE_SIZE (c->mode))
8283 int align_log = floor_log2 (GET_MODE_SIZE (c->mode));
8284 insn = emit_insn_after (gen_align (GEN_INT (align_log)), insn);
8286 align = GET_MODE_SIZE (c->mode);
8288 insn = emit_label_after (c->label, insn);
8289 insn = dump_constants_1 (c->mode, c->value, insn);
8295 emit_barrier_after (insn);
8298 /* Return the length of instruction INSN. */
8301 mips16_insn_length (rtx insn)
8305 rtx body = PATTERN (insn);
8306 if (GET_CODE (body) == ADDR_VEC)
8307 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, 0);
8308 if (GET_CODE (body) == ADDR_DIFF_VEC)
8309 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, 1);
8311 return get_attr_length (insn);
8314 /* Rewrite *X so that constant pool references refer to the constant's
8315 label instead. DATA points to the constant pool structure. */
8318 mips16_rewrite_pool_refs (rtx *x, void *data)
8320 struct mips16_constant_pool *pool = data;
8321 if (GET_CODE (*x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (*x))
8322 *x = gen_rtx_LABEL_REF (Pmode, add_constant (pool,
8323 get_pool_constant (*x),
8324 get_pool_mode (*x)));
8328 /* Build MIPS16 constant pools. */
8331 mips16_lay_out_constants (void)
8333 struct mips16_constant_pool pool;
8337 memset (&pool, 0, sizeof (pool));
8338 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8340 /* Rewrite constant pool references in INSN. */
8342 for_each_rtx (&PATTERN (insn), mips16_rewrite_pool_refs, &pool);
8344 pool.insn_address += mips16_insn_length (insn);
8346 if (pool.first != NULL)
8348 /* If there are no natural barriers between the first user of
8349 the pool and the highest acceptable address, we'll need to
8350 create a new instruction to jump around the constant pool.
8351 In the worst case, this instruction will be 4 bytes long.
8353 If it's too late to do this transformation after INSN,
8354 do it immediately before INSN. */
8355 if (barrier == 0 && pool.insn_address + 4 > pool.highest_address)
8359 label = gen_label_rtx ();
8361 jump = emit_jump_insn_before (gen_jump (label), insn);
8362 JUMP_LABEL (jump) = label;
8363 LABEL_NUSES (label) = 1;
8364 barrier = emit_barrier_after (jump);
8366 emit_label_after (label, barrier);
8367 pool.insn_address += 4;
8370 /* See whether the constant pool is now out of range of the first
8371 user. If so, output the constants after the previous barrier.
8372 Note that any instructions between BARRIER and INSN (inclusive)
8373 will use negative offsets to refer to the pool. */
8374 if (pool.insn_address > pool.highest_address)
8376 dump_constants (pool.first, barrier);
8380 else if (BARRIER_P (insn))
8384 dump_constants (pool.first, get_last_insn ());
8387 /* A temporary variable used by for_each_rtx callbacks, etc. */
8388 static rtx mips_sim_insn;
8390 /* A structure representing the state of the processor pipeline.
8391 Used by the mips_sim_* family of functions. */
8393 /* The maximum number of instructions that can be issued in a cycle.
8394 (Caches mips_issue_rate.) */
8395 unsigned int issue_rate;
8397 /* The current simulation time. */
8400 /* How many more instructions can be issued in the current cycle. */
8401 unsigned int insns_left;
8403 /* LAST_SET[X].INSN is the last instruction to set register X.
8404 LAST_SET[X].TIME is the time at which that instruction was issued.
8405 INSN is null if no instruction has yet set register X. */
8409 } last_set[FIRST_PSEUDO_REGISTER];
8411 /* The pipeline's current DFA state. */
8415 /* Reset STATE to the initial simulation state. */
8418 mips_sim_reset (struct mips_sim *state)
8421 state->insns_left = state->issue_rate;
8422 memset (&state->last_set, 0, sizeof (state->last_set));
8423 state_reset (state->dfa_state);
8426 /* Initialize STATE before its first use. DFA_STATE points to an
8427 allocated but uninitialized DFA state. */
8430 mips_sim_init (struct mips_sim *state, state_t dfa_state)
8432 state->issue_rate = mips_issue_rate ();
8433 state->dfa_state = dfa_state;
8434 mips_sim_reset (state);
8437 /* Advance STATE by one clock cycle. */
8440 mips_sim_next_cycle (struct mips_sim *state)
8443 state->insns_left = state->issue_rate;
8444 state_transition (state->dfa_state, 0);
8447 /* Advance simulation state STATE until instruction INSN can read
8451 mips_sim_wait_reg (struct mips_sim *state, rtx insn, rtx reg)
8455 for (i = 0; i < HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)); i++)
8456 if (state->last_set[REGNO (reg) + i].insn != 0)
8460 t = state->last_set[REGNO (reg) + i].time;
8461 t += insn_latency (state->last_set[REGNO (reg) + i].insn, insn);
8462 while (state->time < t)
8463 mips_sim_next_cycle (state);
8467 /* A for_each_rtx callback. If *X is a register, advance simulation state
8468 DATA until mips_sim_insn can read the register's value. */
8471 mips_sim_wait_regs_2 (rtx *x, void *data)
8474 mips_sim_wait_reg (data, mips_sim_insn, *x);
8478 /* Call mips_sim_wait_regs_2 (R, DATA) for each register R mentioned in *X. */
8481 mips_sim_wait_regs_1 (rtx *x, void *data)
8483 for_each_rtx (x, mips_sim_wait_regs_2, data);
8486 /* Advance simulation state STATE until all of INSN's register
8487 dependencies are satisfied. */
8490 mips_sim_wait_regs (struct mips_sim *state, rtx insn)
8492 mips_sim_insn = insn;
8493 note_uses (&PATTERN (insn), mips_sim_wait_regs_1, state);
8496 /* Advance simulation state STATE until the units required by
8497 instruction INSN are available. */
8500 mips_sim_wait_units (struct mips_sim *state, rtx insn)
8504 tmp_state = alloca (state_size ());
8505 while (state->insns_left == 0
8506 || (memcpy (tmp_state, state->dfa_state, state_size ()),
8507 state_transition (tmp_state, insn) >= 0))
8508 mips_sim_next_cycle (state);
8511 /* Advance simulation state STATE until INSN is ready to issue. */
8514 mips_sim_wait_insn (struct mips_sim *state, rtx insn)
8516 mips_sim_wait_regs (state, insn);
8517 mips_sim_wait_units (state, insn);
8520 /* mips_sim_insn has just set X. Update the LAST_SET array
8521 in simulation state DATA. */
8524 mips_sim_record_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
8526 struct mips_sim *state;
8531 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
8533 state->last_set[REGNO (x) + i].insn = mips_sim_insn;
8534 state->last_set[REGNO (x) + i].time = state->time;
8538 /* Issue instruction INSN in scheduler state STATE. Assume that INSN
8539 can issue immediately (i.e., that mips_sim_wait_insn has already
8543 mips_sim_issue_insn (struct mips_sim *state, rtx insn)
8545 state_transition (state->dfa_state, insn);
8546 state->insns_left--;
8548 mips_sim_insn = insn;
8549 note_stores (PATTERN (insn), mips_sim_record_set, state);
8552 /* Simulate issuing a NOP in state STATE. */
8555 mips_sim_issue_nop (struct mips_sim *state)
8557 if (state->insns_left == 0)
8558 mips_sim_next_cycle (state);
8559 state->insns_left--;
8562 /* Update simulation state STATE so that it's ready to accept the instruction
8563 after INSN. INSN should be part of the main rtl chain, not a member of a
8567 mips_sim_finish_insn (struct mips_sim *state, rtx insn)
8569 /* If INSN is a jump with an implicit delay slot, simulate a nop. */
8571 mips_sim_issue_nop (state);
8573 switch (GET_CODE (SEQ_BEGIN (insn)))
8577 /* We can't predict the processor state after a call or label. */
8578 mips_sim_reset (state);
8582 /* The delay slots of branch likely instructions are only executed
8583 when the branch is taken. Therefore, if the caller has simulated
8584 the delay slot instruction, STATE does not really reflect the state
8585 of the pipeline for the instruction after the delay slot. Also,
8586 branch likely instructions tend to incur a penalty when not taken,
8587 so there will probably be an extra delay between the branch and
8588 the instruction after the delay slot. */
8589 if (INSN_ANNULLED_BRANCH_P (SEQ_BEGIN (insn)))
8590 mips_sim_reset (state);
8598 /* The VR4130 pipeline issues aligned pairs of instructions together,
8599 but it stalls the second instruction if it depends on the first.
8600 In order to cut down the amount of logic required, this dependence
8601 check is not based on a full instruction decode. Instead, any non-SPECIAL
8602 instruction is assumed to modify the register specified by bits 20-16
8603 (which is usually the "rt" field).
8605 In beq, beql, bne and bnel instructions, the rt field is actually an
8606 input, so we can end up with a false dependence between the branch
8607 and its delay slot. If this situation occurs in instruction INSN,
8608 try to avoid it by swapping rs and rt. */
8611 vr4130_avoid_branch_rt_conflict (rtx insn)
8615 first = SEQ_BEGIN (insn);
8616 second = SEQ_END (insn);
8618 && NONJUMP_INSN_P (second)
8619 && GET_CODE (PATTERN (first)) == SET
8620 && GET_CODE (SET_DEST (PATTERN (first))) == PC
8621 && GET_CODE (SET_SRC (PATTERN (first))) == IF_THEN_ELSE)
8623 /* Check for the right kind of condition. */
8624 rtx cond = XEXP (SET_SRC (PATTERN (first)), 0);
8625 if ((GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
8626 && REG_P (XEXP (cond, 0))
8627 && REG_P (XEXP (cond, 1))
8628 && reg_referenced_p (XEXP (cond, 1), PATTERN (second))
8629 && !reg_referenced_p (XEXP (cond, 0), PATTERN (second)))
8631 /* SECOND mentions the rt register but not the rs register. */
8632 rtx tmp = XEXP (cond, 0);
8633 XEXP (cond, 0) = XEXP (cond, 1);
8634 XEXP (cond, 1) = tmp;
8639 /* Implement -mvr4130-align. Go through each basic block and simulate the
8640 processor pipeline. If we find that a pair of instructions could execute
8641 in parallel, and the first of those instruction is not 8-byte aligned,
8642 insert a nop to make it aligned. */
8645 vr4130_align_insns (void)
8647 struct mips_sim state;
8648 rtx insn, subinsn, last, last2, next;
8653 /* LAST is the last instruction before INSN to have a nonzero length.
8654 LAST2 is the last such instruction before LAST. */
8658 /* ALIGNED_P is true if INSN is known to be at an aligned address. */
8661 mips_sim_init (&state, alloca (state_size ()));
8662 for (insn = get_insns (); insn != 0; insn = next)
8664 unsigned int length;
8666 next = NEXT_INSN (insn);
8668 /* See the comment above vr4130_avoid_branch_rt_conflict for details.
8669 This isn't really related to the alignment pass, but we do it on
8670 the fly to avoid a separate instruction walk. */
8671 vr4130_avoid_branch_rt_conflict (insn);
8673 if (USEFUL_INSN_P (insn))
8674 FOR_EACH_SUBINSN (subinsn, insn)
8676 mips_sim_wait_insn (&state, subinsn);
8678 /* If we want this instruction to issue in parallel with the
8679 previous one, make sure that the previous instruction is
8680 aligned. There are several reasons why this isn't worthwhile
8681 when the second instruction is a call:
8683 - Calls are less likely to be performance critical,
8684 - There's a good chance that the delay slot can execute
8685 in parallel with the call.
8686 - The return address would then be unaligned.
8688 In general, if we're going to insert a nop between instructions
8689 X and Y, it's better to insert it immediately after X. That
8690 way, if the nop makes Y aligned, it will also align any labels
8692 if (state.insns_left != state.issue_rate
8693 && !CALL_P (subinsn))
8695 if (subinsn == SEQ_BEGIN (insn) && aligned_p)
8697 /* SUBINSN is the first instruction in INSN and INSN is
8698 aligned. We want to align the previous instruction
8699 instead, so insert a nop between LAST2 and LAST.
8701 Note that LAST could be either a single instruction
8702 or a branch with a delay slot. In the latter case,
8703 LAST, like INSN, is already aligned, but the delay
8704 slot must have some extra delay that stops it from
8705 issuing at the same time as the branch. We therefore
8706 insert a nop before the branch in order to align its
8708 emit_insn_after (gen_nop (), last2);
8711 else if (subinsn != SEQ_BEGIN (insn) && !aligned_p)
8713 /* SUBINSN is the delay slot of INSN, but INSN is
8714 currently unaligned. Insert a nop between
8715 LAST and INSN to align it. */
8716 emit_insn_after (gen_nop (), last);
8720 mips_sim_issue_insn (&state, subinsn);
8722 mips_sim_finish_insn (&state, insn);
8724 /* Update LAST, LAST2 and ALIGNED_P for the next instruction. */
8725 length = get_attr_length (insn);
8728 /* If the instruction is an asm statement or multi-instruction
8729 mips.md patern, the length is only an estimate. Insert an
8730 8 byte alignment after it so that the following instructions
8731 can be handled correctly. */
8732 if (NONJUMP_INSN_P (SEQ_BEGIN (insn))
8733 && (recog_memoized (insn) < 0 || length >= 8))
8735 next = emit_insn_after (gen_align (GEN_INT (3)), insn);
8736 next = NEXT_INSN (next);
8737 mips_sim_next_cycle (&state);
8740 else if (length & 4)
8741 aligned_p = !aligned_p;
8746 /* See whether INSN is an aligned label. */
8747 if (LABEL_P (insn) && label_to_alignment (insn) >= 3)
8753 /* Subroutine of mips_reorg. If there is a hazard between INSN
8754 and a previous instruction, avoid it by inserting nops after
8757 *DELAYED_REG and *HILO_DELAY describe the hazards that apply at
8758 this point. If *DELAYED_REG is non-null, INSN must wait a cycle
8759 before using the value of that register. *HILO_DELAY counts the
8760 number of instructions since the last hilo hazard (that is,
8761 the number of instructions since the last mflo or mfhi).
8763 After inserting nops for INSN, update *DELAYED_REG and *HILO_DELAY
8764 for the next instruction.
8766 LO_REG is an rtx for the LO register, used in dependence checking. */
8769 mips_avoid_hazard (rtx after, rtx insn, int *hilo_delay,
8770 rtx *delayed_reg, rtx lo_reg)
8778 pattern = PATTERN (insn);
8780 /* Do not put the whole function in .set noreorder if it contains
8781 an asm statement. We don't know whether there will be hazards
8782 between the asm statement and the gcc-generated code. */
8783 if (GET_CODE (pattern) == ASM_INPUT || asm_noperands (pattern) >= 0)
8784 cfun->machine->all_noreorder_p = false;
8786 /* Ignore zero-length instructions (barriers and the like). */
8787 ninsns = get_attr_length (insn) / 4;
8791 /* Work out how many nops are needed. Note that we only care about
8792 registers that are explicitly mentioned in the instruction's pattern.
8793 It doesn't matter that calls use the argument registers or that they
8794 clobber hi and lo. */
8795 if (*hilo_delay < 2 && reg_set_p (lo_reg, pattern))
8796 nops = 2 - *hilo_delay;
8797 else if (*delayed_reg != 0 && reg_referenced_p (*delayed_reg, pattern))
8802 /* Insert the nops between this instruction and the previous one.
8803 Each new nop takes us further from the last hilo hazard. */
8804 *hilo_delay += nops;
8806 emit_insn_after (gen_hazard_nop (), after);
8808 /* Set up the state for the next instruction. */
8809 *hilo_delay += ninsns;
8811 if (INSN_CODE (insn) >= 0)
8812 switch (get_attr_hazard (insn))
8822 set = single_set (insn);
8823 gcc_assert (set != 0);
8824 *delayed_reg = SET_DEST (set);
8830 /* Go through the instruction stream and insert nops where necessary.
8831 See if the whole function can then be put into .set noreorder &
8835 mips_avoid_hazards (void)
8837 rtx insn, last_insn, lo_reg, delayed_reg;
8840 /* Force all instructions to be split into their final form. */
8841 split_all_insns_noflow ();
8843 /* Recalculate instruction lengths without taking nops into account. */
8844 cfun->machine->ignore_hazard_length_p = true;
8845 shorten_branches (get_insns ());
8847 cfun->machine->all_noreorder_p = true;
8849 /* Profiled functions can't be all noreorder because the profiler
8850 support uses assembler macros. */
8851 if (current_function_profile)
8852 cfun->machine->all_noreorder_p = false;
8854 /* Code compiled with -mfix-vr4120 can't be all noreorder because
8855 we rely on the assembler to work around some errata. */
8856 if (TARGET_FIX_VR4120)
8857 cfun->machine->all_noreorder_p = false;
8859 /* The same is true for -mfix-vr4130 if we might generate mflo or
8860 mfhi instructions. Note that we avoid using mflo and mfhi if
8861 the VR4130 macc and dmacc instructions are available instead;
8862 see the *mfhilo_{si,di}_macc patterns. */
8863 if (TARGET_FIX_VR4130 && !ISA_HAS_MACCHI)
8864 cfun->machine->all_noreorder_p = false;
8869 lo_reg = gen_rtx_REG (SImode, LO_REGNUM);
8871 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
8874 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
8875 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8876 mips_avoid_hazard (last_insn, XVECEXP (PATTERN (insn), 0, i),
8877 &hilo_delay, &delayed_reg, lo_reg);
8879 mips_avoid_hazard (last_insn, insn, &hilo_delay,
8880 &delayed_reg, lo_reg);
8887 /* Implement TARGET_MACHINE_DEPENDENT_REORG. */
8893 mips16_lay_out_constants ();
8894 else if (TARGET_EXPLICIT_RELOCS)
8896 if (mips_flag_delayed_branch)
8897 dbr_schedule (get_insns ());
8898 mips_avoid_hazards ();
8899 if (TUNE_MIPS4130 && TARGET_VR4130_ALIGN)
8900 vr4130_align_insns ();
8904 /* This function does three things:
8906 - Register the special divsi3 and modsi3 functions if -mfix-vr4120.
8907 - Register the mips16 hardware floating point stubs.
8908 - Register the gofast functions if selected using --enable-gofast. */
8910 #include "config/gofast.h"
8913 mips_init_libfuncs (void)
8915 if (TARGET_FIX_VR4120)
8917 set_optab_libfunc (sdiv_optab, SImode, "__vr4120_divsi3");
8918 set_optab_libfunc (smod_optab, SImode, "__vr4120_modsi3");
8921 if (TARGET_MIPS16 && mips16_hard_float)
8923 set_optab_libfunc (add_optab, SFmode, "__mips16_addsf3");
8924 set_optab_libfunc (sub_optab, SFmode, "__mips16_subsf3");
8925 set_optab_libfunc (smul_optab, SFmode, "__mips16_mulsf3");
8926 set_optab_libfunc (sdiv_optab, SFmode, "__mips16_divsf3");
8928 set_optab_libfunc (eq_optab, SFmode, "__mips16_eqsf2");
8929 set_optab_libfunc (ne_optab, SFmode, "__mips16_nesf2");
8930 set_optab_libfunc (gt_optab, SFmode, "__mips16_gtsf2");
8931 set_optab_libfunc (ge_optab, SFmode, "__mips16_gesf2");
8932 set_optab_libfunc (lt_optab, SFmode, "__mips16_ltsf2");
8933 set_optab_libfunc (le_optab, SFmode, "__mips16_lesf2");
8935 set_conv_libfunc (sfix_optab, SImode, SFmode, "__mips16_fix_truncsfsi");
8936 set_conv_libfunc (sfloat_optab, SFmode, SImode, "__mips16_floatsisf");
8938 if (TARGET_DOUBLE_FLOAT)
8940 set_optab_libfunc (add_optab, DFmode, "__mips16_adddf3");
8941 set_optab_libfunc (sub_optab, DFmode, "__mips16_subdf3");
8942 set_optab_libfunc (smul_optab, DFmode, "__mips16_muldf3");
8943 set_optab_libfunc (sdiv_optab, DFmode, "__mips16_divdf3");
8945 set_optab_libfunc (eq_optab, DFmode, "__mips16_eqdf2");
8946 set_optab_libfunc (ne_optab, DFmode, "__mips16_nedf2");
8947 set_optab_libfunc (gt_optab, DFmode, "__mips16_gtdf2");
8948 set_optab_libfunc (ge_optab, DFmode, "__mips16_gedf2");
8949 set_optab_libfunc (lt_optab, DFmode, "__mips16_ltdf2");
8950 set_optab_libfunc (le_optab, DFmode, "__mips16_ledf2");
8952 set_conv_libfunc (sext_optab, DFmode, SFmode, "__mips16_extendsfdf2");
8953 set_conv_libfunc (trunc_optab, SFmode, DFmode, "__mips16_truncdfsf2");
8955 set_conv_libfunc (sfix_optab, SImode, DFmode, "__mips16_fix_truncdfsi");
8956 set_conv_libfunc (sfloat_optab, DFmode, SImode, "__mips16_floatsidf");
8960 gofast_maybe_init_libfuncs ();
8963 /* Return a number assessing the cost of moving a register in class
8964 FROM to class TO. The classes are expressed using the enumeration
8965 values such as `GENERAL_REGS'. A value of 2 is the default; other
8966 values are interpreted relative to that.
8968 It is not required that the cost always equal 2 when FROM is the
8969 same as TO; on some machines it is expensive to move between
8970 registers if they are not general registers.
8972 If reload sees an insn consisting of a single `set' between two
8973 hard registers, and if `REGISTER_MOVE_COST' applied to their
8974 classes returns a value of 2, reload does not check to ensure that
8975 the constraints of the insn are met. Setting a cost of other than
8976 2 will allow reload to verify that the constraints are met. You
8977 should do this if the `movM' pattern's constraints do not allow
8980 ??? We make the cost of moving from HI/LO into general
8981 registers the same as for one of moving general registers to
8982 HI/LO for TARGET_MIPS16 in order to prevent allocating a
8983 pseudo to HI/LO. This might hurt optimizations though, it
8984 isn't clear if it is wise. And it might not work in all cases. We
8985 could solve the DImode LO reg problem by using a multiply, just
8986 like reload_{in,out}si. We could solve the SImode/HImode HI reg
8987 problem by using divide instructions. divu puts the remainder in
8988 the HI reg, so doing a divide by -1 will move the value in the HI
8989 reg for all values except -1. We could handle that case by using a
8990 signed divide, e.g. -1 / 2 (or maybe 1 / -2?). We'd have to emit
8991 a compare/branch to test the input value to see which instruction
8992 we need to use. This gets pretty messy, but it is feasible. */
8995 mips_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
8996 enum reg_class to, enum reg_class from)
8998 if (from == M16_REGS && GR_REG_CLASS_P (to))
9000 else if (from == M16_NA_REGS && GR_REG_CLASS_P (to))
9002 else if (GR_REG_CLASS_P (from))
9006 else if (to == M16_NA_REGS)
9008 else if (GR_REG_CLASS_P (to))
9015 else if (to == FP_REGS)
9017 else if (reg_class_subset_p (to, ACC_REGS))
9024 else if (COP_REG_CLASS_P (to))
9029 else if (from == FP_REGS)
9031 if (GR_REG_CLASS_P (to))
9033 else if (to == FP_REGS)
9035 else if (to == ST_REGS)
9038 else if (reg_class_subset_p (from, ACC_REGS))
9040 if (GR_REG_CLASS_P (to))
9048 else if (from == ST_REGS && GR_REG_CLASS_P (to))
9050 else if (COP_REG_CLASS_P (from))
9056 ??? What cases are these? Shouldn't we return 2 here? */
9061 /* Return the length of INSN. LENGTH is the initial length computed by
9062 attributes in the machine-description file. */
9065 mips_adjust_insn_length (rtx insn, int length)
9067 /* A unconditional jump has an unfilled delay slot if it is not part
9068 of a sequence. A conditional jump normally has a delay slot, but
9069 does not on MIPS16. */
9070 if (CALL_P (insn) || (TARGET_MIPS16 ? simplejump_p (insn) : JUMP_P (insn)))
9073 /* See how many nops might be needed to avoid hardware hazards. */
9074 if (!cfun->machine->ignore_hazard_length_p && INSN_CODE (insn) >= 0)
9075 switch (get_attr_hazard (insn))
9089 /* All MIPS16 instructions are a measly two bytes. */
9097 /* Return an asm sequence to start a noat block and load the address
9098 of a label into $1. */
9101 mips_output_load_label (void)
9103 if (TARGET_EXPLICIT_RELOCS)
9107 return "%[lw\t%@,%%got_page(%0)(%+)\n\taddiu\t%@,%@,%%got_ofst(%0)";
9110 return "%[ld\t%@,%%got_page(%0)(%+)\n\tdaddiu\t%@,%@,%%got_ofst(%0)";
9113 if (ISA_HAS_LOAD_DELAY)
9114 return "%[lw\t%@,%%got(%0)(%+)%#\n\taddiu\t%@,%@,%%lo(%0)";
9115 return "%[lw\t%@,%%got(%0)(%+)\n\taddiu\t%@,%@,%%lo(%0)";
9119 if (Pmode == DImode)
9120 return "%[dla\t%@,%0";
9122 return "%[la\t%@,%0";
9126 /* Return the assembly code for INSN, which has the operands given by
9127 OPERANDS, and which branches to OPERANDS[1] if some condition is true.
9128 BRANCH_IF_TRUE is the asm template that should be used if OPERANDS[1]
9129 is in range of a direct branch. BRANCH_IF_FALSE is an inverted
9130 version of BRANCH_IF_TRUE. */
9133 mips_output_conditional_branch (rtx insn, rtx *operands,
9134 const char *branch_if_true,
9135 const char *branch_if_false)
9137 unsigned int length;
9138 rtx taken, not_taken;
9140 length = get_attr_length (insn);
9143 /* Just a simple conditional branch. */
9144 mips_branch_likely = (final_sequence && INSN_ANNULLED_BRANCH_P (insn));
9145 return branch_if_true;
9148 /* Generate a reversed branch around a direct jump. This fallback does
9149 not use branch-likely instructions. */
9150 mips_branch_likely = false;
9151 not_taken = gen_label_rtx ();
9152 taken = operands[1];
9154 /* Generate the reversed branch to NOT_TAKEN. */
9155 operands[1] = not_taken;
9156 output_asm_insn (branch_if_false, operands);
9158 /* If INSN has a delay slot, we must provide delay slots for both the
9159 branch to NOT_TAKEN and the conditional jump. We must also ensure
9160 that INSN's delay slot is executed in the appropriate cases. */
9163 /* This first delay slot will always be executed, so use INSN's
9164 delay slot if is not annulled. */
9165 if (!INSN_ANNULLED_BRANCH_P (insn))
9167 final_scan_insn (XVECEXP (final_sequence, 0, 1),
9168 asm_out_file, optimize, 1, NULL);
9169 INSN_DELETED_P (XVECEXP (final_sequence, 0, 1)) = 1;
9172 output_asm_insn ("nop", 0);
9173 fprintf (asm_out_file, "\n");
9176 /* Output the unconditional branch to TAKEN. */
9178 output_asm_insn ("j\t%0%/", &taken);
9181 output_asm_insn (mips_output_load_label (), &taken);
9182 output_asm_insn ("jr\t%@%]%/", 0);
9185 /* Now deal with its delay slot; see above. */
9188 /* This delay slot will only be executed if the branch is taken.
9189 Use INSN's delay slot if is annulled. */
9190 if (INSN_ANNULLED_BRANCH_P (insn))
9192 final_scan_insn (XVECEXP (final_sequence, 0, 1),
9193 asm_out_file, optimize, 1, NULL);
9194 INSN_DELETED_P (XVECEXP (final_sequence, 0, 1)) = 1;
9197 output_asm_insn ("nop", 0);
9198 fprintf (asm_out_file, "\n");
9201 /* Output NOT_TAKEN. */
9202 (*targetm.asm_out.internal_label) (asm_out_file, "L",
9203 CODE_LABEL_NUMBER (not_taken));
9207 /* Return the assembly code for INSN, which branches to OPERANDS[1]
9208 if some ordered condition is true. The condition is given by
9209 OPERANDS[0] if !INVERTED_P, otherwise it is the inverse of
9210 OPERANDS[0]. OPERANDS[2] is the comparison's first operand;
9211 its second is always zero. */
9214 mips_output_order_conditional_branch (rtx insn, rtx *operands, bool inverted_p)
9216 const char *branch[2];
9218 /* Make BRANCH[1] branch to OPERANDS[1] when the condition is true.
9219 Make BRANCH[0] branch on the inverse condition. */
9220 switch (GET_CODE (operands[0]))
9222 /* These cases are equivalent to comparisons against zero. */
9224 inverted_p = !inverted_p;
9227 branch[!inverted_p] = MIPS_BRANCH ("bne", "%2,%.,%1");
9228 branch[inverted_p] = MIPS_BRANCH ("beq", "%2,%.,%1");
9231 /* These cases are always true or always false. */
9233 inverted_p = !inverted_p;
9236 branch[!inverted_p] = MIPS_BRANCH ("beq", "%.,%.,%1");
9237 branch[inverted_p] = MIPS_BRANCH ("bne", "%.,%.,%1");
9241 branch[!inverted_p] = MIPS_BRANCH ("b%C0z", "%2,%1");
9242 branch[inverted_p] = MIPS_BRANCH ("b%N0z", "%2,%1");
9245 return mips_output_conditional_branch (insn, operands, branch[1], branch[0]);
9248 /* Used to output div or ddiv instruction DIVISION, which has the operands
9249 given by OPERANDS. Add in a divide-by-zero check if needed.
9251 When working around R4000 and R4400 errata, we need to make sure that
9252 the division is not immediately followed by a shift[1][2]. We also
9253 need to stop the division from being put into a branch delay slot[3].
9254 The easiest way to avoid both problems is to add a nop after the
9255 division. When a divide-by-zero check is needed, this nop can be
9256 used to fill the branch delay slot.
9258 [1] If a double-word or a variable shift executes immediately
9259 after starting an integer division, the shift may give an
9260 incorrect result. See quotations of errata #16 and #28 from
9261 "MIPS R4000PC/SC Errata, Processor Revision 2.2 and 3.0"
9262 in mips.md for details.
9264 [2] A similar bug to [1] exists for all revisions of the
9265 R4000 and the R4400 when run in an MC configuration.
9266 From "MIPS R4000MC Errata, Processor Revision 2.2 and 3.0":
9268 "19. In this following sequence:
9270 ddiv (or ddivu or div or divu)
9271 dsll32 (or dsrl32, dsra32)
9273 if an MPT stall occurs, while the divide is slipping the cpu
9274 pipeline, then the following double shift would end up with an
9277 Workaround: The compiler needs to avoid generating any
9278 sequence with divide followed by extended double shift."
9280 This erratum is also present in "MIPS R4400MC Errata, Processor
9281 Revision 1.0" and "MIPS R4400MC Errata, Processor Revision 2.0
9282 & 3.0" as errata #10 and #4, respectively.
9284 [3] From "MIPS R4000PC/SC Errata, Processor Revision 2.2 and 3.0"
9285 (also valid for MIPS R4000MC processors):
9287 "52. R4000SC: This bug does not apply for the R4000PC.
9289 There are two flavors of this bug:
9291 1) If the instruction just after divide takes an RF exception
9292 (tlb-refill, tlb-invalid) and gets an instruction cache
9293 miss (both primary and secondary) and the line which is
9294 currently in secondary cache at this index had the first
9295 data word, where the bits 5..2 are set, then R4000 would
9296 get a wrong result for the div.
9301 ------------------- # end-of page. -tlb-refill
9306 ------------------- # end-of page. -tlb-invalid
9309 2) If the divide is in the taken branch delay slot, where the
9310 target takes RF exception and gets an I-cache miss for the
9311 exception vector or where I-cache miss occurs for the
9312 target address, under the above mentioned scenarios, the
9313 div would get wrong results.
9316 j r2 # to next page mapped or unmapped
9317 div r8,r9 # this bug would be there as long
9318 # as there is an ICache miss and
9319 nop # the "data pattern" is present
9322 beq r0, r0, NextPage # to Next page
9326 This bug is present for div, divu, ddiv, and ddivu
9329 Workaround: For item 1), OS could make sure that the next page
9330 after the divide instruction is also mapped. For item 2), the
9331 compiler could make sure that the divide instruction is not in
9332 the branch delay slot."
9334 These processors have PRId values of 0x00004220 and 0x00004300 for
9335 the R4000 and 0x00004400, 0x00004500 and 0x00004600 for the R4400. */
9338 mips_output_division (const char *division, rtx *operands)
9343 if (TARGET_FIX_R4000 || TARGET_FIX_R4400)
9345 output_asm_insn (s, operands);
9348 if (TARGET_CHECK_ZERO_DIV)
9352 output_asm_insn (s, operands);
9353 s = "bnez\t%2,1f\n\tbreak\t7\n1:";
9355 else if (GENERATE_DIVIDE_TRAPS)
9357 output_asm_insn (s, operands);
9362 output_asm_insn ("%(bne\t%2,%.,1f", operands);
9363 output_asm_insn (s, operands);
9364 s = "break\t7%)\n1:";
9370 /* Return true if GIVEN is the same as CANONICAL, or if it is CANONICAL
9371 with a final "000" replaced by "k". Ignore case.
9373 Note: this function is shared between GCC and GAS. */
9376 mips_strict_matching_cpu_name_p (const char *canonical, const char *given)
9378 while (*given != 0 && TOLOWER (*given) == TOLOWER (*canonical))
9379 given++, canonical++;
9381 return ((*given == 0 && *canonical == 0)
9382 || (strcmp (canonical, "000") == 0 && strcasecmp (given, "k") == 0));
9386 /* Return true if GIVEN matches CANONICAL, where GIVEN is a user-supplied
9387 CPU name. We've traditionally allowed a lot of variation here.
9389 Note: this function is shared between GCC and GAS. */
9392 mips_matching_cpu_name_p (const char *canonical, const char *given)
9394 /* First see if the name matches exactly, or with a final "000"
9396 if (mips_strict_matching_cpu_name_p (canonical, given))
9399 /* If not, try comparing based on numerical designation alone.
9400 See if GIVEN is an unadorned number, or 'r' followed by a number. */
9401 if (TOLOWER (*given) == 'r')
9403 if (!ISDIGIT (*given))
9406 /* Skip over some well-known prefixes in the canonical name,
9407 hoping to find a number there too. */
9408 if (TOLOWER (canonical[0]) == 'v' && TOLOWER (canonical[1]) == 'r')
9410 else if (TOLOWER (canonical[0]) == 'r' && TOLOWER (canonical[1]) == 'm')
9412 else if (TOLOWER (canonical[0]) == 'r')
9415 return mips_strict_matching_cpu_name_p (canonical, given);
9419 /* Return the mips_cpu_info entry for the processor or ISA given
9420 by CPU_STRING. Return null if the string isn't recognized.
9422 A similar function exists in GAS. */
9424 static const struct mips_cpu_info *
9425 mips_parse_cpu (const char *cpu_string)
9427 const struct mips_cpu_info *p;
9430 /* In the past, we allowed upper-case CPU names, but it doesn't
9431 work well with the multilib machinery. */
9432 for (s = cpu_string; *s != 0; s++)
9435 warning (0, "the cpu name must be lower case");
9439 /* 'from-abi' selects the most compatible architecture for the given
9440 ABI: MIPS I for 32-bit ABIs and MIPS III for 64-bit ABIs. For the
9441 EABIs, we have to decide whether we're using the 32-bit or 64-bit
9442 version. Look first at the -mgp options, if given, otherwise base
9443 the choice on MASK_64BIT in TARGET_DEFAULT. */
9444 if (strcasecmp (cpu_string, "from-abi") == 0)
9445 return mips_cpu_info_from_isa (ABI_NEEDS_32BIT_REGS ? 1
9446 : ABI_NEEDS_64BIT_REGS ? 3
9447 : (TARGET_64BIT ? 3 : 1));
9449 /* 'default' has traditionally been a no-op. Probably not very useful. */
9450 if (strcasecmp (cpu_string, "default") == 0)
9453 for (p = mips_cpu_info_table; p->name != 0; p++)
9454 if (mips_matching_cpu_name_p (p->name, cpu_string))
9461 /* Return the processor associated with the given ISA level, or null
9462 if the ISA isn't valid. */
9464 static const struct mips_cpu_info *
9465 mips_cpu_info_from_isa (int isa)
9467 const struct mips_cpu_info *p;
9469 for (p = mips_cpu_info_table; p->name != 0; p++)
9476 /* Implement HARD_REGNO_NREGS. The size of FP registers is controlled
9477 by UNITS_PER_FPREG. The size of FP status registers is always 4, because
9478 they only hold condition code modes, and CCmode is always considered to
9479 be 4 bytes wide. All other registers are word sized. */
9482 mips_hard_regno_nregs (int regno, enum machine_mode mode)
9484 if (ST_REG_P (regno))
9485 return ((GET_MODE_SIZE (mode) + 3) / 4);
9486 else if (! FP_REG_P (regno))
9487 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
9489 return ((GET_MODE_SIZE (mode) + UNITS_PER_FPREG - 1) / UNITS_PER_FPREG);
9492 /* Implement TARGET_RETURN_IN_MEMORY. Under the old (i.e., 32 and O64 ABIs)
9493 all BLKmode objects are returned in memory. Under the new (N32 and
9494 64-bit MIPS ABIs) small structures are returned in a register.
9495 Objects with varying size must still be returned in memory, of
9499 mips_return_in_memory (tree type, tree fndecl ATTRIBUTE_UNUSED)
9502 return (TYPE_MODE (type) == BLKmode);
9504 return ((int_size_in_bytes (type) > (2 * UNITS_PER_WORD))
9505 || (int_size_in_bytes (type) == -1));
9509 mips_strict_argument_naming (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
9511 return !TARGET_OLDABI;
9514 /* Return true if INSN is a multiply-add or multiply-subtract
9515 instruction and PREV assigns to the accumulator operand. */
9518 mips_linked_madd_p (rtx prev, rtx insn)
9522 x = single_set (insn);
9528 if (GET_CODE (x) == PLUS
9529 && GET_CODE (XEXP (x, 0)) == MULT
9530 && reg_set_p (XEXP (x, 1), prev))
9533 if (GET_CODE (x) == MINUS
9534 && GET_CODE (XEXP (x, 1)) == MULT
9535 && reg_set_p (XEXP (x, 0), prev))
9541 /* Used by TUNE_MACC_CHAINS to record the last scheduled instruction
9542 that may clobber hi or lo. */
9544 static rtx mips_macc_chains_last_hilo;
9546 /* A TUNE_MACC_CHAINS helper function. Record that instruction INSN has
9547 been scheduled, updating mips_macc_chains_last_hilo appropriately. */
9550 mips_macc_chains_record (rtx insn)
9552 if (get_attr_may_clobber_hilo (insn))
9553 mips_macc_chains_last_hilo = insn;
9556 /* A TUNE_MACC_CHAINS helper function. Search ready queue READY, which
9557 has NREADY elements, looking for a multiply-add or multiply-subtract
9558 instruction that is cumulative with mips_macc_chains_last_hilo.
9559 If there is one, promote it ahead of anything else that might
9560 clobber hi or lo. */
9563 mips_macc_chains_reorder (rtx *ready, int nready)
9567 if (mips_macc_chains_last_hilo != 0)
9568 for (i = nready - 1; i >= 0; i--)
9569 if (mips_linked_madd_p (mips_macc_chains_last_hilo, ready[i]))
9571 for (j = nready - 1; j > i; j--)
9572 if (recog_memoized (ready[j]) >= 0
9573 && get_attr_may_clobber_hilo (ready[j]))
9575 mips_promote_ready (ready, i, j);
9582 /* The last instruction to be scheduled. */
9584 static rtx vr4130_last_insn;
9586 /* A note_stores callback used by vr4130_true_reg_dependence_p. DATA
9587 points to an rtx that is initially an instruction. Nullify the rtx
9588 if the instruction uses the value of register X. */
9591 vr4130_true_reg_dependence_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
9593 rtx *insn_ptr = data;
9596 && reg_referenced_p (x, PATTERN (*insn_ptr)))
9600 /* Return true if there is true register dependence between vr4130_last_insn
9604 vr4130_true_reg_dependence_p (rtx insn)
9606 note_stores (PATTERN (vr4130_last_insn),
9607 vr4130_true_reg_dependence_p_1, &insn);
9611 /* A TUNE_MIPS4130 helper function. Given that INSN1 is at the head of
9612 the ready queue and that INSN2 is the instruction after it, return
9613 true if it is worth promoting INSN2 ahead of INSN1. Look for cases
9614 in which INSN1 and INSN2 can probably issue in parallel, but for
9615 which (INSN2, INSN1) should be less sensitive to instruction
9616 alignment than (INSN1, INSN2). See 4130.md for more details. */
9619 vr4130_swap_insns_p (rtx insn1, rtx insn2)
9623 /* Check for the following case:
9625 1) there is some other instruction X with an anti dependence on INSN1;
9626 2) X has a higher priority than INSN2; and
9627 3) X is an arithmetic instruction (and thus has no unit restrictions).
9629 If INSN1 is the last instruction blocking X, it would better to
9630 choose (INSN1, X) over (INSN2, INSN1). */
9631 for (dep = INSN_DEPEND (insn1); dep != 0; dep = XEXP (dep, 1))
9632 if (REG_NOTE_KIND (dep) == REG_DEP_ANTI
9633 && INSN_PRIORITY (XEXP (dep, 0)) > INSN_PRIORITY (insn2)
9634 && recog_memoized (XEXP (dep, 0)) >= 0
9635 && get_attr_vr4130_class (XEXP (dep, 0)) == VR4130_CLASS_ALU)
9638 if (vr4130_last_insn != 0
9639 && recog_memoized (insn1) >= 0
9640 && recog_memoized (insn2) >= 0)
9642 /* See whether INSN1 and INSN2 use different execution units,
9643 or if they are both ALU-type instructions. If so, they can
9644 probably execute in parallel. */
9645 enum attr_vr4130_class class1 = get_attr_vr4130_class (insn1);
9646 enum attr_vr4130_class class2 = get_attr_vr4130_class (insn2);
9647 if (class1 != class2 || class1 == VR4130_CLASS_ALU)
9649 /* If only one of the instructions has a dependence on
9650 vr4130_last_insn, prefer to schedule the other one first. */
9651 bool dep1 = vr4130_true_reg_dependence_p (insn1);
9652 bool dep2 = vr4130_true_reg_dependence_p (insn2);
9656 /* Prefer to schedule INSN2 ahead of INSN1 if vr4130_last_insn
9657 is not an ALU-type instruction and if INSN1 uses the same
9658 execution unit. (Note that if this condition holds, we already
9659 know that INSN2 uses a different execution unit.) */
9660 if (class1 != VR4130_CLASS_ALU
9661 && recog_memoized (vr4130_last_insn) >= 0
9662 && class1 == get_attr_vr4130_class (vr4130_last_insn))
9669 /* A TUNE_MIPS4130 helper function. (READY, NREADY) describes a ready
9670 queue with at least two instructions. Swap the first two if
9671 vr4130_swap_insns_p says that it could be worthwhile. */
9674 vr4130_reorder (rtx *ready, int nready)
9676 if (vr4130_swap_insns_p (ready[nready - 1], ready[nready - 2]))
9677 mips_promote_ready (ready, nready - 2, nready - 1);
9680 /* Remove the instruction at index LOWER from ready queue READY and
9681 reinsert it in front of the instruction at index HIGHER. LOWER must
9685 mips_promote_ready (rtx *ready, int lower, int higher)
9690 new_head = ready[lower];
9691 for (i = lower; i < higher; i++)
9692 ready[i] = ready[i + 1];
9693 ready[i] = new_head;
9696 /* Implement TARGET_SCHED_REORDER. */
9699 mips_sched_reorder (FILE *file ATTRIBUTE_UNUSED, int verbose ATTRIBUTE_UNUSED,
9700 rtx *ready, int *nreadyp, int cycle)
9702 if (!reload_completed && TUNE_MACC_CHAINS)
9705 mips_macc_chains_last_hilo = 0;
9707 mips_macc_chains_reorder (ready, *nreadyp);
9709 if (reload_completed && TUNE_MIPS4130 && !TARGET_VR4130_ALIGN)
9712 vr4130_last_insn = 0;
9714 vr4130_reorder (ready, *nreadyp);
9716 return mips_issue_rate ();
9719 /* Implement TARGET_SCHED_VARIABLE_ISSUE. */
9722 mips_variable_issue (FILE *file ATTRIBUTE_UNUSED, int verbose ATTRIBUTE_UNUSED,
9725 switch (GET_CODE (PATTERN (insn)))
9729 /* Don't count USEs and CLOBBERs against the issue rate. */
9734 if (!reload_completed && TUNE_MACC_CHAINS)
9735 mips_macc_chains_record (insn);
9736 vr4130_last_insn = insn;
9742 /* Implement TARGET_SCHED_ADJUST_COST. We assume that anti and output
9743 dependencies have no cost. */
9746 mips_adjust_cost (rtx insn ATTRIBUTE_UNUSED, rtx link,
9747 rtx dep ATTRIBUTE_UNUSED, int cost)
9749 if (REG_NOTE_KIND (link) != 0)
9754 /* Return the number of instructions that can be issued per cycle. */
9757 mips_issue_rate (void)
9761 case PROCESSOR_R4130:
9762 case PROCESSOR_R5400:
9763 case PROCESSOR_R5500:
9764 case PROCESSOR_R7000:
9765 case PROCESSOR_R9000:
9769 /* This is actually 4, but we get better performance if we claim 3.
9770 This is partly because of unwanted speculative code motion with the
9771 larger number, and partly because in most common cases we can't
9772 reach the theoretical max of 4. */
9780 /* Implements TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD. This should
9781 be as wide as the scheduling freedom in the DFA. */
9784 mips_multipass_dfa_lookahead (void)
9786 /* Can schedule up to 4 of the 6 function units in any one cycle. */
9787 if (mips_tune == PROCESSOR_SB1)
9793 /* Given that we have an rtx of the form (prefetch ... WRITE LOCALITY),
9794 return the first operand of the associated "pref" or "prefx" insn. */
9797 mips_prefetch_cookie (rtx write, rtx locality)
9799 /* store_streamed / load_streamed. */
9800 if (INTVAL (locality) <= 0)
9801 return GEN_INT (INTVAL (write) + 4);
9804 if (INTVAL (locality) <= 2)
9807 /* store_retained / load_retained. */
9808 return GEN_INT (INTVAL (write) + 6);
9811 /* MIPS builtin function support. */
9813 struct builtin_description
9815 /* The code of the main .md file instruction. See mips_builtin_type
9816 for more information. */
9817 enum insn_code icode;
9819 /* The floating-point comparison code to use with ICODE, if any. */
9820 enum mips_fp_condition cond;
9822 /* The name of the builtin function. */
9825 /* Specifies how the function should be expanded. */
9826 enum mips_builtin_type builtin_type;
9828 /* The function's prototype. */
9829 enum mips_function_type function_type;
9831 /* The target flags required for this function. */
9835 /* Define a MIPS_BUILTIN_DIRECT function for instruction CODE_FOR_mips_<INSN>.
9836 FUNCTION_TYPE and TARGET_FLAGS are builtin_description fields. */
9837 #define DIRECT_BUILTIN(INSN, FUNCTION_TYPE, TARGET_FLAGS) \
9838 { CODE_FOR_mips_ ## INSN, 0, "__builtin_mips_" #INSN, \
9839 MIPS_BUILTIN_DIRECT, FUNCTION_TYPE, TARGET_FLAGS }
9841 /* Define __builtin_mips_<INSN>_<COND>_{s,d}, both of which require
9843 #define CMP_SCALAR_BUILTINS(INSN, COND, TARGET_FLAGS) \
9844 { CODE_FOR_mips_ ## INSN ## _cond_s, MIPS_FP_COND_ ## COND, \
9845 "__builtin_mips_" #INSN "_" #COND "_s", \
9846 MIPS_BUILTIN_CMP_SINGLE, MIPS_INT_FTYPE_SF_SF, TARGET_FLAGS }, \
9847 { CODE_FOR_mips_ ## INSN ## _cond_d, MIPS_FP_COND_ ## COND, \
9848 "__builtin_mips_" #INSN "_" #COND "_d", \
9849 MIPS_BUILTIN_CMP_SINGLE, MIPS_INT_FTYPE_DF_DF, TARGET_FLAGS }
9851 /* Define __builtin_mips_{any,all,upper,lower}_<INSN>_<COND>_ps.
9852 The lower and upper forms require TARGET_FLAGS while the any and all
9853 forms require MASK_MIPS3D. */
9854 #define CMP_PS_BUILTINS(INSN, COND, TARGET_FLAGS) \
9855 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
9856 "__builtin_mips_any_" #INSN "_" #COND "_ps", \
9857 MIPS_BUILTIN_CMP_ANY, MIPS_INT_FTYPE_V2SF_V2SF, MASK_MIPS3D }, \
9858 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
9859 "__builtin_mips_all_" #INSN "_" #COND "_ps", \
9860 MIPS_BUILTIN_CMP_ALL, MIPS_INT_FTYPE_V2SF_V2SF, MASK_MIPS3D }, \
9861 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
9862 "__builtin_mips_lower_" #INSN "_" #COND "_ps", \
9863 MIPS_BUILTIN_CMP_LOWER, MIPS_INT_FTYPE_V2SF_V2SF, TARGET_FLAGS }, \
9864 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
9865 "__builtin_mips_upper_" #INSN "_" #COND "_ps", \
9866 MIPS_BUILTIN_CMP_UPPER, MIPS_INT_FTYPE_V2SF_V2SF, TARGET_FLAGS }
9868 /* Define __builtin_mips_{any,all}_<INSN>_<COND>_4s. The functions
9869 require MASK_MIPS3D. */
9870 #define CMP_4S_BUILTINS(INSN, COND) \
9871 { CODE_FOR_mips_ ## INSN ## _cond_4s, MIPS_FP_COND_ ## COND, \
9872 "__builtin_mips_any_" #INSN "_" #COND "_4s", \
9873 MIPS_BUILTIN_CMP_ANY, MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF, \
9875 { CODE_FOR_mips_ ## INSN ## _cond_4s, MIPS_FP_COND_ ## COND, \
9876 "__builtin_mips_all_" #INSN "_" #COND "_4s", \
9877 MIPS_BUILTIN_CMP_ALL, MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF, \
9880 /* Define __builtin_mips_mov{t,f}_<INSN>_<COND>_ps. The comparison
9881 instruction requires TARGET_FLAGS. */
9882 #define MOVTF_BUILTINS(INSN, COND, TARGET_FLAGS) \
9883 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
9884 "__builtin_mips_movt_" #INSN "_" #COND "_ps", \
9885 MIPS_BUILTIN_MOVT, MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF, \
9887 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
9888 "__builtin_mips_movf_" #INSN "_" #COND "_ps", \
9889 MIPS_BUILTIN_MOVF, MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF, \
9892 /* Define all the builtins related to c.cond.fmt condition COND. */
9893 #define CMP_BUILTINS(COND) \
9894 MOVTF_BUILTINS (c, COND, MASK_PAIRED_SINGLE_FLOAT), \
9895 MOVTF_BUILTINS (cabs, COND, MASK_MIPS3D), \
9896 CMP_SCALAR_BUILTINS (cabs, COND, MASK_MIPS3D), \
9897 CMP_PS_BUILTINS (c, COND, MASK_PAIRED_SINGLE_FLOAT), \
9898 CMP_PS_BUILTINS (cabs, COND, MASK_MIPS3D), \
9899 CMP_4S_BUILTINS (c, COND), \
9900 CMP_4S_BUILTINS (cabs, COND)
9902 /* __builtin_mips_abs_ps() maps to the standard absM2 pattern. */
9903 #define CODE_FOR_mips_abs_ps CODE_FOR_absv2sf2
9905 static const struct builtin_description mips_bdesc[] =
9907 DIRECT_BUILTIN (pll_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
9908 DIRECT_BUILTIN (pul_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
9909 DIRECT_BUILTIN (plu_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
9910 DIRECT_BUILTIN (puu_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
9911 DIRECT_BUILTIN (cvt_ps_s, MIPS_V2SF_FTYPE_SF_SF, MASK_PAIRED_SINGLE_FLOAT),
9912 DIRECT_BUILTIN (cvt_s_pl, MIPS_SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT),
9913 DIRECT_BUILTIN (cvt_s_pu, MIPS_SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT),
9914 DIRECT_BUILTIN (abs_ps, MIPS_V2SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT),
9916 DIRECT_BUILTIN (alnv_ps, MIPS_V2SF_FTYPE_V2SF_V2SF_INT,
9917 MASK_PAIRED_SINGLE_FLOAT),
9918 DIRECT_BUILTIN (addr_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
9919 DIRECT_BUILTIN (mulr_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
9920 DIRECT_BUILTIN (cvt_pw_ps, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
9921 DIRECT_BUILTIN (cvt_ps_pw, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
9923 DIRECT_BUILTIN (recip1_s, MIPS_SF_FTYPE_SF, MASK_MIPS3D),
9924 DIRECT_BUILTIN (recip1_d, MIPS_DF_FTYPE_DF, MASK_MIPS3D),
9925 DIRECT_BUILTIN (recip1_ps, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
9926 DIRECT_BUILTIN (recip2_s, MIPS_SF_FTYPE_SF_SF, MASK_MIPS3D),
9927 DIRECT_BUILTIN (recip2_d, MIPS_DF_FTYPE_DF_DF, MASK_MIPS3D),
9928 DIRECT_BUILTIN (recip2_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
9930 DIRECT_BUILTIN (rsqrt1_s, MIPS_SF_FTYPE_SF, MASK_MIPS3D),
9931 DIRECT_BUILTIN (rsqrt1_d, MIPS_DF_FTYPE_DF, MASK_MIPS3D),
9932 DIRECT_BUILTIN (rsqrt1_ps, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
9933 DIRECT_BUILTIN (rsqrt2_s, MIPS_SF_FTYPE_SF_SF, MASK_MIPS3D),
9934 DIRECT_BUILTIN (rsqrt2_d, MIPS_DF_FTYPE_DF_DF, MASK_MIPS3D),
9935 DIRECT_BUILTIN (rsqrt2_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
9937 MIPS_FP_CONDITIONS (CMP_BUILTINS)
9940 /* Builtin functions for the SB-1 processor. */
9942 #define CODE_FOR_mips_sqrt_ps CODE_FOR_sqrtv2sf2
9944 static const struct builtin_description sb1_bdesc[] =
9946 DIRECT_BUILTIN (sqrt_ps, MIPS_V2SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT)
9949 /* Builtin functions for DSP ASE. */
9951 #define CODE_FOR_mips_addq_ph CODE_FOR_addv2hi3
9952 #define CODE_FOR_mips_addu_qb CODE_FOR_addv4qi3
9953 #define CODE_FOR_mips_subq_ph CODE_FOR_subv2hi3
9954 #define CODE_FOR_mips_subu_qb CODE_FOR_subv4qi3
9956 /* Define a MIPS_BUILTIN_DIRECT_NO_TARGET function for instruction
9957 CODE_FOR_mips_<INSN>. FUNCTION_TYPE and TARGET_FLAGS are
9958 builtin_description fields. */
9959 #define DIRECT_NO_TARGET_BUILTIN(INSN, FUNCTION_TYPE, TARGET_FLAGS) \
9960 { CODE_FOR_mips_ ## INSN, 0, "__builtin_mips_" #INSN, \
9961 MIPS_BUILTIN_DIRECT_NO_TARGET, FUNCTION_TYPE, TARGET_FLAGS }
9963 /* Define __builtin_mips_bposge<VALUE>. <VALUE> is 32 for the MIPS32 DSP
9964 branch instruction. TARGET_FLAGS is a builtin_description field. */
9965 #define BPOSGE_BUILTIN(VALUE, TARGET_FLAGS) \
9966 { CODE_FOR_mips_bposge, 0, "__builtin_mips_bposge" #VALUE, \
9967 MIPS_BUILTIN_BPOSGE ## VALUE, MIPS_SI_FTYPE_VOID, TARGET_FLAGS }
9969 static const struct builtin_description dsp_bdesc[] =
9971 DIRECT_BUILTIN (addq_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
9972 DIRECT_BUILTIN (addq_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
9973 DIRECT_BUILTIN (addq_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
9974 DIRECT_BUILTIN (addu_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
9975 DIRECT_BUILTIN (addu_s_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
9976 DIRECT_BUILTIN (subq_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
9977 DIRECT_BUILTIN (subq_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
9978 DIRECT_BUILTIN (subq_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
9979 DIRECT_BUILTIN (subu_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
9980 DIRECT_BUILTIN (subu_s_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
9981 DIRECT_BUILTIN (addsc, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
9982 DIRECT_BUILTIN (addwc, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
9983 DIRECT_BUILTIN (modsub, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
9984 DIRECT_BUILTIN (raddu_w_qb, MIPS_SI_FTYPE_V4QI, MASK_DSP),
9985 DIRECT_BUILTIN (absq_s_ph, MIPS_V2HI_FTYPE_V2HI, MASK_DSP),
9986 DIRECT_BUILTIN (absq_s_w, MIPS_SI_FTYPE_SI, MASK_DSP),
9987 DIRECT_BUILTIN (precrq_qb_ph, MIPS_V4QI_FTYPE_V2HI_V2HI, MASK_DSP),
9988 DIRECT_BUILTIN (precrq_ph_w, MIPS_V2HI_FTYPE_SI_SI, MASK_DSP),
9989 DIRECT_BUILTIN (precrq_rs_ph_w, MIPS_V2HI_FTYPE_SI_SI, MASK_DSP),
9990 DIRECT_BUILTIN (precrqu_s_qb_ph, MIPS_V4QI_FTYPE_V2HI_V2HI, MASK_DSP),
9991 DIRECT_BUILTIN (preceq_w_phl, MIPS_SI_FTYPE_V2HI, MASK_DSP),
9992 DIRECT_BUILTIN (preceq_w_phr, MIPS_SI_FTYPE_V2HI, MASK_DSP),
9993 DIRECT_BUILTIN (precequ_ph_qbl, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
9994 DIRECT_BUILTIN (precequ_ph_qbr, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
9995 DIRECT_BUILTIN (precequ_ph_qbla, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
9996 DIRECT_BUILTIN (precequ_ph_qbra, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
9997 DIRECT_BUILTIN (preceu_ph_qbl, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
9998 DIRECT_BUILTIN (preceu_ph_qbr, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
9999 DIRECT_BUILTIN (preceu_ph_qbla, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
10000 DIRECT_BUILTIN (preceu_ph_qbra, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
10001 DIRECT_BUILTIN (shll_qb, MIPS_V4QI_FTYPE_V4QI_SI, MASK_DSP),
10002 DIRECT_BUILTIN (shll_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
10003 DIRECT_BUILTIN (shll_s_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
10004 DIRECT_BUILTIN (shll_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
10005 DIRECT_BUILTIN (shrl_qb, MIPS_V4QI_FTYPE_V4QI_SI, MASK_DSP),
10006 DIRECT_BUILTIN (shra_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
10007 DIRECT_BUILTIN (shra_r_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
10008 DIRECT_BUILTIN (shra_r_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
10009 DIRECT_BUILTIN (muleu_s_ph_qbl, MIPS_V2HI_FTYPE_V4QI_V2HI, MASK_DSP),
10010 DIRECT_BUILTIN (muleu_s_ph_qbr, MIPS_V2HI_FTYPE_V4QI_V2HI, MASK_DSP),
10011 DIRECT_BUILTIN (mulq_rs_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
10012 DIRECT_BUILTIN (muleq_s_w_phl, MIPS_SI_FTYPE_V2HI_V2HI, MASK_DSP),
10013 DIRECT_BUILTIN (muleq_s_w_phr, MIPS_SI_FTYPE_V2HI_V2HI, MASK_DSP),
10014 DIRECT_BUILTIN (dpau_h_qbl, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
10015 DIRECT_BUILTIN (dpau_h_qbr, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
10016 DIRECT_BUILTIN (dpsu_h_qbl, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
10017 DIRECT_BUILTIN (dpsu_h_qbr, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
10018 DIRECT_BUILTIN (dpaq_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
10019 DIRECT_BUILTIN (dpsq_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
10020 DIRECT_BUILTIN (mulsaq_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
10021 DIRECT_BUILTIN (dpaq_sa_l_w, MIPS_DI_FTYPE_DI_SI_SI, MASK_DSP),
10022 DIRECT_BUILTIN (dpsq_sa_l_w, MIPS_DI_FTYPE_DI_SI_SI, MASK_DSP),
10023 DIRECT_BUILTIN (maq_s_w_phl, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
10024 DIRECT_BUILTIN (maq_s_w_phr, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
10025 DIRECT_BUILTIN (maq_sa_w_phl, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
10026 DIRECT_BUILTIN (maq_sa_w_phr, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
10027 DIRECT_BUILTIN (bitrev, MIPS_SI_FTYPE_SI, MASK_DSP),
10028 DIRECT_BUILTIN (insv, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
10029 DIRECT_BUILTIN (repl_qb, MIPS_V4QI_FTYPE_SI, MASK_DSP),
10030 DIRECT_BUILTIN (repl_ph, MIPS_V2HI_FTYPE_SI, MASK_DSP),
10031 DIRECT_NO_TARGET_BUILTIN (cmpu_eq_qb, MIPS_VOID_FTYPE_V4QI_V4QI, MASK_DSP),
10032 DIRECT_NO_TARGET_BUILTIN (cmpu_lt_qb, MIPS_VOID_FTYPE_V4QI_V4QI, MASK_DSP),
10033 DIRECT_NO_TARGET_BUILTIN (cmpu_le_qb, MIPS_VOID_FTYPE_V4QI_V4QI, MASK_DSP),
10034 DIRECT_BUILTIN (cmpgu_eq_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSP),
10035 DIRECT_BUILTIN (cmpgu_lt_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSP),
10036 DIRECT_BUILTIN (cmpgu_le_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSP),
10037 DIRECT_NO_TARGET_BUILTIN (cmp_eq_ph, MIPS_VOID_FTYPE_V2HI_V2HI, MASK_DSP),
10038 DIRECT_NO_TARGET_BUILTIN (cmp_lt_ph, MIPS_VOID_FTYPE_V2HI_V2HI, MASK_DSP),
10039 DIRECT_NO_TARGET_BUILTIN (cmp_le_ph, MIPS_VOID_FTYPE_V2HI_V2HI, MASK_DSP),
10040 DIRECT_BUILTIN (pick_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
10041 DIRECT_BUILTIN (pick_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
10042 DIRECT_BUILTIN (packrl_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
10043 DIRECT_BUILTIN (extr_w, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
10044 DIRECT_BUILTIN (extr_r_w, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
10045 DIRECT_BUILTIN (extr_rs_w, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
10046 DIRECT_BUILTIN (extr_s_h, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
10047 DIRECT_BUILTIN (extp, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
10048 DIRECT_BUILTIN (extpdp, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
10049 DIRECT_BUILTIN (shilo, MIPS_DI_FTYPE_DI_SI, MASK_DSP),
10050 DIRECT_BUILTIN (mthlip, MIPS_DI_FTYPE_DI_SI, MASK_DSP),
10051 DIRECT_NO_TARGET_BUILTIN (wrdsp, MIPS_VOID_FTYPE_SI_SI, MASK_DSP),
10052 DIRECT_BUILTIN (rddsp, MIPS_SI_FTYPE_SI, MASK_DSP),
10053 DIRECT_BUILTIN (lbux, MIPS_SI_FTYPE_PTR_SI, MASK_DSP),
10054 DIRECT_BUILTIN (lhx, MIPS_SI_FTYPE_PTR_SI, MASK_DSP),
10055 DIRECT_BUILTIN (lwx, MIPS_SI_FTYPE_PTR_SI, MASK_DSP),
10056 BPOSGE_BUILTIN (32, MASK_DSP)
10059 /* This helps provide a mapping from builtin function codes to bdesc
10064 /* The builtin function table that this entry describes. */
10065 const struct builtin_description *bdesc;
10067 /* The number of entries in the builtin function table. */
10070 /* The target processor that supports these builtin functions.
10071 PROCESSOR_MAX means we enable them for all processors. */
10072 enum processor_type proc;
10075 static const struct bdesc_map bdesc_arrays[] =
10077 { mips_bdesc, ARRAY_SIZE (mips_bdesc), PROCESSOR_MAX },
10078 { sb1_bdesc, ARRAY_SIZE (sb1_bdesc), PROCESSOR_SB1 },
10079 { dsp_bdesc, ARRAY_SIZE (dsp_bdesc), PROCESSOR_MAX }
10082 /* Take the head of argument list *ARGLIST and convert it into a form
10083 suitable for input operand OP of instruction ICODE. Return the value
10084 and point *ARGLIST at the next element of the list. */
10087 mips_prepare_builtin_arg (enum insn_code icode,
10088 unsigned int op, tree *arglist)
10091 enum machine_mode mode;
10093 value = expand_normal (TREE_VALUE (*arglist));
10094 mode = insn_data[icode].operand[op].mode;
10095 if (!insn_data[icode].operand[op].predicate (value, mode))
10097 value = copy_to_mode_reg (mode, value);
10098 /* Check the predicate again. */
10099 if (!insn_data[icode].operand[op].predicate (value, mode))
10101 error ("invalid argument to builtin function");
10106 *arglist = TREE_CHAIN (*arglist);
10110 /* Return an rtx suitable for output operand OP of instruction ICODE.
10111 If TARGET is non-null, try to use it where possible. */
10114 mips_prepare_builtin_target (enum insn_code icode, unsigned int op, rtx target)
10116 enum machine_mode mode;
10118 mode = insn_data[icode].operand[op].mode;
10119 if (target == 0 || !insn_data[icode].operand[op].predicate (target, mode))
10120 target = gen_reg_rtx (mode);
10125 /* Expand builtin functions. This is called from TARGET_EXPAND_BUILTIN. */
10128 mips_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
10129 enum machine_mode mode ATTRIBUTE_UNUSED,
10130 int ignore ATTRIBUTE_UNUSED)
10132 enum insn_code icode;
10133 enum mips_builtin_type type;
10134 tree fndecl, arglist;
10135 unsigned int fcode;
10136 const struct builtin_description *bdesc;
10137 const struct bdesc_map *m;
10139 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10140 arglist = TREE_OPERAND (exp, 1);
10141 fcode = DECL_FUNCTION_CODE (fndecl);
10144 for (m = bdesc_arrays; m < &bdesc_arrays[ARRAY_SIZE (bdesc_arrays)]; m++)
10146 if (fcode < m->size)
10149 icode = bdesc[fcode].icode;
10150 type = bdesc[fcode].builtin_type;
10160 case MIPS_BUILTIN_DIRECT:
10161 return mips_expand_builtin_direct (icode, target, arglist, true);
10163 case MIPS_BUILTIN_DIRECT_NO_TARGET:
10164 return mips_expand_builtin_direct (icode, target, arglist, false);
10166 case MIPS_BUILTIN_MOVT:
10167 case MIPS_BUILTIN_MOVF:
10168 return mips_expand_builtin_movtf (type, icode, bdesc[fcode].cond,
10171 case MIPS_BUILTIN_CMP_ANY:
10172 case MIPS_BUILTIN_CMP_ALL:
10173 case MIPS_BUILTIN_CMP_UPPER:
10174 case MIPS_BUILTIN_CMP_LOWER:
10175 case MIPS_BUILTIN_CMP_SINGLE:
10176 return mips_expand_builtin_compare (type, icode, bdesc[fcode].cond,
10179 case MIPS_BUILTIN_BPOSGE32:
10180 return mips_expand_builtin_bposge (type, target);
10187 /* Init builtin functions. This is called from TARGET_INIT_BUILTIN. */
10190 mips_init_builtins (void)
10192 const struct builtin_description *d;
10193 const struct bdesc_map *m;
10194 tree types[(int) MIPS_MAX_FTYPE_MAX];
10195 tree V2SF_type_node;
10196 tree V2HI_type_node;
10197 tree V4QI_type_node;
10198 unsigned int offset;
10200 /* We have only builtins for -mpaired-single, -mips3d and -mdsp. */
10201 if (!TARGET_PAIRED_SINGLE_FLOAT && !TARGET_DSP)
10204 if (TARGET_PAIRED_SINGLE_FLOAT)
10206 V2SF_type_node = build_vector_type_for_mode (float_type_node, V2SFmode);
10208 types[MIPS_V2SF_FTYPE_V2SF]
10209 = build_function_type_list (V2SF_type_node, V2SF_type_node, NULL_TREE);
10211 types[MIPS_V2SF_FTYPE_V2SF_V2SF]
10212 = build_function_type_list (V2SF_type_node,
10213 V2SF_type_node, V2SF_type_node, NULL_TREE);
10215 types[MIPS_V2SF_FTYPE_V2SF_V2SF_INT]
10216 = build_function_type_list (V2SF_type_node,
10217 V2SF_type_node, V2SF_type_node,
10218 integer_type_node, NULL_TREE);
10220 types[MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF]
10221 = build_function_type_list (V2SF_type_node,
10222 V2SF_type_node, V2SF_type_node,
10223 V2SF_type_node, V2SF_type_node, NULL_TREE);
10225 types[MIPS_V2SF_FTYPE_SF_SF]
10226 = build_function_type_list (V2SF_type_node,
10227 float_type_node, float_type_node, NULL_TREE);
10229 types[MIPS_INT_FTYPE_V2SF_V2SF]
10230 = build_function_type_list (integer_type_node,
10231 V2SF_type_node, V2SF_type_node, NULL_TREE);
10233 types[MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF]
10234 = build_function_type_list (integer_type_node,
10235 V2SF_type_node, V2SF_type_node,
10236 V2SF_type_node, V2SF_type_node, NULL_TREE);
10238 types[MIPS_INT_FTYPE_SF_SF]
10239 = build_function_type_list (integer_type_node,
10240 float_type_node, float_type_node, NULL_TREE);
10242 types[MIPS_INT_FTYPE_DF_DF]
10243 = build_function_type_list (integer_type_node,
10244 double_type_node, double_type_node, NULL_TREE);
10246 types[MIPS_SF_FTYPE_V2SF]
10247 = build_function_type_list (float_type_node, V2SF_type_node, NULL_TREE);
10249 types[MIPS_SF_FTYPE_SF]
10250 = build_function_type_list (float_type_node,
10251 float_type_node, NULL_TREE);
10253 types[MIPS_SF_FTYPE_SF_SF]
10254 = build_function_type_list (float_type_node,
10255 float_type_node, float_type_node, NULL_TREE);
10257 types[MIPS_DF_FTYPE_DF]
10258 = build_function_type_list (double_type_node,
10259 double_type_node, NULL_TREE);
10261 types[MIPS_DF_FTYPE_DF_DF]
10262 = build_function_type_list (double_type_node,
10263 double_type_node, double_type_node, NULL_TREE);
10268 V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
10269 V4QI_type_node = build_vector_type_for_mode (intQI_type_node, V4QImode);
10271 types[MIPS_V2HI_FTYPE_V2HI_V2HI]
10272 = build_function_type_list (V2HI_type_node,
10273 V2HI_type_node, V2HI_type_node,
10276 types[MIPS_SI_FTYPE_SI_SI]
10277 = build_function_type_list (intSI_type_node,
10278 intSI_type_node, intSI_type_node,
10281 types[MIPS_V4QI_FTYPE_V4QI_V4QI]
10282 = build_function_type_list (V4QI_type_node,
10283 V4QI_type_node, V4QI_type_node,
10286 types[MIPS_SI_FTYPE_V4QI]
10287 = build_function_type_list (intSI_type_node,
10291 types[MIPS_V2HI_FTYPE_V2HI]
10292 = build_function_type_list (V2HI_type_node,
10296 types[MIPS_SI_FTYPE_SI]
10297 = build_function_type_list (intSI_type_node,
10301 types[MIPS_V4QI_FTYPE_V2HI_V2HI]
10302 = build_function_type_list (V4QI_type_node,
10303 V2HI_type_node, V2HI_type_node,
10306 types[MIPS_V2HI_FTYPE_SI_SI]
10307 = build_function_type_list (V2HI_type_node,
10308 intSI_type_node, intSI_type_node,
10311 types[MIPS_SI_FTYPE_V2HI]
10312 = build_function_type_list (intSI_type_node,
10316 types[MIPS_V2HI_FTYPE_V4QI]
10317 = build_function_type_list (V2HI_type_node,
10321 types[MIPS_V4QI_FTYPE_V4QI_SI]
10322 = build_function_type_list (V4QI_type_node,
10323 V4QI_type_node, intSI_type_node,
10326 types[MIPS_V2HI_FTYPE_V2HI_SI]
10327 = build_function_type_list (V2HI_type_node,
10328 V2HI_type_node, intSI_type_node,
10331 types[MIPS_V2HI_FTYPE_V4QI_V2HI]
10332 = build_function_type_list (V2HI_type_node,
10333 V4QI_type_node, V2HI_type_node,
10336 types[MIPS_SI_FTYPE_V2HI_V2HI]
10337 = build_function_type_list (intSI_type_node,
10338 V2HI_type_node, V2HI_type_node,
10341 types[MIPS_DI_FTYPE_DI_V4QI_V4QI]
10342 = build_function_type_list (intDI_type_node,
10343 intDI_type_node, V4QI_type_node, V4QI_type_node,
10346 types[MIPS_DI_FTYPE_DI_V2HI_V2HI]
10347 = build_function_type_list (intDI_type_node,
10348 intDI_type_node, V2HI_type_node, V2HI_type_node,
10351 types[MIPS_DI_FTYPE_DI_SI_SI]
10352 = build_function_type_list (intDI_type_node,
10353 intDI_type_node, intSI_type_node, intSI_type_node,
10356 types[MIPS_V4QI_FTYPE_SI]
10357 = build_function_type_list (V4QI_type_node,
10361 types[MIPS_V2HI_FTYPE_SI]
10362 = build_function_type_list (V2HI_type_node,
10366 types[MIPS_VOID_FTYPE_V4QI_V4QI]
10367 = build_function_type_list (void_type_node,
10368 V4QI_type_node, V4QI_type_node,
10371 types[MIPS_SI_FTYPE_V4QI_V4QI]
10372 = build_function_type_list (intSI_type_node,
10373 V4QI_type_node, V4QI_type_node,
10376 types[MIPS_VOID_FTYPE_V2HI_V2HI]
10377 = build_function_type_list (void_type_node,
10378 V2HI_type_node, V2HI_type_node,
10381 types[MIPS_SI_FTYPE_DI_SI]
10382 = build_function_type_list (intSI_type_node,
10383 intDI_type_node, intSI_type_node,
10386 types[MIPS_DI_FTYPE_DI_SI]
10387 = build_function_type_list (intDI_type_node,
10388 intDI_type_node, intSI_type_node,
10391 types[MIPS_VOID_FTYPE_SI_SI]
10392 = build_function_type_list (void_type_node,
10393 intSI_type_node, intSI_type_node,
10396 types[MIPS_SI_FTYPE_PTR_SI]
10397 = build_function_type_list (intSI_type_node,
10398 ptr_type_node, intSI_type_node,
10401 types[MIPS_SI_FTYPE_VOID]
10402 = build_function_type (intSI_type_node, void_list_node);
10405 /* Iterate through all of the bdesc arrays, initializing all of the
10406 builtin functions. */
10409 for (m = bdesc_arrays; m < &bdesc_arrays[ARRAY_SIZE (bdesc_arrays)]; m++)
10411 if (m->proc == PROCESSOR_MAX || (m->proc == mips_arch))
10412 for (d = m->bdesc; d < &m->bdesc[m->size]; d++)
10413 if ((d->target_flags & target_flags) == d->target_flags)
10414 lang_hooks.builtin_function (d->name, types[d->function_type],
10415 d - m->bdesc + offset,
10416 BUILT_IN_MD, NULL, NULL);
10421 /* Expand a MIPS_BUILTIN_DIRECT function. ICODE is the code of the
10422 .md pattern and ARGLIST is the list of function arguments. TARGET,
10423 if nonnull, suggests a good place to put the result.
10424 HAS_TARGET indicates the function must return something. */
10427 mips_expand_builtin_direct (enum insn_code icode, rtx target, tree arglist,
10430 rtx ops[MAX_RECOG_OPERANDS];
10435 /* We save target to ops[0]. */
10436 ops[0] = mips_prepare_builtin_target (icode, 0, target);
10440 /* We need to test if arglist is not zero. Some instructions have extra
10441 clobber registers. */
10442 for (; i < insn_data[icode].n_operands && arglist != 0; i++)
10443 ops[i] = mips_prepare_builtin_arg (icode, i, &arglist);
10448 emit_insn (GEN_FCN (icode) (ops[0], ops[1]));
10452 emit_insn (GEN_FCN (icode) (ops[0], ops[1], ops[2]));
10456 emit_insn (GEN_FCN (icode) (ops[0], ops[1], ops[2], ops[3]));
10460 gcc_unreachable ();
10465 /* Expand a __builtin_mips_movt_*_ps() or __builtin_mips_movf_*_ps()
10466 function (TYPE says which). ARGLIST is the list of arguments to the
10467 function, ICODE is the instruction that should be used to compare
10468 the first two arguments, and COND is the condition it should test.
10469 TARGET, if nonnull, suggests a good place to put the result. */
10472 mips_expand_builtin_movtf (enum mips_builtin_type type,
10473 enum insn_code icode, enum mips_fp_condition cond,
10474 rtx target, tree arglist)
10476 rtx cmp_result, op0, op1;
10478 cmp_result = mips_prepare_builtin_target (icode, 0, 0);
10479 op0 = mips_prepare_builtin_arg (icode, 1, &arglist);
10480 op1 = mips_prepare_builtin_arg (icode, 2, &arglist);
10481 emit_insn (GEN_FCN (icode) (cmp_result, op0, op1, GEN_INT (cond)));
10483 icode = CODE_FOR_mips_cond_move_tf_ps;
10484 target = mips_prepare_builtin_target (icode, 0, target);
10485 if (type == MIPS_BUILTIN_MOVT)
10487 op1 = mips_prepare_builtin_arg (icode, 2, &arglist);
10488 op0 = mips_prepare_builtin_arg (icode, 1, &arglist);
10492 op0 = mips_prepare_builtin_arg (icode, 1, &arglist);
10493 op1 = mips_prepare_builtin_arg (icode, 2, &arglist);
10495 emit_insn (gen_mips_cond_move_tf_ps (target, op0, op1, cmp_result));
10499 /* Move VALUE_IF_TRUE into TARGET if CONDITION is true; move VALUE_IF_FALSE
10500 into TARGET otherwise. Return TARGET. */
10503 mips_builtin_branch_and_move (rtx condition, rtx target,
10504 rtx value_if_true, rtx value_if_false)
10506 rtx true_label, done_label;
10508 true_label = gen_label_rtx ();
10509 done_label = gen_label_rtx ();
10511 /* First assume that CONDITION is false. */
10512 emit_move_insn (target, value_if_false);
10514 /* Branch to TRUE_LABEL if CONDITION is true and DONE_LABEL otherwise. */
10515 emit_jump_insn (gen_condjump (condition, true_label));
10516 emit_jump_insn (gen_jump (done_label));
10519 /* Fix TARGET if CONDITION is true. */
10520 emit_label (true_label);
10521 emit_move_insn (target, value_if_true);
10523 emit_label (done_label);
10527 /* Expand a comparison builtin of type BUILTIN_TYPE. ICODE is the code
10528 of the comparison instruction and COND is the condition it should test.
10529 ARGLIST is the list of function arguments and TARGET, if nonnull,
10530 suggests a good place to put the boolean result. */
10533 mips_expand_builtin_compare (enum mips_builtin_type builtin_type,
10534 enum insn_code icode, enum mips_fp_condition cond,
10535 rtx target, tree arglist)
10537 rtx offset, condition, cmp_result, ops[MAX_RECOG_OPERANDS];
10540 if (target == 0 || GET_MODE (target) != SImode)
10541 target = gen_reg_rtx (SImode);
10543 /* Prepare the operands to the comparison. */
10544 cmp_result = mips_prepare_builtin_target (icode, 0, 0);
10545 for (i = 1; i < insn_data[icode].n_operands - 1; i++)
10546 ops[i] = mips_prepare_builtin_arg (icode, i, &arglist);
10548 switch (insn_data[icode].n_operands)
10551 emit_insn (GEN_FCN (icode) (cmp_result, ops[1], ops[2], GEN_INT (cond)));
10555 emit_insn (GEN_FCN (icode) (cmp_result, ops[1], ops[2],
10556 ops[3], ops[4], GEN_INT (cond)));
10560 gcc_unreachable ();
10563 /* If the comparison sets more than one register, we define the result
10564 to be 0 if all registers are false and -1 if all registers are true.
10565 The value of the complete result is indeterminate otherwise. */
10566 switch (builtin_type)
10568 case MIPS_BUILTIN_CMP_ALL:
10569 condition = gen_rtx_NE (VOIDmode, cmp_result, constm1_rtx);
10570 return mips_builtin_branch_and_move (condition, target,
10571 const0_rtx, const1_rtx);
10573 case MIPS_BUILTIN_CMP_UPPER:
10574 case MIPS_BUILTIN_CMP_LOWER:
10575 offset = GEN_INT (builtin_type == MIPS_BUILTIN_CMP_UPPER);
10576 condition = gen_single_cc (cmp_result, offset);
10577 return mips_builtin_branch_and_move (condition, target,
10578 const1_rtx, const0_rtx);
10581 condition = gen_rtx_NE (VOIDmode, cmp_result, const0_rtx);
10582 return mips_builtin_branch_and_move (condition, target,
10583 const1_rtx, const0_rtx);
10587 /* Expand a bposge builtin of type BUILTIN_TYPE. TARGET, if nonnull,
10588 suggests a good place to put the boolean result. */
10591 mips_expand_builtin_bposge (enum mips_builtin_type builtin_type, rtx target)
10593 rtx condition, cmp_result;
10596 if (target == 0 || GET_MODE (target) != SImode)
10597 target = gen_reg_rtx (SImode);
10599 cmp_result = gen_rtx_REG (CCDSPmode, CCDSP_PO_REGNUM);
10601 if (builtin_type == MIPS_BUILTIN_BPOSGE32)
10606 condition = gen_rtx_GE (VOIDmode, cmp_result, GEN_INT (cmp_value));
10607 return mips_builtin_branch_and_move (condition, target,
10608 const1_rtx, const0_rtx);
10611 /* Set SYMBOL_REF_FLAGS for the SYMBOL_REF inside RTL, which belongs to DECL.
10612 FIRST is true if this is the first time handling this decl. */
10615 mips_encode_section_info (tree decl, rtx rtl, int first)
10617 default_encode_section_info (decl, rtl, first);
10619 if (TREE_CODE (decl) == FUNCTION_DECL
10620 && lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
10622 rtx symbol = XEXP (rtl, 0);
10623 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LONG_CALL;
10627 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. TARGET_ABICALLS makes
10628 PIC_FUNCTION_ADDR_REGNUM live on entry to a function. */
10631 mips_extra_live_on_entry (bitmap regs)
10633 if (!TARGET_ABICALLS)
10634 bitmap_set_bit (regs, PIC_FUNCTION_ADDR_REGNUM);
10638 #include "gt-mips.h"