1 /* Subroutines used for MIPS code generation.
2 Copyright (C) 1989, 1990, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5 Contributed by A. Lichnewsky, lich@inria.inria.fr.
6 Changes by Michael Meissner, meissner@osf.org.
7 64-bit r4000 support by Ian Lance Taylor, ian@cygnus.com, and
8 Brendan Eich, brendan@microunity.com.
10 This file is part of GCC.
12 GCC is free software; you can redistribute it and/or modify
13 it under the terms of the GNU General Public License as published by
14 the Free Software Foundation; either version 3, or (at your option)
17 GCC is distributed in the hope that it will be useful,
18 but WITHOUT ANY WARRANTY; without even the implied warranty of
19 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 GNU General Public License for more details.
22 You should have received a copy of the GNU General Public License
23 along with GCC; see the file COPYING3. If not see
24 <http://www.gnu.org/licenses/>. */
28 #include "coretypes.h"
33 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "insn-attr.h"
53 #include "target-def.h"
54 #include "integrate.h"
55 #include "langhooks.h"
56 #include "cfglayout.h"
57 #include "sched-int.h"
58 #include "tree-gimple.h"
61 /* True if X is an unspec wrapper around a SYMBOL_REF or LABEL_REF. */
62 #define UNSPEC_ADDRESS_P(X) \
63 (GET_CODE (X) == UNSPEC \
64 && XINT (X, 1) >= UNSPEC_ADDRESS_FIRST \
65 && XINT (X, 1) < UNSPEC_ADDRESS_FIRST + NUM_SYMBOL_TYPES)
67 /* Extract the symbol or label from UNSPEC wrapper X. */
68 #define UNSPEC_ADDRESS(X) \
71 /* Extract the symbol type from UNSPEC wrapper X. */
72 #define UNSPEC_ADDRESS_TYPE(X) \
73 ((enum mips_symbol_type) (XINT (X, 1) - UNSPEC_ADDRESS_FIRST))
75 /* The maximum distance between the top of the stack frame and the
76 value $sp has when we save and restore registers.
78 The value for normal-mode code must be a SMALL_OPERAND and must
79 preserve the maximum stack alignment. We therefore use a value
80 of 0x7ff0 in this case.
82 MIPS16e SAVE and RESTORE instructions can adjust the stack pointer by
83 up to 0x7f8 bytes and can usually save or restore all the registers
84 that we need to save or restore. (Note that we can only use these
85 instructions for o32, for which the stack alignment is 8 bytes.)
87 We use a maximum gap of 0x100 or 0x400 for MIPS16 code when SAVE and
88 RESTORE are not available. We can then use unextended instructions
89 to save and restore registers, and to allocate and deallocate the top
91 #define MIPS_MAX_FIRST_STACK_STEP \
92 (!TARGET_MIPS16 ? 0x7ff0 \
93 : GENERATE_MIPS16E_SAVE_RESTORE ? 0x7f8 \
94 : TARGET_64BIT ? 0x100 : 0x400)
96 /* True if INSN is a mips.md pattern or asm statement. */
97 #define USEFUL_INSN_P(INSN) \
99 && GET_CODE (PATTERN (INSN)) != USE \
100 && GET_CODE (PATTERN (INSN)) != CLOBBER \
101 && GET_CODE (PATTERN (INSN)) != ADDR_VEC \
102 && GET_CODE (PATTERN (INSN)) != ADDR_DIFF_VEC)
104 /* If INSN is a delayed branch sequence, return the first instruction
105 in the sequence, otherwise return INSN itself. */
106 #define SEQ_BEGIN(INSN) \
107 (INSN_P (INSN) && GET_CODE (PATTERN (INSN)) == SEQUENCE \
108 ? XVECEXP (PATTERN (INSN), 0, 0) \
111 /* Likewise for the last instruction in a delayed branch sequence. */
112 #define SEQ_END(INSN) \
113 (INSN_P (INSN) && GET_CODE (PATTERN (INSN)) == SEQUENCE \
114 ? XVECEXP (PATTERN (INSN), 0, XVECLEN (PATTERN (INSN), 0) - 1) \
117 /* Execute the following loop body with SUBINSN set to each instruction
118 between SEQ_BEGIN (INSN) and SEQ_END (INSN) inclusive. */
119 #define FOR_EACH_SUBINSN(SUBINSN, INSN) \
120 for ((SUBINSN) = SEQ_BEGIN (INSN); \
121 (SUBINSN) != NEXT_INSN (SEQ_END (INSN)); \
122 (SUBINSN) = NEXT_INSN (SUBINSN))
124 /* True if bit BIT is set in VALUE. */
125 #define BITSET_P(VALUE, BIT) (((VALUE) & (1 << (BIT))) != 0)
127 /* Classifies an address.
130 A natural register + offset address. The register satisfies
131 mips_valid_base_register_p and the offset is a const_arith_operand.
134 A LO_SUM rtx. The first operand is a valid base register and
135 the second operand is a symbolic address.
138 A signed 16-bit constant address.
141 A constant symbolic address (equivalent to CONSTANT_SYMBOLIC). */
142 enum mips_address_type {
149 /* Classifies the prototype of a builtin function. */
150 enum mips_function_type
152 MIPS_V2SF_FTYPE_V2SF,
153 MIPS_V2SF_FTYPE_V2SF_V2SF,
154 MIPS_V2SF_FTYPE_V2SF_V2SF_INT,
155 MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF,
156 MIPS_V2SF_FTYPE_SF_SF,
157 MIPS_INT_FTYPE_V2SF_V2SF,
158 MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF,
159 MIPS_INT_FTYPE_SF_SF,
160 MIPS_INT_FTYPE_DF_DF,
167 /* For MIPS DSP ASE */
169 MIPS_DI_FTYPE_DI_SI_SI,
170 MIPS_DI_FTYPE_DI_V2HI_V2HI,
171 MIPS_DI_FTYPE_DI_V4QI_V4QI,
173 MIPS_SI_FTYPE_PTR_SI,
177 MIPS_SI_FTYPE_V2HI_V2HI,
179 MIPS_SI_FTYPE_V4QI_V4QI,
182 MIPS_V2HI_FTYPE_SI_SI,
183 MIPS_V2HI_FTYPE_V2HI,
184 MIPS_V2HI_FTYPE_V2HI_SI,
185 MIPS_V2HI_FTYPE_V2HI_V2HI,
186 MIPS_V2HI_FTYPE_V4QI,
187 MIPS_V2HI_FTYPE_V4QI_V2HI,
189 MIPS_V4QI_FTYPE_V2HI_V2HI,
190 MIPS_V4QI_FTYPE_V4QI_SI,
191 MIPS_V4QI_FTYPE_V4QI_V4QI,
192 MIPS_VOID_FTYPE_SI_SI,
193 MIPS_VOID_FTYPE_V2HI_V2HI,
194 MIPS_VOID_FTYPE_V4QI_V4QI,
196 /* For MIPS DSP REV 2 ASE. */
197 MIPS_V4QI_FTYPE_V4QI,
198 MIPS_SI_FTYPE_SI_SI_SI,
199 MIPS_DI_FTYPE_DI_USI_USI,
201 MIPS_DI_FTYPE_USI_USI,
202 MIPS_V2HI_FTYPE_SI_SI_SI,
208 /* Specifies how a builtin function should be converted into rtl. */
209 enum mips_builtin_type
211 /* The builtin corresponds directly to an .md pattern. The return
212 value is mapped to operand 0 and the arguments are mapped to
213 operands 1 and above. */
216 /* The builtin corresponds directly to an .md pattern. There is no return
217 value and the arguments are mapped to operands 0 and above. */
218 MIPS_BUILTIN_DIRECT_NO_TARGET,
220 /* The builtin corresponds to a comparison instruction followed by
221 a mips_cond_move_tf_ps pattern. The first two arguments are the
222 values to compare and the second two arguments are the vector
223 operands for the movt.ps or movf.ps instruction (in assembly order). */
227 /* The builtin corresponds to a V2SF comparison instruction. Operand 0
228 of this instruction is the result of the comparison, which has mode
229 CCV2 or CCV4. The function arguments are mapped to operands 1 and
230 above. The function's return value is an SImode boolean that is
231 true under the following conditions:
233 MIPS_BUILTIN_CMP_ANY: one of the registers is true
234 MIPS_BUILTIN_CMP_ALL: all of the registers are true
235 MIPS_BUILTIN_CMP_LOWER: the first register is true
236 MIPS_BUILTIN_CMP_UPPER: the second register is true. */
237 MIPS_BUILTIN_CMP_ANY,
238 MIPS_BUILTIN_CMP_ALL,
239 MIPS_BUILTIN_CMP_UPPER,
240 MIPS_BUILTIN_CMP_LOWER,
242 /* As above, but the instruction only sets a single $fcc register. */
243 MIPS_BUILTIN_CMP_SINGLE,
245 /* For generating bposge32 branch instructions in MIPS32 DSP ASE. */
246 MIPS_BUILTIN_BPOSGE32
249 /* Invokes MACRO (COND) for each c.cond.fmt condition. */
250 #define MIPS_FP_CONDITIONS(MACRO) \
268 /* Enumerates the codes above as MIPS_FP_COND_<X>. */
269 #define DECLARE_MIPS_COND(X) MIPS_FP_COND_ ## X
270 enum mips_fp_condition {
271 MIPS_FP_CONDITIONS (DECLARE_MIPS_COND)
274 /* Index X provides the string representation of MIPS_FP_COND_<X>. */
275 #define STRINGIFY(X) #X
276 static const char *const mips_fp_conditions[] = {
277 MIPS_FP_CONDITIONS (STRINGIFY)
280 /* A function to save or store a register. The first argument is the
281 register and the second is the stack slot. */
282 typedef void (*mips_save_restore_fn) (rtx, rtx);
284 struct mips16_constant;
285 struct mips_arg_info;
286 struct mips_address_info;
287 struct mips_integer_op;
290 static bool mips_valid_base_register_p (rtx, enum machine_mode, int);
291 static bool mips_classify_address (struct mips_address_info *, rtx,
292 enum machine_mode, int);
293 static bool mips_cannot_force_const_mem (rtx);
294 static bool mips_use_blocks_for_constant_p (enum machine_mode, const_rtx);
295 static int mips_symbol_insns (enum mips_symbol_type, enum machine_mode);
296 static bool mips16_unextended_reference_p (enum machine_mode mode, rtx, rtx);
297 static rtx mips_force_temporary (rtx, rtx);
298 static rtx mips_unspec_offset_high (rtx, rtx, rtx, enum mips_symbol_type);
299 static rtx mips_add_offset (rtx, rtx, HOST_WIDE_INT);
300 static unsigned int mips_build_shift (struct mips_integer_op *, HOST_WIDE_INT);
301 static unsigned int mips_build_lower (struct mips_integer_op *,
302 unsigned HOST_WIDE_INT);
303 static unsigned int mips_build_integer (struct mips_integer_op *,
304 unsigned HOST_WIDE_INT);
305 static void mips_legitimize_const_move (enum machine_mode, rtx, rtx);
306 static int m16_check_op (rtx, int, int, int);
307 static bool mips_rtx_costs (rtx, int, int, int *);
308 static int mips_address_cost (rtx);
309 static void mips_emit_compare (enum rtx_code *, rtx *, rtx *, bool);
310 static void mips_load_call_address (rtx, rtx, int);
311 static bool mips_function_ok_for_sibcall (tree, tree);
312 static void mips_block_move_straight (rtx, rtx, HOST_WIDE_INT);
313 static void mips_adjust_block_mem (rtx, HOST_WIDE_INT, rtx *, rtx *);
314 static void mips_block_move_loop (rtx, rtx, HOST_WIDE_INT);
315 static void mips_arg_info (const CUMULATIVE_ARGS *, enum machine_mode,
316 tree, int, struct mips_arg_info *);
317 static bool mips_get_unaligned_mem (rtx *, unsigned int, int, rtx *, rtx *);
318 static void mips_set_architecture (const struct mips_cpu_info *);
319 static void mips_set_tune (const struct mips_cpu_info *);
320 static bool mips_handle_option (size_t, const char *, int);
321 static struct machine_function *mips_init_machine_status (void);
322 static void print_operand_reloc (FILE *, rtx, enum mips_symbol_context,
324 static void mips_file_start (void);
325 static int mips_small_data_pattern_1 (rtx *, void *);
326 static int mips_rewrite_small_data_1 (rtx *, void *);
327 static bool mips_function_has_gp_insn (void);
328 static unsigned int mips_global_pointer (void);
329 static bool mips_save_reg_p (unsigned int);
330 static void mips_save_restore_reg (enum machine_mode, int, HOST_WIDE_INT,
331 mips_save_restore_fn);
332 static void mips_for_each_saved_reg (HOST_WIDE_INT, mips_save_restore_fn);
333 static void mips_output_cplocal (void);
334 static void mips_emit_loadgp (void);
335 static void mips_output_function_prologue (FILE *, HOST_WIDE_INT);
336 static void mips_set_frame_expr (rtx);
337 static rtx mips_frame_set (rtx, rtx);
338 static void mips_save_reg (rtx, rtx);
339 static void mips_output_function_epilogue (FILE *, HOST_WIDE_INT);
340 static void mips_restore_reg (rtx, rtx);
341 static void mips_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
342 HOST_WIDE_INT, tree);
343 static int symbolic_expression_p (rtx);
344 static section *mips_select_rtx_section (enum machine_mode, rtx,
345 unsigned HOST_WIDE_INT);
346 static section *mips_function_rodata_section (tree);
347 static bool mips_in_small_data_p (const_tree);
348 static bool mips_use_anchors_for_symbol_p (const_rtx);
349 static int mips_fpr_return_fields (const_tree, tree *);
350 static bool mips_return_in_msb (const_tree);
351 static rtx mips_return_fpr_pair (enum machine_mode mode,
352 enum machine_mode mode1, HOST_WIDE_INT,
353 enum machine_mode mode2, HOST_WIDE_INT);
354 static rtx mips16_gp_pseudo_reg (void);
355 static void mips16_fp_args (FILE *, int, int);
356 static void build_mips16_function_stub (FILE *);
357 static rtx dump_constants_1 (enum machine_mode, rtx, rtx);
358 static void dump_constants (struct mips16_constant *, rtx);
359 static int mips16_insn_length (rtx);
360 static int mips16_rewrite_pool_refs (rtx *, void *);
361 static void mips16_lay_out_constants (void);
362 static void mips_sim_reset (struct mips_sim *);
363 static void mips_sim_init (struct mips_sim *, state_t);
364 static void mips_sim_next_cycle (struct mips_sim *);
365 static void mips_sim_wait_reg (struct mips_sim *, rtx, rtx);
366 static int mips_sim_wait_regs_2 (rtx *, void *);
367 static void mips_sim_wait_regs_1 (rtx *, void *);
368 static void mips_sim_wait_regs (struct mips_sim *, rtx);
369 static void mips_sim_wait_units (struct mips_sim *, rtx);
370 static void mips_sim_wait_insn (struct mips_sim *, rtx);
371 static void mips_sim_record_set (rtx, const_rtx, void *);
372 static void mips_sim_issue_insn (struct mips_sim *, rtx);
373 static void mips_sim_issue_nop (struct mips_sim *);
374 static void mips_sim_finish_insn (struct mips_sim *, rtx);
375 static void vr4130_avoid_branch_rt_conflict (rtx);
376 static void vr4130_align_insns (void);
377 static void mips_avoid_hazard (rtx, rtx, int *, rtx *, rtx);
378 static void mips_avoid_hazards (void);
379 static void mips_reorg (void);
380 static bool mips_strict_matching_cpu_name_p (const char *, const char *);
381 static bool mips_matching_cpu_name_p (const char *, const char *);
382 static const struct mips_cpu_info *mips_parse_cpu (const char *);
383 static const struct mips_cpu_info *mips_cpu_info_from_isa (int);
384 static bool mips_return_in_memory (const_tree, const_tree);
385 static bool mips_strict_argument_naming (CUMULATIVE_ARGS *);
386 static void mips_macc_chains_record (rtx);
387 static void mips_macc_chains_reorder (rtx *, int);
388 static void vr4130_true_reg_dependence_p_1 (rtx, const_rtx, void *);
389 static bool vr4130_true_reg_dependence_p (rtx);
390 static bool vr4130_swap_insns_p (rtx, rtx);
391 static void vr4130_reorder (rtx *, int);
392 static void mips_promote_ready (rtx *, int, int);
393 static void mips_sched_init (FILE *, int, int);
394 static int mips_sched_reorder (FILE *, int, rtx *, int *, int);
395 static int mips_variable_issue (FILE *, int, rtx, int);
396 static int mips_adjust_cost (rtx, rtx, rtx, int);
397 static int mips_issue_rate (void);
398 static int mips_multipass_dfa_lookahead (void);
399 static void mips_init_libfuncs (void);
400 static void mips_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
402 static tree mips_build_builtin_va_list (void);
403 static tree mips_gimplify_va_arg_expr (tree, tree, tree *, tree *);
404 static bool mips_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode mode,
406 static bool mips_callee_copies (CUMULATIVE_ARGS *, enum machine_mode mode,
408 static int mips_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode mode,
410 static bool mips_valid_pointer_mode (enum machine_mode);
411 static bool mips_vector_mode_supported_p (enum machine_mode);
412 static rtx mips_prepare_builtin_arg (enum insn_code, unsigned int, tree, unsigned int);
413 static rtx mips_prepare_builtin_target (enum insn_code, unsigned int, rtx);
414 static rtx mips_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
415 static void mips_init_builtins (void);
416 static rtx mips_expand_builtin_direct (enum insn_code, rtx, tree, bool);
417 static rtx mips_expand_builtin_movtf (enum mips_builtin_type,
418 enum insn_code, enum mips_fp_condition,
420 static rtx mips_expand_builtin_compare (enum mips_builtin_type,
421 enum insn_code, enum mips_fp_condition,
423 static rtx mips_expand_builtin_bposge (enum mips_builtin_type, rtx);
424 static void mips_encode_section_info (tree, rtx, int);
425 static void mips_extra_live_on_entry (bitmap);
426 static int mips_comp_type_attributes (const_tree, const_tree);
427 static void mips_set_mips16_mode (int);
428 static void mips_set_current_function (tree);
429 static int mips_mode_rep_extended (enum machine_mode, enum machine_mode);
430 static bool mips_offset_within_alignment_p (rtx, HOST_WIDE_INT);
431 static void mips_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
433 /* Structure to be filled in by compute_frame_size with register
434 save masks, and offsets for the current function. */
436 struct mips_frame_info GTY(())
438 HOST_WIDE_INT total_size; /* # bytes that the entire frame takes up */
439 HOST_WIDE_INT var_size; /* # bytes that variables take up */
440 HOST_WIDE_INT args_size; /* # bytes that outgoing arguments take up */
441 HOST_WIDE_INT cprestore_size; /* # bytes that the .cprestore slot takes up */
442 HOST_WIDE_INT gp_reg_size; /* # bytes needed to store gp regs */
443 HOST_WIDE_INT fp_reg_size; /* # bytes needed to store fp regs */
444 unsigned int mask; /* mask of saved gp registers */
445 unsigned int fmask; /* mask of saved fp registers */
446 HOST_WIDE_INT gp_save_offset; /* offset from vfp to store gp registers */
447 HOST_WIDE_INT fp_save_offset; /* offset from vfp to store fp registers */
448 HOST_WIDE_INT gp_sp_offset; /* offset from new sp to store gp registers */
449 HOST_WIDE_INT fp_sp_offset; /* offset from new sp to store fp registers */
450 bool initialized; /* true if frame size already calculated */
451 int num_gp; /* number of gp registers saved */
452 int num_fp; /* number of fp registers saved */
455 struct machine_function GTY(()) {
456 /* Pseudo-reg holding the value of $28 in a mips16 function which
457 refers to GP relative global variables. */
458 rtx mips16_gp_pseudo_rtx;
460 /* The number of extra stack bytes taken up by register varargs.
461 This area is allocated by the callee at the very top of the frame. */
464 /* Current frame information, calculated by compute_frame_size. */
465 struct mips_frame_info frame;
467 /* The register to use as the global pointer within this function. */
468 unsigned int global_pointer;
470 /* True if mips_adjust_insn_length should ignore an instruction's
472 bool ignore_hazard_length_p;
474 /* True if the whole function is suitable for .set noreorder and
476 bool all_noreorder_p;
478 /* True if the function is known to have an instruction that needs $gp. */
481 /* True if we have emitted an instruction to initialize
482 mips16_gp_pseudo_rtx. */
483 bool initialized_mips16_gp_pseudo_p;
486 /* Information about a single argument. */
489 /* True if the argument is passed in a floating-point register, or
490 would have been if we hadn't run out of registers. */
493 /* The number of words passed in registers, rounded up. */
494 unsigned int reg_words;
496 /* For EABI, the offset of the first register from GP_ARG_FIRST or
497 FP_ARG_FIRST. For other ABIs, the offset of the first register from
498 the start of the ABI's argument structure (see the CUMULATIVE_ARGS
499 comment for details).
501 The value is MAX_ARGS_IN_REGISTERS if the argument is passed entirely
503 unsigned int reg_offset;
505 /* The number of words that must be passed on the stack, rounded up. */
506 unsigned int stack_words;
508 /* The offset from the start of the stack overflow area of the argument's
509 first stack word. Only meaningful when STACK_WORDS is nonzero. */
510 unsigned int stack_offset;
514 /* Information about an address described by mips_address_type.
520 REG is the base register and OFFSET is the constant offset.
523 REG is the register that contains the high part of the address,
524 OFFSET is the symbolic address being referenced and SYMBOL_TYPE
525 is the type of OFFSET's symbol.
528 SYMBOL_TYPE is the type of symbol being referenced. */
530 struct mips_address_info
532 enum mips_address_type type;
535 enum mips_symbol_type symbol_type;
539 /* One stage in a constant building sequence. These sequences have
543 A = A CODE[1] VALUE[1]
544 A = A CODE[2] VALUE[2]
547 where A is an accumulator, each CODE[i] is a binary rtl operation
548 and each VALUE[i] is a constant integer. */
549 struct mips_integer_op {
551 unsigned HOST_WIDE_INT value;
555 /* The largest number of operations needed to load an integer constant.
556 The worst accepted case for 64-bit constants is LUI,ORI,SLL,ORI,SLL,ORI.
557 When the lowest bit is clear, we can try, but reject a sequence with
558 an extra SLL at the end. */
559 #define MIPS_MAX_INTEGER_OPS 7
561 /* Information about a MIPS16e SAVE or RESTORE instruction. */
562 struct mips16e_save_restore_info {
563 /* The number of argument registers saved by a SAVE instruction.
564 0 for RESTORE instructions. */
567 /* Bit X is set if the instruction saves or restores GPR X. */
570 /* The total number of bytes to allocate. */
574 /* Global variables for machine-dependent things. */
576 /* Threshold for data being put into the small data/bss area, instead
577 of the normal data area. */
578 int mips_section_threshold = -1;
580 /* Count the number of .file directives, so that .loc is up to date. */
581 int num_source_filenames = 0;
583 /* Count the number of sdb related labels are generated (to find block
584 start and end boundaries). */
585 int sdb_label_count = 0;
587 /* Next label # for each statement for Silicon Graphics IRIS systems. */
590 /* Name of the file containing the current function. */
591 const char *current_function_file = "";
593 /* Number of nested .set noreorder, noat, nomacro, and volatile requests. */
599 /* The next branch instruction is a branch likely, not branch normal. */
600 int mips_branch_likely;
602 /* The operands passed to the last cmpMM expander. */
605 /* The target cpu for code generation. */
606 enum processor_type mips_arch;
607 const struct mips_cpu_info *mips_arch_info;
609 /* The target cpu for optimization and scheduling. */
610 enum processor_type mips_tune;
611 const struct mips_cpu_info *mips_tune_info;
613 /* Which instruction set architecture to use. */
616 /* Which ABI to use. */
617 int mips_abi = MIPS_ABI_DEFAULT;
619 /* Cost information to use. */
620 const struct mips_rtx_cost_data *mips_cost;
622 /* Remember the ambient target flags, excluding mips16. */
623 static int mips_base_target_flags;
624 /* The mips16 command-line target flags only. */
625 static bool mips_base_mips16;
626 /* Similar copies of option settings. */
627 static int mips_base_schedule_insns; /* flag_schedule_insns */
628 static int mips_base_reorder_blocks_and_partition; /* flag_reorder... */
629 static int mips_base_align_loops; /* align_loops */
630 static int mips_base_align_jumps; /* align_jumps */
631 static int mips_base_align_functions; /* align_functions */
632 static GTY(()) int mips16_flipper;
634 /* The -mtext-loads setting. */
635 enum mips_code_readable_setting mips_code_readable = CODE_READABLE_YES;
637 /* The architecture selected by -mipsN. */
638 static const struct mips_cpu_info *mips_isa_info;
640 /* If TRUE, we split addresses into their high and low parts in the RTL. */
641 int mips_split_addresses;
643 /* Mode used for saving/restoring general purpose registers. */
644 static enum machine_mode gpr_mode;
646 /* Array giving truth value on whether or not a given hard register
647 can support a given mode. */
648 char mips_hard_regno_mode_ok[(int)MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
650 /* List of all MIPS punctuation characters used by print_operand. */
651 char mips_print_operand_punct[256];
653 /* Map GCC register number to debugger register number. */
654 int mips_dbx_regno[FIRST_PSEUDO_REGISTER];
655 int mips_dwarf_regno[FIRST_PSEUDO_REGISTER];
657 /* A copy of the original flag_delayed_branch: see override_options. */
658 static int mips_flag_delayed_branch;
660 static GTY (()) int mips_output_filename_first_time = 1;
662 /* mips_split_p[X] is true if symbols of type X can be split by
663 mips_split_symbol(). */
664 bool mips_split_p[NUM_SYMBOL_TYPES];
666 /* mips_lo_relocs[X] is the relocation to use when a symbol of type X
667 appears in a LO_SUM. It can be null if such LO_SUMs aren't valid or
668 if they are matched by a special .md file pattern. */
669 static const char *mips_lo_relocs[NUM_SYMBOL_TYPES];
671 /* Likewise for HIGHs. */
672 static const char *mips_hi_relocs[NUM_SYMBOL_TYPES];
674 /* Map hard register number to register class */
675 const enum reg_class mips_regno_to_class[] =
677 LEA_REGS, LEA_REGS, M16_NA_REGS, V1_REG,
678 M16_REGS, M16_REGS, M16_REGS, M16_REGS,
679 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
680 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
681 M16_NA_REGS, M16_NA_REGS, LEA_REGS, LEA_REGS,
682 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
683 T_REG, PIC_FN_ADDR_REG, LEA_REGS, LEA_REGS,
684 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
685 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
686 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
687 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
688 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
689 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
690 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
691 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
692 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
693 MD0_REG, MD1_REG, NO_REGS, ST_REGS,
694 ST_REGS, ST_REGS, ST_REGS, ST_REGS,
695 ST_REGS, ST_REGS, ST_REGS, NO_REGS,
696 NO_REGS, ALL_REGS, ALL_REGS, NO_REGS,
697 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
698 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
699 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
700 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
701 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
702 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
703 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
704 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
705 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
706 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
707 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
708 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
709 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
710 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
711 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
712 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
713 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
714 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
715 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
716 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
717 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
718 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
719 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
720 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
721 DSP_ACC_REGS, DSP_ACC_REGS, DSP_ACC_REGS, DSP_ACC_REGS,
722 DSP_ACC_REGS, DSP_ACC_REGS, ALL_REGS, ALL_REGS,
723 ALL_REGS, ALL_REGS, ALL_REGS, ALL_REGS
726 /* Table of machine dependent attributes. */
727 const struct attribute_spec mips_attribute_table[] =
729 { "long_call", 0, 0, false, true, true, NULL },
730 { "far", 0, 0, false, true, true, NULL },
731 { "near", 0, 0, false, true, true, NULL },
732 /* Switch MIPS16 ASE on and off per-function. */
733 { "mips16", 0, 0, false, true, true, NULL },
734 { "nomips16", 0, 0, false, true, true, NULL },
735 { NULL, 0, 0, false, false, false, NULL }
738 /* A table describing all the processors gcc knows about. Names are
739 matched in the order listed. The first mention of an ISA level is
740 taken as the canonical name for that ISA.
742 To ease comparison, please keep this table in the same order as
743 gas's mips_cpu_info_table[]. Please also make sure that
744 MIPS_ISA_LEVEL_SPEC handles all -march options correctly. */
745 const struct mips_cpu_info mips_cpu_info_table[] = {
746 /* Entries for generic ISAs */
747 { "mips1", PROCESSOR_R3000, 1 },
748 { "mips2", PROCESSOR_R6000, 2 },
749 { "mips3", PROCESSOR_R4000, 3 },
750 { "mips4", PROCESSOR_R8000, 4 },
751 { "mips32", PROCESSOR_4KC, 32 },
752 { "mips32r2", PROCESSOR_M4K, 33 },
753 { "mips64", PROCESSOR_5KC, 64 },
756 { "r3000", PROCESSOR_R3000, 1 },
757 { "r2000", PROCESSOR_R3000, 1 }, /* = r3000 */
758 { "r3900", PROCESSOR_R3900, 1 },
761 { "r6000", PROCESSOR_R6000, 2 },
764 { "r4000", PROCESSOR_R4000, 3 },
765 { "vr4100", PROCESSOR_R4100, 3 },
766 { "vr4111", PROCESSOR_R4111, 3 },
767 { "vr4120", PROCESSOR_R4120, 3 },
768 { "vr4130", PROCESSOR_R4130, 3 },
769 { "vr4300", PROCESSOR_R4300, 3 },
770 { "r4400", PROCESSOR_R4000, 3 }, /* = r4000 */
771 { "r4600", PROCESSOR_R4600, 3 },
772 { "orion", PROCESSOR_R4600, 3 }, /* = r4600 */
773 { "r4650", PROCESSOR_R4650, 3 },
776 { "r8000", PROCESSOR_R8000, 4 },
777 { "vr5000", PROCESSOR_R5000, 4 },
778 { "vr5400", PROCESSOR_R5400, 4 },
779 { "vr5500", PROCESSOR_R5500, 4 },
780 { "rm7000", PROCESSOR_R7000, 4 },
781 { "rm9000", PROCESSOR_R9000, 4 },
784 { "4kc", PROCESSOR_4KC, 32 },
785 { "4km", PROCESSOR_4KC, 32 }, /* = 4kc */
786 { "4kp", PROCESSOR_4KP, 32 },
787 { "4ksc", PROCESSOR_4KC, 32 },
789 /* MIPS32 Release 2 */
790 { "m4k", PROCESSOR_M4K, 33 },
791 { "4kec", PROCESSOR_4KC, 33 },
792 { "4kem", PROCESSOR_4KC, 33 },
793 { "4kep", PROCESSOR_4KP, 33 },
794 { "4ksd", PROCESSOR_4KC, 33 },
796 { "24kc", PROCESSOR_24KC, 33 },
797 { "24kf2_1", PROCESSOR_24KF2_1, 33 },
798 { "24kf", PROCESSOR_24KF2_1, 33 },
799 { "24kf1_1", PROCESSOR_24KF1_1, 33 },
800 { "24kfx", PROCESSOR_24KF1_1, 33 },
801 { "24kx", PROCESSOR_24KF1_1, 33 },
803 { "24kec", PROCESSOR_24KC, 33 }, /* 24K with DSP */
804 { "24kef2_1", PROCESSOR_24KF2_1, 33 },
805 { "24kef", PROCESSOR_24KF2_1, 33 },
806 { "24kef1_1", PROCESSOR_24KF1_1, 33 },
807 { "24kefx", PROCESSOR_24KF1_1, 33 },
808 { "24kex", PROCESSOR_24KF1_1, 33 },
810 { "34kc", PROCESSOR_24KC, 33 }, /* 34K with MT/DSP */
811 { "34kf2_1", PROCESSOR_24KF2_1, 33 },
812 { "34kf", PROCESSOR_24KF2_1, 33 },
813 { "34kf1_1", PROCESSOR_24KF1_1, 33 },
814 { "34kfx", PROCESSOR_24KF1_1, 33 },
815 { "34kx", PROCESSOR_24KF1_1, 33 },
817 { "74kc", PROCESSOR_74KC, 33 }, /* 74K with DSPr2 */
818 { "74kf2_1", PROCESSOR_74KF2_1, 33 },
819 { "74kf", PROCESSOR_74KF2_1, 33 },
820 { "74kf1_1", PROCESSOR_74KF1_1, 33 },
821 { "74kfx", PROCESSOR_74KF1_1, 33 },
822 { "74kx", PROCESSOR_74KF1_1, 33 },
823 { "74kf3_2", PROCESSOR_74KF3_2, 33 },
826 { "5kc", PROCESSOR_5KC, 64 },
827 { "5kf", PROCESSOR_5KF, 64 },
828 { "20kc", PROCESSOR_20KC, 64 },
829 { "sb1", PROCESSOR_SB1, 64 },
830 { "sb1a", PROCESSOR_SB1A, 64 },
831 { "sr71000", PROCESSOR_SR71000, 64 },
837 /* Default costs. If these are used for a processor we should look
838 up the actual costs. */
839 #define DEFAULT_COSTS COSTS_N_INSNS (6), /* fp_add */ \
840 COSTS_N_INSNS (7), /* fp_mult_sf */ \
841 COSTS_N_INSNS (8), /* fp_mult_df */ \
842 COSTS_N_INSNS (23), /* fp_div_sf */ \
843 COSTS_N_INSNS (36), /* fp_div_df */ \
844 COSTS_N_INSNS (10), /* int_mult_si */ \
845 COSTS_N_INSNS (10), /* int_mult_di */ \
846 COSTS_N_INSNS (69), /* int_div_si */ \
847 COSTS_N_INSNS (69), /* int_div_di */ \
848 2, /* branch_cost */ \
849 4 /* memory_latency */
851 /* Need to replace these with the costs of calling the appropriate
853 #define SOFT_FP_COSTS COSTS_N_INSNS (256), /* fp_add */ \
854 COSTS_N_INSNS (256), /* fp_mult_sf */ \
855 COSTS_N_INSNS (256), /* fp_mult_df */ \
856 COSTS_N_INSNS (256), /* fp_div_sf */ \
857 COSTS_N_INSNS (256) /* fp_div_df */
859 static struct mips_rtx_cost_data const mips_rtx_cost_optimize_size =
861 COSTS_N_INSNS (1), /* fp_add */
862 COSTS_N_INSNS (1), /* fp_mult_sf */
863 COSTS_N_INSNS (1), /* fp_mult_df */
864 COSTS_N_INSNS (1), /* fp_div_sf */
865 COSTS_N_INSNS (1), /* fp_div_df */
866 COSTS_N_INSNS (1), /* int_mult_si */
867 COSTS_N_INSNS (1), /* int_mult_di */
868 COSTS_N_INSNS (1), /* int_div_si */
869 COSTS_N_INSNS (1), /* int_div_di */
871 4 /* memory_latency */
874 static struct mips_rtx_cost_data const mips_rtx_cost_data[PROCESSOR_MAX] =
877 COSTS_N_INSNS (2), /* fp_add */
878 COSTS_N_INSNS (4), /* fp_mult_sf */
879 COSTS_N_INSNS (5), /* fp_mult_df */
880 COSTS_N_INSNS (12), /* fp_div_sf */
881 COSTS_N_INSNS (19), /* fp_div_df */
882 COSTS_N_INSNS (12), /* int_mult_si */
883 COSTS_N_INSNS (12), /* int_mult_di */
884 COSTS_N_INSNS (35), /* int_div_si */
885 COSTS_N_INSNS (35), /* int_div_di */
887 4 /* memory_latency */
892 COSTS_N_INSNS (6), /* int_mult_si */
893 COSTS_N_INSNS (6), /* int_mult_di */
894 COSTS_N_INSNS (36), /* int_div_si */
895 COSTS_N_INSNS (36), /* int_div_di */
897 4 /* memory_latency */
901 COSTS_N_INSNS (36), /* int_mult_si */
902 COSTS_N_INSNS (36), /* int_mult_di */
903 COSTS_N_INSNS (37), /* int_div_si */
904 COSTS_N_INSNS (37), /* int_div_di */
906 4 /* memory_latency */
910 COSTS_N_INSNS (4), /* int_mult_si */
911 COSTS_N_INSNS (11), /* int_mult_di */
912 COSTS_N_INSNS (36), /* int_div_si */
913 COSTS_N_INSNS (68), /* int_div_di */
915 4 /* memory_latency */
918 COSTS_N_INSNS (4), /* fp_add */
919 COSTS_N_INSNS (4), /* fp_mult_sf */
920 COSTS_N_INSNS (5), /* fp_mult_df */
921 COSTS_N_INSNS (17), /* fp_div_sf */
922 COSTS_N_INSNS (32), /* fp_div_df */
923 COSTS_N_INSNS (4), /* int_mult_si */
924 COSTS_N_INSNS (11), /* int_mult_di */
925 COSTS_N_INSNS (36), /* int_div_si */
926 COSTS_N_INSNS (68), /* int_div_di */
928 4 /* memory_latency */
931 COSTS_N_INSNS (4), /* fp_add */
932 COSTS_N_INSNS (4), /* fp_mult_sf */
933 COSTS_N_INSNS (5), /* fp_mult_df */
934 COSTS_N_INSNS (17), /* fp_div_sf */
935 COSTS_N_INSNS (32), /* fp_div_df */
936 COSTS_N_INSNS (4), /* int_mult_si */
937 COSTS_N_INSNS (7), /* int_mult_di */
938 COSTS_N_INSNS (42), /* int_div_si */
939 COSTS_N_INSNS (72), /* int_div_di */
941 4 /* memory_latency */
945 COSTS_N_INSNS (5), /* int_mult_si */
946 COSTS_N_INSNS (5), /* int_mult_di */
947 COSTS_N_INSNS (41), /* int_div_si */
948 COSTS_N_INSNS (41), /* int_div_di */
950 4 /* memory_latency */
953 COSTS_N_INSNS (8), /* fp_add */
954 COSTS_N_INSNS (8), /* fp_mult_sf */
955 COSTS_N_INSNS (10), /* fp_mult_df */
956 COSTS_N_INSNS (34), /* fp_div_sf */
957 COSTS_N_INSNS (64), /* fp_div_df */
958 COSTS_N_INSNS (5), /* int_mult_si */
959 COSTS_N_INSNS (5), /* int_mult_di */
960 COSTS_N_INSNS (41), /* int_div_si */
961 COSTS_N_INSNS (41), /* int_div_di */
963 4 /* memory_latency */
966 COSTS_N_INSNS (4), /* fp_add */
967 COSTS_N_INSNS (4), /* fp_mult_sf */
968 COSTS_N_INSNS (5), /* fp_mult_df */
969 COSTS_N_INSNS (17), /* fp_div_sf */
970 COSTS_N_INSNS (32), /* fp_div_df */
971 COSTS_N_INSNS (5), /* int_mult_si */
972 COSTS_N_INSNS (5), /* int_mult_di */
973 COSTS_N_INSNS (41), /* int_div_si */
974 COSTS_N_INSNS (41), /* int_div_di */
976 4 /* memory_latency */
980 COSTS_N_INSNS (5), /* int_mult_si */
981 COSTS_N_INSNS (5), /* int_mult_di */
982 COSTS_N_INSNS (41), /* int_div_si */
983 COSTS_N_INSNS (41), /* int_div_di */
985 4 /* memory_latency */
988 COSTS_N_INSNS (8), /* fp_add */
989 COSTS_N_INSNS (8), /* fp_mult_sf */
990 COSTS_N_INSNS (10), /* fp_mult_df */
991 COSTS_N_INSNS (34), /* fp_div_sf */
992 COSTS_N_INSNS (64), /* fp_div_df */
993 COSTS_N_INSNS (5), /* int_mult_si */
994 COSTS_N_INSNS (5), /* int_mult_di */
995 COSTS_N_INSNS (41), /* int_div_si */
996 COSTS_N_INSNS (41), /* int_div_di */
998 4 /* memory_latency */
1001 COSTS_N_INSNS (4), /* fp_add */
1002 COSTS_N_INSNS (4), /* fp_mult_sf */
1003 COSTS_N_INSNS (5), /* fp_mult_df */
1004 COSTS_N_INSNS (17), /* fp_div_sf */
1005 COSTS_N_INSNS (32), /* fp_div_df */
1006 COSTS_N_INSNS (5), /* int_mult_si */
1007 COSTS_N_INSNS (5), /* int_mult_di */
1008 COSTS_N_INSNS (41), /* int_div_si */
1009 COSTS_N_INSNS (41), /* int_div_di */
1010 1, /* branch_cost */
1011 4 /* memory_latency */
1014 COSTS_N_INSNS (6), /* fp_add */
1015 COSTS_N_INSNS (6), /* fp_mult_sf */
1016 COSTS_N_INSNS (7), /* fp_mult_df */
1017 COSTS_N_INSNS (25), /* fp_div_sf */
1018 COSTS_N_INSNS (48), /* fp_div_df */
1019 COSTS_N_INSNS (5), /* int_mult_si */
1020 COSTS_N_INSNS (5), /* int_mult_di */
1021 COSTS_N_INSNS (41), /* int_div_si */
1022 COSTS_N_INSNS (41), /* int_div_di */
1023 1, /* branch_cost */
1024 4 /* memory_latency */
1030 COSTS_N_INSNS (2), /* fp_add */
1031 COSTS_N_INSNS (4), /* fp_mult_sf */
1032 COSTS_N_INSNS (5), /* fp_mult_df */
1033 COSTS_N_INSNS (12), /* fp_div_sf */
1034 COSTS_N_INSNS (19), /* fp_div_df */
1035 COSTS_N_INSNS (2), /* int_mult_si */
1036 COSTS_N_INSNS (2), /* int_mult_di */
1037 COSTS_N_INSNS (35), /* int_div_si */
1038 COSTS_N_INSNS (35), /* int_div_di */
1039 1, /* branch_cost */
1040 4 /* memory_latency */
1043 COSTS_N_INSNS (3), /* fp_add */
1044 COSTS_N_INSNS (5), /* fp_mult_sf */
1045 COSTS_N_INSNS (6), /* fp_mult_df */
1046 COSTS_N_INSNS (15), /* fp_div_sf */
1047 COSTS_N_INSNS (16), /* fp_div_df */
1048 COSTS_N_INSNS (17), /* int_mult_si */
1049 COSTS_N_INSNS (17), /* int_mult_di */
1050 COSTS_N_INSNS (38), /* int_div_si */
1051 COSTS_N_INSNS (38), /* int_div_di */
1052 2, /* branch_cost */
1053 6 /* memory_latency */
1056 COSTS_N_INSNS (6), /* fp_add */
1057 COSTS_N_INSNS (7), /* fp_mult_sf */
1058 COSTS_N_INSNS (8), /* fp_mult_df */
1059 COSTS_N_INSNS (23), /* fp_div_sf */
1060 COSTS_N_INSNS (36), /* fp_div_df */
1061 COSTS_N_INSNS (10), /* int_mult_si */
1062 COSTS_N_INSNS (10), /* int_mult_di */
1063 COSTS_N_INSNS (69), /* int_div_si */
1064 COSTS_N_INSNS (69), /* int_div_di */
1065 2, /* branch_cost */
1066 6 /* memory_latency */
1078 /* The only costs that appear to be updated here are
1079 integer multiplication. */
1081 COSTS_N_INSNS (4), /* int_mult_si */
1082 COSTS_N_INSNS (6), /* int_mult_di */
1083 COSTS_N_INSNS (69), /* int_div_si */
1084 COSTS_N_INSNS (69), /* int_div_di */
1085 1, /* branch_cost */
1086 4 /* memory_latency */
1098 COSTS_N_INSNS (6), /* fp_add */
1099 COSTS_N_INSNS (4), /* fp_mult_sf */
1100 COSTS_N_INSNS (5), /* fp_mult_df */
1101 COSTS_N_INSNS (23), /* fp_div_sf */
1102 COSTS_N_INSNS (36), /* fp_div_df */
1103 COSTS_N_INSNS (5), /* int_mult_si */
1104 COSTS_N_INSNS (5), /* int_mult_di */
1105 COSTS_N_INSNS (36), /* int_div_si */
1106 COSTS_N_INSNS (36), /* int_div_di */
1107 1, /* branch_cost */
1108 4 /* memory_latency */
1111 COSTS_N_INSNS (6), /* fp_add */
1112 COSTS_N_INSNS (5), /* fp_mult_sf */
1113 COSTS_N_INSNS (6), /* fp_mult_df */
1114 COSTS_N_INSNS (30), /* fp_div_sf */
1115 COSTS_N_INSNS (59), /* fp_div_df */
1116 COSTS_N_INSNS (3), /* int_mult_si */
1117 COSTS_N_INSNS (4), /* int_mult_di */
1118 COSTS_N_INSNS (42), /* int_div_si */
1119 COSTS_N_INSNS (74), /* int_div_di */
1120 1, /* branch_cost */
1121 4 /* memory_latency */
1124 COSTS_N_INSNS (6), /* fp_add */
1125 COSTS_N_INSNS (5), /* fp_mult_sf */
1126 COSTS_N_INSNS (6), /* fp_mult_df */
1127 COSTS_N_INSNS (30), /* fp_div_sf */
1128 COSTS_N_INSNS (59), /* fp_div_df */
1129 COSTS_N_INSNS (5), /* int_mult_si */
1130 COSTS_N_INSNS (9), /* int_mult_di */
1131 COSTS_N_INSNS (42), /* int_div_si */
1132 COSTS_N_INSNS (74), /* int_div_di */
1133 1, /* branch_cost */
1134 4 /* memory_latency */
1137 /* The only costs that are changed here are
1138 integer multiplication. */
1139 COSTS_N_INSNS (6), /* fp_add */
1140 COSTS_N_INSNS (7), /* fp_mult_sf */
1141 COSTS_N_INSNS (8), /* fp_mult_df */
1142 COSTS_N_INSNS (23), /* fp_div_sf */
1143 COSTS_N_INSNS (36), /* fp_div_df */
1144 COSTS_N_INSNS (5), /* int_mult_si */
1145 COSTS_N_INSNS (9), /* int_mult_di */
1146 COSTS_N_INSNS (69), /* int_div_si */
1147 COSTS_N_INSNS (69), /* int_div_di */
1148 1, /* branch_cost */
1149 4 /* memory_latency */
1155 /* The only costs that are changed here are
1156 integer multiplication. */
1157 COSTS_N_INSNS (6), /* fp_add */
1158 COSTS_N_INSNS (7), /* fp_mult_sf */
1159 COSTS_N_INSNS (8), /* fp_mult_df */
1160 COSTS_N_INSNS (23), /* fp_div_sf */
1161 COSTS_N_INSNS (36), /* fp_div_df */
1162 COSTS_N_INSNS (3), /* int_mult_si */
1163 COSTS_N_INSNS (8), /* int_mult_di */
1164 COSTS_N_INSNS (69), /* int_div_si */
1165 COSTS_N_INSNS (69), /* int_div_di */
1166 1, /* branch_cost */
1167 4 /* memory_latency */
1170 /* These costs are the same as the SB-1A below. */
1171 COSTS_N_INSNS (4), /* fp_add */
1172 COSTS_N_INSNS (4), /* fp_mult_sf */
1173 COSTS_N_INSNS (4), /* fp_mult_df */
1174 COSTS_N_INSNS (24), /* fp_div_sf */
1175 COSTS_N_INSNS (32), /* fp_div_df */
1176 COSTS_N_INSNS (3), /* int_mult_si */
1177 COSTS_N_INSNS (4), /* int_mult_di */
1178 COSTS_N_INSNS (36), /* int_div_si */
1179 COSTS_N_INSNS (68), /* int_div_di */
1180 1, /* branch_cost */
1181 4 /* memory_latency */
1184 /* These costs are the same as the SB-1 above. */
1185 COSTS_N_INSNS (4), /* fp_add */
1186 COSTS_N_INSNS (4), /* fp_mult_sf */
1187 COSTS_N_INSNS (4), /* fp_mult_df */
1188 COSTS_N_INSNS (24), /* fp_div_sf */
1189 COSTS_N_INSNS (32), /* fp_div_df */
1190 COSTS_N_INSNS (3), /* int_mult_si */
1191 COSTS_N_INSNS (4), /* int_mult_di */
1192 COSTS_N_INSNS (36), /* int_div_si */
1193 COSTS_N_INSNS (68), /* int_div_di */
1194 1, /* branch_cost */
1195 4 /* memory_latency */
1202 /* If a MIPS16e SAVE or RESTORE instruction saves or restores register
1203 mips16e_s2_s8_regs[X], it must also save the registers in indexes
1204 X + 1 onwards. Likewise mips16e_a0_a3_regs. */
1205 static const unsigned char mips16e_s2_s8_regs[] = {
1206 30, 23, 22, 21, 20, 19, 18
1208 static const unsigned char mips16e_a0_a3_regs[] = {
1212 /* A list of the registers that can be saved by the MIPS16e SAVE instruction,
1213 ordered from the uppermost in memory to the lowest in memory. */
1214 static const unsigned char mips16e_save_restore_regs[] = {
1215 31, 30, 23, 22, 21, 20, 19, 18, 17, 16, 7, 6, 5, 4
1218 /* Nonzero if -march should decide the default value of
1219 MASK_SOFT_FLOAT_ABI. */
1220 #ifndef MIPS_MARCH_CONTROLS_SOFT_FLOAT
1221 #define MIPS_MARCH_CONTROLS_SOFT_FLOAT 0
1224 /* Initialize the GCC target structure. */
1225 #undef TARGET_ASM_ALIGNED_HI_OP
1226 #define TARGET_ASM_ALIGNED_HI_OP "\t.half\t"
1227 #undef TARGET_ASM_ALIGNED_SI_OP
1228 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
1229 #undef TARGET_ASM_ALIGNED_DI_OP
1230 #define TARGET_ASM_ALIGNED_DI_OP "\t.dword\t"
1232 #undef TARGET_ASM_FUNCTION_PROLOGUE
1233 #define TARGET_ASM_FUNCTION_PROLOGUE mips_output_function_prologue
1234 #undef TARGET_ASM_FUNCTION_EPILOGUE
1235 #define TARGET_ASM_FUNCTION_EPILOGUE mips_output_function_epilogue
1236 #undef TARGET_ASM_SELECT_RTX_SECTION
1237 #define TARGET_ASM_SELECT_RTX_SECTION mips_select_rtx_section
1238 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
1239 #define TARGET_ASM_FUNCTION_RODATA_SECTION mips_function_rodata_section
1241 #undef TARGET_SCHED_INIT
1242 #define TARGET_SCHED_INIT mips_sched_init
1243 #undef TARGET_SCHED_REORDER
1244 #define TARGET_SCHED_REORDER mips_sched_reorder
1245 #undef TARGET_SCHED_REORDER2
1246 #define TARGET_SCHED_REORDER2 mips_sched_reorder
1247 #undef TARGET_SCHED_VARIABLE_ISSUE
1248 #define TARGET_SCHED_VARIABLE_ISSUE mips_variable_issue
1249 #undef TARGET_SCHED_ADJUST_COST
1250 #define TARGET_SCHED_ADJUST_COST mips_adjust_cost
1251 #undef TARGET_SCHED_ISSUE_RATE
1252 #define TARGET_SCHED_ISSUE_RATE mips_issue_rate
1253 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1254 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD \
1255 mips_multipass_dfa_lookahead
1257 #undef TARGET_DEFAULT_TARGET_FLAGS
1258 #define TARGET_DEFAULT_TARGET_FLAGS \
1260 | TARGET_CPU_DEFAULT \
1261 | TARGET_ENDIAN_DEFAULT \
1262 | TARGET_FP_EXCEPTIONS_DEFAULT \
1263 | MASK_CHECK_ZERO_DIV \
1265 #undef TARGET_HANDLE_OPTION
1266 #define TARGET_HANDLE_OPTION mips_handle_option
1268 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
1269 #define TARGET_FUNCTION_OK_FOR_SIBCALL mips_function_ok_for_sibcall
1271 #undef TARGET_SET_CURRENT_FUNCTION
1272 #define TARGET_SET_CURRENT_FUNCTION mips_set_current_function
1274 #undef TARGET_VALID_POINTER_MODE
1275 #define TARGET_VALID_POINTER_MODE mips_valid_pointer_mode
1276 #undef TARGET_RTX_COSTS
1277 #define TARGET_RTX_COSTS mips_rtx_costs
1278 #undef TARGET_ADDRESS_COST
1279 #define TARGET_ADDRESS_COST mips_address_cost
1281 #undef TARGET_IN_SMALL_DATA_P
1282 #define TARGET_IN_SMALL_DATA_P mips_in_small_data_p
1284 #undef TARGET_MACHINE_DEPENDENT_REORG
1285 #define TARGET_MACHINE_DEPENDENT_REORG mips_reorg
1287 #undef TARGET_ASM_FILE_START
1288 #define TARGET_ASM_FILE_START mips_file_start
1289 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
1290 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
1292 #undef TARGET_INIT_LIBFUNCS
1293 #define TARGET_INIT_LIBFUNCS mips_init_libfuncs
1295 #undef TARGET_BUILD_BUILTIN_VA_LIST
1296 #define TARGET_BUILD_BUILTIN_VA_LIST mips_build_builtin_va_list
1297 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
1298 #define TARGET_GIMPLIFY_VA_ARG_EXPR mips_gimplify_va_arg_expr
1300 #undef TARGET_PROMOTE_FUNCTION_ARGS
1301 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
1302 #undef TARGET_PROMOTE_FUNCTION_RETURN
1303 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
1304 #undef TARGET_PROMOTE_PROTOTYPES
1305 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
1307 #undef TARGET_RETURN_IN_MEMORY
1308 #define TARGET_RETURN_IN_MEMORY mips_return_in_memory
1309 #undef TARGET_RETURN_IN_MSB
1310 #define TARGET_RETURN_IN_MSB mips_return_in_msb
1312 #undef TARGET_ASM_OUTPUT_MI_THUNK
1313 #define TARGET_ASM_OUTPUT_MI_THUNK mips_output_mi_thunk
1314 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
1315 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
1317 #undef TARGET_SETUP_INCOMING_VARARGS
1318 #define TARGET_SETUP_INCOMING_VARARGS mips_setup_incoming_varargs
1319 #undef TARGET_STRICT_ARGUMENT_NAMING
1320 #define TARGET_STRICT_ARGUMENT_NAMING mips_strict_argument_naming
1321 #undef TARGET_MUST_PASS_IN_STACK
1322 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
1323 #undef TARGET_PASS_BY_REFERENCE
1324 #define TARGET_PASS_BY_REFERENCE mips_pass_by_reference
1325 #undef TARGET_CALLEE_COPIES
1326 #define TARGET_CALLEE_COPIES mips_callee_copies
1327 #undef TARGET_ARG_PARTIAL_BYTES
1328 #define TARGET_ARG_PARTIAL_BYTES mips_arg_partial_bytes
1330 #undef TARGET_MODE_REP_EXTENDED
1331 #define TARGET_MODE_REP_EXTENDED mips_mode_rep_extended
1333 #undef TARGET_VECTOR_MODE_SUPPORTED_P
1334 #define TARGET_VECTOR_MODE_SUPPORTED_P mips_vector_mode_supported_p
1336 #undef TARGET_INIT_BUILTINS
1337 #define TARGET_INIT_BUILTINS mips_init_builtins
1338 #undef TARGET_EXPAND_BUILTIN
1339 #define TARGET_EXPAND_BUILTIN mips_expand_builtin
1341 #undef TARGET_HAVE_TLS
1342 #define TARGET_HAVE_TLS HAVE_AS_TLS
1344 #undef TARGET_CANNOT_FORCE_CONST_MEM
1345 #define TARGET_CANNOT_FORCE_CONST_MEM mips_cannot_force_const_mem
1347 #undef TARGET_ENCODE_SECTION_INFO
1348 #define TARGET_ENCODE_SECTION_INFO mips_encode_section_info
1350 #undef TARGET_ATTRIBUTE_TABLE
1351 #define TARGET_ATTRIBUTE_TABLE mips_attribute_table
1353 #undef TARGET_EXTRA_LIVE_ON_ENTRY
1354 #define TARGET_EXTRA_LIVE_ON_ENTRY mips_extra_live_on_entry
1356 #undef TARGET_MIN_ANCHOR_OFFSET
1357 #define TARGET_MIN_ANCHOR_OFFSET -32768
1358 #undef TARGET_MAX_ANCHOR_OFFSET
1359 #define TARGET_MAX_ANCHOR_OFFSET 32767
1360 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1361 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P mips_use_blocks_for_constant_p
1362 #undef TARGET_USE_ANCHORS_FOR_SYMBOL_P
1363 #define TARGET_USE_ANCHORS_FOR_SYMBOL_P mips_use_anchors_for_symbol_p
1365 #undef TARGET_COMP_TYPE_ATTRIBUTES
1366 #define TARGET_COMP_TYPE_ATTRIBUTES mips_comp_type_attributes
1368 #ifdef HAVE_AS_DTPRELWORD
1369 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1370 #define TARGET_ASM_OUTPUT_DWARF_DTPREL mips_output_dwarf_dtprel
1373 struct gcc_target targetm = TARGET_INITIALIZER;
1376 /* Predicates to test for presence of "near" and "far"/"long_call"
1377 attributes on the given TYPE. */
1380 mips_near_type_p (const_tree type)
1382 return lookup_attribute ("near", TYPE_ATTRIBUTES (type)) != NULL;
1386 mips_far_type_p (const_tree type)
1388 return (lookup_attribute ("long_call", TYPE_ATTRIBUTES (type)) != NULL
1389 || lookup_attribute ("far", TYPE_ATTRIBUTES (type)) != NULL);
1392 /* Similar predicates for "mips16"/"nomips16" attributes. */
1395 mips_mips16_type_p (const_tree type)
1397 return lookup_attribute ("mips16", TYPE_ATTRIBUTES (type)) != NULL;
1401 mips_nomips16_type_p (const_tree type)
1403 return lookup_attribute ("nomips16", TYPE_ATTRIBUTES (type)) != NULL;
1406 /* Return 0 if the attributes for two types are incompatible, 1 if they
1407 are compatible, and 2 if they are nearly compatible (which causes a
1408 warning to be generated). */
1411 mips_comp_type_attributes (const_tree type1, const_tree type2)
1413 /* Check for mismatch of non-default calling convention. */
1414 if (TREE_CODE (type1) != FUNCTION_TYPE)
1417 /* Disallow mixed near/far attributes. */
1418 if (mips_far_type_p (type1) && mips_near_type_p (type2))
1420 if (mips_near_type_p (type1) && mips_far_type_p (type2))
1423 /* Mips16/nomips16 attributes must match exactly. */
1424 if (mips_nomips16_type_p (type1) != mips_nomips16_type_p (type2)
1425 || mips_mips16_type_p (type1) != mips_mips16_type_p (type2))
1431 /* If X is a PLUS of a CONST_INT, return the two terms in *BASE_PTR
1432 and *OFFSET_PTR. Return X in *BASE_PTR and 0 in *OFFSET_PTR otherwise. */
1435 mips_split_plus (rtx x, rtx *base_ptr, HOST_WIDE_INT *offset_ptr)
1437 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
1439 *base_ptr = XEXP (x, 0);
1440 *offset_ptr = INTVAL (XEXP (x, 1));
1449 /* Return true if SYMBOL_REF X is associated with a global symbol
1450 (in the STB_GLOBAL sense). */
1453 mips_global_symbol_p (const_rtx x)
1455 const_tree const decl = SYMBOL_REF_DECL (x);
1458 return !SYMBOL_REF_LOCAL_P (x);
1460 /* Weakref symbols are not TREE_PUBLIC, but their targets are global
1461 or weak symbols. Relocations in the object file will be against
1462 the target symbol, so it's that symbol's binding that matters here. */
1463 return DECL_P (decl) && (TREE_PUBLIC (decl) || DECL_WEAK (decl));
1466 /* Return true if SYMBOL_REF X binds locally. */
1469 mips_symbol_binds_local_p (const_rtx x)
1471 return (SYMBOL_REF_DECL (x)
1472 ? targetm.binds_local_p (SYMBOL_REF_DECL (x))
1473 : SYMBOL_REF_LOCAL_P (x));
1476 /* Return the method that should be used to access SYMBOL_REF or
1477 LABEL_REF X in context CONTEXT. */
1479 static enum mips_symbol_type
1480 mips_classify_symbol (const_rtx x, enum mips_symbol_context context)
1483 return SYMBOL_GOT_DISP;
1485 if (GET_CODE (x) == LABEL_REF)
1487 /* LABEL_REFs are used for jump tables as well as text labels.
1488 Only return SYMBOL_PC_RELATIVE if we know the label is in
1489 the text section. */
1490 if (TARGET_MIPS16_SHORT_JUMP_TABLES)
1491 return SYMBOL_PC_RELATIVE;
1492 if (TARGET_ABICALLS && !TARGET_ABSOLUTE_ABICALLS)
1493 return SYMBOL_GOT_PAGE_OFST;
1494 return SYMBOL_ABSOLUTE;
1497 gcc_assert (GET_CODE (x) == SYMBOL_REF);
1499 if (SYMBOL_REF_TLS_MODEL (x))
1502 if (CONSTANT_POOL_ADDRESS_P (x))
1504 if (TARGET_MIPS16_TEXT_LOADS)
1505 return SYMBOL_PC_RELATIVE;
1507 if (TARGET_MIPS16_PCREL_LOADS && context == SYMBOL_CONTEXT_MEM)
1508 return SYMBOL_PC_RELATIVE;
1510 if (!TARGET_EMBEDDED_DATA
1511 && GET_MODE_SIZE (get_pool_mode (x)) <= mips_section_threshold)
1512 return SYMBOL_GP_RELATIVE;
1515 /* Do not use small-data accesses for weak symbols; they may end up
1517 if (SYMBOL_REF_SMALL_P (x)
1518 && !SYMBOL_REF_WEAK (x))
1519 return SYMBOL_GP_RELATIVE;
1521 /* Don't use GOT accesses for locally-binding symbols when -mno-shared
1524 && !(TARGET_ABSOLUTE_ABICALLS && mips_symbol_binds_local_p (x)))
1526 /* There are three cases to consider:
1528 - o32 PIC (either with or without explicit relocs)
1529 - n32/n64 PIC without explicit relocs
1530 - n32/n64 PIC with explicit relocs
1532 In the first case, both local and global accesses will use an
1533 R_MIPS_GOT16 relocation. We must correctly predict which of
1534 the two semantics (local or global) the assembler and linker
1535 will apply. The choice depends on the symbol's binding rather
1536 than its visibility.
1538 In the second case, the assembler will not use R_MIPS_GOT16
1539 relocations, but it chooses between local and global accesses
1540 in the same way as for o32 PIC.
1542 In the third case we have more freedom since both forms of
1543 access will work for any kind of symbol. However, there seems
1544 little point in doing things differently. */
1545 if (mips_global_symbol_p (x))
1546 return SYMBOL_GOT_DISP;
1548 return SYMBOL_GOT_PAGE_OFST;
1551 if (TARGET_MIPS16_PCREL_LOADS && context != SYMBOL_CONTEXT_CALL)
1552 return SYMBOL_FORCE_TO_MEM;
1553 return SYMBOL_ABSOLUTE;
1556 /* Return true if OFFSET is within the range [0, ALIGN), where ALIGN
1557 is the alignment (in bytes) of SYMBOL_REF X. */
1560 mips_offset_within_alignment_p (rtx x, HOST_WIDE_INT offset)
1562 /* If for some reason we can't get the alignment for the
1563 symbol, initializing this to one means we will only accept
1565 HOST_WIDE_INT align = 1;
1568 /* Get the alignment of the symbol we're referring to. */
1569 t = SYMBOL_REF_DECL (x);
1571 align = DECL_ALIGN_UNIT (t);
1573 return offset >= 0 && offset < align;
1576 /* Return true if X is a symbolic constant that can be used in context
1577 CONTEXT. If it is, store the type of the symbol in *SYMBOL_TYPE. */
1580 mips_symbolic_constant_p (rtx x, enum mips_symbol_context context,
1581 enum mips_symbol_type *symbol_type)
1585 split_const (x, &x, &offset);
1586 if (UNSPEC_ADDRESS_P (x))
1588 *symbol_type = UNSPEC_ADDRESS_TYPE (x);
1589 x = UNSPEC_ADDRESS (x);
1591 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1593 *symbol_type = mips_classify_symbol (x, context);
1594 if (*symbol_type == SYMBOL_TLS)
1600 if (offset == const0_rtx)
1603 /* Check whether a nonzero offset is valid for the underlying
1605 switch (*symbol_type)
1607 case SYMBOL_ABSOLUTE:
1608 case SYMBOL_FORCE_TO_MEM:
1609 case SYMBOL_32_HIGH:
1610 case SYMBOL_64_HIGH:
1613 /* If the target has 64-bit pointers and the object file only
1614 supports 32-bit symbols, the values of those symbols will be
1615 sign-extended. In this case we can't allow an arbitrary offset
1616 in case the 32-bit value X + OFFSET has a different sign from X. */
1617 if (Pmode == DImode && !ABI_HAS_64BIT_SYMBOLS)
1618 return offset_within_block_p (x, INTVAL (offset));
1620 /* In other cases the relocations can handle any offset. */
1623 case SYMBOL_PC_RELATIVE:
1624 /* Allow constant pool references to be converted to LABEL+CONSTANT.
1625 In this case, we no longer have access to the underlying constant,
1626 but the original symbol-based access was known to be valid. */
1627 if (GET_CODE (x) == LABEL_REF)
1632 case SYMBOL_GP_RELATIVE:
1633 /* Make sure that the offset refers to something within the
1634 same object block. This should guarantee that the final
1635 PC- or GP-relative offset is within the 16-bit limit. */
1636 return offset_within_block_p (x, INTVAL (offset));
1638 case SYMBOL_GOT_PAGE_OFST:
1639 case SYMBOL_GOTOFF_PAGE:
1640 /* If the symbol is global, the GOT entry will contain the symbol's
1641 address, and we will apply a 16-bit offset after loading it.
1642 If the symbol is local, the linker should provide enough local
1643 GOT entries for a 16-bit offset, but larger offsets may lead
1645 return SMALL_INT (offset);
1649 /* There is no carry between the HI and LO REL relocations, so the
1650 offset is only valid if we know it won't lead to such a carry. */
1651 return mips_offset_within_alignment_p (x, INTVAL (offset));
1653 case SYMBOL_GOT_DISP:
1654 case SYMBOL_GOTOFF_DISP:
1655 case SYMBOL_GOTOFF_CALL:
1656 case SYMBOL_GOTOFF_LOADGP:
1659 case SYMBOL_GOTTPREL:
1668 /* This function is used to implement REG_MODE_OK_FOR_BASE_P. */
1671 mips_regno_mode_ok_for_base_p (int regno, enum machine_mode mode, int strict)
1673 if (!HARD_REGISTER_NUM_P (regno))
1677 regno = reg_renumber[regno];
1680 /* These fake registers will be eliminated to either the stack or
1681 hard frame pointer, both of which are usually valid base registers.
1682 Reload deals with the cases where the eliminated form isn't valid. */
1683 if (regno == ARG_POINTER_REGNUM || regno == FRAME_POINTER_REGNUM)
1686 /* In mips16 mode, the stack pointer can only address word and doubleword
1687 values, nothing smaller. There are two problems here:
1689 (a) Instantiating virtual registers can introduce new uses of the
1690 stack pointer. If these virtual registers are valid addresses,
1691 the stack pointer should be too.
1693 (b) Most uses of the stack pointer are not made explicit until
1694 FRAME_POINTER_REGNUM and ARG_POINTER_REGNUM have been eliminated.
1695 We don't know until that stage whether we'll be eliminating to the
1696 stack pointer (which needs the restriction) or the hard frame
1697 pointer (which doesn't).
1699 All in all, it seems more consistent to only enforce this restriction
1700 during and after reload. */
1701 if (TARGET_MIPS16 && regno == STACK_POINTER_REGNUM)
1702 return !strict || GET_MODE_SIZE (mode) == 4 || GET_MODE_SIZE (mode) == 8;
1704 return TARGET_MIPS16 ? M16_REG_P (regno) : GP_REG_P (regno);
1708 /* Return true if X is a valid base register for the given mode.
1709 Allow only hard registers if STRICT. */
1712 mips_valid_base_register_p (rtx x, enum machine_mode mode, int strict)
1714 if (!strict && GET_CODE (x) == SUBREG)
1718 && mips_regno_mode_ok_for_base_p (REGNO (x), mode, strict));
1722 /* Return true if X is a valid address for machine mode MODE. If it is,
1723 fill in INFO appropriately. STRICT is true if we should only accept
1724 hard base registers. */
1727 mips_classify_address (struct mips_address_info *info, rtx x,
1728 enum machine_mode mode, int strict)
1730 switch (GET_CODE (x))
1734 info->type = ADDRESS_REG;
1736 info->offset = const0_rtx;
1737 return mips_valid_base_register_p (info->reg, mode, strict);
1740 info->type = ADDRESS_REG;
1741 info->reg = XEXP (x, 0);
1742 info->offset = XEXP (x, 1);
1743 return (mips_valid_base_register_p (info->reg, mode, strict)
1744 && const_arith_operand (info->offset, VOIDmode));
1747 info->type = ADDRESS_LO_SUM;
1748 info->reg = XEXP (x, 0);
1749 info->offset = XEXP (x, 1);
1750 return (mips_valid_base_register_p (info->reg, mode, strict)
1751 && mips_symbolic_constant_p (info->offset, SYMBOL_CONTEXT_MEM,
1753 && mips_symbol_insns (info->symbol_type, mode) > 0
1754 && mips_lo_relocs[info->symbol_type] != 0);
1757 /* Small-integer addresses don't occur very often, but they
1758 are legitimate if $0 is a valid base register. */
1759 info->type = ADDRESS_CONST_INT;
1760 return !TARGET_MIPS16 && SMALL_INT (x);
1765 info->type = ADDRESS_SYMBOLIC;
1766 return (mips_symbolic_constant_p (x, SYMBOL_CONTEXT_MEM,
1768 && mips_symbol_insns (info->symbol_type, mode) > 0
1769 && !mips_split_p[info->symbol_type]);
1776 /* Return true if X is a thread-local symbol. */
1779 mips_tls_operand_p (rtx x)
1781 return GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0;
1784 /* Return true if X can not be forced into a constant pool. */
1787 mips_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
1789 return mips_tls_operand_p (*x);
1792 /* Return true if X can not be forced into a constant pool. */
1795 mips_cannot_force_const_mem (rtx x)
1801 /* As an optimization, reject constants that mips_legitimize_move
1804 Suppose we have a multi-instruction sequence that loads constant C
1805 into register R. If R does not get allocated a hard register, and
1806 R is used in an operand that allows both registers and memory
1807 references, reload will consider forcing C into memory and using
1808 one of the instruction's memory alternatives. Returning false
1809 here will force it to use an input reload instead. */
1810 if (GET_CODE (x) == CONST_INT)
1813 split_const (x, &base, &offset);
1814 if (symbolic_operand (base, VOIDmode) && SMALL_INT (offset))
1818 if (TARGET_HAVE_TLS && for_each_rtx (&x, &mips_tls_symbol_ref_1, 0))
1824 /* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. We can't use blocks for
1825 constants when we're using a per-function constant pool. */
1828 mips_use_blocks_for_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1829 const_rtx x ATTRIBUTE_UNUSED)
1831 return !TARGET_MIPS16_PCREL_LOADS;
1834 /* Like mips_symbol_insns, but treat extended MIPS16 instructions as a
1835 single instruction. We rely on the fact that, in the worst case,
1836 all instructions involved in a MIPS16 address calculation are usually
1840 mips_symbol_insns_1 (enum mips_symbol_type type, enum machine_mode mode)
1844 case SYMBOL_ABSOLUTE:
1845 /* When using 64-bit symbols, we need 5 preparatory instructions,
1848 lui $at,%highest(symbol)
1849 daddiu $at,$at,%higher(symbol)
1851 daddiu $at,$at,%hi(symbol)
1854 The final address is then $at + %lo(symbol). With 32-bit
1855 symbols we just need a preparatory lui for normal mode and
1856 a preparatory "li; sll" for MIPS16. */
1857 return ABI_HAS_64BIT_SYMBOLS ? 6 : TARGET_MIPS16 ? 3 : 2;
1859 case SYMBOL_GP_RELATIVE:
1860 /* Treat GP-relative accesses as taking a single instruction on
1861 MIPS16 too; the copy of $gp can often be shared. */
1864 case SYMBOL_PC_RELATIVE:
1865 /* PC-relative constants can be only be used with addiupc,
1867 if (mode == MAX_MACHINE_MODE
1868 || GET_MODE_SIZE (mode) == 4
1869 || GET_MODE_SIZE (mode) == 8)
1872 /* The constant must be loaded using addiupc first. */
1875 case SYMBOL_FORCE_TO_MEM:
1876 /* The constant must be loaded from the constant pool. */
1879 case SYMBOL_GOT_DISP:
1880 /* The constant will have to be loaded from the GOT before it
1881 is used in an address. */
1882 if (mode != MAX_MACHINE_MODE)
1887 case SYMBOL_GOT_PAGE_OFST:
1888 /* Unless -funit-at-a-time is in effect, we can't be sure whether
1889 the local/global classification is accurate. See override_options
1892 The worst cases are:
1894 (1) For local symbols when generating o32 or o64 code. The assembler
1900 ...and the final address will be $at + %lo(symbol).
1902 (2) For global symbols when -mxgot. The assembler will use:
1904 lui $at,%got_hi(symbol)
1907 ...and the final address will be $at + %got_lo(symbol). */
1910 case SYMBOL_GOTOFF_PAGE:
1911 case SYMBOL_GOTOFF_DISP:
1912 case SYMBOL_GOTOFF_CALL:
1913 case SYMBOL_GOTOFF_LOADGP:
1914 case SYMBOL_32_HIGH:
1915 case SYMBOL_64_HIGH:
1921 case SYMBOL_GOTTPREL:
1924 /* A 16-bit constant formed by a single relocation, or a 32-bit
1925 constant formed from a high 16-bit relocation and a low 16-bit
1926 relocation. Use mips_split_p to determine which. */
1927 return !mips_split_p[type] ? 1 : TARGET_MIPS16 ? 3 : 2;
1930 /* We don't treat a bare TLS symbol as a constant. */
1936 /* If MODE is MAX_MACHINE_MODE, return the number of instructions needed
1937 to load symbols of type TYPE into a register. Return 0 if the given
1938 type of symbol cannot be used as an immediate operand.
1940 Otherwise, return the number of instructions needed to load or store
1941 values of mode MODE to or from addresses of type TYPE. Return 0 if
1942 the given type of symbol is not valid in addresses.
1944 In both cases, treat extended MIPS16 instructions as two instructions. */
1947 mips_symbol_insns (enum mips_symbol_type type, enum machine_mode mode)
1949 return mips_symbol_insns_1 (type, mode) * (TARGET_MIPS16 ? 2 : 1);
1952 /* Return true if X is a legitimate $sp-based address for mode MDOE. */
1955 mips_stack_address_p (rtx x, enum machine_mode mode)
1957 struct mips_address_info addr;
1959 return (mips_classify_address (&addr, x, mode, false)
1960 && addr.type == ADDRESS_REG
1961 && addr.reg == stack_pointer_rtx);
1964 /* Return true if a value at OFFSET bytes from BASE can be accessed
1965 using an unextended mips16 instruction. MODE is the mode of the
1968 Usually the offset in an unextended instruction is a 5-bit field.
1969 The offset is unsigned and shifted left once for HIs, twice
1970 for SIs, and so on. An exception is SImode accesses off the
1971 stack pointer, which have an 8-bit immediate field. */
1974 mips16_unextended_reference_p (enum machine_mode mode, rtx base, rtx offset)
1977 && GET_CODE (offset) == CONST_INT
1978 && INTVAL (offset) >= 0
1979 && (INTVAL (offset) & (GET_MODE_SIZE (mode) - 1)) == 0)
1981 if (GET_MODE_SIZE (mode) == 4 && base == stack_pointer_rtx)
1982 return INTVAL (offset) < 256 * GET_MODE_SIZE (mode);
1983 return INTVAL (offset) < 32 * GET_MODE_SIZE (mode);
1989 /* Return the number of instructions needed to load or store a value
1990 of mode MODE at X. Return 0 if X isn't valid for MODE. Assume that
1991 multiword moves may need to be split into word moves if MIGHT_SPLIT_P,
1992 otherwise assume that a single load or store is enough.
1994 For mips16 code, count extended instructions as two instructions. */
1997 mips_address_insns (rtx x, enum machine_mode mode, bool might_split_p)
1999 struct mips_address_info addr;
2002 /* BLKmode is used for single unaligned loads and stores and should
2003 not count as a multiword mode. (GET_MODE_SIZE (BLKmode) is pretty
2004 meaningless, so we have to single it out as a special case one way
2006 if (mode != BLKmode && might_split_p)
2007 factor = (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2011 if (mips_classify_address (&addr, x, mode, false))
2016 && !mips16_unextended_reference_p (mode, addr.reg, addr.offset))
2020 case ADDRESS_LO_SUM:
2021 return (TARGET_MIPS16 ? factor * 2 : factor);
2023 case ADDRESS_CONST_INT:
2026 case ADDRESS_SYMBOLIC:
2027 return factor * mips_symbol_insns (addr.symbol_type, mode);
2033 /* Likewise for constant X. */
2036 mips_const_insns (rtx x)
2038 struct mips_integer_op codes[MIPS_MAX_INTEGER_OPS];
2039 enum mips_symbol_type symbol_type;
2042 switch (GET_CODE (x))
2045 if (!mips_symbolic_constant_p (XEXP (x, 0), SYMBOL_CONTEXT_LEA,
2047 || !mips_split_p[symbol_type])
2050 /* This is simply an lui for normal mode. It is an extended
2051 "li" followed by an extended "sll" for MIPS16. */
2052 return TARGET_MIPS16 ? 4 : 1;
2056 /* Unsigned 8-bit constants can be loaded using an unextended
2057 LI instruction. Unsigned 16-bit constants can be loaded
2058 using an extended LI. Negative constants must be loaded
2059 using LI and then negated. */
2060 return (INTVAL (x) >= 0 && INTVAL (x) < 256 ? 1
2061 : SMALL_OPERAND_UNSIGNED (INTVAL (x)) ? 2
2062 : INTVAL (x) > -256 && INTVAL (x) < 0 ? 2
2063 : SMALL_OPERAND_UNSIGNED (-INTVAL (x)) ? 3
2066 return mips_build_integer (codes, INTVAL (x));
2070 return (!TARGET_MIPS16 && x == CONST0_RTX (GET_MODE (x)) ? 1 : 0);
2076 /* See if we can refer to X directly. */
2077 if (mips_symbolic_constant_p (x, SYMBOL_CONTEXT_LEA, &symbol_type))
2078 return mips_symbol_insns (symbol_type, MAX_MACHINE_MODE);
2080 /* Otherwise try splitting the constant into a base and offset.
2081 16-bit offsets can be added using an extra addiu. Larger offsets
2082 must be calculated separately and then added to the base. */
2083 split_const (x, &x, &offset);
2086 int n = mips_const_insns (x);
2089 if (SMALL_INT (offset))
2092 return n + 1 + mips_build_integer (codes, INTVAL (offset));
2099 return mips_symbol_insns (mips_classify_symbol (x, SYMBOL_CONTEXT_LEA),
2108 /* Return the number of instructions needed to implement INSN,
2109 given that it loads from or stores to MEM. Count extended
2110 mips16 instructions as two instructions. */
2113 mips_load_store_insns (rtx mem, rtx insn)
2115 enum machine_mode mode;
2119 gcc_assert (MEM_P (mem));
2120 mode = GET_MODE (mem);
2122 /* Try to prove that INSN does not need to be split. */
2123 might_split_p = true;
2124 if (GET_MODE_BITSIZE (mode) == 64)
2126 set = single_set (insn);
2127 if (set && !mips_split_64bit_move_p (SET_DEST (set), SET_SRC (set)))
2128 might_split_p = false;
2131 return mips_address_insns (XEXP (mem, 0), mode, might_split_p);
2135 /* Return the number of instructions needed for an integer division. */
2138 mips_idiv_insns (void)
2143 if (TARGET_CHECK_ZERO_DIV)
2145 if (GENERATE_DIVIDE_TRAPS)
2151 if (TARGET_FIX_R4000 || TARGET_FIX_R4400)
2156 /* This function is used to implement GO_IF_LEGITIMATE_ADDRESS. It
2157 returns a nonzero value if X is a legitimate address for a memory
2158 operand of the indicated MODE. STRICT is nonzero if this function
2159 is called during reload. */
2162 mips_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2164 struct mips_address_info addr;
2166 return mips_classify_address (&addr, x, mode, strict);
2169 /* Emit a move from SRC to DEST. Assume that the move expanders can
2170 handle all moves if !can_create_pseudo_p (). The distinction is
2171 important because, unlike emit_move_insn, the move expanders know
2172 how to force Pmode objects into the constant pool even when the
2173 constant pool address is not itself legitimate. */
2176 mips_emit_move (rtx dest, rtx src)
2178 return (can_create_pseudo_p ()
2179 ? emit_move_insn (dest, src)
2180 : emit_move_insn_1 (dest, src));
2183 /* Copy VALUE to a register and return that register. If new psuedos
2184 are allowed, copy it into a new register, otherwise use DEST. */
2187 mips_force_temporary (rtx dest, rtx value)
2189 if (can_create_pseudo_p ())
2190 return force_reg (Pmode, value);
2193 mips_emit_move (copy_rtx (dest), value);
2199 /* If MODE is MAX_MACHINE_MODE, ADDR appears as a move operand, otherwise
2200 it appears in a MEM of that mode. Return true if ADDR is a legitimate
2201 constant in that context and can be split into a high part and a LO_SUM.
2202 If so, and if LO_SUM_OUT is nonnull, emit the high part and return
2203 the LO_SUM in *LO_SUM_OUT. Leave *LO_SUM_OUT unchanged otherwise.
2205 TEMP is as for mips_force_temporary and is used to load the high
2206 part into a register. */
2209 mips_split_symbol (rtx temp, rtx addr, enum machine_mode mode, rtx *lo_sum_out)
2211 enum mips_symbol_context context;
2212 enum mips_symbol_type symbol_type;
2215 context = (mode == MAX_MACHINE_MODE
2216 ? SYMBOL_CONTEXT_LEA
2217 : SYMBOL_CONTEXT_MEM);
2218 if (!mips_symbolic_constant_p (addr, context, &symbol_type)
2219 || mips_symbol_insns (symbol_type, mode) == 0
2220 || !mips_split_p[symbol_type])
2225 if (symbol_type == SYMBOL_GP_RELATIVE)
2227 if (!can_create_pseudo_p ())
2229 emit_insn (gen_load_const_gp (copy_rtx (temp)));
2233 high = mips16_gp_pseudo_reg ();
2237 high = gen_rtx_HIGH (Pmode, copy_rtx (addr));
2238 high = mips_force_temporary (temp, high);
2240 *lo_sum_out = gen_rtx_LO_SUM (Pmode, high, addr);
2246 /* Wrap symbol or label BASE in an unspec address of type SYMBOL_TYPE
2247 and add CONST_INT OFFSET to the result. */
2250 mips_unspec_address_offset (rtx base, rtx offset,
2251 enum mips_symbol_type symbol_type)
2253 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base),
2254 UNSPEC_ADDRESS_FIRST + symbol_type);
2255 if (offset != const0_rtx)
2256 base = gen_rtx_PLUS (Pmode, base, offset);
2257 return gen_rtx_CONST (Pmode, base);
2260 /* Return an UNSPEC address with underlying address ADDRESS and symbol
2261 type SYMBOL_TYPE. */
2264 mips_unspec_address (rtx address, enum mips_symbol_type symbol_type)
2268 split_const (address, &base, &offset);
2269 return mips_unspec_address_offset (base, offset, symbol_type);
2273 /* If mips_unspec_address (ADDR, SYMBOL_TYPE) is a 32-bit value, add the
2274 high part to BASE and return the result. Just return BASE otherwise.
2275 TEMP is available as a temporary register if needed.
2277 The returned expression can be used as the first operand to a LO_SUM. */
2280 mips_unspec_offset_high (rtx temp, rtx base, rtx addr,
2281 enum mips_symbol_type symbol_type)
2283 if (mips_split_p[symbol_type])
2285 addr = gen_rtx_HIGH (Pmode, mips_unspec_address (addr, symbol_type));
2286 addr = mips_force_temporary (temp, addr);
2287 return mips_force_temporary (temp, gen_rtx_PLUS (Pmode, addr, base));
2293 /* Return a legitimate address for REG + OFFSET. TEMP is as for
2294 mips_force_temporary; it is only needed when OFFSET is not a
2298 mips_add_offset (rtx temp, rtx reg, HOST_WIDE_INT offset)
2300 if (!SMALL_OPERAND (offset))
2305 /* Load the full offset into a register so that we can use
2306 an unextended instruction for the address itself. */
2307 high = GEN_INT (offset);
2312 /* Leave OFFSET as a 16-bit offset and put the excess in HIGH. */
2313 high = GEN_INT (CONST_HIGH_PART (offset));
2314 offset = CONST_LOW_PART (offset);
2316 high = mips_force_temporary (temp, high);
2317 reg = mips_force_temporary (temp, gen_rtx_PLUS (Pmode, high, reg));
2319 return plus_constant (reg, offset);
2322 /* Emit a call to __tls_get_addr. SYM is the TLS symbol we are
2323 referencing, and TYPE is the symbol type to use (either global
2324 dynamic or local dynamic). V0 is an RTX for the return value
2325 location. The entire insn sequence is returned. */
2327 static GTY(()) rtx mips_tls_symbol;
2330 mips_call_tls_get_addr (rtx sym, enum mips_symbol_type type, rtx v0)
2332 rtx insn, loc, tga, a0;
2334 a0 = gen_rtx_REG (Pmode, GP_ARG_FIRST);
2336 if (!mips_tls_symbol)
2337 mips_tls_symbol = init_one_libfunc ("__tls_get_addr");
2339 loc = mips_unspec_address (sym, type);
2343 emit_insn (gen_rtx_SET (Pmode, a0,
2344 gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, loc)));
2345 tga = gen_rtx_MEM (Pmode, mips_tls_symbol);
2346 insn = emit_call_insn (gen_call_value (v0, tga, const0_rtx, const0_rtx));
2347 CONST_OR_PURE_CALL_P (insn) = 1;
2348 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), v0);
2349 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), a0);
2350 insn = get_insns ();
2357 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
2358 return value will be a valid address and move_operand (either a REG
2362 mips_legitimize_tls_address (rtx loc)
2364 rtx dest, insn, v0, v1, tmp1, tmp2, eqv;
2365 enum tls_model model;
2369 sorry ("MIPS16 TLS");
2370 return gen_reg_rtx (Pmode);
2373 v0 = gen_rtx_REG (Pmode, GP_RETURN);
2374 v1 = gen_rtx_REG (Pmode, GP_RETURN + 1);
2376 model = SYMBOL_REF_TLS_MODEL (loc);
2377 /* Only TARGET_ABICALLS code can have more than one module; other
2378 code must be be static and should not use a GOT. All TLS models
2379 reduce to local exec in this situation. */
2380 if (!TARGET_ABICALLS)
2381 model = TLS_MODEL_LOCAL_EXEC;
2385 case TLS_MODEL_GLOBAL_DYNAMIC:
2386 insn = mips_call_tls_get_addr (loc, SYMBOL_TLSGD, v0);
2387 dest = gen_reg_rtx (Pmode);
2388 emit_libcall_block (insn, dest, v0, loc);
2391 case TLS_MODEL_LOCAL_DYNAMIC:
2392 insn = mips_call_tls_get_addr (loc, SYMBOL_TLSLDM, v0);
2393 tmp1 = gen_reg_rtx (Pmode);
2395 /* Attach a unique REG_EQUIV, to allow the RTL optimizers to
2396 share the LDM result with other LD model accesses. */
2397 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2399 emit_libcall_block (insn, tmp1, v0, eqv);
2401 tmp2 = mips_unspec_offset_high (NULL, tmp1, loc, SYMBOL_DTPREL);
2402 dest = gen_rtx_LO_SUM (Pmode, tmp2,
2403 mips_unspec_address (loc, SYMBOL_DTPREL));
2406 case TLS_MODEL_INITIAL_EXEC:
2407 tmp1 = gen_reg_rtx (Pmode);
2408 tmp2 = mips_unspec_address (loc, SYMBOL_GOTTPREL);
2409 if (Pmode == DImode)
2411 emit_insn (gen_tls_get_tp_di (v1));
2412 emit_insn (gen_load_gotdi (tmp1, pic_offset_table_rtx, tmp2));
2416 emit_insn (gen_tls_get_tp_si (v1));
2417 emit_insn (gen_load_gotsi (tmp1, pic_offset_table_rtx, tmp2));
2419 dest = gen_reg_rtx (Pmode);
2420 emit_insn (gen_add3_insn (dest, tmp1, v1));
2423 case TLS_MODEL_LOCAL_EXEC:
2424 if (Pmode == DImode)
2425 emit_insn (gen_tls_get_tp_di (v1));
2427 emit_insn (gen_tls_get_tp_si (v1));
2429 tmp1 = mips_unspec_offset_high (NULL, v1, loc, SYMBOL_TPREL);
2430 dest = gen_rtx_LO_SUM (Pmode, tmp1,
2431 mips_unspec_address (loc, SYMBOL_TPREL));
2441 /* This function is used to implement LEGITIMIZE_ADDRESS. If *XLOC can
2442 be legitimized in a way that the generic machinery might not expect,
2443 put the new address in *XLOC and return true. MODE is the mode of
2444 the memory being accessed. */
2447 mips_legitimize_address (rtx *xloc, enum machine_mode mode)
2449 if (mips_tls_operand_p (*xloc))
2451 *xloc = mips_legitimize_tls_address (*xloc);
2455 /* See if the address can split into a high part and a LO_SUM. */
2456 if (mips_split_symbol (NULL, *xloc, mode, xloc))
2459 if (GET_CODE (*xloc) == PLUS && GET_CODE (XEXP (*xloc, 1)) == CONST_INT)
2461 /* Handle REG + CONSTANT using mips_add_offset. */
2464 reg = XEXP (*xloc, 0);
2465 if (!mips_valid_base_register_p (reg, mode, 0))
2466 reg = copy_to_mode_reg (Pmode, reg);
2467 *xloc = mips_add_offset (0, reg, INTVAL (XEXP (*xloc, 1)));
2475 /* Subroutine of mips_build_integer (with the same interface).
2476 Assume that the final action in the sequence should be a left shift. */
2479 mips_build_shift (struct mips_integer_op *codes, HOST_WIDE_INT value)
2481 unsigned int i, shift;
2483 /* Shift VALUE right until its lowest bit is set. Shift arithmetically
2484 since signed numbers are easier to load than unsigned ones. */
2486 while ((value & 1) == 0)
2487 value /= 2, shift++;
2489 i = mips_build_integer (codes, value);
2490 codes[i].code = ASHIFT;
2491 codes[i].value = shift;
2496 /* As for mips_build_shift, but assume that the final action will be
2497 an IOR or PLUS operation. */
2500 mips_build_lower (struct mips_integer_op *codes, unsigned HOST_WIDE_INT value)
2502 unsigned HOST_WIDE_INT high;
2505 high = value & ~(unsigned HOST_WIDE_INT) 0xffff;
2506 if (!LUI_OPERAND (high) && (value & 0x18000) == 0x18000)
2508 /* The constant is too complex to load with a simple lui/ori pair
2509 so our goal is to clear as many trailing zeros as possible.
2510 In this case, we know bit 16 is set and that the low 16 bits
2511 form a negative number. If we subtract that number from VALUE,
2512 we will clear at least the lowest 17 bits, maybe more. */
2513 i = mips_build_integer (codes, CONST_HIGH_PART (value));
2514 codes[i].code = PLUS;
2515 codes[i].value = CONST_LOW_PART (value);
2519 i = mips_build_integer (codes, high);
2520 codes[i].code = IOR;
2521 codes[i].value = value & 0xffff;
2527 /* Fill CODES with a sequence of rtl operations to load VALUE.
2528 Return the number of operations needed. */
2531 mips_build_integer (struct mips_integer_op *codes,
2532 unsigned HOST_WIDE_INT value)
2534 if (SMALL_OPERAND (value)
2535 || SMALL_OPERAND_UNSIGNED (value)
2536 || LUI_OPERAND (value))
2538 /* The value can be loaded with a single instruction. */
2539 codes[0].code = UNKNOWN;
2540 codes[0].value = value;
2543 else if ((value & 1) != 0 || LUI_OPERAND (CONST_HIGH_PART (value)))
2545 /* Either the constant is a simple LUI/ORI combination or its
2546 lowest bit is set. We don't want to shift in this case. */
2547 return mips_build_lower (codes, value);
2549 else if ((value & 0xffff) == 0)
2551 /* The constant will need at least three actions. The lowest
2552 16 bits are clear, so the final action will be a shift. */
2553 return mips_build_shift (codes, value);
2557 /* The final action could be a shift, add or inclusive OR.
2558 Rather than use a complex condition to select the best
2559 approach, try both mips_build_shift and mips_build_lower
2560 and pick the one that gives the shortest sequence.
2561 Note that this case is only used once per constant. */
2562 struct mips_integer_op alt_codes[MIPS_MAX_INTEGER_OPS];
2563 unsigned int cost, alt_cost;
2565 cost = mips_build_shift (codes, value);
2566 alt_cost = mips_build_lower (alt_codes, value);
2567 if (alt_cost < cost)
2569 memcpy (codes, alt_codes, alt_cost * sizeof (codes[0]));
2577 /* Load VALUE into DEST, using TEMP as a temporary register if need be. */
2580 mips_move_integer (rtx dest, rtx temp, unsigned HOST_WIDE_INT value)
2582 struct mips_integer_op codes[MIPS_MAX_INTEGER_OPS];
2583 enum machine_mode mode;
2584 unsigned int i, cost;
2587 mode = GET_MODE (dest);
2588 cost = mips_build_integer (codes, value);
2590 /* Apply each binary operation to X. Invariant: X is a legitimate
2591 source operand for a SET pattern. */
2592 x = GEN_INT (codes[0].value);
2593 for (i = 1; i < cost; i++)
2595 if (!can_create_pseudo_p ())
2597 emit_insn (gen_rtx_SET (VOIDmode, temp, x));
2601 x = force_reg (mode, x);
2602 x = gen_rtx_fmt_ee (codes[i].code, mode, x, GEN_INT (codes[i].value));
2605 emit_insn (gen_rtx_SET (VOIDmode, dest, x));
2609 /* Subroutine of mips_legitimize_move. Move constant SRC into register
2610 DEST given that SRC satisfies immediate_operand but doesn't satisfy
2614 mips_legitimize_const_move (enum machine_mode mode, rtx dest, rtx src)
2618 /* Split moves of big integers into smaller pieces. */
2619 if (splittable_const_int_operand (src, mode))
2621 mips_move_integer (dest, dest, INTVAL (src));
2625 /* Split moves of symbolic constants into high/low pairs. */
2626 if (mips_split_symbol (dest, src, MAX_MACHINE_MODE, &src))
2628 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
2632 if (mips_tls_operand_p (src))
2634 mips_emit_move (dest, mips_legitimize_tls_address (src));
2638 /* If we have (const (plus symbol offset)), and that expression cannot
2639 be forced into memory, load the symbol first and add in the offset.
2640 In non-MIPS16 mode, prefer to do this even if the constant _can_ be
2641 forced into memory, as it usually produces better code. */
2642 split_const (src, &base, &offset);
2643 if (offset != const0_rtx
2644 && (targetm.cannot_force_const_mem (src)
2645 || (!TARGET_MIPS16 && can_create_pseudo_p ())))
2647 base = mips_force_temporary (dest, base);
2648 mips_emit_move (dest, mips_add_offset (0, base, INTVAL (offset)));
2652 src = force_const_mem (mode, src);
2654 /* When using explicit relocs, constant pool references are sometimes
2655 not legitimate addresses. */
2656 mips_split_symbol (dest, XEXP (src, 0), mode, &XEXP (src, 0));
2657 mips_emit_move (dest, src);
2661 /* If (set DEST SRC) is not a valid instruction, emit an equivalent
2662 sequence that is valid. */
2665 mips_legitimize_move (enum machine_mode mode, rtx dest, rtx src)
2667 if (!register_operand (dest, mode) && !reg_or_0_operand (src, mode))
2669 mips_emit_move (dest, force_reg (mode, src));
2673 /* Check for individual, fully-reloaded mflo and mfhi instructions. */
2674 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
2675 && REG_P (src) && MD_REG_P (REGNO (src))
2676 && REG_P (dest) && GP_REG_P (REGNO (dest)))
2678 int other_regno = REGNO (src) == HI_REGNUM ? LO_REGNUM : HI_REGNUM;
2679 if (GET_MODE_SIZE (mode) <= 4)
2680 emit_insn (gen_mfhilo_si (gen_rtx_REG (SImode, REGNO (dest)),
2681 gen_rtx_REG (SImode, REGNO (src)),
2682 gen_rtx_REG (SImode, other_regno)));
2684 emit_insn (gen_mfhilo_di (gen_rtx_REG (DImode, REGNO (dest)),
2685 gen_rtx_REG (DImode, REGNO (src)),
2686 gen_rtx_REG (DImode, other_regno)));
2690 /* We need to deal with constants that would be legitimate
2691 immediate_operands but not legitimate move_operands. */
2692 if (CONSTANT_P (src) && !move_operand (src, mode))
2694 mips_legitimize_const_move (mode, dest, src);
2695 set_unique_reg_note (get_last_insn (), REG_EQUAL, copy_rtx (src));
2701 /* We need a lot of little routines to check constant values on the
2702 mips16. These are used to figure out how long the instruction will
2703 be. It would be much better to do this using constraints, but
2704 there aren't nearly enough letters available. */
2707 m16_check_op (rtx op, int low, int high, int mask)
2709 return (GET_CODE (op) == CONST_INT
2710 && INTVAL (op) >= low
2711 && INTVAL (op) <= high
2712 && (INTVAL (op) & mask) == 0);
2716 m16_uimm3_b (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2718 return m16_check_op (op, 0x1, 0x8, 0);
2722 m16_simm4_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2724 return m16_check_op (op, - 0x8, 0x7, 0);
2728 m16_nsimm4_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2730 return m16_check_op (op, - 0x7, 0x8, 0);
2734 m16_simm5_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2736 return m16_check_op (op, - 0x10, 0xf, 0);
2740 m16_nsimm5_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2742 return m16_check_op (op, - 0xf, 0x10, 0);
2746 m16_uimm5_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2748 return m16_check_op (op, (- 0x10) << 2, 0xf << 2, 3);
2752 m16_nuimm5_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2754 return m16_check_op (op, (- 0xf) << 2, 0x10 << 2, 3);
2758 m16_simm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2760 return m16_check_op (op, - 0x80, 0x7f, 0);
2764 m16_nsimm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2766 return m16_check_op (op, - 0x7f, 0x80, 0);
2770 m16_uimm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2772 return m16_check_op (op, 0x0, 0xff, 0);
2776 m16_nuimm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2778 return m16_check_op (op, - 0xff, 0x0, 0);
2782 m16_uimm8_m1_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2784 return m16_check_op (op, - 0x1, 0xfe, 0);
2788 m16_uimm8_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2790 return m16_check_op (op, 0x0, 0xff << 2, 3);
2794 m16_nuimm8_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2796 return m16_check_op (op, (- 0xff) << 2, 0x0, 3);
2800 m16_simm8_8 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2802 return m16_check_op (op, (- 0x80) << 3, 0x7f << 3, 7);
2806 m16_nsimm8_8 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2808 return m16_check_op (op, (- 0x7f) << 3, 0x80 << 3, 7);
2811 /* Return true if ADDR matches the pattern for the lwxs load scaled indexed
2812 address instruction. */
2815 mips_lwxs_address_p (rtx addr)
2818 && GET_CODE (addr) == PLUS
2819 && REG_P (XEXP (addr, 1)))
2821 rtx offset = XEXP (addr, 0);
2822 if (GET_CODE (offset) == MULT
2823 && REG_P (XEXP (offset, 0))
2824 && GET_CODE (XEXP (offset, 1)) == CONST_INT
2825 && INTVAL (XEXP (offset, 1)) == 4)
2832 mips_rtx_costs (rtx x, int code, int outer_code, int *total)
2834 enum machine_mode mode = GET_MODE (x);
2835 bool float_mode_p = FLOAT_MODE_P (mode);
2842 /* A number between 1 and 8 inclusive is efficient for a shift.
2843 Otherwise, we will need an extended instruction. */
2844 if ((outer_code) == ASHIFT || (outer_code) == ASHIFTRT
2845 || (outer_code) == LSHIFTRT)
2847 if (INTVAL (x) >= 1 && INTVAL (x) <= 8)
2850 *total = COSTS_N_INSNS (1);
2854 /* We can use cmpi for an xor with an unsigned 16-bit value. */
2855 if ((outer_code) == XOR
2856 && INTVAL (x) >= 0 && INTVAL (x) < 0x10000)
2862 /* We may be able to use slt or sltu for a comparison with a
2863 signed 16-bit value. (The boundary conditions aren't quite
2864 right, but this is just a heuristic anyhow.) */
2865 if (((outer_code) == LT || (outer_code) == LE
2866 || (outer_code) == GE || (outer_code) == GT
2867 || (outer_code) == LTU || (outer_code) == LEU
2868 || (outer_code) == GEU || (outer_code) == GTU)
2869 && INTVAL (x) >= -0x8000 && INTVAL (x) < 0x8000)
2875 /* Equality comparisons with 0 are cheap. */
2876 if (((outer_code) == EQ || (outer_code) == NE)
2883 /* Constants in the range 0...255 can be loaded with an unextended
2884 instruction. They are therefore as cheap as a register move.
2886 Given the choice between "li R1,0...255" and "move R1,R2"
2887 (where R2 is a known constant), it is usually better to use "li",
2888 since we do not want to unnecessarily extend the lifetime
2890 if (outer_code == SET
2892 && INTVAL (x) < 256)
2900 /* These can be used anywhere. */
2905 /* Otherwise fall through to the handling below because
2906 we'll need to construct the constant. */
2912 if (LEGITIMATE_CONSTANT_P (x))
2914 *total = COSTS_N_INSNS (1);
2919 /* The value will need to be fetched from the constant pool. */
2920 *total = CONSTANT_POOL_COST;
2926 /* If the address is legitimate, return the number of
2927 instructions it needs. */
2928 rtx addr = XEXP (x, 0);
2929 int n = mips_address_insns (addr, GET_MODE (x), true);
2932 *total = COSTS_N_INSNS (n + 1);
2935 /* Check for scaled indexed address. */
2936 if (mips_lwxs_address_p (addr))
2938 *total = COSTS_N_INSNS (2);
2941 /* Otherwise use the default handling. */
2946 *total = COSTS_N_INSNS (6);
2950 *total = COSTS_N_INSNS ((mode == DImode && !TARGET_64BIT) ? 2 : 1);
2956 if (mode == DImode && !TARGET_64BIT)
2958 *total = COSTS_N_INSNS (2);
2966 if (mode == DImode && !TARGET_64BIT)
2968 *total = COSTS_N_INSNS ((GET_CODE (XEXP (x, 1)) == CONST_INT)
2976 *total = COSTS_N_INSNS (1);
2978 *total = COSTS_N_INSNS (4);
2982 *total = COSTS_N_INSNS (1);
2989 *total = mips_cost->fp_add;
2993 else if (mode == DImode && !TARGET_64BIT)
2995 *total = COSTS_N_INSNS (4);
3001 if (mode == DImode && !TARGET_64BIT)
3003 *total = COSTS_N_INSNS (4);
3010 *total = mips_cost->fp_mult_sf;
3012 else if (mode == DFmode)
3013 *total = mips_cost->fp_mult_df;
3015 else if (mode == SImode)
3016 *total = mips_cost->int_mult_si;
3019 *total = mips_cost->int_mult_di;
3028 *total = mips_cost->fp_div_sf;
3030 *total = mips_cost->fp_div_df;
3039 *total = mips_cost->int_div_di;
3041 *total = mips_cost->int_div_si;
3046 /* A sign extend from SImode to DImode in 64-bit mode is often
3047 zero instructions, because the result can often be used
3048 directly by another instruction; we'll call it one. */
3049 if (TARGET_64BIT && mode == DImode
3050 && GET_MODE (XEXP (x, 0)) == SImode)
3051 *total = COSTS_N_INSNS (1);
3053 *total = COSTS_N_INSNS (2);
3057 if (TARGET_64BIT && mode == DImode
3058 && GET_MODE (XEXP (x, 0)) == SImode)
3059 *total = COSTS_N_INSNS (2);
3061 *total = COSTS_N_INSNS (1);
3065 case UNSIGNED_FLOAT:
3068 case FLOAT_TRUNCATE:
3070 *total = mips_cost->fp_add;
3078 /* Provide the costs of an addressing mode that contains ADDR.
3079 If ADDR is not a valid address, its cost is irrelevant. */
3082 mips_address_cost (rtx addr)
3084 return mips_address_insns (addr, SImode, false);
3087 /* Return one word of double-word value OP, taking into account the fixed
3088 endianness of certain registers. HIGH_P is true to select the high part,
3089 false to select the low part. */
3092 mips_subword (rtx op, int high_p)
3095 enum machine_mode mode;
3097 mode = GET_MODE (op);
3098 if (mode == VOIDmode)
3101 if (TARGET_BIG_ENDIAN ? !high_p : high_p)
3102 byte = UNITS_PER_WORD;
3106 if (FP_REG_RTX_P (op))
3107 return gen_rtx_REG (word_mode, high_p ? REGNO (op) + 1 : REGNO (op));
3110 return mips_rewrite_small_data (adjust_address (op, word_mode, byte));
3112 return simplify_gen_subreg (word_mode, op, mode, byte);
3116 /* Return true if a 64-bit move from SRC to DEST should be split into two. */
3119 mips_split_64bit_move_p (rtx dest, rtx src)
3124 /* FP->FP moves can be done in a single instruction. */
3125 if (FP_REG_RTX_P (src) && FP_REG_RTX_P (dest))
3128 /* Check for floating-point loads and stores. They can be done using
3129 ldc1 and sdc1 on MIPS II and above. */
3132 if (FP_REG_RTX_P (dest) && MEM_P (src))
3134 if (FP_REG_RTX_P (src) && MEM_P (dest))
3141 /* Split a 64-bit move from SRC to DEST assuming that
3142 mips_split_64bit_move_p holds.
3144 Moves into and out of FPRs cause some difficulty here. Such moves
3145 will always be DFmode, since paired FPRs are not allowed to store
3146 DImode values. The most natural representation would be two separate
3147 32-bit moves, such as:
3149 (set (reg:SI $f0) (mem:SI ...))
3150 (set (reg:SI $f1) (mem:SI ...))
3152 However, the second insn is invalid because odd-numbered FPRs are
3153 not allowed to store independent values. Use the patterns load_df_low,
3154 load_df_high and store_df_high instead. */
3157 mips_split_64bit_move (rtx dest, rtx src)
3159 if (FP_REG_RTX_P (dest))
3161 /* Loading an FPR from memory or from GPRs. */
3164 dest = gen_lowpart (DFmode, dest);
3165 emit_insn (gen_load_df_low (dest, mips_subword (src, 0)));
3166 emit_insn (gen_mthc1 (dest, mips_subword (src, 1),
3171 emit_insn (gen_load_df_low (copy_rtx (dest),
3172 mips_subword (src, 0)));
3173 emit_insn (gen_load_df_high (dest, mips_subword (src, 1),
3177 else if (FP_REG_RTX_P (src))
3179 /* Storing an FPR into memory or GPRs. */
3182 src = gen_lowpart (DFmode, src);
3183 mips_emit_move (mips_subword (dest, 0), mips_subword (src, 0));
3184 emit_insn (gen_mfhc1 (mips_subword (dest, 1), src));
3188 mips_emit_move (mips_subword (dest, 0), mips_subword (src, 0));
3189 emit_insn (gen_store_df_high (mips_subword (dest, 1), src));
3194 /* The operation can be split into two normal moves. Decide in
3195 which order to do them. */
3198 low_dest = mips_subword (dest, 0);
3199 if (REG_P (low_dest)
3200 && reg_overlap_mentioned_p (low_dest, src))
3202 mips_emit_move (mips_subword (dest, 1), mips_subword (src, 1));
3203 mips_emit_move (low_dest, mips_subword (src, 0));
3207 mips_emit_move (low_dest, mips_subword (src, 0));
3208 mips_emit_move (mips_subword (dest, 1), mips_subword (src, 1));
3213 /* Return the appropriate instructions to move SRC into DEST. Assume
3214 that SRC is operand 1 and DEST is operand 0. */
3217 mips_output_move (rtx dest, rtx src)
3219 enum rtx_code dest_code, src_code;
3220 enum mips_symbol_type symbol_type;
3223 dest_code = GET_CODE (dest);
3224 src_code = GET_CODE (src);
3225 dbl_p = (GET_MODE_SIZE (GET_MODE (dest)) == 8);
3227 if (dbl_p && mips_split_64bit_move_p (dest, src))
3230 if ((src_code == REG && GP_REG_P (REGNO (src)))
3231 || (!TARGET_MIPS16 && src == CONST0_RTX (GET_MODE (dest))))
3233 if (dest_code == REG)
3235 if (GP_REG_P (REGNO (dest)))
3236 return "move\t%0,%z1";
3238 if (MD_REG_P (REGNO (dest)))
3241 if (DSP_ACC_REG_P (REGNO (dest)))
3243 static char retval[] = "mt__\t%z1,%q0";
3244 retval[2] = reg_names[REGNO (dest)][4];
3245 retval[3] = reg_names[REGNO (dest)][5];
3249 if (FP_REG_P (REGNO (dest)))
3250 return (dbl_p ? "dmtc1\t%z1,%0" : "mtc1\t%z1,%0");
3252 if (ALL_COP_REG_P (REGNO (dest)))
3254 static char retval[] = "dmtc_\t%z1,%0";
3256 retval[4] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (dest));
3257 return (dbl_p ? retval : retval + 1);
3260 if (dest_code == MEM)
3261 return (dbl_p ? "sd\t%z1,%0" : "sw\t%z1,%0");
3263 if (dest_code == REG && GP_REG_P (REGNO (dest)))
3265 if (src_code == REG)
3267 if (DSP_ACC_REG_P (REGNO (src)))
3269 static char retval[] = "mf__\t%0,%q1";
3270 retval[2] = reg_names[REGNO (src)][4];
3271 retval[3] = reg_names[REGNO (src)][5];
3275 if (ST_REG_P (REGNO (src)) && ISA_HAS_8CC)
3276 return "lui\t%0,0x3f80\n\tmovf\t%0,%.,%1";
3278 if (FP_REG_P (REGNO (src)))
3279 return (dbl_p ? "dmfc1\t%0,%1" : "mfc1\t%0,%1");
3281 if (ALL_COP_REG_P (REGNO (src)))
3283 static char retval[] = "dmfc_\t%0,%1";
3285 retval[4] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (src));
3286 return (dbl_p ? retval : retval + 1);
3290 if (src_code == MEM)
3291 return (dbl_p ? "ld\t%0,%1" : "lw\t%0,%1");
3293 if (src_code == CONST_INT)
3295 /* Don't use the X format, because that will give out of
3296 range numbers for 64-bit hosts and 32-bit targets. */
3298 return "li\t%0,%1\t\t\t# %X1";
3300 if (INTVAL (src) >= 0 && INTVAL (src) <= 0xffff)
3303 if (INTVAL (src) < 0 && INTVAL (src) >= -0xffff)
3307 if (src_code == HIGH)
3308 return TARGET_MIPS16 ? "#" : "lui\t%0,%h1";
3310 if (CONST_GP_P (src))
3311 return "move\t%0,%1";
3313 if (mips_symbolic_constant_p (src, SYMBOL_CONTEXT_LEA, &symbol_type)
3314 && mips_lo_relocs[symbol_type] != 0)
3316 /* A signed 16-bit constant formed by applying a relocation
3317 operator to a symbolic address. */
3318 gcc_assert (!mips_split_p[symbol_type]);
3319 return "li\t%0,%R1";
3322 if (symbolic_operand (src, VOIDmode))
3324 gcc_assert (TARGET_MIPS16
3325 ? TARGET_MIPS16_TEXT_LOADS
3326 : !TARGET_EXPLICIT_RELOCS);
3327 return (dbl_p ? "dla\t%0,%1" : "la\t%0,%1");
3330 if (src_code == REG && FP_REG_P (REGNO (src)))
3332 if (dest_code == REG && FP_REG_P (REGNO (dest)))
3334 if (GET_MODE (dest) == V2SFmode)
3335 return "mov.ps\t%0,%1";
3337 return (dbl_p ? "mov.d\t%0,%1" : "mov.s\t%0,%1");
3340 if (dest_code == MEM)
3341 return (dbl_p ? "sdc1\t%1,%0" : "swc1\t%1,%0");
3343 if (dest_code == REG && FP_REG_P (REGNO (dest)))
3345 if (src_code == MEM)
3346 return (dbl_p ? "ldc1\t%0,%1" : "lwc1\t%0,%1");
3348 if (dest_code == REG && ALL_COP_REG_P (REGNO (dest)) && src_code == MEM)
3350 static char retval[] = "l_c_\t%0,%1";
3352 retval[1] = (dbl_p ? 'd' : 'w');
3353 retval[3] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (dest));
3356 if (dest_code == MEM && src_code == REG && ALL_COP_REG_P (REGNO (src)))
3358 static char retval[] = "s_c_\t%1,%0";
3360 retval[1] = (dbl_p ? 'd' : 'w');
3361 retval[3] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (src));
3367 /* Restore $gp from its save slot. Valid only when using o32 or
3371 mips_restore_gp (void)
3375 gcc_assert (TARGET_ABICALLS && TARGET_OLDABI);
3377 address = mips_add_offset (pic_offset_table_rtx,
3378 frame_pointer_needed
3379 ? hard_frame_pointer_rtx
3380 : stack_pointer_rtx,
3381 current_function_outgoing_args_size);
3382 slot = gen_rtx_MEM (Pmode, address);
3384 mips_emit_move (pic_offset_table_rtx, slot);
3385 if (!TARGET_EXPLICIT_RELOCS)
3386 emit_insn (gen_blockage ());
3389 /* Emit an instruction of the form (set TARGET (CODE OP0 OP1)). */
3392 mips_emit_binary (enum rtx_code code, rtx target, rtx op0, rtx op1)
3394 emit_insn (gen_rtx_SET (VOIDmode, target,
3395 gen_rtx_fmt_ee (code, GET_MODE (target), op0, op1)));
3398 /* Return true if CMP1 is a suitable second operand for relational
3399 operator CODE. See also the *sCC patterns in mips.md. */
3402 mips_relational_operand_ok_p (enum rtx_code code, rtx cmp1)
3408 return reg_or_0_operand (cmp1, VOIDmode);
3412 return !TARGET_MIPS16 && cmp1 == const1_rtx;
3416 return arith_operand (cmp1, VOIDmode);
3419 return sle_operand (cmp1, VOIDmode);
3422 return sleu_operand (cmp1, VOIDmode);
3429 /* Canonicalize LE or LEU comparisons into LT comparisons when
3430 possible to avoid extra instructions or inverting the
3434 mips_canonicalize_comparison (enum rtx_code *code, rtx *cmp1,
3435 enum machine_mode mode)
3437 HOST_WIDE_INT original, plus_one;
3439 if (GET_CODE (*cmp1) != CONST_INT)
3442 original = INTVAL (*cmp1);
3443 plus_one = trunc_int_for_mode ((unsigned HOST_WIDE_INT) original + 1, mode);
3448 if (original < plus_one)
3451 *cmp1 = force_reg (mode, GEN_INT (plus_one));
3460 *cmp1 = force_reg (mode, GEN_INT (plus_one));
3473 /* Compare CMP0 and CMP1 using relational operator CODE and store the
3474 result in TARGET. CMP0 and TARGET are register_operands that have
3475 the same integer mode. If INVERT_PTR is nonnull, it's OK to set
3476 TARGET to the inverse of the result and flip *INVERT_PTR instead. */
3479 mips_emit_int_relational (enum rtx_code code, bool *invert_ptr,
3480 rtx target, rtx cmp0, rtx cmp1)
3482 /* First see if there is a MIPS instruction that can do this operation
3483 with CMP1 in its current form. If not, try to canonicalize the
3484 comparison to LT. If that fails, try doing the same for the
3485 inverse operation. If that also fails, force CMP1 into a register
3487 if (mips_relational_operand_ok_p (code, cmp1))
3488 mips_emit_binary (code, target, cmp0, cmp1);
3489 else if (mips_canonicalize_comparison (&code, &cmp1, GET_MODE (target)))
3490 mips_emit_binary (code, target, cmp0, cmp1);
3493 enum rtx_code inv_code = reverse_condition (code);
3494 if (!mips_relational_operand_ok_p (inv_code, cmp1))
3496 cmp1 = force_reg (GET_MODE (cmp0), cmp1);
3497 mips_emit_int_relational (code, invert_ptr, target, cmp0, cmp1);
3499 else if (invert_ptr == 0)
3501 rtx inv_target = gen_reg_rtx (GET_MODE (target));
3502 mips_emit_binary (inv_code, inv_target, cmp0, cmp1);
3503 mips_emit_binary (XOR, target, inv_target, const1_rtx);
3507 *invert_ptr = !*invert_ptr;
3508 mips_emit_binary (inv_code, target, cmp0, cmp1);
3513 /* Return a register that is zero iff CMP0 and CMP1 are equal.
3514 The register will have the same mode as CMP0. */
3517 mips_zero_if_equal (rtx cmp0, rtx cmp1)
3519 if (cmp1 == const0_rtx)
3522 if (uns_arith_operand (cmp1, VOIDmode))
3523 return expand_binop (GET_MODE (cmp0), xor_optab,
3524 cmp0, cmp1, 0, 0, OPTAB_DIRECT);
3526 return expand_binop (GET_MODE (cmp0), sub_optab,
3527 cmp0, cmp1, 0, 0, OPTAB_DIRECT);
3530 /* Convert *CODE into a code that can be used in a floating-point
3531 scc instruction (c.<cond>.<fmt>). Return true if the values of
3532 the condition code registers will be inverted, with 0 indicating
3533 that the condition holds. */
3536 mips_reverse_fp_cond_p (enum rtx_code *code)
3543 *code = reverse_condition_maybe_unordered (*code);
3551 /* Convert a comparison into something that can be used in a branch or
3552 conditional move. cmp_operands[0] and cmp_operands[1] are the values
3553 being compared and *CODE is the code used to compare them.
3555 Update *CODE, *OP0 and *OP1 so that they describe the final comparison.
3556 If NEED_EQ_NE_P, then only EQ/NE comparisons against zero are possible,
3557 otherwise any standard branch condition can be used. The standard branch
3560 - EQ/NE between two registers.
3561 - any comparison between a register and zero. */
3564 mips_emit_compare (enum rtx_code *code, rtx *op0, rtx *op1, bool need_eq_ne_p)
3566 if (GET_MODE_CLASS (GET_MODE (cmp_operands[0])) == MODE_INT)
3568 if (!need_eq_ne_p && cmp_operands[1] == const0_rtx)
3570 *op0 = cmp_operands[0];
3571 *op1 = cmp_operands[1];
3573 else if (*code == EQ || *code == NE)
3577 *op0 = mips_zero_if_equal (cmp_operands[0], cmp_operands[1]);
3582 *op0 = cmp_operands[0];
3583 *op1 = force_reg (GET_MODE (*op0), cmp_operands[1]);
3588 /* The comparison needs a separate scc instruction. Store the
3589 result of the scc in *OP0 and compare it against zero. */
3590 bool invert = false;
3591 *op0 = gen_reg_rtx (GET_MODE (cmp_operands[0]));
3593 mips_emit_int_relational (*code, &invert, *op0,
3594 cmp_operands[0], cmp_operands[1]);
3595 *code = (invert ? EQ : NE);
3600 enum rtx_code cmp_code;
3602 /* Floating-point tests use a separate c.cond.fmt comparison to
3603 set a condition code register. The branch or conditional move
3604 will then compare that register against zero.
3606 Set CMP_CODE to the code of the comparison instruction and
3607 *CODE to the code that the branch or move should use. */
3609 *code = mips_reverse_fp_cond_p (&cmp_code) ? EQ : NE;
3611 ? gen_reg_rtx (CCmode)
3612 : gen_rtx_REG (CCmode, FPSW_REGNUM));
3614 mips_emit_binary (cmp_code, *op0, cmp_operands[0], cmp_operands[1]);
3618 /* Try comparing cmp_operands[0] and cmp_operands[1] using rtl code CODE.
3619 Store the result in TARGET and return true if successful.
3621 On 64-bit targets, TARGET may be wider than cmp_operands[0]. */
3624 mips_emit_scc (enum rtx_code code, rtx target)
3626 if (GET_MODE_CLASS (GET_MODE (cmp_operands[0])) != MODE_INT)
3629 target = gen_lowpart (GET_MODE (cmp_operands[0]), target);
3630 if (code == EQ || code == NE)
3632 rtx zie = mips_zero_if_equal (cmp_operands[0], cmp_operands[1]);
3633 mips_emit_binary (code, target, zie, const0_rtx);
3636 mips_emit_int_relational (code, 0, target,
3637 cmp_operands[0], cmp_operands[1]);
3641 /* Emit the common code for doing conditional branches.
3642 operand[0] is the label to jump to.
3643 The comparison operands are saved away by cmp{si,di,sf,df}. */
3646 gen_conditional_branch (rtx *operands, enum rtx_code code)
3648 rtx op0, op1, condition;
3650 mips_emit_compare (&code, &op0, &op1, TARGET_MIPS16);
3651 condition = gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
3652 emit_jump_insn (gen_condjump (condition, operands[0]));
3657 (set temp (COND:CCV2 CMP_OP0 CMP_OP1))
3658 (set DEST (unspec [TRUE_SRC FALSE_SRC temp] UNSPEC_MOVE_TF_PS)) */
3661 mips_expand_vcondv2sf (rtx dest, rtx true_src, rtx false_src,
3662 enum rtx_code cond, rtx cmp_op0, rtx cmp_op1)
3667 reversed_p = mips_reverse_fp_cond_p (&cond);
3668 cmp_result = gen_reg_rtx (CCV2mode);
3669 emit_insn (gen_scc_ps (cmp_result,
3670 gen_rtx_fmt_ee (cond, VOIDmode, cmp_op0, cmp_op1)));
3672 emit_insn (gen_mips_cond_move_tf_ps (dest, false_src, true_src,
3675 emit_insn (gen_mips_cond_move_tf_ps (dest, true_src, false_src,
3679 /* Emit the common code for conditional moves. OPERANDS is the array
3680 of operands passed to the conditional move define_expand. */
3683 gen_conditional_move (rtx *operands)
3688 code = GET_CODE (operands[1]);
3689 mips_emit_compare (&code, &op0, &op1, true);
3690 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3691 gen_rtx_IF_THEN_ELSE (GET_MODE (operands[0]),
3692 gen_rtx_fmt_ee (code,
3695 operands[2], operands[3])));
3698 /* Emit a conditional trap. OPERANDS is the array of operands passed to
3699 the conditional_trap expander. */
3702 mips_gen_conditional_trap (rtx *operands)
3705 enum rtx_code cmp_code = GET_CODE (operands[0]);
3706 enum machine_mode mode = GET_MODE (cmp_operands[0]);
3708 /* MIPS conditional trap machine instructions don't have GT or LE
3709 flavors, so we must invert the comparison and convert to LT and
3710 GE, respectively. */
3713 case GT: cmp_code = LT; break;
3714 case LE: cmp_code = GE; break;
3715 case GTU: cmp_code = LTU; break;
3716 case LEU: cmp_code = GEU; break;
3719 if (cmp_code == GET_CODE (operands[0]))
3721 op0 = cmp_operands[0];
3722 op1 = cmp_operands[1];
3726 op0 = cmp_operands[1];
3727 op1 = cmp_operands[0];
3729 op0 = force_reg (mode, op0);
3730 if (!arith_operand (op1, mode))
3731 op1 = force_reg (mode, op1);
3733 emit_insn (gen_rtx_TRAP_IF (VOIDmode,
3734 gen_rtx_fmt_ee (cmp_code, mode, op0, op1),
3738 /* Return true if calls to X can use R_MIPS_CALL* relocations. */
3741 mips_ok_for_lazy_binding_p (rtx x)
3743 return (TARGET_USE_GOT
3744 && GET_CODE (x) == SYMBOL_REF
3745 && !mips_symbol_binds_local_p (x));
3748 /* Load function address ADDR into register DEST. SIBCALL_P is true
3749 if the address is needed for a sibling call. */
3752 mips_load_call_address (rtx dest, rtx addr, int sibcall_p)
3754 /* If we're generating PIC, and this call is to a global function,
3755 try to allow its address to be resolved lazily. This isn't
3756 possible if TARGET_CALL_SAVED_GP since the value of $gp on entry
3757 to the stub would be our caller's gp, not ours. */
3758 if (TARGET_EXPLICIT_RELOCS
3759 && !(sibcall_p && TARGET_CALL_SAVED_GP)
3760 && mips_ok_for_lazy_binding_p (addr))
3762 rtx high, lo_sum_symbol;
3764 high = mips_unspec_offset_high (dest, pic_offset_table_rtx,
3765 addr, SYMBOL_GOTOFF_CALL);
3766 lo_sum_symbol = mips_unspec_address (addr, SYMBOL_GOTOFF_CALL);
3767 if (Pmode == SImode)
3768 emit_insn (gen_load_callsi (dest, high, lo_sum_symbol));
3770 emit_insn (gen_load_calldi (dest, high, lo_sum_symbol));
3773 mips_emit_move (dest, addr);
3777 /* Expand a call or call_value instruction. RESULT is where the
3778 result will go (null for calls), ADDR is the address of the
3779 function, ARGS_SIZE is the size of the arguments and AUX is
3780 the value passed to us by mips_function_arg. SIBCALL_P is true
3781 if we are expanding a sibling call, false if we're expanding
3785 mips_expand_call (rtx result, rtx addr, rtx args_size, rtx aux, int sibcall_p)
3787 rtx orig_addr, pattern, insn;
3790 if (!call_insn_operand (addr, VOIDmode))
3792 addr = gen_reg_rtx (Pmode);
3793 mips_load_call_address (addr, orig_addr, sibcall_p);
3797 && TARGET_HARD_FLOAT_ABI
3798 && build_mips16_call_stub (result, addr, args_size,
3799 aux == 0 ? 0 : (int) GET_MODE (aux)))
3803 pattern = (sibcall_p
3804 ? gen_sibcall_internal (addr, args_size)
3805 : gen_call_internal (addr, args_size));
3806 else if (GET_CODE (result) == PARALLEL && XVECLEN (result, 0) == 2)
3810 reg1 = XEXP (XVECEXP (result, 0, 0), 0);
3811 reg2 = XEXP (XVECEXP (result, 0, 1), 0);
3814 ? gen_sibcall_value_multiple_internal (reg1, addr, args_size, reg2)
3815 : gen_call_value_multiple_internal (reg1, addr, args_size, reg2));
3818 pattern = (sibcall_p
3819 ? gen_sibcall_value_internal (result, addr, args_size)
3820 : gen_call_value_internal (result, addr, args_size));
3822 insn = emit_call_insn (pattern);
3824 /* Lazy-binding stubs require $gp to be valid on entry. */
3825 if (mips_ok_for_lazy_binding_p (orig_addr))
3826 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
3830 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
3833 mips_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3835 if (!TARGET_SIBCALLS)
3838 /* We can't do a sibcall if the called function is a MIPS16 function
3839 because there is no direct "jx" instruction equivalent to "jalx" to
3840 switch the ISA mode. */
3841 if (decl && SYMBOL_REF_MIPS16_FUNC_P (XEXP (DECL_RTL (decl), 0)))
3848 /* Emit code to move general operand SRC into condition-code
3849 register DEST. SCRATCH is a scratch TFmode float register.
3856 where FP1 and FP2 are single-precision float registers
3857 taken from SCRATCH. */
3860 mips_emit_fcc_reload (rtx dest, rtx src, rtx scratch)
3864 /* Change the source to SFmode. */
3866 src = adjust_address (src, SFmode, 0);
3867 else if (REG_P (src) || GET_CODE (src) == SUBREG)
3868 src = gen_rtx_REG (SFmode, true_regnum (src));
3870 fp1 = gen_rtx_REG (SFmode, REGNO (scratch));
3871 fp2 = gen_rtx_REG (SFmode, REGNO (scratch) + MAX_FPRS_PER_FMT);
3873 mips_emit_move (copy_rtx (fp1), src);
3874 mips_emit_move (copy_rtx (fp2), CONST0_RTX (SFmode));
3875 emit_insn (gen_slt_sf (dest, fp2, fp1));
3878 /* Emit code to change the current function's return address to
3879 ADDRESS. SCRATCH is available as a scratch register, if needed.
3880 ADDRESS and SCRATCH are both word-mode GPRs. */
3883 mips_set_return_address (rtx address, rtx scratch)
3887 compute_frame_size (get_frame_size ());
3888 gcc_assert ((cfun->machine->frame.mask >> 31) & 1);
3889 slot_address = mips_add_offset (scratch, stack_pointer_rtx,
3890 cfun->machine->frame.gp_sp_offset);
3892 mips_emit_move (gen_rtx_MEM (GET_MODE (address), slot_address), address);
3895 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
3896 Assume that the areas do not overlap. */
3899 mips_block_move_straight (rtx dest, rtx src, HOST_WIDE_INT length)
3901 HOST_WIDE_INT offset, delta;
3902 unsigned HOST_WIDE_INT bits;
3904 enum machine_mode mode;
3907 /* Work out how many bits to move at a time. If both operands have
3908 half-word alignment, it is usually better to move in half words.
3909 For instance, lh/lh/sh/sh is usually better than lwl/lwr/swl/swr
3910 and lw/lw/sw/sw is usually better than ldl/ldr/sdl/sdr.
3911 Otherwise move word-sized chunks. */
3912 if (MEM_ALIGN (src) == BITS_PER_WORD / 2
3913 && MEM_ALIGN (dest) == BITS_PER_WORD / 2)
3914 bits = BITS_PER_WORD / 2;
3916 bits = BITS_PER_WORD;
3918 mode = mode_for_size (bits, MODE_INT, 0);
3919 delta = bits / BITS_PER_UNIT;
3921 /* Allocate a buffer for the temporary registers. */
3922 regs = alloca (sizeof (rtx) * length / delta);
3924 /* Load as many BITS-sized chunks as possible. Use a normal load if
3925 the source has enough alignment, otherwise use left/right pairs. */
3926 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
3928 regs[i] = gen_reg_rtx (mode);
3929 if (MEM_ALIGN (src) >= bits)
3930 mips_emit_move (regs[i], adjust_address (src, mode, offset));
3933 rtx part = adjust_address (src, BLKmode, offset);
3934 if (!mips_expand_unaligned_load (regs[i], part, bits, 0))
3939 /* Copy the chunks to the destination. */
3940 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
3941 if (MEM_ALIGN (dest) >= bits)
3942 mips_emit_move (adjust_address (dest, mode, offset), regs[i]);
3945 rtx part = adjust_address (dest, BLKmode, offset);
3946 if (!mips_expand_unaligned_store (part, regs[i], bits, 0))
3950 /* Mop up any left-over bytes. */
3951 if (offset < length)
3953 src = adjust_address (src, BLKmode, offset);
3954 dest = adjust_address (dest, BLKmode, offset);
3955 move_by_pieces (dest, src, length - offset,
3956 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
3960 #define MAX_MOVE_REGS 4
3961 #define MAX_MOVE_BYTES (MAX_MOVE_REGS * UNITS_PER_WORD)
3964 /* Helper function for doing a loop-based block operation on memory
3965 reference MEM. Each iteration of the loop will operate on LENGTH
3968 Create a new base register for use within the loop and point it to
3969 the start of MEM. Create a new memory reference that uses this
3970 register. Store them in *LOOP_REG and *LOOP_MEM respectively. */
3973 mips_adjust_block_mem (rtx mem, HOST_WIDE_INT length,
3974 rtx *loop_reg, rtx *loop_mem)
3976 *loop_reg = copy_addr_to_reg (XEXP (mem, 0));
3978 /* Although the new mem does not refer to a known location,
3979 it does keep up to LENGTH bytes of alignment. */
3980 *loop_mem = change_address (mem, BLKmode, *loop_reg);
3981 set_mem_align (*loop_mem, MIN (MEM_ALIGN (mem), length * BITS_PER_UNIT));
3985 /* Move LENGTH bytes from SRC to DEST using a loop that moves MAX_MOVE_BYTES
3986 per iteration. LENGTH must be at least MAX_MOVE_BYTES. Assume that the
3987 memory regions do not overlap. */
3990 mips_block_move_loop (rtx dest, rtx src, HOST_WIDE_INT length)
3992 rtx label, src_reg, dest_reg, final_src;
3993 HOST_WIDE_INT leftover;
3995 leftover = length % MAX_MOVE_BYTES;
3998 /* Create registers and memory references for use within the loop. */
3999 mips_adjust_block_mem (src, MAX_MOVE_BYTES, &src_reg, &src);
4000 mips_adjust_block_mem (dest, MAX_MOVE_BYTES, &dest_reg, &dest);
4002 /* Calculate the value that SRC_REG should have after the last iteration
4004 final_src = expand_simple_binop (Pmode, PLUS, src_reg, GEN_INT (length),
4007 /* Emit the start of the loop. */
4008 label = gen_label_rtx ();
4011 /* Emit the loop body. */
4012 mips_block_move_straight (dest, src, MAX_MOVE_BYTES);
4014 /* Move on to the next block. */
4015 mips_emit_move (src_reg, plus_constant (src_reg, MAX_MOVE_BYTES));
4016 mips_emit_move (dest_reg, plus_constant (dest_reg, MAX_MOVE_BYTES));
4018 /* Emit the loop condition. */
4019 if (Pmode == DImode)
4020 emit_insn (gen_cmpdi (src_reg, final_src));
4022 emit_insn (gen_cmpsi (src_reg, final_src));
4023 emit_jump_insn (gen_bne (label));
4025 /* Mop up any left-over bytes. */
4027 mips_block_move_straight (dest, src, leftover);
4031 /* Expand a loop of synci insns for the address range [BEGIN, END). */
4034 mips_expand_synci_loop (rtx begin, rtx end)
4036 rtx inc, label, cmp, cmp_result;
4038 /* Load INC with the cache line size (rdhwr INC,$1). */
4039 inc = gen_reg_rtx (SImode);
4040 emit_insn (gen_rdhwr (inc, const1_rtx));
4042 /* Loop back to here. */
4043 label = gen_label_rtx ();
4046 emit_insn (gen_synci (begin));
4048 cmp = gen_reg_rtx (Pmode);
4049 mips_emit_binary (GTU, cmp, begin, end);
4051 mips_emit_binary (PLUS, begin, begin, inc);
4053 cmp_result = gen_rtx_EQ (VOIDmode, cmp, const0_rtx);
4054 emit_jump_insn (gen_condjump (cmp_result, label));
4057 /* Expand a movmemsi instruction. */
4060 mips_expand_block_move (rtx dest, rtx src, rtx length)
4062 if (GET_CODE (length) == CONST_INT)
4064 if (INTVAL (length) <= 2 * MAX_MOVE_BYTES)
4066 mips_block_move_straight (dest, src, INTVAL (length));
4071 mips_block_move_loop (dest, src, INTVAL (length));
4078 /* Argument support functions. */
4080 /* Initialize CUMULATIVE_ARGS for a function. */
4083 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4084 rtx libname ATTRIBUTE_UNUSED)
4086 static CUMULATIVE_ARGS zero_cum;
4087 tree param, next_param;
4090 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4092 /* Determine if this function has variable arguments. This is
4093 indicated by the last argument being 'void_type_mode' if there
4094 are no variable arguments. The standard MIPS calling sequence
4095 passes all arguments in the general purpose registers in this case. */
4097 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
4098 param != 0; param = next_param)
4100 next_param = TREE_CHAIN (param);
4101 if (next_param == 0 && TREE_VALUE (param) != void_type_node)
4102 cum->gp_reg_found = 1;
4107 /* Fill INFO with information about a single argument. CUM is the
4108 cumulative state for earlier arguments. MODE is the mode of this
4109 argument and TYPE is its type (if known). NAMED is true if this
4110 is a named (fixed) argument rather than a variable one. */
4113 mips_arg_info (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
4114 tree type, int named, struct mips_arg_info *info)
4116 bool doubleword_aligned_p;
4117 unsigned int num_bytes, num_words, max_regs;
4119 /* Work out the size of the argument. */
4120 num_bytes = type ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
4121 num_words = (num_bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4123 /* Decide whether it should go in a floating-point register, assuming
4124 one is free. Later code checks for availability.
4126 The checks against UNITS_PER_FPVALUE handle the soft-float and
4127 single-float cases. */
4131 /* The EABI conventions have traditionally been defined in terms
4132 of TYPE_MODE, regardless of the actual type. */
4133 info->fpr_p = ((GET_MODE_CLASS (mode) == MODE_FLOAT
4134 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
4135 && GET_MODE_SIZE (mode) <= UNITS_PER_FPVALUE);
4140 /* Only leading floating-point scalars are passed in
4141 floating-point registers. We also handle vector floats the same
4142 say, which is OK because they are not covered by the standard ABI. */
4143 info->fpr_p = (!cum->gp_reg_found
4144 && cum->arg_number < 2
4145 && (type == 0 || SCALAR_FLOAT_TYPE_P (type)
4146 || VECTOR_FLOAT_TYPE_P (type))
4147 && (GET_MODE_CLASS (mode) == MODE_FLOAT
4148 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
4149 && GET_MODE_SIZE (mode) <= UNITS_PER_FPVALUE);
4154 /* Scalar and complex floating-point types are passed in
4155 floating-point registers. */
4156 info->fpr_p = (named
4157 && (type == 0 || FLOAT_TYPE_P (type))
4158 && (GET_MODE_CLASS (mode) == MODE_FLOAT
4159 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
4160 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
4161 && GET_MODE_UNIT_SIZE (mode) <= UNITS_PER_FPVALUE);
4163 /* ??? According to the ABI documentation, the real and imaginary
4164 parts of complex floats should be passed in individual registers.
4165 The real and imaginary parts of stack arguments are supposed
4166 to be contiguous and there should be an extra word of padding
4169 This has two problems. First, it makes it impossible to use a
4170 single "void *" va_list type, since register and stack arguments
4171 are passed differently. (At the time of writing, MIPSpro cannot
4172 handle complex float varargs correctly.) Second, it's unclear
4173 what should happen when there is only one register free.
4175 For now, we assume that named complex floats should go into FPRs
4176 if there are two FPRs free, otherwise they should be passed in the
4177 same way as a struct containing two floats. */
4179 && GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
4180 && GET_MODE_UNIT_SIZE (mode) < UNITS_PER_FPVALUE)
4182 if (cum->num_gprs >= MAX_ARGS_IN_REGISTERS - 1)
4183 info->fpr_p = false;
4193 /* See whether the argument has doubleword alignment. */
4194 doubleword_aligned_p = FUNCTION_ARG_BOUNDARY (mode, type) > BITS_PER_WORD;
4196 /* Set REG_OFFSET to the register count we're interested in.
4197 The EABI allocates the floating-point registers separately,
4198 but the other ABIs allocate them like integer registers. */
4199 info->reg_offset = (mips_abi == ABI_EABI && info->fpr_p
4203 /* Advance to an even register if the argument is doubleword-aligned. */
4204 if (doubleword_aligned_p)
4205 info->reg_offset += info->reg_offset & 1;
4207 /* Work out the offset of a stack argument. */
4208 info->stack_offset = cum->stack_words;
4209 if (doubleword_aligned_p)
4210 info->stack_offset += info->stack_offset & 1;
4212 max_regs = MAX_ARGS_IN_REGISTERS - info->reg_offset;
4214 /* Partition the argument between registers and stack. */
4215 info->reg_words = MIN (num_words, max_regs);
4216 info->stack_words = num_words - info->reg_words;
4220 /* INFO describes an argument that is passed in a single-register value.
4221 Return the register it uses, assuming that FPRs are available if
4225 mips_arg_regno (const struct mips_arg_info *info, bool hard_float_p)
4227 if (!info->fpr_p || !hard_float_p)
4228 return GP_ARG_FIRST + info->reg_offset;
4229 else if (mips_abi == ABI_32 && TARGET_DOUBLE_FLOAT && info->reg_offset > 0)
4230 /* In o32, the second argument is always passed in $f14
4231 for TARGET_DOUBLE_FLOAT, regardless of whether the
4232 first argument was a word or doubleword. */
4233 return FP_ARG_FIRST + 2;
4235 return FP_ARG_FIRST + info->reg_offset;
4238 /* Implement FUNCTION_ARG_ADVANCE. */
4241 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4242 tree type, int named)
4244 struct mips_arg_info info;
4246 mips_arg_info (cum, mode, type, named, &info);
4249 cum->gp_reg_found = true;
4251 /* See the comment above the cumulative args structure in mips.h
4252 for an explanation of what this code does. It assumes the O32
4253 ABI, which passes at most 2 arguments in float registers. */
4254 if (cum->arg_number < 2 && info.fpr_p)
4255 cum->fp_code += (mode == SFmode ? 1 : 2) << (cum->arg_number * 2);
4257 if (mips_abi != ABI_EABI || !info.fpr_p)
4258 cum->num_gprs = info.reg_offset + info.reg_words;
4259 else if (info.reg_words > 0)
4260 cum->num_fprs += MAX_FPRS_PER_FMT;
4262 if (info.stack_words > 0)
4263 cum->stack_words = info.stack_offset + info.stack_words;
4268 /* Implement FUNCTION_ARG. */
4271 function_arg (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
4272 tree type, int named)
4274 struct mips_arg_info info;
4276 /* We will be called with a mode of VOIDmode after the last argument
4277 has been seen. Whatever we return will be passed to the call
4278 insn. If we need a mips16 fp_code, return a REG with the code
4279 stored as the mode. */
4280 if (mode == VOIDmode)
4282 if (TARGET_MIPS16 && cum->fp_code != 0)
4283 return gen_rtx_REG ((enum machine_mode) cum->fp_code, 0);
4289 mips_arg_info (cum, mode, type, named, &info);
4291 /* Return straight away if the whole argument is passed on the stack. */
4292 if (info.reg_offset == MAX_ARGS_IN_REGISTERS)
4296 && TREE_CODE (type) == RECORD_TYPE
4298 && TYPE_SIZE_UNIT (type)
4299 && host_integerp (TYPE_SIZE_UNIT (type), 1)
4302 /* The Irix 6 n32/n64 ABIs say that if any 64-bit chunk of the
4303 structure contains a double in its entirety, then that 64-bit
4304 chunk is passed in a floating point register. */
4307 /* First check to see if there is any such field. */
4308 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4309 if (TREE_CODE (field) == FIELD_DECL
4310 && TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
4311 && TYPE_PRECISION (TREE_TYPE (field)) == BITS_PER_WORD
4312 && host_integerp (bit_position (field), 0)
4313 && int_bit_position (field) % BITS_PER_WORD == 0)
4318 /* Now handle the special case by returning a PARALLEL
4319 indicating where each 64-bit chunk goes. INFO.REG_WORDS
4320 chunks are passed in registers. */
4322 HOST_WIDE_INT bitpos;
4325 /* assign_parms checks the mode of ENTRY_PARM, so we must
4326 use the actual mode here. */
4327 ret = gen_rtx_PARALLEL (mode, rtvec_alloc (info.reg_words));
4330 field = TYPE_FIELDS (type);
4331 for (i = 0; i < info.reg_words; i++)
4335 for (; field; field = TREE_CHAIN (field))
4336 if (TREE_CODE (field) == FIELD_DECL
4337 && int_bit_position (field) >= bitpos)
4341 && int_bit_position (field) == bitpos
4342 && TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
4343 && !TARGET_SOFT_FLOAT
4344 && TYPE_PRECISION (TREE_TYPE (field)) == BITS_PER_WORD)
4345 reg = gen_rtx_REG (DFmode, FP_ARG_FIRST + info.reg_offset + i);
4347 reg = gen_rtx_REG (DImode, GP_ARG_FIRST + info.reg_offset + i);
4350 = gen_rtx_EXPR_LIST (VOIDmode, reg,
4351 GEN_INT (bitpos / BITS_PER_UNIT));
4353 bitpos += BITS_PER_WORD;
4359 /* Handle the n32/n64 conventions for passing complex floating-point
4360 arguments in FPR pairs. The real part goes in the lower register
4361 and the imaginary part goes in the upper register. */
4364 && GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
4367 enum machine_mode inner;
4370 inner = GET_MODE_INNER (mode);
4371 reg = FP_ARG_FIRST + info.reg_offset;
4372 if (info.reg_words * UNITS_PER_WORD == GET_MODE_SIZE (inner))
4374 /* Real part in registers, imaginary part on stack. */
4375 gcc_assert (info.stack_words == info.reg_words);
4376 return gen_rtx_REG (inner, reg);
4380 gcc_assert (info.stack_words == 0);
4381 real = gen_rtx_EXPR_LIST (VOIDmode,
4382 gen_rtx_REG (inner, reg),
4384 imag = gen_rtx_EXPR_LIST (VOIDmode,
4386 reg + info.reg_words / 2),
4387 GEN_INT (GET_MODE_SIZE (inner)));
4388 return gen_rtx_PARALLEL (mode, gen_rtvec (2, real, imag));
4392 return gen_rtx_REG (mode, mips_arg_regno (&info, TARGET_HARD_FLOAT));
4396 /* Implement TARGET_ARG_PARTIAL_BYTES. */
4399 mips_arg_partial_bytes (CUMULATIVE_ARGS *cum,
4400 enum machine_mode mode, tree type, bool named)
4402 struct mips_arg_info info;
4404 mips_arg_info (cum, mode, type, named, &info);
4405 return info.stack_words > 0 ? info.reg_words * UNITS_PER_WORD : 0;
4409 /* Implement FUNCTION_ARG_BOUNDARY. Every parameter gets at least
4410 PARM_BOUNDARY bits of alignment, but will be given anything up
4411 to STACK_BOUNDARY bits if the type requires it. */
4414 function_arg_boundary (enum machine_mode mode, tree type)
4416 unsigned int alignment;
4418 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
4419 if (alignment < PARM_BOUNDARY)
4420 alignment = PARM_BOUNDARY;
4421 if (alignment > STACK_BOUNDARY)
4422 alignment = STACK_BOUNDARY;
4426 /* Return true if FUNCTION_ARG_PADDING (MODE, TYPE) should return
4427 upward rather than downward. In other words, return true if the
4428 first byte of the stack slot has useful data, false if the last
4432 mips_pad_arg_upward (enum machine_mode mode, const_tree type)
4434 /* On little-endian targets, the first byte of every stack argument
4435 is passed in the first byte of the stack slot. */
4436 if (!BYTES_BIG_ENDIAN)
4439 /* Otherwise, integral types are padded downward: the last byte of a
4440 stack argument is passed in the last byte of the stack slot. */
4442 ? INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
4443 : GET_MODE_CLASS (mode) == MODE_INT)
4446 /* Big-endian o64 pads floating-point arguments downward. */
4447 if (mips_abi == ABI_O64)
4448 if (type != 0 ? FLOAT_TYPE_P (type) : GET_MODE_CLASS (mode) == MODE_FLOAT)
4451 /* Other types are padded upward for o32, o64, n32 and n64. */
4452 if (mips_abi != ABI_EABI)
4455 /* Arguments smaller than a stack slot are padded downward. */
4456 if (mode != BLKmode)
4457 return (GET_MODE_BITSIZE (mode) >= PARM_BOUNDARY);
4459 return (int_size_in_bytes (type) >= (PARM_BOUNDARY / BITS_PER_UNIT));
4463 /* Likewise BLOCK_REG_PADDING (MODE, TYPE, ...). Return !BYTES_BIG_ENDIAN
4464 if the least significant byte of the register has useful data. Return
4465 the opposite if the most significant byte does. */
4468 mips_pad_reg_upward (enum machine_mode mode, tree type)
4470 /* No shifting is required for floating-point arguments. */
4471 if (type != 0 ? FLOAT_TYPE_P (type) : GET_MODE_CLASS (mode) == MODE_FLOAT)
4472 return !BYTES_BIG_ENDIAN;
4474 /* Otherwise, apply the same padding to register arguments as we do
4475 to stack arguments. */
4476 return mips_pad_arg_upward (mode, type);
4480 mips_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4481 tree type, int *pretend_size ATTRIBUTE_UNUSED,
4484 CUMULATIVE_ARGS local_cum;
4485 int gp_saved, fp_saved;
4487 /* The caller has advanced CUM up to, but not beyond, the last named
4488 argument. Advance a local copy of CUM past the last "real" named
4489 argument, to find out how many registers are left over. */
4492 FUNCTION_ARG_ADVANCE (local_cum, mode, type, 1);
4494 /* Found out how many registers we need to save. */
4495 gp_saved = MAX_ARGS_IN_REGISTERS - local_cum.num_gprs;
4496 fp_saved = (EABI_FLOAT_VARARGS_P
4497 ? MAX_ARGS_IN_REGISTERS - local_cum.num_fprs
4506 ptr = plus_constant (virtual_incoming_args_rtx,
4507 REG_PARM_STACK_SPACE (cfun->decl)
4508 - gp_saved * UNITS_PER_WORD);
4509 mem = gen_rtx_MEM (BLKmode, ptr);
4510 set_mem_alias_set (mem, get_varargs_alias_set ());
4512 move_block_from_reg (local_cum.num_gprs + GP_ARG_FIRST,
4517 /* We can't use move_block_from_reg, because it will use
4519 enum machine_mode mode;
4522 /* Set OFF to the offset from virtual_incoming_args_rtx of
4523 the first float register. The FP save area lies below
4524 the integer one, and is aligned to UNITS_PER_FPVALUE bytes. */
4525 off = -gp_saved * UNITS_PER_WORD;
4526 off &= ~(UNITS_PER_FPVALUE - 1);
4527 off -= fp_saved * UNITS_PER_FPREG;
4529 mode = TARGET_SINGLE_FLOAT ? SFmode : DFmode;
4531 for (i = local_cum.num_fprs; i < MAX_ARGS_IN_REGISTERS;
4532 i += MAX_FPRS_PER_FMT)
4536 ptr = plus_constant (virtual_incoming_args_rtx, off);
4537 mem = gen_rtx_MEM (mode, ptr);
4538 set_mem_alias_set (mem, get_varargs_alias_set ());
4539 mips_emit_move (mem, gen_rtx_REG (mode, FP_ARG_FIRST + i));
4540 off += UNITS_PER_HWFPVALUE;
4544 if (REG_PARM_STACK_SPACE (cfun->decl) == 0)
4545 cfun->machine->varargs_size = (gp_saved * UNITS_PER_WORD
4546 + fp_saved * UNITS_PER_FPREG);
4549 /* Create the va_list data type.
4550 We keep 3 pointers, and two offsets.
4551 Two pointers are to the overflow area, which starts at the CFA.
4552 One of these is constant, for addressing into the GPR save area below it.
4553 The other is advanced up the stack through the overflow region.
4554 The third pointer is to the GPR save area. Since the FPR save area
4555 is just below it, we can address FPR slots off this pointer.
4556 We also keep two one-byte offsets, which are to be subtracted from the
4557 constant pointers to yield addresses in the GPR and FPR save areas.
4558 These are downcounted as float or non-float arguments are used,
4559 and when they get to zero, the argument must be obtained from the
4561 If !EABI_FLOAT_VARARGS_P, then no FPR save area exists, and a single
4562 pointer is enough. It's started at the GPR save area, and is
4564 Note that the GPR save area is not constant size, due to optimization
4565 in the prologue. Hence, we can't use a design with two pointers
4566 and two offsets, although we could have designed this with two pointers
4567 and three offsets. */
4570 mips_build_builtin_va_list (void)
4572 if (EABI_FLOAT_VARARGS_P)
4574 tree f_ovfl, f_gtop, f_ftop, f_goff, f_foff, f_res, record;
4577 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4579 f_ovfl = build_decl (FIELD_DECL, get_identifier ("__overflow_argptr"),
4581 f_gtop = build_decl (FIELD_DECL, get_identifier ("__gpr_top"),
4583 f_ftop = build_decl (FIELD_DECL, get_identifier ("__fpr_top"),
4585 f_goff = build_decl (FIELD_DECL, get_identifier ("__gpr_offset"),
4586 unsigned_char_type_node);
4587 f_foff = build_decl (FIELD_DECL, get_identifier ("__fpr_offset"),
4588 unsigned_char_type_node);
4589 /* Explicitly pad to the size of a pointer, so that -Wpadded won't
4590 warn on every user file. */
4591 index = build_int_cst (NULL_TREE, GET_MODE_SIZE (ptr_mode) - 2 - 1);
4592 array = build_array_type (unsigned_char_type_node,
4593 build_index_type (index));
4594 f_res = build_decl (FIELD_DECL, get_identifier ("__reserved"), array);
4596 DECL_FIELD_CONTEXT (f_ovfl) = record;
4597 DECL_FIELD_CONTEXT (f_gtop) = record;
4598 DECL_FIELD_CONTEXT (f_ftop) = record;
4599 DECL_FIELD_CONTEXT (f_goff) = record;
4600 DECL_FIELD_CONTEXT (f_foff) = record;
4601 DECL_FIELD_CONTEXT (f_res) = record;
4603 TYPE_FIELDS (record) = f_ovfl;
4604 TREE_CHAIN (f_ovfl) = f_gtop;
4605 TREE_CHAIN (f_gtop) = f_ftop;
4606 TREE_CHAIN (f_ftop) = f_goff;
4607 TREE_CHAIN (f_goff) = f_foff;
4608 TREE_CHAIN (f_foff) = f_res;
4610 layout_type (record);
4613 else if (TARGET_IRIX && TARGET_IRIX6)
4614 /* On IRIX 6, this type is 'char *'. */
4615 return build_pointer_type (char_type_node);
4617 /* Otherwise, we use 'void *'. */
4618 return ptr_type_node;
4621 /* Implement va_start. */
4624 mips_va_start (tree valist, rtx nextarg)
4626 if (EABI_FLOAT_VARARGS_P)
4628 const CUMULATIVE_ARGS *cum;
4629 tree f_ovfl, f_gtop, f_ftop, f_goff, f_foff;
4630 tree ovfl, gtop, ftop, goff, foff;
4632 int gpr_save_area_size;
4633 int fpr_save_area_size;
4636 cum = ¤t_function_args_info;
4638 = (MAX_ARGS_IN_REGISTERS - cum->num_gprs) * UNITS_PER_WORD;
4640 = (MAX_ARGS_IN_REGISTERS - cum->num_fprs) * UNITS_PER_FPREG;
4642 f_ovfl = TYPE_FIELDS (va_list_type_node);
4643 f_gtop = TREE_CHAIN (f_ovfl);
4644 f_ftop = TREE_CHAIN (f_gtop);
4645 f_goff = TREE_CHAIN (f_ftop);
4646 f_foff = TREE_CHAIN (f_goff);
4648 ovfl = build3 (COMPONENT_REF, TREE_TYPE (f_ovfl), valist, f_ovfl,
4650 gtop = build3 (COMPONENT_REF, TREE_TYPE (f_gtop), valist, f_gtop,
4652 ftop = build3 (COMPONENT_REF, TREE_TYPE (f_ftop), valist, f_ftop,
4654 goff = build3 (COMPONENT_REF, TREE_TYPE (f_goff), valist, f_goff,
4656 foff = build3 (COMPONENT_REF, TREE_TYPE (f_foff), valist, f_foff,
4659 /* Emit code to initialize OVFL, which points to the next varargs
4660 stack argument. CUM->STACK_WORDS gives the number of stack
4661 words used by named arguments. */
4662 t = make_tree (TREE_TYPE (ovfl), virtual_incoming_args_rtx);
4663 if (cum->stack_words > 0)
4664 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovfl), t,
4665 size_int (cum->stack_words * UNITS_PER_WORD));
4666 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovfl), ovfl, t);
4667 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4669 /* Emit code to initialize GTOP, the top of the GPR save area. */
4670 t = make_tree (TREE_TYPE (gtop), virtual_incoming_args_rtx);
4671 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gtop), gtop, t);
4672 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4674 /* Emit code to initialize FTOP, the top of the FPR save area.
4675 This address is gpr_save_area_bytes below GTOP, rounded
4676 down to the next fp-aligned boundary. */
4677 t = make_tree (TREE_TYPE (ftop), virtual_incoming_args_rtx);
4678 fpr_offset = gpr_save_area_size + UNITS_PER_FPVALUE - 1;
4679 fpr_offset &= ~(UNITS_PER_FPVALUE - 1);
4681 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ftop), t,
4682 size_int (-fpr_offset));
4683 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ftop), ftop, t);
4684 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4686 /* Emit code to initialize GOFF, the offset from GTOP of the
4687 next GPR argument. */
4688 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (goff), goff,
4689 build_int_cst (NULL_TREE, gpr_save_area_size));
4690 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4692 /* Likewise emit code to initialize FOFF, the offset from FTOP
4693 of the next FPR argument. */
4694 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (foff), foff,
4695 build_int_cst (NULL_TREE, fpr_save_area_size));
4696 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4700 nextarg = plus_constant (nextarg, -cfun->machine->varargs_size);
4701 std_expand_builtin_va_start (valist, nextarg);
4705 /* Implement va_arg. */
4708 mips_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4710 HOST_WIDE_INT size, rsize;
4714 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, 0);
4717 type = build_pointer_type (type);
4719 size = int_size_in_bytes (type);
4720 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
4722 if (mips_abi != ABI_EABI || !EABI_FLOAT_VARARGS_P)
4723 addr = std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
4726 /* Not a simple merged stack. */
4728 tree f_ovfl, f_gtop, f_ftop, f_goff, f_foff;
4729 tree ovfl, top, off, align;
4730 HOST_WIDE_INT osize;
4733 f_ovfl = TYPE_FIELDS (va_list_type_node);
4734 f_gtop = TREE_CHAIN (f_ovfl);
4735 f_ftop = TREE_CHAIN (f_gtop);
4736 f_goff = TREE_CHAIN (f_ftop);
4737 f_foff = TREE_CHAIN (f_goff);
4739 /* We maintain separate pointers and offsets for floating-point
4740 and integer arguments, but we need similar code in both cases.
4743 TOP be the top of the register save area;
4744 OFF be the offset from TOP of the next register;
4745 ADDR_RTX be the address of the argument;
4746 RSIZE be the number of bytes used to store the argument
4747 when it's in the register save area;
4748 OSIZE be the number of bytes used to store it when it's
4749 in the stack overflow area; and
4750 PADDING be (BYTES_BIG_ENDIAN ? OSIZE - RSIZE : 0)
4752 The code we want is:
4754 1: off &= -rsize; // round down
4757 4: addr_rtx = top - off;
4762 9: ovfl += ((intptr_t) ovfl + osize - 1) & -osize;
4763 10: addr_rtx = ovfl + PADDING;
4767 [1] and [9] can sometimes be optimized away. */
4769 ovfl = build3 (COMPONENT_REF, TREE_TYPE (f_ovfl), valist, f_ovfl,
4772 if (GET_MODE_CLASS (TYPE_MODE (type)) == MODE_FLOAT
4773 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_FPVALUE)
4775 top = build3 (COMPONENT_REF, TREE_TYPE (f_ftop), valist, f_ftop,
4777 off = build3 (COMPONENT_REF, TREE_TYPE (f_foff), valist, f_foff,
4780 /* When floating-point registers are saved to the stack,
4781 each one will take up UNITS_PER_HWFPVALUE bytes, regardless
4782 of the float's precision. */
4783 rsize = UNITS_PER_HWFPVALUE;
4785 /* Overflow arguments are padded to UNITS_PER_WORD bytes
4786 (= PARM_BOUNDARY bits). This can be different from RSIZE
4789 (1) On 32-bit targets when TYPE is a structure such as:
4791 struct s { float f; };
4793 Such structures are passed in paired FPRs, so RSIZE
4794 will be 8 bytes. However, the structure only takes
4795 up 4 bytes of memory, so OSIZE will only be 4.
4797 (2) In combinations such as -mgp64 -msingle-float
4798 -fshort-double. Doubles passed in registers
4799 will then take up 4 (UNITS_PER_HWFPVALUE) bytes,
4800 but those passed on the stack take up
4801 UNITS_PER_WORD bytes. */
4802 osize = MAX (GET_MODE_SIZE (TYPE_MODE (type)), UNITS_PER_WORD);
4806 top = build3 (COMPONENT_REF, TREE_TYPE (f_gtop), valist, f_gtop,
4808 off = build3 (COMPONENT_REF, TREE_TYPE (f_goff), valist, f_goff,
4810 if (rsize > UNITS_PER_WORD)
4812 /* [1] Emit code for: off &= -rsize. */
4813 t = build2 (BIT_AND_EXPR, TREE_TYPE (off), off,
4814 build_int_cst (NULL_TREE, -rsize));
4815 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (off), off, t);
4816 gimplify_and_add (t, pre_p);
4821 /* [2] Emit code to branch if off == 0. */
4822 t = build2 (NE_EXPR, boolean_type_node, off,
4823 build_int_cst (TREE_TYPE (off), 0));
4824 addr = build3 (COND_EXPR, ptr_type_node, t, NULL_TREE, NULL_TREE);
4826 /* [5] Emit code for: off -= rsize. We do this as a form of
4827 post-increment not available to C. Also widen for the
4828 coming pointer arithmetic. */
4829 t = fold_convert (TREE_TYPE (off), build_int_cst (NULL_TREE, rsize));
4830 t = build2 (POSTDECREMENT_EXPR, TREE_TYPE (off), off, t);
4831 t = fold_convert (sizetype, t);
4832 t = fold_build1 (NEGATE_EXPR, sizetype, t);
4834 /* [4] Emit code for: addr_rtx = top - off. On big endian machines,
4835 the argument has RSIZE - SIZE bytes of leading padding. */
4836 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (top), top, t);
4837 if (BYTES_BIG_ENDIAN && rsize > size)
4839 u = size_int (rsize - size);
4840 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, u);
4842 COND_EXPR_THEN (addr) = t;
4844 if (osize > UNITS_PER_WORD)
4846 /* [9] Emit: ovfl += ((intptr_t) ovfl + osize - 1) & -osize. */
4847 u = size_int (osize - 1);
4848 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovfl), ovfl, u);
4849 t = fold_convert (sizetype, t);
4850 u = size_int (-osize);
4851 t = build2 (BIT_AND_EXPR, sizetype, t, u);
4852 t = fold_convert (TREE_TYPE (ovfl), t);
4853 align = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovfl), ovfl, t);
4858 /* [10, 11]. Emit code to store ovfl in addr_rtx, then
4859 post-increment ovfl by osize. On big-endian machines,
4860 the argument has OSIZE - SIZE bytes of leading padding. */
4861 u = fold_convert (TREE_TYPE (ovfl),
4862 build_int_cst (NULL_TREE, osize));
4863 t = build2 (POSTINCREMENT_EXPR, TREE_TYPE (ovfl), ovfl, u);
4864 if (BYTES_BIG_ENDIAN && osize > size)
4866 u = size_int (osize - size);
4867 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, u);
4870 /* String [9] and [10,11] together. */
4872 t = build2 (COMPOUND_EXPR, TREE_TYPE (t), align, t);
4873 COND_EXPR_ELSE (addr) = t;
4875 addr = fold_convert (build_pointer_type (type), addr);
4876 addr = build_va_arg_indirect_ref (addr);
4880 addr = build_va_arg_indirect_ref (addr);
4885 /* Return true if it is possible to use left/right accesses for a
4886 bitfield of WIDTH bits starting BITPOS bits into *OP. When
4887 returning true, update *OP, *LEFT and *RIGHT as follows:
4889 *OP is a BLKmode reference to the whole field.
4891 *LEFT is a QImode reference to the first byte if big endian or
4892 the last byte if little endian. This address can be used in the
4893 left-side instructions (lwl, swl, ldl, sdl).
4895 *RIGHT is a QImode reference to the opposite end of the field and
4896 can be used in the patterning right-side instruction. */
4899 mips_get_unaligned_mem (rtx *op, unsigned int width, int bitpos,
4900 rtx *left, rtx *right)
4904 /* Check that the operand really is a MEM. Not all the extv and
4905 extzv predicates are checked. */
4909 /* Check that the size is valid. */
4910 if (width != 32 && (!TARGET_64BIT || width != 64))
4913 /* We can only access byte-aligned values. Since we are always passed
4914 a reference to the first byte of the field, it is not necessary to
4915 do anything with BITPOS after this check. */
4916 if (bitpos % BITS_PER_UNIT != 0)
4919 /* Reject aligned bitfields: we want to use a normal load or store
4920 instead of a left/right pair. */
4921 if (MEM_ALIGN (*op) >= width)
4924 /* Adjust *OP to refer to the whole field. This also has the effect
4925 of legitimizing *OP's address for BLKmode, possibly simplifying it. */
4926 *op = adjust_address (*op, BLKmode, 0);
4927 set_mem_size (*op, GEN_INT (width / BITS_PER_UNIT));
4929 /* Get references to both ends of the field. We deliberately don't
4930 use the original QImode *OP for FIRST since the new BLKmode one
4931 might have a simpler address. */
4932 first = adjust_address (*op, QImode, 0);
4933 last = adjust_address (*op, QImode, width / BITS_PER_UNIT - 1);
4935 /* Allocate to LEFT and RIGHT according to endianness. LEFT should
4936 be the upper word and RIGHT the lower word. */
4937 if (TARGET_BIG_ENDIAN)
4938 *left = first, *right = last;
4940 *left = last, *right = first;
4946 /* Try to emit the equivalent of (set DEST (zero_extract SRC WIDTH BITPOS)).
4947 Return true on success. We only handle cases where zero_extract is
4948 equivalent to sign_extract. */
4951 mips_expand_unaligned_load (rtx dest, rtx src, unsigned int width, int bitpos)
4953 rtx left, right, temp;
4955 /* If TARGET_64BIT, the destination of a 32-bit load will be a
4956 paradoxical word_mode subreg. This is the only case in which
4957 we allow the destination to be larger than the source. */
4958 if (GET_CODE (dest) == SUBREG
4959 && GET_MODE (dest) == DImode
4960 && SUBREG_BYTE (dest) == 0
4961 && GET_MODE (SUBREG_REG (dest)) == SImode)
4962 dest = SUBREG_REG (dest);
4964 /* After the above adjustment, the destination must be the same
4965 width as the source. */
4966 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4969 if (!mips_get_unaligned_mem (&src, width, bitpos, &left, &right))
4972 temp = gen_reg_rtx (GET_MODE (dest));
4973 if (GET_MODE (dest) == DImode)
4975 emit_insn (gen_mov_ldl (temp, src, left));
4976 emit_insn (gen_mov_ldr (dest, copy_rtx (src), right, temp));
4980 emit_insn (gen_mov_lwl (temp, src, left));
4981 emit_insn (gen_mov_lwr (dest, copy_rtx (src), right, temp));
4987 /* Try to expand (set (zero_extract DEST WIDTH BITPOS) SRC). Return
4991 mips_expand_unaligned_store (rtx dest, rtx src, unsigned int width, int bitpos)
4994 enum machine_mode mode;
4996 if (!mips_get_unaligned_mem (&dest, width, bitpos, &left, &right))
4999 mode = mode_for_size (width, MODE_INT, 0);
5000 src = gen_lowpart (mode, src);
5004 emit_insn (gen_mov_sdl (dest, src, left));
5005 emit_insn (gen_mov_sdr (copy_rtx (dest), copy_rtx (src), right));
5009 emit_insn (gen_mov_swl (dest, src, left));
5010 emit_insn (gen_mov_swr (copy_rtx (dest), copy_rtx (src), right));
5015 /* Return true if X is a MEM with the same size as MODE. */
5018 mips_mem_fits_mode_p (enum machine_mode mode, rtx x)
5025 size = MEM_SIZE (x);
5026 return size && INTVAL (size) == GET_MODE_SIZE (mode);
5029 /* Return true if (zero_extract OP SIZE POSITION) can be used as the
5030 source of an "ext" instruction or the destination of an "ins"
5031 instruction. OP must be a register operand and the following
5032 conditions must hold:
5034 0 <= POSITION < GET_MODE_BITSIZE (GET_MODE (op))
5035 0 < SIZE <= GET_MODE_BITSIZE (GET_MODE (op))
5036 0 < POSITION + SIZE <= GET_MODE_BITSIZE (GET_MODE (op))
5038 Also reject lengths equal to a word as they are better handled
5039 by the move patterns. */
5042 mips_use_ins_ext_p (rtx op, rtx size, rtx position)
5044 HOST_WIDE_INT len, pos;
5046 if (!ISA_HAS_EXT_INS
5047 || !register_operand (op, VOIDmode)
5048 || GET_MODE_BITSIZE (GET_MODE (op)) > BITS_PER_WORD)
5051 len = INTVAL (size);
5052 pos = INTVAL (position);
5054 if (len <= 0 || len >= GET_MODE_BITSIZE (GET_MODE (op))
5055 || pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (op)))
5061 /* Set up globals to generate code for the ISA or processor
5062 described by INFO. */
5065 mips_set_architecture (const struct mips_cpu_info *info)
5069 mips_arch_info = info;
5070 mips_arch = info->cpu;
5071 mips_isa = info->isa;
5076 /* Likewise for tuning. */
5079 mips_set_tune (const struct mips_cpu_info *info)
5083 mips_tune_info = info;
5084 mips_tune = info->cpu;
5088 /* Initialize mips_split_addresses from the associated command-line
5091 mips_split_addresses is a half-way house between explicit
5092 relocations and the traditional assembler macros. It can
5093 split absolute 32-bit symbolic constants into a high/lo_sum
5094 pair but uses macros for other sorts of access.
5096 Like explicit relocation support for REL targets, it relies
5097 on GNU extensions in the assembler and the linker.
5099 Although this code should work for -O0, it has traditionally
5100 been treated as an optimization. */
5103 mips_init_split_addresses (void)
5105 if (!TARGET_MIPS16 && TARGET_SPLIT_ADDRESSES
5106 && optimize && !flag_pic
5107 && !ABI_HAS_64BIT_SYMBOLS)
5108 mips_split_addresses = 1;
5110 mips_split_addresses = 0;
5113 /* (Re-)Initialize information about relocs. */
5116 mips_init_relocs (void)
5118 memset (mips_split_p, '\0', sizeof (mips_split_p));
5119 memset (mips_hi_relocs, '\0', sizeof (mips_hi_relocs));
5120 memset (mips_lo_relocs, '\0', sizeof (mips_lo_relocs));
5122 if (ABI_HAS_64BIT_SYMBOLS)
5124 if (TARGET_EXPLICIT_RELOCS)
5126 mips_split_p[SYMBOL_64_HIGH] = true;
5127 mips_hi_relocs[SYMBOL_64_HIGH] = "%highest(";
5128 mips_lo_relocs[SYMBOL_64_HIGH] = "%higher(";
5130 mips_split_p[SYMBOL_64_MID] = true;
5131 mips_hi_relocs[SYMBOL_64_MID] = "%higher(";
5132 mips_lo_relocs[SYMBOL_64_MID] = "%hi(";
5134 mips_split_p[SYMBOL_64_LOW] = true;
5135 mips_hi_relocs[SYMBOL_64_LOW] = "%hi(";
5136 mips_lo_relocs[SYMBOL_64_LOW] = "%lo(";
5138 mips_split_p[SYMBOL_ABSOLUTE] = true;
5139 mips_lo_relocs[SYMBOL_ABSOLUTE] = "%lo(";
5144 if (TARGET_EXPLICIT_RELOCS || mips_split_addresses || TARGET_MIPS16)
5146 mips_split_p[SYMBOL_ABSOLUTE] = true;
5147 mips_hi_relocs[SYMBOL_ABSOLUTE] = "%hi(";
5148 mips_lo_relocs[SYMBOL_ABSOLUTE] = "%lo(";
5150 mips_lo_relocs[SYMBOL_32_HIGH] = "%hi(";
5156 /* The high part is provided by a pseudo copy of $gp. */
5157 mips_split_p[SYMBOL_GP_RELATIVE] = true;
5158 mips_lo_relocs[SYMBOL_GP_RELATIVE] = "%gprel(";
5161 if (TARGET_EXPLICIT_RELOCS)
5163 /* Small data constants are kept whole until after reload,
5164 then lowered by mips_rewrite_small_data. */
5165 mips_lo_relocs[SYMBOL_GP_RELATIVE] = "%gp_rel(";
5167 mips_split_p[SYMBOL_GOT_PAGE_OFST] = true;
5170 mips_lo_relocs[SYMBOL_GOTOFF_PAGE] = "%got_page(";
5171 mips_lo_relocs[SYMBOL_GOT_PAGE_OFST] = "%got_ofst(";
5175 mips_lo_relocs[SYMBOL_GOTOFF_PAGE] = "%got(";
5176 mips_lo_relocs[SYMBOL_GOT_PAGE_OFST] = "%lo(";
5181 /* The HIGH and LO_SUM are matched by special .md patterns. */
5182 mips_split_p[SYMBOL_GOT_DISP] = true;
5184 mips_split_p[SYMBOL_GOTOFF_DISP] = true;
5185 mips_hi_relocs[SYMBOL_GOTOFF_DISP] = "%got_hi(";
5186 mips_lo_relocs[SYMBOL_GOTOFF_DISP] = "%got_lo(";
5188 mips_split_p[SYMBOL_GOTOFF_CALL] = true;
5189 mips_hi_relocs[SYMBOL_GOTOFF_CALL] = "%call_hi(";
5190 mips_lo_relocs[SYMBOL_GOTOFF_CALL] = "%call_lo(";
5195 mips_lo_relocs[SYMBOL_GOTOFF_DISP] = "%got_disp(";
5197 mips_lo_relocs[SYMBOL_GOTOFF_DISP] = "%got(";
5198 mips_lo_relocs[SYMBOL_GOTOFF_CALL] = "%call16(";
5204 mips_split_p[SYMBOL_GOTOFF_LOADGP] = true;
5205 mips_hi_relocs[SYMBOL_GOTOFF_LOADGP] = "%hi(%neg(%gp_rel(";
5206 mips_lo_relocs[SYMBOL_GOTOFF_LOADGP] = "%lo(%neg(%gp_rel(";
5209 /* Thread-local relocation operators. */
5210 mips_lo_relocs[SYMBOL_TLSGD] = "%tlsgd(";
5211 mips_lo_relocs[SYMBOL_TLSLDM] = "%tlsldm(";
5212 mips_split_p[SYMBOL_DTPREL] = 1;
5213 mips_hi_relocs[SYMBOL_DTPREL] = "%dtprel_hi(";
5214 mips_lo_relocs[SYMBOL_DTPREL] = "%dtprel_lo(";
5215 mips_lo_relocs[SYMBOL_GOTTPREL] = "%gottprel(";
5216 mips_split_p[SYMBOL_TPREL] = 1;
5217 mips_hi_relocs[SYMBOL_TPREL] = "%tprel_hi(";
5218 mips_lo_relocs[SYMBOL_TPREL] = "%tprel_lo(";
5220 mips_lo_relocs[SYMBOL_HALF] = "%half(";
5223 static GTY(()) int was_mips16_p = -1;
5225 /* Set up the target-dependent global state so that it matches the
5226 current function's ISA mode. */
5229 mips_set_mips16_mode (int mips16_p)
5231 if (mips16_p == was_mips16_p)
5234 /* Restore base settings of various flags. */
5235 target_flags = mips_base_target_flags;
5236 align_loops = mips_base_align_loops;
5237 align_jumps = mips_base_align_jumps;
5238 align_functions = mips_base_align_functions;
5239 flag_schedule_insns = mips_base_schedule_insns;
5240 flag_reorder_blocks_and_partition = mips_base_reorder_blocks_and_partition;
5241 flag_delayed_branch = mips_flag_delayed_branch;
5245 /* Select mips16 instruction set. */
5246 target_flags |= MASK_MIPS16;
5248 /* Don't run the scheduler before reload, since it tends to
5249 increase register pressure. */
5250 flag_schedule_insns = 0;
5252 /* Don't do hot/cold partitioning. The constant layout code expects
5253 the whole function to be in a single section. */
5254 flag_reorder_blocks_and_partition = 0;
5256 /* Silently disable -mexplicit-relocs since it doesn't apply
5257 to mips16 code. Even so, it would overly pedantic to warn
5258 about "-mips16 -mexplicit-relocs", especially given that
5259 we use a %gprel() operator. */
5260 target_flags &= ~MASK_EXPLICIT_RELOCS;
5262 /* Silently disable DSP extensions. */
5263 target_flags &= ~MASK_DSP;
5264 target_flags &= ~MASK_DSPR2;
5268 /* Reset to select base non-mips16 ISA. */
5269 target_flags &= ~MASK_MIPS16;
5271 /* When using explicit relocs, we call dbr_schedule from within
5273 if (TARGET_EXPLICIT_RELOCS)
5274 flag_delayed_branch = 0;
5276 /* Provide default values for align_* for 64-bit targets. */
5279 if (align_loops == 0)
5281 if (align_jumps == 0)
5283 if (align_functions == 0)
5284 align_functions = 8;
5288 /* (Re)initialize mips target internals for new ISA. */
5289 mips_init_split_addresses ();
5290 mips_init_relocs ();
5292 if (was_mips16_p >= 0)
5293 /* Reinitialize target-dependent state. */
5296 was_mips16_p = TARGET_MIPS16;
5299 /* Implement TARGET_SET_CURRENT_FUNCTION. Decide whether the current
5300 function should use the MIPS16 ISA and switch modes accordingly. */
5303 mips_set_current_function (tree fndecl)
5307 mips16p = SYMBOL_REF_MIPS16_FUNC_P (XEXP (DECL_RTL (fndecl), 0));
5309 mips16p = mips_base_mips16;
5310 mips_set_mips16_mode (mips16p);
5313 /* Implement TARGET_HANDLE_OPTION. */
5316 mips_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
5321 if (strcmp (arg, "32") == 0)
5323 else if (strcmp (arg, "o64") == 0)
5325 else if (strcmp (arg, "n32") == 0)
5327 else if (strcmp (arg, "64") == 0)
5329 else if (strcmp (arg, "eabi") == 0)
5330 mips_abi = ABI_EABI;
5337 return mips_parse_cpu (arg) != 0;
5340 mips_isa_info = mips_parse_cpu (ACONCAT (("mips", arg, NULL)));
5341 return mips_isa_info != 0;
5343 case OPT_mno_flush_func:
5344 mips_cache_flush_func = NULL;
5347 case OPT_mcode_readable_:
5348 if (strcmp (arg, "yes") == 0)
5349 mips_code_readable = CODE_READABLE_YES;
5350 else if (strcmp (arg, "pcrel") == 0)
5351 mips_code_readable = CODE_READABLE_PCREL;
5352 else if (strcmp (arg, "no") == 0)
5353 mips_code_readable = CODE_READABLE_NO;
5363 /* Set up the threshold for data to go into the small data area, instead
5364 of the normal data area, and detect any conflicts in the switches. */
5367 override_options (void)
5369 int i, start, regno;
5370 enum machine_mode mode;
5372 #ifdef SUBTARGET_OVERRIDE_OPTIONS
5373 SUBTARGET_OVERRIDE_OPTIONS;
5376 mips_section_threshold = g_switch_set ? g_switch_value : MIPS_DEFAULT_GVALUE;
5378 /* The following code determines the architecture and register size.
5379 Similar code was added to GAS 2.14 (see tc-mips.c:md_after_parse_args()).
5380 The GAS and GCC code should be kept in sync as much as possible. */
5382 if (mips_arch_string != 0)
5383 mips_set_architecture (mips_parse_cpu (mips_arch_string));
5385 if (mips_isa_info != 0)
5387 if (mips_arch_info == 0)
5388 mips_set_architecture (mips_isa_info);
5389 else if (mips_arch_info->isa != mips_isa_info->isa)
5390 error ("-%s conflicts with the other architecture options, "
5391 "which specify a %s processor",
5392 mips_isa_info->name,
5393 mips_cpu_info_from_isa (mips_arch_info->isa)->name);
5396 if (mips_arch_info == 0)
5398 #ifdef MIPS_CPU_STRING_DEFAULT
5399 mips_set_architecture (mips_parse_cpu (MIPS_CPU_STRING_DEFAULT));
5401 mips_set_architecture (mips_cpu_info_from_isa (MIPS_ISA_DEFAULT));
5405 if (ABI_NEEDS_64BIT_REGS && !ISA_HAS_64BIT_REGS)
5406 error ("-march=%s is not compatible with the selected ABI",
5407 mips_arch_info->name);
5409 /* Optimize for mips_arch, unless -mtune selects a different processor. */
5410 if (mips_tune_string != 0)
5411 mips_set_tune (mips_parse_cpu (mips_tune_string));
5413 if (mips_tune_info == 0)
5414 mips_set_tune (mips_arch_info);
5416 /* Set cost structure for the processor. */
5418 mips_cost = &mips_rtx_cost_optimize_size;
5420 mips_cost = &mips_rtx_cost_data[mips_tune];
5422 /* If the user hasn't specified a branch cost, use the processor's
5424 if (mips_branch_cost == 0)
5425 mips_branch_cost = mips_cost->branch_cost;
5427 if ((target_flags_explicit & MASK_64BIT) != 0)
5429 /* The user specified the size of the integer registers. Make sure
5430 it agrees with the ABI and ISA. */
5431 if (TARGET_64BIT && !ISA_HAS_64BIT_REGS)
5432 error ("-mgp64 used with a 32-bit processor");
5433 else if (!TARGET_64BIT && ABI_NEEDS_64BIT_REGS)
5434 error ("-mgp32 used with a 64-bit ABI");
5435 else if (TARGET_64BIT && ABI_NEEDS_32BIT_REGS)
5436 error ("-mgp64 used with a 32-bit ABI");
5440 /* Infer the integer register size from the ABI and processor.
5441 Restrict ourselves to 32-bit registers if that's all the
5442 processor has, or if the ABI cannot handle 64-bit registers. */
5443 if (ABI_NEEDS_32BIT_REGS || !ISA_HAS_64BIT_REGS)
5444 target_flags &= ~MASK_64BIT;
5446 target_flags |= MASK_64BIT;
5449 if ((target_flags_explicit & MASK_FLOAT64) != 0)
5451 /* Really, -mfp32 and -mfp64 are ornamental options. There's
5452 only one right answer here. */
5453 if (TARGET_64BIT && TARGET_DOUBLE_FLOAT && !TARGET_FLOAT64)
5454 error ("unsupported combination: %s", "-mgp64 -mfp32 -mdouble-float");
5455 else if (!TARGET_64BIT && TARGET_FLOAT64
5456 && !(ISA_HAS_MXHC1 && mips_abi == ABI_32))
5457 error ("-mgp32 and -mfp64 can only be combined if the target"
5458 " supports the mfhc1 and mthc1 instructions");
5459 else if (TARGET_SINGLE_FLOAT && TARGET_FLOAT64)
5460 error ("unsupported combination: %s", "-mfp64 -msingle-float");
5464 /* -msingle-float selects 32-bit float registers. Otherwise the
5465 float registers should be the same size as the integer ones. */
5466 if (TARGET_64BIT && TARGET_DOUBLE_FLOAT)
5467 target_flags |= MASK_FLOAT64;
5469 target_flags &= ~MASK_FLOAT64;
5472 /* End of code shared with GAS. */
5474 if ((target_flags_explicit & MASK_LONG64) == 0)
5476 if ((mips_abi == ABI_EABI && TARGET_64BIT) || mips_abi == ABI_64)
5477 target_flags |= MASK_LONG64;
5479 target_flags &= ~MASK_LONG64;
5482 if (MIPS_MARCH_CONTROLS_SOFT_FLOAT
5483 && (target_flags_explicit & MASK_SOFT_FLOAT_ABI) == 0)
5485 /* For some configurations, it is useful to have -march control
5486 the default setting of MASK_SOFT_FLOAT_ABI. */
5487 switch ((int) mips_arch)
5489 case PROCESSOR_R4100:
5490 case PROCESSOR_R4111:
5491 case PROCESSOR_R4120:
5492 case PROCESSOR_R4130:
5493 target_flags |= MASK_SOFT_FLOAT_ABI;
5497 target_flags &= ~MASK_SOFT_FLOAT_ABI;
5503 flag_pcc_struct_return = 0;
5505 if ((target_flags_explicit & MASK_BRANCHLIKELY) == 0)
5507 /* If neither -mbranch-likely nor -mno-branch-likely was given
5508 on the command line, set MASK_BRANCHLIKELY based on the target
5511 By default, we enable use of Branch Likely instructions on
5512 all architectures which support them with the following
5513 exceptions: when creating MIPS32 or MIPS64 code, and when
5514 tuning for architectures where their use tends to hurt
5517 The MIPS32 and MIPS64 architecture specifications say "Software
5518 is strongly encouraged to avoid use of Branch Likely
5519 instructions, as they will be removed from a future revision
5520 of the [MIPS32 and MIPS64] architecture." Therefore, we do not
5521 issue those instructions unless instructed to do so by
5523 if (ISA_HAS_BRANCHLIKELY
5524 && !(ISA_MIPS32 || ISA_MIPS32R2 || ISA_MIPS64)
5525 && !(TUNE_MIPS5500 || TUNE_SB1))
5526 target_flags |= MASK_BRANCHLIKELY;
5528 target_flags &= ~MASK_BRANCHLIKELY;
5530 if (TARGET_BRANCHLIKELY && !ISA_HAS_BRANCHLIKELY)
5531 warning (0, "generation of Branch Likely instructions enabled, but not supported by architecture");
5533 /* The effect of -mabicalls isn't defined for the EABI. */
5534 if (mips_abi == ABI_EABI && TARGET_ABICALLS)
5536 error ("unsupported combination: %s", "-mabicalls -mabi=eabi");
5537 target_flags &= ~MASK_ABICALLS;
5540 /* MIPS16 cannot generate PIC yet. */
5541 if (TARGET_MIPS16 && (flag_pic || TARGET_ABICALLS))
5543 sorry ("MIPS16 PIC");
5544 target_flags &= ~MASK_ABICALLS;
5545 flag_pic = flag_pie = flag_shlib = 0;
5548 if (TARGET_ABICALLS)
5550 /* We need to set flag_pic for executables as well as DSOs
5551 because we may reference symbols that are not defined in
5552 the final executable. (MIPS does not use things like
5553 copy relocs, for example.)
5555 Also, there is a body of code that uses __PIC__ to distinguish
5556 between -mabicalls and -mno-abicalls code. */
5558 if (mips_section_threshold > 0)
5559 warning (0, "%<-G%> is incompatible with %<-mabicalls%>");
5562 if (TARGET_VXWORKS_RTP && mips_section_threshold > 0)
5563 warning (0, "-G and -mrtp are incompatible");
5565 /* -mvr4130-align is a "speed over size" optimization: it usually produces
5566 faster code, but at the expense of more nops. Enable it at -O3 and
5568 if (optimize > 2 && (target_flags_explicit & MASK_VR4130_ALIGN) == 0)
5569 target_flags |= MASK_VR4130_ALIGN;
5571 /* Prefer a call to memcpy over inline code when optimizing for size,
5572 though see MOVE_RATIO in mips.h. */
5573 if (optimize_size && (target_flags_explicit & MASK_MEMCPY) == 0)
5574 target_flags |= MASK_MEMCPY;
5576 #ifdef MIPS_TFMODE_FORMAT
5577 REAL_MODE_FORMAT (TFmode) = &MIPS_TFMODE_FORMAT;
5580 /* Make sure that the user didn't turn off paired single support when
5581 MIPS-3D support is requested. */
5582 if (TARGET_MIPS3D && (target_flags_explicit & MASK_PAIRED_SINGLE_FLOAT)
5583 && !TARGET_PAIRED_SINGLE_FLOAT)
5584 error ("-mips3d requires -mpaired-single");
5586 /* If TARGET_MIPS3D, enable MASK_PAIRED_SINGLE_FLOAT. */
5588 target_flags |= MASK_PAIRED_SINGLE_FLOAT;
5590 /* Make sure that when TARGET_PAIRED_SINGLE_FLOAT is true, TARGET_FLOAT64
5591 and TARGET_HARD_FLOAT are both true. */
5592 if (TARGET_PAIRED_SINGLE_FLOAT && !(TARGET_FLOAT64 && TARGET_HARD_FLOAT))
5593 error ("-mips3d/-mpaired-single must be used with -mfp64 -mhard-float");
5595 /* Make sure that the ISA supports TARGET_PAIRED_SINGLE_FLOAT when it is
5597 if (TARGET_PAIRED_SINGLE_FLOAT && !ISA_MIPS64)
5598 error ("-mips3d/-mpaired-single must be used with -mips64");
5600 /* If TARGET_DSPR2, enable MASK_DSP. */
5602 target_flags |= MASK_DSP;
5604 mips_print_operand_punct['?'] = 1;
5605 mips_print_operand_punct['#'] = 1;
5606 mips_print_operand_punct['/'] = 1;
5607 mips_print_operand_punct['&'] = 1;
5608 mips_print_operand_punct['!'] = 1;
5609 mips_print_operand_punct['*'] = 1;
5610 mips_print_operand_punct['@'] = 1;
5611 mips_print_operand_punct['.'] = 1;
5612 mips_print_operand_punct['('] = 1;
5613 mips_print_operand_punct[')'] = 1;
5614 mips_print_operand_punct['['] = 1;
5615 mips_print_operand_punct[']'] = 1;
5616 mips_print_operand_punct['<'] = 1;
5617 mips_print_operand_punct['>'] = 1;
5618 mips_print_operand_punct['{'] = 1;
5619 mips_print_operand_punct['}'] = 1;
5620 mips_print_operand_punct['^'] = 1;
5621 mips_print_operand_punct['$'] = 1;
5622 mips_print_operand_punct['+'] = 1;
5623 mips_print_operand_punct['~'] = 1;
5625 /* Set up array to map GCC register number to debug register number.
5626 Ignore the special purpose register numbers. */
5628 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5630 mips_dbx_regno[i] = INVALID_REGNUM;
5631 if (GP_REG_P (i) || FP_REG_P (i) || ALL_COP_REG_P (i))
5632 mips_dwarf_regno[i] = i;
5634 mips_dwarf_regno[i] = INVALID_REGNUM;
5637 start = GP_DBX_FIRST - GP_REG_FIRST;
5638 for (i = GP_REG_FIRST; i <= GP_REG_LAST; i++)
5639 mips_dbx_regno[i] = i + start;
5641 start = FP_DBX_FIRST - FP_REG_FIRST;
5642 for (i = FP_REG_FIRST; i <= FP_REG_LAST; i++)
5643 mips_dbx_regno[i] = i + start;
5645 /* HI and LO debug registers use big-endian ordering. */
5646 mips_dbx_regno[HI_REGNUM] = MD_DBX_FIRST + 0;
5647 mips_dbx_regno[LO_REGNUM] = MD_DBX_FIRST + 1;
5648 mips_dwarf_regno[HI_REGNUM] = MD_REG_FIRST + 0;
5649 mips_dwarf_regno[LO_REGNUM] = MD_REG_FIRST + 1;
5650 for (i = DSP_ACC_REG_FIRST; i <= DSP_ACC_REG_LAST; i += 2)
5652 mips_dwarf_regno[i + TARGET_LITTLE_ENDIAN] = i;
5653 mips_dwarf_regno[i + TARGET_BIG_ENDIAN] = i + 1;
5656 /* Set up array giving whether a given register can hold a given mode. */
5658 for (mode = VOIDmode;
5659 mode != MAX_MACHINE_MODE;
5660 mode = (enum machine_mode) ((int)mode + 1))
5662 register int size = GET_MODE_SIZE (mode);
5663 register enum mode_class class = GET_MODE_CLASS (mode);
5665 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5669 if (mode == CCV2mode)
5672 && (regno - ST_REG_FIRST) % 2 == 0);
5674 else if (mode == CCV4mode)
5677 && (regno - ST_REG_FIRST) % 4 == 0);
5679 else if (mode == CCmode)
5682 temp = (regno == FPSW_REGNUM);
5684 temp = (ST_REG_P (regno) || GP_REG_P (regno)
5685 || FP_REG_P (regno));
5688 else if (GP_REG_P (regno))
5689 temp = ((regno & 1) == 0 || size <= UNITS_PER_WORD);
5691 else if (FP_REG_P (regno))
5692 temp = ((((regno % MAX_FPRS_PER_FMT) == 0)
5693 || (MIN_FPRS_PER_FMT == 1
5694 && size <= UNITS_PER_FPREG))
5695 && (((class == MODE_FLOAT || class == MODE_COMPLEX_FLOAT
5696 || class == MODE_VECTOR_FLOAT)
5697 && size <= UNITS_PER_FPVALUE)
5698 /* Allow integer modes that fit into a single
5699 register. We need to put integers into FPRs
5700 when using instructions like cvt and trunc.
5701 We can't allow sizes smaller than a word,
5702 the FPU has no appropriate load/store
5703 instructions for those. */
5704 || (class == MODE_INT
5705 && size >= MIN_UNITS_PER_WORD
5706 && size <= UNITS_PER_FPREG)
5707 /* Allow TFmode for CCmode reloads. */
5708 || (ISA_HAS_8CC && mode == TFmode)));
5710 else if (ACC_REG_P (regno))
5711 temp = (INTEGRAL_MODE_P (mode)
5712 && size <= UNITS_PER_WORD * 2
5713 && (size <= UNITS_PER_WORD
5714 || regno == MD_REG_FIRST
5715 || (DSP_ACC_REG_P (regno)
5716 && ((regno - DSP_ACC_REG_FIRST) & 1) == 0)));
5718 else if (ALL_COP_REG_P (regno))
5719 temp = (class == MODE_INT && size <= UNITS_PER_WORD);
5723 mips_hard_regno_mode_ok[(int)mode][regno] = temp;
5727 /* Save GPR registers in word_mode sized hunks. word_mode hasn't been
5728 initialized yet, so we can't use that here. */
5729 gpr_mode = TARGET_64BIT ? DImode : SImode;
5731 /* Function to allocate machine-dependent function status. */
5732 init_machine_status = &mips_init_machine_status;
5734 /* Default to working around R4000 errata only if the processor
5735 was selected explicitly. */
5736 if ((target_flags_explicit & MASK_FIX_R4000) == 0
5737 && mips_matching_cpu_name_p (mips_arch_info->name, "r4000"))
5738 target_flags |= MASK_FIX_R4000;
5740 /* Default to working around R4400 errata only if the processor
5741 was selected explicitly. */
5742 if ((target_flags_explicit & MASK_FIX_R4400) == 0
5743 && mips_matching_cpu_name_p (mips_arch_info->name, "r4400"))
5744 target_flags |= MASK_FIX_R4400;
5746 /* Save base state of options. */
5747 mips_base_mips16 = TARGET_MIPS16;
5748 mips_base_target_flags = target_flags;
5749 mips_base_schedule_insns = flag_schedule_insns;
5750 mips_base_reorder_blocks_and_partition = flag_reorder_blocks_and_partition;
5751 mips_base_align_loops = align_loops;
5752 mips_base_align_jumps = align_jumps;
5753 mips_base_align_functions = align_functions;
5754 mips_flag_delayed_branch = flag_delayed_branch;
5756 /* Now select the mips16 or 32-bit instruction set, as requested. */
5757 mips_set_mips16_mode (mips_base_mips16);
5760 /* Swap the register information for registers I and I + 1, which
5761 currently have the wrong endianness. Note that the registers'
5762 fixedness and call-clobberedness might have been set on the
5766 mips_swap_registers (unsigned int i)
5771 #define SWAP_INT(X, Y) (tmpi = (X), (X) = (Y), (Y) = tmpi)
5772 #define SWAP_STRING(X, Y) (tmps = (X), (X) = (Y), (Y) = tmps)
5774 SWAP_INT (fixed_regs[i], fixed_regs[i + 1]);
5775 SWAP_INT (call_used_regs[i], call_used_regs[i + 1]);
5776 SWAP_INT (call_really_used_regs[i], call_really_used_regs[i + 1]);
5777 SWAP_STRING (reg_names[i], reg_names[i + 1]);
5783 /* Implement CONDITIONAL_REGISTER_USAGE. */
5786 mips_conditional_register_usage (void)
5792 for (regno = DSP_ACC_REG_FIRST; regno <= DSP_ACC_REG_LAST; regno++)
5793 fixed_regs[regno] = call_used_regs[regno] = 1;
5795 if (!TARGET_HARD_FLOAT)
5799 for (regno = FP_REG_FIRST; regno <= FP_REG_LAST; regno++)
5800 fixed_regs[regno] = call_used_regs[regno] = 1;
5801 for (regno = ST_REG_FIRST; regno <= ST_REG_LAST; regno++)
5802 fixed_regs[regno] = call_used_regs[regno] = 1;
5804 else if (! ISA_HAS_8CC)
5808 /* We only have a single condition code register. We
5809 implement this by hiding all the condition code registers,
5810 and generating RTL that refers directly to ST_REG_FIRST. */
5811 for (regno = ST_REG_FIRST; regno <= ST_REG_LAST; regno++)
5812 fixed_regs[regno] = call_used_regs[regno] = 1;
5814 /* In mips16 mode, we permit the $t temporary registers to be used
5815 for reload. We prohibit the unused $s registers, since they
5816 are caller saved, and saving them via a mips16 register would
5817 probably waste more time than just reloading the value. */
5820 fixed_regs[18] = call_used_regs[18] = 1;
5821 fixed_regs[19] = call_used_regs[19] = 1;
5822 fixed_regs[20] = call_used_regs[20] = 1;
5823 fixed_regs[21] = call_used_regs[21] = 1;
5824 fixed_regs[22] = call_used_regs[22] = 1;
5825 fixed_regs[23] = call_used_regs[23] = 1;
5826 fixed_regs[26] = call_used_regs[26] = 1;
5827 fixed_regs[27] = call_used_regs[27] = 1;
5828 fixed_regs[30] = call_used_regs[30] = 1;
5830 /* fp20-23 are now caller saved. */
5831 if (mips_abi == ABI_64)
5834 for (regno = FP_REG_FIRST + 20; regno < FP_REG_FIRST + 24; regno++)
5835 call_really_used_regs[regno] = call_used_regs[regno] = 1;
5837 /* Odd registers from fp21 to fp31 are now caller saved. */
5838 if (mips_abi == ABI_N32)
5841 for (regno = FP_REG_FIRST + 21; regno <= FP_REG_FIRST + 31; regno+=2)
5842 call_really_used_regs[regno] = call_used_regs[regno] = 1;
5844 /* Make sure that double-register accumulator values are correctly
5845 ordered for the current endianness. */
5846 if (TARGET_LITTLE_ENDIAN)
5849 mips_swap_registers (MD_REG_FIRST);
5850 for (regno = DSP_ACC_REG_FIRST; regno <= DSP_ACC_REG_LAST; regno += 2)
5851 mips_swap_registers (regno);
5855 /* Allocate a chunk of memory for per-function machine-dependent data. */
5856 static struct machine_function *
5857 mips_init_machine_status (void)
5859 return ((struct machine_function *)
5860 ggc_alloc_cleared (sizeof (struct machine_function)));
5863 /* On the mips16, we want to allocate $24 (T_REG) before other
5864 registers for instructions for which it is possible. This helps
5865 avoid shuffling registers around in order to set up for an xor,
5866 encouraging the compiler to use a cmp instead. */
5869 mips_order_regs_for_local_alloc (void)
5873 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5874 reg_alloc_order[i] = i;
5878 /* It really doesn't matter where we put register 0, since it is
5879 a fixed register anyhow. */
5880 reg_alloc_order[0] = 24;
5881 reg_alloc_order[24] = 0;
5886 /* The MIPS debug format wants all automatic variables and arguments
5887 to be in terms of the virtual frame pointer (stack pointer before
5888 any adjustment in the function), while the MIPS 3.0 linker wants
5889 the frame pointer to be the stack pointer after the initial
5890 adjustment. So, we do the adjustment here. The arg pointer (which
5891 is eliminated) points to the virtual frame pointer, while the frame
5892 pointer (which may be eliminated) points to the stack pointer after
5893 the initial adjustments. */
5896 mips_debugger_offset (rtx addr, HOST_WIDE_INT offset)
5898 rtx offset2 = const0_rtx;
5899 rtx reg = eliminate_constant_term (addr, &offset2);
5902 offset = INTVAL (offset2);
5904 if (reg == stack_pointer_rtx || reg == frame_pointer_rtx
5905 || reg == hard_frame_pointer_rtx)
5907 HOST_WIDE_INT frame_size = (!cfun->machine->frame.initialized)
5908 ? compute_frame_size (get_frame_size ())
5909 : cfun->machine->frame.total_size;
5911 /* MIPS16 frame is smaller */
5912 if (frame_pointer_needed && TARGET_MIPS16)
5913 frame_size -= cfun->machine->frame.args_size;
5915 offset = offset - frame_size;
5918 /* sdbout_parms does not want this to crash for unrecognized cases. */
5920 else if (reg != arg_pointer_rtx)
5921 fatal_insn ("mips_debugger_offset called with non stack/frame/arg pointer",
5928 /* If OP is an UNSPEC address, return the address to which it refers,
5929 otherwise return OP itself. */
5932 mips_strip_unspec_address (rtx op)
5936 split_const (op, &base, &offset);
5937 if (UNSPEC_ADDRESS_P (base))
5938 op = plus_constant (UNSPEC_ADDRESS (base), INTVAL (offset));
5942 /* Implement the PRINT_OPERAND macro. The MIPS-specific operand codes are:
5944 'X' OP is CONST_INT, prints 32 bits in hexadecimal format = "0x%08x",
5945 'x' OP is CONST_INT, prints 16 bits in hexadecimal format = "0x%04x",
5946 'h' OP is HIGH, prints %hi(X),
5947 'd' output integer constant in decimal,
5948 'z' if the operand is 0, use $0 instead of normal operand.
5949 'D' print second part of double-word register or memory operand.
5950 'L' print low-order register of double-word register operand.
5951 'M' print high-order register of double-word register operand.
5952 'C' print part of opcode for a branch condition.
5953 'F' print part of opcode for a floating-point branch condition.
5954 'N' print part of opcode for a branch condition, inverted.
5955 'W' print part of opcode for a floating-point branch condition, inverted.
5956 'T' print 'f' for (eq:CC ...), 't' for (ne:CC ...),
5957 'z' for (eq:?I ...), 'n' for (ne:?I ...).
5958 't' like 'T', but with the EQ/NE cases reversed
5959 'Y' for a CONST_INT X, print mips_fp_conditions[X]
5960 'Z' print the operand and a comma for ISA_HAS_8CC, otherwise print nothing
5961 'R' print the reloc associated with LO_SUM
5962 'q' print DSP accumulator registers
5964 The punctuation characters are:
5966 '(' Turn on .set noreorder
5967 ')' Turn on .set reorder
5968 '[' Turn on .set noat
5970 '<' Turn on .set nomacro
5971 '>' Turn on .set macro
5972 '{' Turn on .set volatile (not GAS)
5973 '}' Turn on .set novolatile (not GAS)
5974 '&' Turn on .set noreorder if filling delay slots
5975 '*' Turn on both .set noreorder and .set nomacro if filling delay slots
5976 '!' Turn on .set nomacro if filling delay slots
5977 '#' Print nop if in a .set noreorder section.
5978 '/' Like '#', but does nothing within a delayed branch sequence
5979 '?' Print 'l' if we are to use a branch likely instead of normal branch.
5980 '@' Print the name of the assembler temporary register (at or $1).
5981 '.' Print the name of the register with a hard-wired zero (zero or $0).
5982 '^' Print the name of the pic call-through register (t9 or $25).
5983 '$' Print the name of the stack pointer register (sp or $29).
5984 '+' Print the name of the gp register (usually gp or $28).
5985 '~' Output a branch alignment to LABEL_ALIGN(NULL). */
5988 print_operand (FILE *file, rtx op, int letter)
5990 register enum rtx_code code;
5992 if (PRINT_OPERAND_PUNCT_VALID_P (letter))
5997 if (mips_branch_likely)
6002 fputs (reg_names [GP_REG_FIRST + 1], file);
6006 fputs (reg_names [PIC_FUNCTION_ADDR_REGNUM], file);
6010 fputs (reg_names [GP_REG_FIRST + 0], file);
6014 fputs (reg_names[STACK_POINTER_REGNUM], file);
6018 fputs (reg_names[PIC_OFFSET_TABLE_REGNUM], file);
6022 if (final_sequence != 0 && set_noreorder++ == 0)
6023 fputs (".set\tnoreorder\n\t", file);
6027 if (final_sequence != 0)
6029 if (set_noreorder++ == 0)
6030 fputs (".set\tnoreorder\n\t", file);
6032 if (set_nomacro++ == 0)
6033 fputs (".set\tnomacro\n\t", file);
6038 if (final_sequence != 0 && set_nomacro++ == 0)
6039 fputs ("\n\t.set\tnomacro", file);
6043 if (set_noreorder != 0)
6044 fputs ("\n\tnop", file);
6048 /* Print an extra newline so that the delayed insn is separated
6049 from the following ones. This looks neater and is consistent
6050 with non-nop delayed sequences. */
6051 if (set_noreorder != 0 && final_sequence == 0)
6052 fputs ("\n\tnop\n", file);
6056 if (set_noreorder++ == 0)
6057 fputs (".set\tnoreorder\n\t", file);
6061 if (set_noreorder == 0)
6062 error ("internal error: %%) found without a %%( in assembler pattern");
6064 else if (--set_noreorder == 0)
6065 fputs ("\n\t.set\treorder", file);
6070 if (set_noat++ == 0)
6071 fputs (".set\tnoat\n\t", file);
6076 error ("internal error: %%] found without a %%[ in assembler pattern");
6077 else if (--set_noat == 0)
6078 fputs ("\n\t.set\tat", file);
6083 if (set_nomacro++ == 0)
6084 fputs (".set\tnomacro\n\t", file);
6088 if (set_nomacro == 0)
6089 error ("internal error: %%> found without a %%< in assembler pattern");
6090 else if (--set_nomacro == 0)
6091 fputs ("\n\t.set\tmacro", file);
6096 if (set_volatile++ == 0)
6097 fputs ("#.set\tvolatile\n\t", file);
6101 if (set_volatile == 0)
6102 error ("internal error: %%} found without a %%{ in assembler pattern");
6103 else if (--set_volatile == 0)
6104 fputs ("\n\t#.set\tnovolatile", file);
6110 if (align_labels_log > 0)
6111 ASM_OUTPUT_ALIGN (file, align_labels_log);
6116 error ("PRINT_OPERAND: unknown punctuation '%c'", letter);
6125 error ("PRINT_OPERAND null pointer");
6129 code = GET_CODE (op);
6134 case EQ: fputs ("eq", file); break;
6135 case NE: fputs ("ne", file); break;
6136 case GT: fputs ("gt", file); break;
6137 case GE: fputs ("ge", file); break;
6138 case LT: fputs ("lt", file); break;
6139 case LE: fputs ("le", file); break;
6140 case GTU: fputs ("gtu", file); break;
6141 case GEU: fputs ("geu", file); break;
6142 case LTU: fputs ("ltu", file); break;
6143 case LEU: fputs ("leu", file); break;
6145 fatal_insn ("PRINT_OPERAND, invalid insn for %%C", op);
6148 else if (letter == 'N')
6151 case EQ: fputs ("ne", file); break;
6152 case NE: fputs ("eq", file); break;
6153 case GT: fputs ("le", file); break;
6154 case GE: fputs ("lt", file); break;
6155 case LT: fputs ("ge", file); break;
6156 case LE: fputs ("gt", file); break;
6157 case GTU: fputs ("leu", file); break;
6158 case GEU: fputs ("ltu", file); break;
6159 case LTU: fputs ("geu", file); break;
6160 case LEU: fputs ("gtu", file); break;
6162 fatal_insn ("PRINT_OPERAND, invalid insn for %%N", op);
6165 else if (letter == 'F')
6168 case EQ: fputs ("c1f", file); break;
6169 case NE: fputs ("c1t", file); break;
6171 fatal_insn ("PRINT_OPERAND, invalid insn for %%F", op);
6174 else if (letter == 'W')
6177 case EQ: fputs ("c1t", file); break;
6178 case NE: fputs ("c1f", file); break;
6180 fatal_insn ("PRINT_OPERAND, invalid insn for %%W", op);
6183 else if (letter == 'h')
6185 if (GET_CODE (op) == HIGH)
6188 print_operand_reloc (file, op, SYMBOL_CONTEXT_LEA, mips_hi_relocs);
6191 else if (letter == 'R')
6192 print_operand_reloc (file, op, SYMBOL_CONTEXT_LEA, mips_lo_relocs);
6194 else if (letter == 'Y')
6196 if (GET_CODE (op) == CONST_INT
6197 && ((unsigned HOST_WIDE_INT) INTVAL (op)
6198 < ARRAY_SIZE (mips_fp_conditions)))
6199 fputs (mips_fp_conditions[INTVAL (op)], file);
6201 output_operand_lossage ("invalid %%Y value");
6204 else if (letter == 'Z')
6208 print_operand (file, op, 0);
6213 else if (letter == 'q')
6218 fatal_insn ("PRINT_OPERAND, invalid insn for %%q", op);
6220 regnum = REGNO (op);
6221 if (MD_REG_P (regnum))
6222 fprintf (file, "$ac0");
6223 else if (DSP_ACC_REG_P (regnum))
6224 fprintf (file, "$ac%c", reg_names[regnum][3]);
6226 fatal_insn ("PRINT_OPERAND, invalid insn for %%q", op);
6229 else if (code == REG || code == SUBREG)
6231 register int regnum;
6234 regnum = REGNO (op);
6236 regnum = true_regnum (op);
6238 if ((letter == 'M' && ! WORDS_BIG_ENDIAN)
6239 || (letter == 'L' && WORDS_BIG_ENDIAN)
6243 fprintf (file, "%s", reg_names[regnum]);
6246 else if (code == MEM)
6249 output_address (plus_constant (XEXP (op, 0), 4));
6251 output_address (XEXP (op, 0));
6254 else if (letter == 'x' && GET_CODE (op) == CONST_INT)
6255 fprintf (file, HOST_WIDE_INT_PRINT_HEX, 0xffff & INTVAL(op));
6257 else if (letter == 'X' && GET_CODE(op) == CONST_INT)
6258 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (op));
6260 else if (letter == 'd' && GET_CODE(op) == CONST_INT)
6261 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (INTVAL(op)));
6263 else if (letter == 'z' && op == CONST0_RTX (GET_MODE (op)))
6264 fputs (reg_names[GP_REG_FIRST], file);
6266 else if (letter == 'd' || letter == 'x' || letter == 'X')
6267 output_operand_lossage ("invalid use of %%d, %%x, or %%X");
6269 else if (letter == 'T' || letter == 't')
6271 int truth = (code == NE) == (letter == 'T');
6272 fputc ("zfnt"[truth * 2 + (GET_MODE (op) == CCmode)], file);
6275 else if (CONST_GP_P (op))
6276 fputs (reg_names[GLOBAL_POINTER_REGNUM], file);
6279 output_addr_const (file, mips_strip_unspec_address (op));
6283 /* Print symbolic operand OP, which is part of a HIGH or LO_SUM
6284 in context CONTEXT. RELOCS is the array of relocations to use. */
6287 print_operand_reloc (FILE *file, rtx op, enum mips_symbol_context context,
6288 const char **relocs)
6290 enum mips_symbol_type symbol_type;
6293 if (!mips_symbolic_constant_p (op, context, &symbol_type)
6294 || relocs[symbol_type] == 0)
6295 fatal_insn ("PRINT_OPERAND, invalid operand for relocation", op);
6297 fputs (relocs[symbol_type], file);
6298 output_addr_const (file, mips_strip_unspec_address (op));
6299 for (p = relocs[symbol_type]; *p != 0; p++)
6304 /* Output address operand X to FILE. */
6307 print_operand_address (FILE *file, rtx x)
6309 struct mips_address_info addr;
6311 if (mips_classify_address (&addr, x, word_mode, true))
6315 print_operand (file, addr.offset, 0);
6316 fprintf (file, "(%s)", reg_names[REGNO (addr.reg)]);
6319 case ADDRESS_LO_SUM:
6320 print_operand_reloc (file, addr.offset, SYMBOL_CONTEXT_MEM,
6322 fprintf (file, "(%s)", reg_names[REGNO (addr.reg)]);
6325 case ADDRESS_CONST_INT:
6326 output_addr_const (file, x);
6327 fprintf (file, "(%s)", reg_names[0]);
6330 case ADDRESS_SYMBOLIC:
6331 output_addr_const (file, mips_strip_unspec_address (x));
6337 /* When using assembler macros, keep track of all of small-data externs
6338 so that mips_file_end can emit the appropriate declarations for them.
6340 In most cases it would be safe (though pointless) to emit .externs
6341 for other symbols too. One exception is when an object is within
6342 the -G limit but declared by the user to be in a section other
6343 than .sbss or .sdata. */
6346 mips_output_external (FILE *file, tree decl, const char *name)
6348 default_elf_asm_output_external (file, decl, name);
6350 /* We output the name if and only if TREE_SYMBOL_REFERENCED is
6351 set in order to avoid putting out names that are never really
6353 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
6355 if (!TARGET_EXPLICIT_RELOCS && mips_in_small_data_p (decl))
6357 fputs ("\t.extern\t", file);
6358 assemble_name (file, name);
6359 fprintf (file, ", " HOST_WIDE_INT_PRINT_DEC "\n",
6360 int_size_in_bytes (TREE_TYPE (decl)));
6362 else if (TARGET_IRIX
6363 && mips_abi == ABI_32
6364 && TREE_CODE (decl) == FUNCTION_DECL)
6366 /* In IRIX 5 or IRIX 6 for the O32 ABI, we must output a
6367 `.global name .text' directive for every used but
6368 undefined function. If we don't, the linker may perform
6369 an optimization (skipping over the insns that set $gp)
6370 when it is unsafe. */
6371 fputs ("\t.globl ", file);
6372 assemble_name (file, name);
6373 fputs (" .text\n", file);
6378 /* Emit a new filename to a stream. If we are smuggling stabs, try to
6379 put out a MIPS ECOFF file and a stab. */
6382 mips_output_filename (FILE *stream, const char *name)
6385 /* If we are emitting DWARF-2, let dwarf2out handle the ".file"
6387 if (write_symbols == DWARF2_DEBUG)
6389 else if (mips_output_filename_first_time)
6391 mips_output_filename_first_time = 0;
6392 num_source_filenames += 1;
6393 current_function_file = name;
6394 fprintf (stream, "\t.file\t%d ", num_source_filenames);
6395 output_quoted_string (stream, name);
6396 putc ('\n', stream);
6399 /* If we are emitting stabs, let dbxout.c handle this (except for
6400 the mips_output_filename_first_time case). */
6401 else if (write_symbols == DBX_DEBUG)
6404 else if (name != current_function_file
6405 && strcmp (name, current_function_file) != 0)
6407 num_source_filenames += 1;
6408 current_function_file = name;
6409 fprintf (stream, "\t.file\t%d ", num_source_filenames);
6410 output_quoted_string (stream, name);
6411 putc ('\n', stream);
6415 /* Output an ASCII string, in a space-saving way. PREFIX is the string
6416 that should be written before the opening quote, such as "\t.ascii\t"
6417 for real string data or "\t# " for a comment. */
6420 mips_output_ascii (FILE *stream, const char *string_param, size_t len,
6425 register const unsigned char *string =
6426 (const unsigned char *)string_param;
6428 fprintf (stream, "%s\"", prefix);
6429 for (i = 0; i < len; i++)
6431 register int c = string[i];
6435 if (c == '\\' || c == '\"')
6437 putc ('\\', stream);
6445 fprintf (stream, "\\%03o", c);
6449 if (cur_pos > 72 && i+1 < len)
6452 fprintf (stream, "\"\n%s\"", prefix);
6455 fprintf (stream, "\"\n");
6458 /* Implement TARGET_ASM_FILE_START. */
6461 mips_file_start (void)
6463 default_file_start ();
6467 /* Generate a special section to describe the ABI switches used to
6468 produce the resultant binary. This used to be done by the assembler
6469 setting bits in the ELF header's flags field, but we have run out of
6470 bits. GDB needs this information in order to be able to correctly
6471 debug these binaries. See the function mips_gdbarch_init() in
6472 gdb/mips-tdep.c. This is unnecessary for the IRIX 5/6 ABIs and
6473 causes unnecessary IRIX 6 ld warnings. */
6474 const char * abi_string = NULL;
6478 case ABI_32: abi_string = "abi32"; break;
6479 case ABI_N32: abi_string = "abiN32"; break;
6480 case ABI_64: abi_string = "abi64"; break;
6481 case ABI_O64: abi_string = "abiO64"; break;
6482 case ABI_EABI: abi_string = TARGET_64BIT ? "eabi64" : "eabi32"; break;
6486 /* Note - we use fprintf directly rather than calling switch_to_section
6487 because in this way we can avoid creating an allocated section. We
6488 do not want this section to take up any space in the running
6490 fprintf (asm_out_file, "\t.section .mdebug.%s\n", abi_string);
6492 /* There is no ELF header flag to distinguish long32 forms of the
6493 EABI from long64 forms. Emit a special section to help tools
6494 such as GDB. Do the same for o64, which is sometimes used with
6496 if (mips_abi == ABI_EABI || mips_abi == ABI_O64)
6497 fprintf (asm_out_file, "\t.section .gcc_compiled_long%d\n",
6498 TARGET_LONG64 ? 64 : 32);
6500 /* Restore the default section. */
6501 fprintf (asm_out_file, "\t.previous\n");
6503 #ifdef HAVE_AS_GNU_ATTRIBUTE
6504 fprintf (asm_out_file, "\t.gnu_attribute 4, %d\n",
6505 TARGET_HARD_FLOAT_ABI ? (TARGET_DOUBLE_FLOAT ? 1 : 2) : 3);
6509 /* Generate the pseudo ops that System V.4 wants. */
6510 if (TARGET_ABICALLS)
6511 fprintf (asm_out_file, "\t.abicalls\n");
6513 if (flag_verbose_asm)
6514 fprintf (asm_out_file, "\n%s -G value = %d, Arch = %s, ISA = %d\n",
6516 mips_section_threshold, mips_arch_info->name, mips_isa);
6519 #ifdef BSS_SECTION_ASM_OP
6520 /* Implement ASM_OUTPUT_ALIGNED_BSS. This differs from the default only
6521 in the use of sbss. */
6524 mips_output_aligned_bss (FILE *stream, tree decl, const char *name,
6525 unsigned HOST_WIDE_INT size, int align)
6527 extern tree last_assemble_variable_decl;
6529 if (mips_in_small_data_p (decl))
6530 switch_to_section (get_named_section (NULL, ".sbss", 0));
6532 switch_to_section (bss_section);
6533 ASM_OUTPUT_ALIGN (stream, floor_log2 (align / BITS_PER_UNIT));
6534 last_assemble_variable_decl = decl;
6535 ASM_DECLARE_OBJECT_NAME (stream, name, decl);
6536 ASM_OUTPUT_SKIP (stream, size != 0 ? size : 1);
6540 /* Implement ASM_OUTPUT_ALIGNED_DECL_COMMON. This is usually the same as the
6541 elfos.h version, but we also need to handle -muninit-const-in-rodata. */
6544 mips_output_aligned_decl_common (FILE *stream, tree decl, const char *name,
6545 unsigned HOST_WIDE_INT size,
6548 /* If the target wants uninitialized const declarations in
6549 .rdata then don't put them in .comm. */
6550 if (TARGET_EMBEDDED_DATA && TARGET_UNINIT_CONST_IN_RODATA
6551 && TREE_CODE (decl) == VAR_DECL && TREE_READONLY (decl)
6552 && (DECL_INITIAL (decl) == 0 || DECL_INITIAL (decl) == error_mark_node))
6554 if (TREE_PUBLIC (decl) && DECL_NAME (decl))
6555 targetm.asm_out.globalize_label (stream, name);
6557 switch_to_section (readonly_data_section);
6558 ASM_OUTPUT_ALIGN (stream, floor_log2 (align / BITS_PER_UNIT));
6559 mips_declare_object (stream, name, "",
6560 ":\n\t.space\t" HOST_WIDE_INT_PRINT_UNSIGNED "\n",
6564 mips_declare_common_object (stream, name, "\n\t.comm\t",
6568 /* Declare a common object of SIZE bytes using asm directive INIT_STRING.
6569 NAME is the name of the object and ALIGN is the required alignment
6570 in bytes. TAKES_ALIGNMENT_P is true if the directive takes a third
6571 alignment argument. */
6574 mips_declare_common_object (FILE *stream, const char *name,
6575 const char *init_string,
6576 unsigned HOST_WIDE_INT size,
6577 unsigned int align, bool takes_alignment_p)
6579 if (!takes_alignment_p)
6581 size += (align / BITS_PER_UNIT) - 1;
6582 size -= size % (align / BITS_PER_UNIT);
6583 mips_declare_object (stream, name, init_string,
6584 "," HOST_WIDE_INT_PRINT_UNSIGNED "\n", size);
6587 mips_declare_object (stream, name, init_string,
6588 "," HOST_WIDE_INT_PRINT_UNSIGNED ",%u\n",
6589 size, align / BITS_PER_UNIT);
6592 /* Emit either a label, .comm, or .lcomm directive. When using assembler
6593 macros, mark the symbol as written so that mips_file_end won't emit an
6594 .extern for it. STREAM is the output file, NAME is the name of the
6595 symbol, INIT_STRING is the string that should be written before the
6596 symbol and FINAL_STRING is the string that should be written after it.
6597 FINAL_STRING is a printf() format that consumes the remaining arguments. */
6600 mips_declare_object (FILE *stream, const char *name, const char *init_string,
6601 const char *final_string, ...)
6605 fputs (init_string, stream);
6606 assemble_name (stream, name);
6607 va_start (ap, final_string);
6608 vfprintf (stream, final_string, ap);
6611 if (!TARGET_EXPLICIT_RELOCS)
6613 tree name_tree = get_identifier (name);
6614 TREE_ASM_WRITTEN (name_tree) = 1;
6618 #ifdef ASM_OUTPUT_SIZE_DIRECTIVE
6619 extern int size_directive_output;
6621 /* Implement ASM_DECLARE_OBJECT_NAME. This is like most of the standard ELF
6622 definitions except that it uses mips_declare_object() to emit the label. */
6625 mips_declare_object_name (FILE *stream, const char *name,
6626 tree decl ATTRIBUTE_UNUSED)
6628 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
6629 ASM_OUTPUT_TYPE_DIRECTIVE (stream, name, "object");
6632 size_directive_output = 0;
6633 if (!flag_inhibit_size_directive && DECL_SIZE (decl))
6637 size_directive_output = 1;
6638 size = int_size_in_bytes (TREE_TYPE (decl));
6639 ASM_OUTPUT_SIZE_DIRECTIVE (stream, name, size);
6642 mips_declare_object (stream, name, "", ":\n");
6645 /* Implement ASM_FINISH_DECLARE_OBJECT. This is generic ELF stuff. */
6648 mips_finish_declare_object (FILE *stream, tree decl, int top_level, int at_end)
6652 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6653 if (!flag_inhibit_size_directive
6654 && DECL_SIZE (decl) != 0
6655 && !at_end && top_level
6656 && DECL_INITIAL (decl) == error_mark_node
6657 && !size_directive_output)
6661 size_directive_output = 1;
6662 size = int_size_in_bytes (TREE_TYPE (decl));
6663 ASM_OUTPUT_SIZE_DIRECTIVE (stream, name, size);
6668 /* Return true if X in context CONTEXT is a small data address that can
6669 be rewritten as a LO_SUM. */
6672 mips_rewrite_small_data_p (rtx x, enum mips_symbol_context context)
6674 enum mips_symbol_type symbol_type;
6676 return (TARGET_EXPLICIT_RELOCS
6677 && mips_symbolic_constant_p (x, context, &symbol_type)
6678 && symbol_type == SYMBOL_GP_RELATIVE);
6682 /* A for_each_rtx callback for mips_small_data_pattern_p. DATA is the
6683 containing MEM, or null if none. */
6686 mips_small_data_pattern_1 (rtx *loc, void *data)
6688 enum mips_symbol_context context;
6690 if (GET_CODE (*loc) == LO_SUM)
6695 if (for_each_rtx (&XEXP (*loc, 0), mips_small_data_pattern_1, *loc))
6700 context = data ? SYMBOL_CONTEXT_MEM : SYMBOL_CONTEXT_LEA;
6701 return mips_rewrite_small_data_p (*loc, context);
6704 /* Return true if OP refers to small data symbols directly, not through
6708 mips_small_data_pattern_p (rtx op)
6710 return for_each_rtx (&op, mips_small_data_pattern_1, 0);
6713 /* A for_each_rtx callback, used by mips_rewrite_small_data.
6714 DATA is the containing MEM, or null if none. */
6717 mips_rewrite_small_data_1 (rtx *loc, void *data)
6719 enum mips_symbol_context context;
6723 for_each_rtx (&XEXP (*loc, 0), mips_rewrite_small_data_1, *loc);
6727 context = data ? SYMBOL_CONTEXT_MEM : SYMBOL_CONTEXT_LEA;
6728 if (mips_rewrite_small_data_p (*loc, context))
6729 *loc = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, *loc);
6731 if (GET_CODE (*loc) == LO_SUM)
6737 /* If possible, rewrite OP so that it refers to small data using
6738 explicit relocations. */
6741 mips_rewrite_small_data (rtx op)
6743 op = copy_insn (op);
6744 for_each_rtx (&op, mips_rewrite_small_data_1, 0);
6748 /* Return true if the current function has an insn that implicitly
6752 mips_function_has_gp_insn (void)
6754 /* Don't bother rechecking if we found one last time. */
6755 if (!cfun->machine->has_gp_insn_p)
6759 push_topmost_sequence ();
6760 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6762 && GET_CODE (PATTERN (insn)) != USE
6763 && GET_CODE (PATTERN (insn)) != CLOBBER
6764 && (get_attr_got (insn) != GOT_UNSET
6765 || small_data_pattern (PATTERN (insn), VOIDmode)))
6767 pop_topmost_sequence ();
6769 cfun->machine->has_gp_insn_p = (insn != 0);
6771 return cfun->machine->has_gp_insn_p;
6775 /* Return the register that should be used as the global pointer
6776 within this function. Return 0 if the function doesn't need
6777 a global pointer. */
6780 mips_global_pointer (void)
6784 /* $gp is always available unless we're using a GOT. */
6785 if (!TARGET_USE_GOT)
6786 return GLOBAL_POINTER_REGNUM;
6788 /* We must always provide $gp when it is used implicitly. */
6789 if (!TARGET_EXPLICIT_RELOCS)
6790 return GLOBAL_POINTER_REGNUM;
6792 /* FUNCTION_PROFILER includes a jal macro, so we need to give it
6794 if (current_function_profile)
6795 return GLOBAL_POINTER_REGNUM;
6797 /* If the function has a nonlocal goto, $gp must hold the correct
6798 global pointer for the target function. */
6799 if (current_function_has_nonlocal_goto)
6800 return GLOBAL_POINTER_REGNUM;
6802 /* If the gp is never referenced, there's no need to initialize it.
6803 Note that reload can sometimes introduce constant pool references
6804 into a function that otherwise didn't need them. For example,
6805 suppose we have an instruction like:
6807 (set (reg:DF R1) (float:DF (reg:SI R2)))
6809 If R2 turns out to be constant such as 1, the instruction may have a
6810 REG_EQUAL note saying that R1 == 1.0. Reload then has the option of
6811 using this constant if R2 doesn't get allocated to a register.
6813 In cases like these, reload will have added the constant to the pool
6814 but no instruction will yet refer to it. */
6815 if (!df_regs_ever_live_p (GLOBAL_POINTER_REGNUM)
6816 && !current_function_uses_const_pool
6817 && !mips_function_has_gp_insn ())
6820 /* We need a global pointer, but perhaps we can use a call-clobbered
6821 register instead of $gp. */
6822 if (TARGET_CALL_SAVED_GP && current_function_is_leaf)
6823 for (regno = GP_REG_FIRST; regno <= GP_REG_LAST; regno++)
6824 if (!df_regs_ever_live_p (regno)
6825 && call_used_regs[regno]
6826 && !fixed_regs[regno]
6827 && regno != PIC_FUNCTION_ADDR_REGNUM)
6830 return GLOBAL_POINTER_REGNUM;
6834 /* Return true if the function return value MODE will get returned in a
6835 floating-point register. */
6838 mips_return_mode_in_fpr_p (enum machine_mode mode)
6840 return ((GET_MODE_CLASS (mode) == MODE_FLOAT
6841 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
6842 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6843 && GET_MODE_UNIT_SIZE (mode) <= UNITS_PER_HWFPVALUE);
6846 /* Return a two-character string representing a function floating-point
6847 return mode, used to name MIPS16 function stubs. */
6850 mips16_call_stub_mode_suffix (enum machine_mode mode)
6854 else if (mode == DFmode)
6856 else if (mode == SCmode)
6858 else if (mode == DCmode)
6860 else if (mode == V2SFmode)
6866 /* Return true if the current function returns its value in a floating-point
6867 register in MIPS16 mode. */
6870 mips16_cfun_returns_in_fpr_p (void)
6872 tree return_type = DECL_RESULT (current_function_decl);
6873 return (TARGET_MIPS16
6874 && TARGET_HARD_FLOAT_ABI
6875 && !aggregate_value_p (return_type, current_function_decl)
6876 && mips_return_mode_in_fpr_p (DECL_MODE (return_type)));
6880 /* Return true if the current function must save REGNO. */
6883 mips_save_reg_p (unsigned int regno)
6885 /* We only need to save $gp if TARGET_CALL_SAVED_GP and only then
6886 if we have not chosen a call-clobbered substitute. */
6887 if (regno == GLOBAL_POINTER_REGNUM)
6888 return TARGET_CALL_SAVED_GP && cfun->machine->global_pointer == regno;
6890 /* Check call-saved registers. */
6891 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
6894 /* Save both registers in an FPR pair if either one is used. This is
6895 needed for the case when MIN_FPRS_PER_FMT == 1, which allows the odd
6896 register to be used without the even register. */
6897 if (FP_REG_P (regno)
6898 && MAX_FPRS_PER_FMT == 2
6899 && df_regs_ever_live_p (regno + 1)
6900 && !call_used_regs[regno + 1])
6903 /* We need to save the old frame pointer before setting up a new one. */
6904 if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed)
6907 /* We need to save the incoming return address if it is ever clobbered
6908 within the function. */
6909 if (regno == GP_REG_FIRST + 31 && df_regs_ever_live_p (regno))
6914 /* $18 is a special case in mips16 code. It may be used to call
6915 a function which returns a floating point value, but it is
6916 marked in call_used_regs. */
6917 if (regno == GP_REG_FIRST + 18 && df_regs_ever_live_p (regno))
6920 /* $31 is also a special case. It will be used to copy a return
6921 value into the floating point registers if the return value is
6923 if (regno == GP_REG_FIRST + 31
6924 && mips16_cfun_returns_in_fpr_p ())
6931 /* Return the index of the lowest X in the range [0, SIZE) for which
6932 bit REGS[X] is set in MASK. Return SIZE if there is no such X. */
6935 mips16e_find_first_register (unsigned int mask, const unsigned char *regs,
6940 for (i = 0; i < size; i++)
6941 if (BITSET_P (mask, regs[i]))
6947 /* *MASK_PTR is a mask of general purpose registers and *GP_REG_SIZE_PTR
6948 is the number of bytes that they occupy. If *MASK_PTR contains REGS[X]
6949 for some X in [0, SIZE), adjust *MASK_PTR and *GP_REG_SIZE_PTR so that
6950 the same is true for all indexes (X, SIZE). */
6953 mips16e_mask_registers (unsigned int *mask_ptr, const unsigned char *regs,
6954 unsigned int size, HOST_WIDE_INT *gp_reg_size_ptr)
6958 i = mips16e_find_first_register (*mask_ptr, regs, size);
6959 for (i++; i < size; i++)
6960 if (!BITSET_P (*mask_ptr, regs[i]))
6962 *gp_reg_size_ptr += GET_MODE_SIZE (gpr_mode);
6963 *mask_ptr |= 1 << regs[i];
6967 /* Return the bytes needed to compute the frame pointer from the current
6968 stack pointer. SIZE is the size (in bytes) of the local variables.
6970 MIPS stack frames look like:
6972 Before call After call
6973 high +-----------------------+ +-----------------------+
6975 | caller's temps. | | caller's temps. |
6977 +-----------------------+ +-----------------------+
6979 | arguments on stack. | | arguments on stack. |
6981 +-----------------------+ +-----------------------+
6982 | 4 words to save | | 4 words to save |
6983 | arguments passed | | arguments passed |
6984 | in registers, even | | in registers, even |
6985 | if not passed. | | if not passed. |
6986 SP->+-----------------------+ VFP->+-----------------------+
6987 (VFP = SP+fp_sp_offset) | |\
6988 | fp register save | | fp_reg_size
6990 SP+gp_sp_offset->+-----------------------+
6992 | | gp register save | | gp_reg_size
6993 gp_reg_rounded | | |/
6994 | +-----------------------+
6995 \| alignment padding |
6996 +-----------------------+
6998 | local variables | | var_size
7000 +-----------------------+
7002 | alloca allocations |
7004 +-----------------------+
7006 cprestore_size | | GP save for V.4 abi |
7008 +-----------------------+
7010 | arguments on stack | |
7012 +-----------------------+ |
7013 | 4 words to save | | args_size
7014 | arguments passed | |
7015 | in registers, even | |
7016 | if not passed. | |
7017 low | (TARGET_OLDABI only) |/
7018 memory SP->+-----------------------+
7023 compute_frame_size (HOST_WIDE_INT size)
7026 HOST_WIDE_INT total_size; /* # bytes that the entire frame takes up */
7027 HOST_WIDE_INT var_size; /* # bytes that variables take up */
7028 HOST_WIDE_INT args_size; /* # bytes that outgoing arguments take up */
7029 HOST_WIDE_INT cprestore_size; /* # bytes that the cprestore slot takes up */
7030 HOST_WIDE_INT gp_reg_rounded; /* # bytes needed to store gp after rounding */
7031 HOST_WIDE_INT gp_reg_size; /* # bytes needed to store gp regs */
7032 HOST_WIDE_INT fp_reg_size; /* # bytes needed to store fp regs */
7033 unsigned int mask; /* mask of saved gp registers */
7034 unsigned int fmask; /* mask of saved fp registers */
7036 cfun->machine->global_pointer = mips_global_pointer ();
7042 var_size = MIPS_STACK_ALIGN (size);
7043 args_size = current_function_outgoing_args_size;
7044 cprestore_size = MIPS_STACK_ALIGN (STARTING_FRAME_OFFSET) - args_size;
7046 /* The space set aside by STARTING_FRAME_OFFSET isn't needed in leaf
7047 functions. If the function has local variables, we're committed
7048 to allocating it anyway. Otherwise reclaim it here. */
7049 if (var_size == 0 && current_function_is_leaf)
7050 cprestore_size = args_size = 0;
7052 /* The MIPS 3.0 linker does not like functions that dynamically
7053 allocate the stack and have 0 for STACK_DYNAMIC_OFFSET, since it
7054 looks like we are trying to create a second frame pointer to the
7055 function, so allocate some stack space to make it happy. */
7057 if (args_size == 0 && current_function_calls_alloca)
7058 args_size = 4 * UNITS_PER_WORD;
7060 total_size = var_size + args_size + cprestore_size;
7062 /* Calculate space needed for gp registers. */
7063 for (regno = GP_REG_FIRST; regno <= GP_REG_LAST; regno++)
7064 if (mips_save_reg_p (regno))
7066 gp_reg_size += GET_MODE_SIZE (gpr_mode);
7067 mask |= 1 << (regno - GP_REG_FIRST);
7070 /* We need to restore these for the handler. */
7071 if (current_function_calls_eh_return)
7076 regno = EH_RETURN_DATA_REGNO (i);
7077 if (regno == INVALID_REGNUM)
7079 gp_reg_size += GET_MODE_SIZE (gpr_mode);
7080 mask |= 1 << (regno - GP_REG_FIRST);
7084 /* The MIPS16e SAVE and RESTORE instructions have two ranges of registers:
7085 $a3-$a0 and $s2-$s8. If we save one register in the range, we must
7086 save all later registers too. */
7087 if (GENERATE_MIPS16E_SAVE_RESTORE)
7089 mips16e_mask_registers (&mask, mips16e_s2_s8_regs,
7090 ARRAY_SIZE (mips16e_s2_s8_regs), &gp_reg_size);
7091 mips16e_mask_registers (&mask, mips16e_a0_a3_regs,
7092 ARRAY_SIZE (mips16e_a0_a3_regs), &gp_reg_size);
7095 /* This loop must iterate over the same space as its companion in
7096 mips_for_each_saved_reg. */
7097 for (regno = (FP_REG_LAST - MAX_FPRS_PER_FMT + 1);
7098 regno >= FP_REG_FIRST;
7099 regno -= MAX_FPRS_PER_FMT)
7101 if (mips_save_reg_p (regno))
7103 fp_reg_size += MAX_FPRS_PER_FMT * UNITS_PER_FPREG;
7104 fmask |= ((1 << MAX_FPRS_PER_FMT) - 1) << (regno - FP_REG_FIRST);
7108 gp_reg_rounded = MIPS_STACK_ALIGN (gp_reg_size);
7109 total_size += gp_reg_rounded + MIPS_STACK_ALIGN (fp_reg_size);
7111 /* Add in the space required for saving incoming register arguments. */
7112 total_size += current_function_pretend_args_size;
7113 total_size += MIPS_STACK_ALIGN (cfun->machine->varargs_size);
7115 /* Save other computed information. */
7116 cfun->machine->frame.total_size = total_size;
7117 cfun->machine->frame.var_size = var_size;
7118 cfun->machine->frame.args_size = args_size;
7119 cfun->machine->frame.cprestore_size = cprestore_size;
7120 cfun->machine->frame.gp_reg_size = gp_reg_size;
7121 cfun->machine->frame.fp_reg_size = fp_reg_size;
7122 cfun->machine->frame.mask = mask;
7123 cfun->machine->frame.fmask = fmask;
7124 cfun->machine->frame.initialized = reload_completed;
7125 cfun->machine->frame.num_gp = gp_reg_size / UNITS_PER_WORD;
7126 cfun->machine->frame.num_fp = (fp_reg_size
7127 / (MAX_FPRS_PER_FMT * UNITS_PER_FPREG));
7131 HOST_WIDE_INT offset;
7133 if (GENERATE_MIPS16E_SAVE_RESTORE)
7134 /* MIPS16e SAVE and RESTORE instructions require the GP save area
7135 to be aligned at the high end with any padding at the low end.
7136 It is only safe to use this calculation for o32, where we never
7137 have pretend arguments, and where any varargs will be saved in
7138 the caller-allocated area rather than at the top of the frame. */
7139 offset = (total_size - GET_MODE_SIZE (gpr_mode));
7141 offset = (args_size + cprestore_size + var_size
7142 + gp_reg_size - GET_MODE_SIZE (gpr_mode));
7143 cfun->machine->frame.gp_sp_offset = offset;
7144 cfun->machine->frame.gp_save_offset = offset - total_size;
7148 cfun->machine->frame.gp_sp_offset = 0;
7149 cfun->machine->frame.gp_save_offset = 0;
7154 HOST_WIDE_INT offset;
7156 offset = (args_size + cprestore_size + var_size
7157 + gp_reg_rounded + fp_reg_size
7158 - MAX_FPRS_PER_FMT * UNITS_PER_FPREG);
7159 cfun->machine->frame.fp_sp_offset = offset;
7160 cfun->machine->frame.fp_save_offset = offset - total_size;
7164 cfun->machine->frame.fp_sp_offset = 0;
7165 cfun->machine->frame.fp_save_offset = 0;
7168 /* Ok, we're done. */
7172 /* Implement INITIAL_ELIMINATION_OFFSET. FROM is either the frame
7173 pointer or argument pointer. TO is either the stack pointer or
7174 hard frame pointer. */
7177 mips_initial_elimination_offset (int from, int to)
7179 HOST_WIDE_INT offset;
7181 compute_frame_size (get_frame_size ());
7183 /* Set OFFSET to the offset from the stack pointer. */
7186 case FRAME_POINTER_REGNUM:
7190 case ARG_POINTER_REGNUM:
7191 offset = (cfun->machine->frame.total_size
7192 - current_function_pretend_args_size);
7199 if (TARGET_MIPS16 && to == HARD_FRAME_POINTER_REGNUM)
7200 offset -= cfun->machine->frame.args_size;
7205 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
7206 back to a previous frame. */
7208 mips_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
7213 return get_hard_reg_initial_val (Pmode, GP_REG_FIRST + 31);
7216 /* Use FN to save or restore register REGNO. MODE is the register's
7217 mode and OFFSET is the offset of its save slot from the current
7221 mips_save_restore_reg (enum machine_mode mode, int regno,
7222 HOST_WIDE_INT offset, mips_save_restore_fn fn)
7226 mem = gen_frame_mem (mode, plus_constant (stack_pointer_rtx, offset));
7228 fn (gen_rtx_REG (mode, regno), mem);
7232 /* Call FN for each register that is saved by the current function.
7233 SP_OFFSET is the offset of the current stack pointer from the start
7237 mips_for_each_saved_reg (HOST_WIDE_INT sp_offset, mips_save_restore_fn fn)
7239 enum machine_mode fpr_mode;
7240 HOST_WIDE_INT offset;
7243 /* Save registers starting from high to low. The debuggers prefer at least
7244 the return register be stored at func+4, and also it allows us not to
7245 need a nop in the epilogue if at least one register is reloaded in
7246 addition to return address. */
7247 offset = cfun->machine->frame.gp_sp_offset - sp_offset;
7248 for (regno = GP_REG_LAST; regno >= GP_REG_FIRST; regno--)
7249 if (BITSET_P (cfun->machine->frame.mask, regno - GP_REG_FIRST))
7251 mips_save_restore_reg (gpr_mode, regno, offset, fn);
7252 offset -= GET_MODE_SIZE (gpr_mode);
7255 /* This loop must iterate over the same space as its companion in
7256 compute_frame_size. */
7257 offset = cfun->machine->frame.fp_sp_offset - sp_offset;
7258 fpr_mode = (TARGET_SINGLE_FLOAT ? SFmode : DFmode);
7259 for (regno = (FP_REG_LAST - MAX_FPRS_PER_FMT + 1);
7260 regno >= FP_REG_FIRST;
7261 regno -= MAX_FPRS_PER_FMT)
7262 if (BITSET_P (cfun->machine->frame.fmask, regno - FP_REG_FIRST))
7264 mips_save_restore_reg (fpr_mode, regno, offset, fn);
7265 offset -= GET_MODE_SIZE (fpr_mode);
7269 /* If we're generating n32 or n64 abicalls, and the current function
7270 does not use $28 as its global pointer, emit a cplocal directive.
7271 Use pic_offset_table_rtx as the argument to the directive. */
7274 mips_output_cplocal (void)
7276 if (!TARGET_EXPLICIT_RELOCS
7277 && cfun->machine->global_pointer > 0
7278 && cfun->machine->global_pointer != GLOBAL_POINTER_REGNUM)
7279 output_asm_insn (".cplocal %+", 0);
7282 /* Return the style of GP load sequence that is being used for the
7283 current function. */
7285 enum mips_loadgp_style
7286 mips_current_loadgp_style (void)
7288 if (!TARGET_USE_GOT || cfun->machine->global_pointer == 0)
7294 if (TARGET_ABSOLUTE_ABICALLS)
7295 return LOADGP_ABSOLUTE;
7297 return TARGET_NEWABI ? LOADGP_NEWABI : LOADGP_OLDABI;
7300 /* The __gnu_local_gp symbol. */
7302 static GTY(()) rtx mips_gnu_local_gp;
7304 /* If we're generating n32 or n64 abicalls, emit instructions
7305 to set up the global pointer. */
7308 mips_emit_loadgp (void)
7310 rtx addr, offset, incoming_address, base, index;
7312 switch (mips_current_loadgp_style ())
7314 case LOADGP_ABSOLUTE:
7315 if (mips_gnu_local_gp == NULL)
7317 mips_gnu_local_gp = gen_rtx_SYMBOL_REF (Pmode, "__gnu_local_gp");
7318 SYMBOL_REF_FLAGS (mips_gnu_local_gp) |= SYMBOL_FLAG_LOCAL;
7320 emit_insn (gen_loadgp_absolute (mips_gnu_local_gp));
7324 addr = XEXP (DECL_RTL (current_function_decl), 0);
7325 offset = mips_unspec_address (addr, SYMBOL_GOTOFF_LOADGP);
7326 incoming_address = gen_rtx_REG (Pmode, PIC_FUNCTION_ADDR_REGNUM);
7327 emit_insn (gen_loadgp_newabi (offset, incoming_address));
7328 if (!TARGET_EXPLICIT_RELOCS)
7329 emit_insn (gen_loadgp_blockage ());
7333 base = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (VXWORKS_GOTT_BASE));
7334 index = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (VXWORKS_GOTT_INDEX));
7335 emit_insn (gen_loadgp_rtp (base, index));
7336 if (!TARGET_EXPLICIT_RELOCS)
7337 emit_insn (gen_loadgp_blockage ());
7345 /* Set up the stack and frame (if desired) for the function. */
7348 mips_output_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
7351 HOST_WIDE_INT tsize = cfun->machine->frame.total_size;
7353 #ifdef SDB_DEBUGGING_INFO
7354 if (debug_info_level != DINFO_LEVEL_TERSE && write_symbols == SDB_DEBUG)
7355 SDB_OUTPUT_SOURCE_LINE (file, DECL_SOURCE_LINE (current_function_decl));
7358 /* In mips16 mode, we may need to generate a 32 bit to handle
7359 floating point arguments. The linker will arrange for any 32-bit
7360 functions to call this stub, which will then jump to the 16-bit
7363 && TARGET_HARD_FLOAT_ABI
7364 && current_function_args_info.fp_code != 0)
7365 build_mips16_function_stub (file);
7367 /* Select the mips16 mode for this function. */
7369 fprintf (file, "\t.set\tmips16\n");
7371 fprintf (file, "\t.set\tnomips16\n");
7373 if (!FUNCTION_NAME_ALREADY_DECLARED)
7375 /* Get the function name the same way that toplev.c does before calling
7376 assemble_start_function. This is needed so that the name used here
7377 exactly matches the name used in ASM_DECLARE_FUNCTION_NAME. */
7378 fnname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
7380 if (!flag_inhibit_size_directive)
7382 fputs ("\t.ent\t", file);
7383 assemble_name (file, fnname);
7387 assemble_name (file, fnname);
7388 fputs (":\n", file);
7391 /* Stop mips_file_end from treating this function as external. */
7392 if (TARGET_IRIX && mips_abi == ABI_32)
7393 TREE_ASM_WRITTEN (DECL_NAME (cfun->decl)) = 1;
7395 if (!flag_inhibit_size_directive)
7397 /* .frame FRAMEREG, FRAMESIZE, RETREG */
7399 "\t.frame\t%s," HOST_WIDE_INT_PRINT_DEC ",%s\t\t"
7400 "# vars= " HOST_WIDE_INT_PRINT_DEC ", regs= %d/%d"
7401 ", args= " HOST_WIDE_INT_PRINT_DEC
7402 ", gp= " HOST_WIDE_INT_PRINT_DEC "\n",
7403 (reg_names[(frame_pointer_needed)
7404 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM]),
7405 ((frame_pointer_needed && TARGET_MIPS16)
7406 ? tsize - cfun->machine->frame.args_size
7408 reg_names[GP_REG_FIRST + 31],
7409 cfun->machine->frame.var_size,
7410 cfun->machine->frame.num_gp,
7411 cfun->machine->frame.num_fp,
7412 cfun->machine->frame.args_size,
7413 cfun->machine->frame.cprestore_size);
7415 /* .mask MASK, GPOFFSET; .fmask FPOFFSET */
7416 fprintf (file, "\t.mask\t0x%08x," HOST_WIDE_INT_PRINT_DEC "\n",
7417 cfun->machine->frame.mask,
7418 cfun->machine->frame.gp_save_offset);
7419 fprintf (file, "\t.fmask\t0x%08x," HOST_WIDE_INT_PRINT_DEC "\n",
7420 cfun->machine->frame.fmask,
7421 cfun->machine->frame.fp_save_offset);
7424 OLD_SP == *FRAMEREG + FRAMESIZE => can find old_sp from nominated FP reg.
7425 HIGHEST_GP_SAVED == *FRAMEREG + FRAMESIZE + GPOFFSET => can find saved regs. */
7428 if (mips_current_loadgp_style () == LOADGP_OLDABI)
7430 /* Handle the initialization of $gp for SVR4 PIC. */
7431 if (!cfun->machine->all_noreorder_p)
7432 output_asm_insn ("%(.cpload\t%^%)", 0);
7434 output_asm_insn ("%(.cpload\t%^\n\t%<", 0);
7436 else if (cfun->machine->all_noreorder_p)
7437 output_asm_insn ("%(%<", 0);
7439 /* Tell the assembler which register we're using as the global
7440 pointer. This is needed for thunks, since they can use either
7441 explicit relocs or assembler macros. */
7442 mips_output_cplocal ();
7445 /* Make the last instruction frame related and note that it performs
7446 the operation described by FRAME_PATTERN. */
7449 mips_set_frame_expr (rtx frame_pattern)
7453 insn = get_last_insn ();
7454 RTX_FRAME_RELATED_P (insn) = 1;
7455 REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7461 /* Return a frame-related rtx that stores REG at MEM.
7462 REG must be a single register. */
7465 mips_frame_set (rtx mem, rtx reg)
7469 /* If we're saving the return address register and the dwarf return
7470 address column differs from the hard register number, adjust the
7471 note reg to refer to the former. */
7472 if (REGNO (reg) == GP_REG_FIRST + 31
7473 && DWARF_FRAME_RETURN_COLUMN != GP_REG_FIRST + 31)
7474 reg = gen_rtx_REG (GET_MODE (reg), DWARF_FRAME_RETURN_COLUMN);
7476 set = gen_rtx_SET (VOIDmode, mem, reg);
7477 RTX_FRAME_RELATED_P (set) = 1;
7483 /* Save register REG to MEM. Make the instruction frame-related. */
7486 mips_save_reg (rtx reg, rtx mem)
7488 if (GET_MODE (reg) == DFmode && !TARGET_FLOAT64)
7492 if (mips_split_64bit_move_p (mem, reg))
7493 mips_split_64bit_move (mem, reg);
7495 mips_emit_move (mem, reg);
7497 x1 = mips_frame_set (mips_subword (mem, 0), mips_subword (reg, 0));
7498 x2 = mips_frame_set (mips_subword (mem, 1), mips_subword (reg, 1));
7499 mips_set_frame_expr (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, x1, x2)));
7504 && REGNO (reg) != GP_REG_FIRST + 31
7505 && !M16_REG_P (REGNO (reg)))
7507 /* Save a non-mips16 register by moving it through a temporary.
7508 We don't need to do this for $31 since there's a special
7509 instruction for it. */
7510 mips_emit_move (MIPS_PROLOGUE_TEMP (GET_MODE (reg)), reg);
7511 mips_emit_move (mem, MIPS_PROLOGUE_TEMP (GET_MODE (reg)));
7514 mips_emit_move (mem, reg);
7516 mips_set_frame_expr (mips_frame_set (mem, reg));
7520 /* Return a move between register REGNO and memory location SP + OFFSET.
7521 Make the move a load if RESTORE_P, otherwise make it a frame-related
7525 mips16e_save_restore_reg (bool restore_p, HOST_WIDE_INT offset,
7530 mem = gen_frame_mem (SImode, plus_constant (stack_pointer_rtx, offset));
7531 reg = gen_rtx_REG (SImode, regno);
7533 ? gen_rtx_SET (VOIDmode, reg, mem)
7534 : mips_frame_set (mem, reg));
7537 /* Return RTL for a MIPS16e SAVE or RESTORE instruction; RESTORE_P says which.
7538 The instruction must:
7540 - Allocate or deallocate SIZE bytes in total; SIZE is known
7543 - Save or restore as many registers in *MASK_PTR as possible.
7544 The instruction saves the first registers at the top of the
7545 allocated area, with the other registers below it.
7547 - Save NARGS argument registers above the allocated area.
7549 (NARGS is always zero if RESTORE_P.)
7551 The SAVE and RESTORE instructions cannot save and restore all general
7552 registers, so there may be some registers left over for the caller to
7553 handle. Destructively modify *MASK_PTR so that it contains the registers
7554 that still need to be saved or restored. The caller can save these
7555 registers in the memory immediately below *OFFSET_PTR, which is a
7556 byte offset from the bottom of the allocated stack area. */
7559 mips16e_build_save_restore (bool restore_p, unsigned int *mask_ptr,
7560 HOST_WIDE_INT *offset_ptr, unsigned int nargs,
7564 HOST_WIDE_INT offset, top_offset;
7565 unsigned int i, regno;
7568 gcc_assert (cfun->machine->frame.fp_reg_size == 0);
7570 /* Calculate the number of elements in the PARALLEL. We need one element
7571 for the stack adjustment, one for each argument register save, and one
7572 for each additional register move. */
7574 for (i = 0; i < ARRAY_SIZE (mips16e_save_restore_regs); i++)
7575 if (BITSET_P (*mask_ptr, mips16e_save_restore_regs[i]))
7578 /* Create the final PARALLEL. */
7579 pattern = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (n));
7582 /* Add the stack pointer adjustment. */
7583 set = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7584 plus_constant (stack_pointer_rtx,
7585 restore_p ? size : -size));
7586 RTX_FRAME_RELATED_P (set) = 1;
7587 XVECEXP (pattern, 0, n++) = set;
7589 /* Stack offsets in the PARALLEL are relative to the old stack pointer. */
7590 top_offset = restore_p ? size : 0;
7592 /* Save the arguments. */
7593 for (i = 0; i < nargs; i++)
7595 offset = top_offset + i * GET_MODE_SIZE (gpr_mode);
7596 set = mips16e_save_restore_reg (restore_p, offset, GP_ARG_FIRST + i);
7597 XVECEXP (pattern, 0, n++) = set;
7600 /* Then fill in the other register moves. */
7601 offset = top_offset;
7602 for (i = 0; i < ARRAY_SIZE (mips16e_save_restore_regs); i++)
7604 regno = mips16e_save_restore_regs[i];
7605 if (BITSET_P (*mask_ptr, regno))
7607 offset -= UNITS_PER_WORD;
7608 set = mips16e_save_restore_reg (restore_p, offset, regno);
7609 XVECEXP (pattern, 0, n++) = set;
7610 *mask_ptr &= ~(1 << regno);
7614 /* Tell the caller what offset it should use for the remaining registers. */
7615 *offset_ptr = size + (offset - top_offset) + size;
7617 gcc_assert (n == XVECLEN (pattern, 0));
7622 /* PATTERN is a PARALLEL whose first element adds ADJUST to the stack
7623 pointer. Return true if PATTERN matches the kind of instruction
7624 generated by mips16e_build_save_restore. If INFO is nonnull,
7625 initialize it when returning true. */
7628 mips16e_save_restore_pattern_p (rtx pattern, HOST_WIDE_INT adjust,
7629 struct mips16e_save_restore_info *info)
7631 unsigned int i, nargs, mask;
7632 HOST_WIDE_INT top_offset, save_offset, offset, extra;
7633 rtx set, reg, mem, base;
7636 if (!GENERATE_MIPS16E_SAVE_RESTORE)
7639 /* Stack offsets in the PARALLEL are relative to the old stack pointer. */
7640 top_offset = adjust > 0 ? adjust : 0;
7642 /* Interpret all other members of the PARALLEL. */
7643 save_offset = top_offset - GET_MODE_SIZE (gpr_mode);
7647 for (n = 1; n < XVECLEN (pattern, 0); n++)
7649 /* Check that we have a SET. */
7650 set = XVECEXP (pattern, 0, n);
7651 if (GET_CODE (set) != SET)
7654 /* Check that the SET is a load (if restoring) or a store
7656 mem = adjust > 0 ? SET_SRC (set) : SET_DEST (set);
7660 /* Check that the address is the sum of the stack pointer and a
7661 possibly-zero constant offset. */
7662 mips_split_plus (XEXP (mem, 0), &base, &offset);
7663 if (base != stack_pointer_rtx)
7666 /* Check that SET's other operand is a register. */
7667 reg = adjust > 0 ? SET_DEST (set) : SET_SRC (set);
7671 /* Check for argument saves. */
7672 if (offset == top_offset + nargs * GET_MODE_SIZE (gpr_mode)
7673 && REGNO (reg) == GP_ARG_FIRST + nargs)
7675 else if (offset == save_offset)
7677 while (mips16e_save_restore_regs[i++] != REGNO (reg))
7678 if (i == ARRAY_SIZE (mips16e_save_restore_regs))
7681 mask |= 1 << REGNO (reg);
7682 save_offset -= GET_MODE_SIZE (gpr_mode);
7688 /* Check that the restrictions on register ranges are met. */
7690 mips16e_mask_registers (&mask, mips16e_s2_s8_regs,
7691 ARRAY_SIZE (mips16e_s2_s8_regs), &extra);
7692 mips16e_mask_registers (&mask, mips16e_a0_a3_regs,
7693 ARRAY_SIZE (mips16e_a0_a3_regs), &extra);
7697 /* Make sure that the topmost argument register is not saved twice.
7698 The checks above ensure that the same is then true for the other
7699 argument registers. */
7700 if (nargs > 0 && BITSET_P (mask, GP_ARG_FIRST + nargs - 1))
7703 /* Pass back information, if requested. */
7706 info->nargs = nargs;
7708 info->size = (adjust > 0 ? adjust : -adjust);
7714 /* Add a MIPS16e SAVE or RESTORE register-range argument to string S
7715 for the register range [MIN_REG, MAX_REG]. Return a pointer to
7716 the null terminator. */
7719 mips16e_add_register_range (char *s, unsigned int min_reg,
7720 unsigned int max_reg)
7722 if (min_reg != max_reg)
7723 s += sprintf (s, ",%s-%s", reg_names[min_reg], reg_names[max_reg]);
7725 s += sprintf (s, ",%s", reg_names[min_reg]);
7729 /* Return the assembly instruction for a MIPS16e SAVE or RESTORE instruction.
7730 PATTERN and ADJUST are as for mips16e_save_restore_pattern_p. */
7733 mips16e_output_save_restore (rtx pattern, HOST_WIDE_INT adjust)
7735 static char buffer[300];
7737 struct mips16e_save_restore_info info;
7738 unsigned int i, end;
7741 /* Parse the pattern. */
7742 if (!mips16e_save_restore_pattern_p (pattern, adjust, &info))
7745 /* Add the mnemonic. */
7746 s = strcpy (buffer, adjust > 0 ? "restore\t" : "save\t");
7749 /* Save the arguments. */
7751 s += sprintf (s, "%s-%s,", reg_names[GP_ARG_FIRST],
7752 reg_names[GP_ARG_FIRST + info.nargs - 1]);
7753 else if (info.nargs == 1)
7754 s += sprintf (s, "%s,", reg_names[GP_ARG_FIRST]);
7756 /* Emit the amount of stack space to allocate or deallocate. */
7757 s += sprintf (s, "%d", (int) info.size);
7759 /* Save or restore $16. */
7760 if (BITSET_P (info.mask, 16))
7761 s += sprintf (s, ",%s", reg_names[GP_REG_FIRST + 16]);
7763 /* Save or restore $17. */
7764 if (BITSET_P (info.mask, 17))
7765 s += sprintf (s, ",%s", reg_names[GP_REG_FIRST + 17]);
7767 /* Save or restore registers in the range $s2...$s8, which
7768 mips16e_s2_s8_regs lists in decreasing order. Note that this
7769 is a software register range; the hardware registers are not
7770 numbered consecutively. */
7771 end = ARRAY_SIZE (mips16e_s2_s8_regs);
7772 i = mips16e_find_first_register (info.mask, mips16e_s2_s8_regs, end);
7774 s = mips16e_add_register_range (s, mips16e_s2_s8_regs[end - 1],
7775 mips16e_s2_s8_regs[i]);
7777 /* Save or restore registers in the range $a0...$a3. */
7778 end = ARRAY_SIZE (mips16e_a0_a3_regs);
7779 i = mips16e_find_first_register (info.mask, mips16e_a0_a3_regs, end);
7781 s = mips16e_add_register_range (s, mips16e_a0_a3_regs[i],
7782 mips16e_a0_a3_regs[end - 1]);
7784 /* Save or restore $31. */
7785 if (BITSET_P (info.mask, 31))
7786 s += sprintf (s, ",%s", reg_names[GP_REG_FIRST + 31]);
7791 /* Return a simplified form of X using the register values in REG_VALUES.
7792 REG_VALUES[R] is the last value assigned to hard register R, or null
7793 if R has not been modified.
7795 This function is rather limited, but is good enough for our purposes. */
7798 mips16e_collect_propagate_value (rtx x, rtx *reg_values)
7802 x = avoid_constant_pool_reference (x);
7806 x0 = mips16e_collect_propagate_value (XEXP (x, 0), reg_values);
7807 return simplify_gen_unary (GET_CODE (x), GET_MODE (x),
7808 x0, GET_MODE (XEXP (x, 0)));
7811 if (ARITHMETIC_P (x))
7813 x0 = mips16e_collect_propagate_value (XEXP (x, 0), reg_values);
7814 x1 = mips16e_collect_propagate_value (XEXP (x, 1), reg_values);
7815 return simplify_gen_binary (GET_CODE (x), GET_MODE (x), x0, x1);
7819 && reg_values[REGNO (x)]
7820 && !rtx_unstable_p (reg_values[REGNO (x)]))
7821 return reg_values[REGNO (x)];
7826 /* Return true if (set DEST SRC) stores an argument register into its
7827 caller-allocated save slot, storing the number of that argument
7828 register in *REGNO_PTR if so. REG_VALUES is as for
7829 mips16e_collect_propagate_value. */
7832 mips16e_collect_argument_save_p (rtx dest, rtx src, rtx *reg_values,
7833 unsigned int *regno_ptr)
7835 unsigned int argno, regno;
7836 HOST_WIDE_INT offset, required_offset;
7839 /* Check that this is a word-mode store. */
7840 if (!MEM_P (dest) || !REG_P (src) || GET_MODE (dest) != word_mode)
7843 /* Check that the register being saved is an unmodified argument
7845 regno = REGNO (src);
7846 if (regno < GP_ARG_FIRST || regno > GP_ARG_LAST || reg_values[regno])
7848 argno = regno - GP_ARG_FIRST;
7850 /* Check whether the address is an appropriate stack pointer or
7851 frame pointer access. The frame pointer is offset from the
7852 stack pointer by the size of the outgoing arguments. */
7853 addr = mips16e_collect_propagate_value (XEXP (dest, 0), reg_values);
7854 mips_split_plus (addr, &base, &offset);
7855 required_offset = cfun->machine->frame.total_size + argno * UNITS_PER_WORD;
7856 if (base == hard_frame_pointer_rtx)
7857 required_offset -= cfun->machine->frame.args_size;
7858 else if (base != stack_pointer_rtx)
7860 if (offset != required_offset)
7867 /* A subroutine of mips_expand_prologue, called only when generating
7868 MIPS16e SAVE instructions. Search the start of the function for any
7869 instructions that save argument registers into their caller-allocated
7870 save slots. Delete such instructions and return a value N such that
7871 saving [GP_ARG_FIRST, GP_ARG_FIRST + N) would make all the deleted
7872 instructions redundant. */
7875 mips16e_collect_argument_saves (void)
7877 rtx reg_values[FIRST_PSEUDO_REGISTER];
7878 rtx insn, next, set, dest, src;
7879 unsigned int nargs, regno;
7881 push_topmost_sequence ();
7883 memset (reg_values, 0, sizeof (reg_values));
7884 for (insn = get_insns (); insn; insn = next)
7886 next = NEXT_INSN (insn);
7893 set = PATTERN (insn);
7894 if (GET_CODE (set) != SET)
7897 dest = SET_DEST (set);
7898 src = SET_SRC (set);
7899 if (mips16e_collect_argument_save_p (dest, src, reg_values, ®no))
7901 if (!BITSET_P (cfun->machine->frame.mask, regno))
7904 nargs = MAX (nargs, (regno - GP_ARG_FIRST) + 1);
7907 else if (REG_P (dest) && GET_MODE (dest) == word_mode)
7908 reg_values[REGNO (dest)]
7909 = mips16e_collect_propagate_value (src, reg_values);
7913 pop_topmost_sequence ();
7918 /* Expand the prologue into a bunch of separate insns. */
7921 mips_expand_prologue (void)
7927 if (cfun->machine->global_pointer > 0)
7928 SET_REGNO (pic_offset_table_rtx, cfun->machine->global_pointer);
7930 size = compute_frame_size (get_frame_size ());
7932 /* Save the registers. Allocate up to MIPS_MAX_FIRST_STACK_STEP
7933 bytes beforehand; this is enough to cover the register save area
7934 without going out of range. */
7935 if ((cfun->machine->frame.mask | cfun->machine->frame.fmask) != 0)
7937 HOST_WIDE_INT step1;
7939 step1 = MIN (size, MIPS_MAX_FIRST_STACK_STEP);
7941 if (GENERATE_MIPS16E_SAVE_RESTORE)
7943 HOST_WIDE_INT offset;
7944 unsigned int mask, regno;
7946 /* Try to merge argument stores into the save instruction. */
7947 nargs = mips16e_collect_argument_saves ();
7949 /* Build the save instruction. */
7950 mask = cfun->machine->frame.mask;
7951 insn = mips16e_build_save_restore (false, &mask, &offset,
7953 RTX_FRAME_RELATED_P (emit_insn (insn)) = 1;
7956 /* Check if we need to save other registers. */
7957 for (regno = GP_REG_FIRST; regno < GP_REG_LAST; regno++)
7958 if (BITSET_P (mask, regno - GP_REG_FIRST))
7960 offset -= GET_MODE_SIZE (gpr_mode);
7961 mips_save_restore_reg (gpr_mode, regno, offset, mips_save_reg);
7966 insn = gen_add3_insn (stack_pointer_rtx,
7969 RTX_FRAME_RELATED_P (emit_insn (insn)) = 1;
7971 mips_for_each_saved_reg (size, mips_save_reg);
7975 /* Allocate the rest of the frame. */
7978 if (SMALL_OPERAND (-size))
7979 RTX_FRAME_RELATED_P (emit_insn (gen_add3_insn (stack_pointer_rtx,
7981 GEN_INT (-size)))) = 1;
7984 mips_emit_move (MIPS_PROLOGUE_TEMP (Pmode), GEN_INT (size));
7987 /* There are no instructions to add or subtract registers
7988 from the stack pointer, so use the frame pointer as a
7989 temporary. We should always be using a frame pointer
7990 in this case anyway. */
7991 gcc_assert (frame_pointer_needed);
7992 mips_emit_move (hard_frame_pointer_rtx, stack_pointer_rtx);
7993 emit_insn (gen_sub3_insn (hard_frame_pointer_rtx,
7994 hard_frame_pointer_rtx,
7995 MIPS_PROLOGUE_TEMP (Pmode)));
7996 mips_emit_move (stack_pointer_rtx, hard_frame_pointer_rtx);
7999 emit_insn (gen_sub3_insn (stack_pointer_rtx,
8001 MIPS_PROLOGUE_TEMP (Pmode)));
8003 /* Describe the combined effect of the previous instructions. */
8005 (gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8006 plus_constant (stack_pointer_rtx, -size)));
8010 /* Set up the frame pointer, if we're using one. In mips16 code,
8011 we point the frame pointer ahead of the outgoing argument area.
8012 This should allow more variables & incoming arguments to be
8013 accessed with unextended instructions. */
8014 if (frame_pointer_needed)
8016 if (TARGET_MIPS16 && cfun->machine->frame.args_size != 0)
8018 rtx offset = GEN_INT (cfun->machine->frame.args_size);
8019 if (SMALL_OPERAND (cfun->machine->frame.args_size))
8021 (emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
8026 mips_emit_move (MIPS_PROLOGUE_TEMP (Pmode), offset);
8027 mips_emit_move (hard_frame_pointer_rtx, stack_pointer_rtx);
8028 emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
8029 hard_frame_pointer_rtx,
8030 MIPS_PROLOGUE_TEMP (Pmode)));
8032 (gen_rtx_SET (VOIDmode, hard_frame_pointer_rtx,
8033 plus_constant (stack_pointer_rtx,
8034 cfun->machine->frame.args_size)));
8038 RTX_FRAME_RELATED_P (mips_emit_move (hard_frame_pointer_rtx,
8039 stack_pointer_rtx)) = 1;
8042 mips_emit_loadgp ();
8044 /* If generating o32/o64 abicalls, save $gp on the stack. */
8045 if (TARGET_ABICALLS && TARGET_OLDABI && !current_function_is_leaf)
8046 emit_insn (gen_cprestore (GEN_INT (current_function_outgoing_args_size)));
8048 /* If we are profiling, make sure no instructions are scheduled before
8049 the call to mcount. */
8051 if (current_function_profile)
8052 emit_insn (gen_blockage ());
8055 /* Do any necessary cleanup after a function to restore stack, frame,
8058 #define RA_MASK BITMASK_HIGH /* 1 << 31 */
8061 mips_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
8062 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
8064 /* Reinstate the normal $gp. */
8065 SET_REGNO (pic_offset_table_rtx, GLOBAL_POINTER_REGNUM);
8066 mips_output_cplocal ();
8068 if (cfun->machine->all_noreorder_p)
8070 /* Avoid using %>%) since it adds excess whitespace. */
8071 output_asm_insn (".set\tmacro", 0);
8072 output_asm_insn (".set\treorder", 0);
8073 set_noreorder = set_nomacro = 0;
8076 if (!FUNCTION_NAME_ALREADY_DECLARED && !flag_inhibit_size_directive)
8080 /* Get the function name the same way that toplev.c does before calling
8081 assemble_start_function. This is needed so that the name used here
8082 exactly matches the name used in ASM_DECLARE_FUNCTION_NAME. */
8083 fnname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
8084 fputs ("\t.end\t", file);
8085 assemble_name (file, fnname);
8090 /* Emit instructions to restore register REG from slot MEM. */
8093 mips_restore_reg (rtx reg, rtx mem)
8095 /* There's no mips16 instruction to load $31 directly. Load into
8096 $7 instead and adjust the return insn appropriately. */
8097 if (TARGET_MIPS16 && REGNO (reg) == GP_REG_FIRST + 31)
8098 reg = gen_rtx_REG (GET_MODE (reg), 7);
8100 if (TARGET_MIPS16 && !M16_REG_P (REGNO (reg)))
8102 /* Can't restore directly; move through a temporary. */
8103 mips_emit_move (MIPS_EPILOGUE_TEMP (GET_MODE (reg)), mem);
8104 mips_emit_move (reg, MIPS_EPILOGUE_TEMP (GET_MODE (reg)));
8107 mips_emit_move (reg, mem);
8111 /* Expand the epilogue into a bunch of separate insns. SIBCALL_P is true
8112 if this epilogue precedes a sibling call, false if it is for a normal
8113 "epilogue" pattern. */
8116 mips_expand_epilogue (int sibcall_p)
8118 HOST_WIDE_INT step1, step2;
8121 if (!sibcall_p && mips_can_use_return_insn ())
8123 emit_jump_insn (gen_return ());
8127 /* In mips16 mode, if the return value should go into a floating-point
8128 register, we need to call a helper routine to copy it over. */
8129 if (mips16_cfun_returns_in_fpr_p ())
8138 enum machine_mode return_mode;
8140 return_type = DECL_RESULT (current_function_decl);
8141 return_mode = DECL_MODE (return_type);
8143 name = ACONCAT (("__mips16_ret_",
8144 mips16_call_stub_mode_suffix (return_mode),
8146 id = get_identifier (name);
8147 func = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (id));
8148 retval = gen_rtx_REG (return_mode, GP_RETURN);
8149 call = gen_call_value_internal (retval, func, const0_rtx);
8150 insn = emit_call_insn (call);
8151 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), retval);
8154 /* Split the frame into two. STEP1 is the amount of stack we should
8155 deallocate before restoring the registers. STEP2 is the amount we
8156 should deallocate afterwards.
8158 Start off by assuming that no registers need to be restored. */
8159 step1 = cfun->machine->frame.total_size;
8162 /* Work out which register holds the frame address. Account for the
8163 frame pointer offset used by mips16 code. */
8164 if (!frame_pointer_needed)
8165 base = stack_pointer_rtx;
8168 base = hard_frame_pointer_rtx;
8170 step1 -= cfun->machine->frame.args_size;
8173 /* If we need to restore registers, deallocate as much stack as
8174 possible in the second step without going out of range. */
8175 if ((cfun->machine->frame.mask | cfun->machine->frame.fmask) != 0)
8177 step2 = MIN (step1, MIPS_MAX_FIRST_STACK_STEP);
8181 /* Set TARGET to BASE + STEP1. */
8187 /* Get an rtx for STEP1 that we can add to BASE. */
8188 adjust = GEN_INT (step1);
8189 if (!SMALL_OPERAND (step1))
8191 mips_emit_move (MIPS_EPILOGUE_TEMP (Pmode), adjust);
8192 adjust = MIPS_EPILOGUE_TEMP (Pmode);
8195 /* Normal mode code can copy the result straight into $sp. */
8197 target = stack_pointer_rtx;
8199 emit_insn (gen_add3_insn (target, base, adjust));
8202 /* Copy TARGET into the stack pointer. */
8203 if (target != stack_pointer_rtx)
8204 mips_emit_move (stack_pointer_rtx, target);
8206 /* If we're using addressing macros, $gp is implicitly used by all
8207 SYMBOL_REFs. We must emit a blockage insn before restoring $gp
8209 if (TARGET_CALL_SAVED_GP && !TARGET_EXPLICIT_RELOCS)
8210 emit_insn (gen_blockage ());
8212 if (GENERATE_MIPS16E_SAVE_RESTORE && cfun->machine->frame.mask != 0)
8214 unsigned int regno, mask;
8215 HOST_WIDE_INT offset;
8218 /* Generate the restore instruction. */
8219 mask = cfun->machine->frame.mask;
8220 restore = mips16e_build_save_restore (true, &mask, &offset, 0, step2);
8222 /* Restore any other registers manually. */
8223 for (regno = GP_REG_FIRST; regno < GP_REG_LAST; regno++)
8224 if (BITSET_P (mask, regno - GP_REG_FIRST))
8226 offset -= GET_MODE_SIZE (gpr_mode);
8227 mips_save_restore_reg (gpr_mode, regno, offset, mips_restore_reg);
8230 /* Restore the remaining registers and deallocate the final bit
8232 emit_insn (restore);
8236 /* Restore the registers. */
8237 mips_for_each_saved_reg (cfun->machine->frame.total_size - step2,
8240 /* Deallocate the final bit of the frame. */
8242 emit_insn (gen_add3_insn (stack_pointer_rtx,
8247 /* Add in the __builtin_eh_return stack adjustment. We need to
8248 use a temporary in mips16 code. */
8249 if (current_function_calls_eh_return)
8253 mips_emit_move (MIPS_EPILOGUE_TEMP (Pmode), stack_pointer_rtx);
8254 emit_insn (gen_add3_insn (MIPS_EPILOGUE_TEMP (Pmode),
8255 MIPS_EPILOGUE_TEMP (Pmode),
8256 EH_RETURN_STACKADJ_RTX));
8257 mips_emit_move (stack_pointer_rtx, MIPS_EPILOGUE_TEMP (Pmode));
8260 emit_insn (gen_add3_insn (stack_pointer_rtx,
8262 EH_RETURN_STACKADJ_RTX));
8267 /* When generating MIPS16 code, the normal mips_for_each_saved_reg
8268 path will restore the return address into $7 rather than $31. */
8270 && !GENERATE_MIPS16E_SAVE_RESTORE
8271 && (cfun->machine->frame.mask & RA_MASK) != 0)
8272 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode,
8273 GP_REG_FIRST + 7)));
8275 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode,
8276 GP_REG_FIRST + 31)));
8280 /* Return nonzero if this function is known to have a null epilogue.
8281 This allows the optimizer to omit jumps to jumps if no stack
8285 mips_can_use_return_insn (void)
8287 if (! reload_completed)
8290 if (df_regs_ever_live_p (31) || current_function_profile)
8293 /* In mips16 mode, a function that returns a floating point value
8294 needs to arrange to copy the return value into the floating point
8296 if (mips16_cfun_returns_in_fpr_p ())
8299 if (cfun->machine->frame.initialized)
8300 return cfun->machine->frame.total_size == 0;
8302 return compute_frame_size (get_frame_size ()) == 0;
8305 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. Generate rtl rather than asm text
8306 in order to avoid duplicating too much logic from elsewhere. */
8309 mips_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
8310 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8313 rtx this, temp1, temp2, insn, fnaddr;
8315 /* Pretend to be a post-reload pass while generating rtl. */
8316 reload_completed = 1;
8318 /* Mark the end of the (empty) prologue. */
8319 emit_note (NOTE_INSN_PROLOGUE_END);
8321 /* Pick a global pointer. Use a call-clobbered register if
8322 TARGET_CALL_SAVED_GP, so that we can use a sibcall. */
8325 cfun->machine->global_pointer =
8326 TARGET_CALL_SAVED_GP ? 15 : GLOBAL_POINTER_REGNUM;
8328 SET_REGNO (pic_offset_table_rtx, cfun->machine->global_pointer);
8331 /* Set up the global pointer for n32 or n64 abicalls. If
8332 LOADGP_ABSOLUTE then the thunk does not use the gp and there is
8333 no need to load it.*/
8334 if (mips_current_loadgp_style () != LOADGP_ABSOLUTE
8335 || !targetm.binds_local_p (function))
8336 mips_emit_loadgp ();
8338 /* We need two temporary registers in some cases. */
8339 temp1 = gen_rtx_REG (Pmode, 2);
8340 temp2 = gen_rtx_REG (Pmode, 3);
8342 /* Find out which register contains the "this" pointer. */
8343 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8344 this = gen_rtx_REG (Pmode, GP_ARG_FIRST + 1);
8346 this = gen_rtx_REG (Pmode, GP_ARG_FIRST);
8348 /* Add DELTA to THIS. */
8351 rtx offset = GEN_INT (delta);
8352 if (!SMALL_OPERAND (delta))
8354 mips_emit_move (temp1, offset);
8357 emit_insn (gen_add3_insn (this, this, offset));
8360 /* If needed, add *(*THIS + VCALL_OFFSET) to THIS. */
8361 if (vcall_offset != 0)
8365 /* Set TEMP1 to *THIS. */
8366 mips_emit_move (temp1, gen_rtx_MEM (Pmode, this));
8368 /* Set ADDR to a legitimate address for *THIS + VCALL_OFFSET. */
8369 addr = mips_add_offset (temp2, temp1, vcall_offset);
8371 /* Load the offset and add it to THIS. */
8372 mips_emit_move (temp1, gen_rtx_MEM (Pmode, addr));
8373 emit_insn (gen_add3_insn (this, this, temp1));
8376 /* Jump to the target function. Use a sibcall if direct jumps are
8377 allowed, otherwise load the address into a register first. */
8378 fnaddr = XEXP (DECL_RTL (function), 0);
8379 if (TARGET_MIPS16 || TARGET_USE_GOT || SYMBOL_REF_LONG_CALL_P (fnaddr)
8380 || SYMBOL_REF_MIPS16_FUNC_P (fnaddr))
8382 /* This is messy. gas treats "la $25,foo" as part of a call
8383 sequence and may allow a global "foo" to be lazily bound.
8384 The general move patterns therefore reject this combination.
8386 In this context, lazy binding would actually be OK
8387 for TARGET_CALL_CLOBBERED_GP, but it's still wrong for
8388 TARGET_CALL_SAVED_GP; see mips_load_call_address.
8389 We must therefore load the address via a temporary
8390 register if mips_dangerous_for_la25_p.
8392 If we jump to the temporary register rather than $25, the assembler
8393 can use the move insn to fill the jump's delay slot. */
8394 if (TARGET_USE_PIC_FN_ADDR_REG
8395 && !mips_dangerous_for_la25_p (fnaddr))
8396 temp1 = gen_rtx_REG (Pmode, PIC_FUNCTION_ADDR_REGNUM);
8397 mips_load_call_address (temp1, fnaddr, true);
8399 if (TARGET_USE_PIC_FN_ADDR_REG
8400 && REGNO (temp1) != PIC_FUNCTION_ADDR_REGNUM)
8401 mips_emit_move (gen_rtx_REG (Pmode, PIC_FUNCTION_ADDR_REGNUM), temp1);
8402 emit_jump_insn (gen_indirect_jump (temp1));
8406 insn = emit_call_insn (gen_sibcall_internal (fnaddr, const0_rtx));
8407 SIBLING_CALL_P (insn) = 1;
8410 /* Run just enough of rest_of_compilation. This sequence was
8411 "borrowed" from alpha.c. */
8412 insn = get_insns ();
8413 insn_locators_alloc ();
8414 split_all_insns_noflow ();
8415 mips16_lay_out_constants ();
8416 shorten_branches (insn);
8417 final_start_function (insn, file, 1);
8418 final (insn, file, 1);
8419 final_end_function ();
8421 /* Clean up the vars set above. Note that final_end_function resets
8422 the global pointer for us. */
8423 reload_completed = 0;
8426 /* Returns nonzero if X contains a SYMBOL_REF. */
8429 symbolic_expression_p (rtx x)
8431 if (GET_CODE (x) == SYMBOL_REF)
8434 if (GET_CODE (x) == CONST)
8435 return symbolic_expression_p (XEXP (x, 0));
8438 return symbolic_expression_p (XEXP (x, 0));
8440 if (ARITHMETIC_P (x))
8441 return (symbolic_expression_p (XEXP (x, 0))
8442 || symbolic_expression_p (XEXP (x, 1)));
8447 /* Choose the section to use for the constant rtx expression X that has
8451 mips_select_rtx_section (enum machine_mode mode, rtx x,
8452 unsigned HOST_WIDE_INT align)
8454 if (TARGET_EMBEDDED_DATA)
8456 /* For embedded applications, always put constants in read-only data,
8457 in order to reduce RAM usage. */
8458 return mergeable_constant_section (mode, align, 0);
8462 /* For hosted applications, always put constants in small data if
8463 possible, as this gives the best performance. */
8464 /* ??? Consider using mergeable small data sections. */
8466 if (GET_MODE_SIZE (mode) <= (unsigned) mips_section_threshold
8467 && mips_section_threshold > 0)
8468 return get_named_section (NULL, ".sdata", 0);
8469 else if (flag_pic && symbolic_expression_p (x))
8470 return get_named_section (NULL, ".data.rel.ro", 3);
8472 return mergeable_constant_section (mode, align, 0);
8476 /* Implement TARGET_ASM_FUNCTION_RODATA_SECTION.
8478 The complication here is that, with the combination TARGET_ABICALLS
8479 && !TARGET_GPWORD, jump tables will use absolute addresses, and should
8480 therefore not be included in the read-only part of a DSO. Handle such
8481 cases by selecting a normal data section instead of a read-only one.
8482 The logic apes that in default_function_rodata_section. */
8485 mips_function_rodata_section (tree decl)
8487 if (!TARGET_ABICALLS || TARGET_GPWORD)
8488 return default_function_rodata_section (decl);
8490 if (decl && DECL_SECTION_NAME (decl))
8492 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
8493 if (DECL_ONE_ONLY (decl) && strncmp (name, ".gnu.linkonce.t.", 16) == 0)
8495 char *rname = ASTRDUP (name);
8497 return get_section (rname, SECTION_LINKONCE | SECTION_WRITE, decl);
8499 else if (flag_function_sections && flag_data_sections
8500 && strncmp (name, ".text.", 6) == 0)
8502 char *rname = ASTRDUP (name);
8503 memcpy (rname + 1, "data", 4);
8504 return get_section (rname, SECTION_WRITE, decl);
8507 return data_section;
8510 /* Implement TARGET_IN_SMALL_DATA_P. This function controls whether
8511 locally-defined objects go in a small data section. It also controls
8512 the setting of the SYMBOL_REF_SMALL_P flag, which in turn helps
8513 mips_classify_symbol decide when to use %gp_rel(...)($gp) accesses. */
8516 mips_in_small_data_p (const_tree decl)
8520 if (TREE_CODE (decl) == STRING_CST || TREE_CODE (decl) == FUNCTION_DECL)
8523 /* We don't yet generate small-data references for -mabicalls or
8524 VxWorks RTP code. See the related -G handling in override_options. */
8525 if (TARGET_ABICALLS || TARGET_VXWORKS_RTP)
8528 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl) != 0)
8532 /* Reject anything that isn't in a known small-data section. */
8533 name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
8534 if (strcmp (name, ".sdata") != 0 && strcmp (name, ".sbss") != 0)
8537 /* If a symbol is defined externally, the assembler will use the
8538 usual -G rules when deciding how to implement macros. */
8539 if (TARGET_EXPLICIT_RELOCS || !DECL_EXTERNAL (decl))
8542 else if (TARGET_EMBEDDED_DATA)
8544 /* Don't put constants into the small data section: we want them
8545 to be in ROM rather than RAM. */
8546 if (TREE_CODE (decl) != VAR_DECL)
8549 if (TREE_READONLY (decl)
8550 && !TREE_SIDE_EFFECTS (decl)
8551 && (!DECL_INITIAL (decl) || TREE_CONSTANT (DECL_INITIAL (decl))))
8555 size = int_size_in_bytes (TREE_TYPE (decl));
8556 return (size > 0 && size <= mips_section_threshold);
8559 /* Implement TARGET_USE_ANCHORS_FOR_SYMBOL_P. We don't want to use
8560 anchors for small data: the GP register acts as an anchor in that
8561 case. We also don't want to use them for PC-relative accesses,
8562 where the PC acts as an anchor. */
8565 mips_use_anchors_for_symbol_p (const_rtx symbol)
8567 switch (mips_classify_symbol (symbol, SYMBOL_CONTEXT_MEM))
8569 case SYMBOL_PC_RELATIVE:
8570 case SYMBOL_GP_RELATIVE:
8578 /* See whether VALTYPE is a record whose fields should be returned in
8579 floating-point registers. If so, return the number of fields and
8580 list them in FIELDS (which should have two elements). Return 0
8583 For n32 & n64, a structure with one or two fields is returned in
8584 floating-point registers as long as every field has a floating-point
8588 mips_fpr_return_fields (const_tree valtype, tree *fields)
8596 if (TREE_CODE (valtype) != RECORD_TYPE)
8600 for (field = TYPE_FIELDS (valtype); field != 0; field = TREE_CHAIN (field))
8602 if (TREE_CODE (field) != FIELD_DECL)
8605 if (TREE_CODE (TREE_TYPE (field)) != REAL_TYPE)
8611 fields[i++] = field;
8617 /* Implement TARGET_RETURN_IN_MSB. For n32 & n64, we should return
8618 a value in the most significant part of $2/$3 if:
8620 - the target is big-endian;
8622 - the value has a structure or union type (we generalize this to
8623 cover aggregates from other languages too); and
8625 - the structure is not returned in floating-point registers. */
8628 mips_return_in_msb (const_tree valtype)
8632 return (TARGET_NEWABI
8633 && TARGET_BIG_ENDIAN
8634 && AGGREGATE_TYPE_P (valtype)
8635 && mips_fpr_return_fields (valtype, fields) == 0);
8639 /* Return a composite value in a pair of floating-point registers.
8640 MODE1 and OFFSET1 are the mode and byte offset for the first value,
8641 likewise MODE2 and OFFSET2 for the second. MODE is the mode of the
8644 For n32 & n64, $f0 always holds the first value and $f2 the second.
8645 Otherwise the values are packed together as closely as possible. */
8648 mips_return_fpr_pair (enum machine_mode mode,
8649 enum machine_mode mode1, HOST_WIDE_INT offset1,
8650 enum machine_mode mode2, HOST_WIDE_INT offset2)
8654 inc = (TARGET_NEWABI ? 2 : MAX_FPRS_PER_FMT);
8655 return gen_rtx_PARALLEL
8658 gen_rtx_EXPR_LIST (VOIDmode,
8659 gen_rtx_REG (mode1, FP_RETURN),
8661 gen_rtx_EXPR_LIST (VOIDmode,
8662 gen_rtx_REG (mode2, FP_RETURN + inc),
8663 GEN_INT (offset2))));
8668 /* Implement FUNCTION_VALUE and LIBCALL_VALUE. For normal calls,
8669 VALTYPE is the return type and MODE is VOIDmode. For libcalls,
8670 VALTYPE is null and MODE is the mode of the return value. */
8673 mips_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED,
8674 enum machine_mode mode)
8681 mode = TYPE_MODE (valtype);
8682 unsignedp = TYPE_UNSIGNED (valtype);
8684 /* Since we define TARGET_PROMOTE_FUNCTION_RETURN that returns
8685 true, we must promote the mode just as PROMOTE_MODE does. */
8686 mode = promote_mode (valtype, mode, &unsignedp, 1);
8688 /* Handle structures whose fields are returned in $f0/$f2. */
8689 switch (mips_fpr_return_fields (valtype, fields))
8692 return gen_rtx_REG (mode, FP_RETURN);
8695 return mips_return_fpr_pair (mode,
8696 TYPE_MODE (TREE_TYPE (fields[0])),
8697 int_byte_position (fields[0]),
8698 TYPE_MODE (TREE_TYPE (fields[1])),
8699 int_byte_position (fields[1]));
8702 /* If a value is passed in the most significant part of a register, see
8703 whether we have to round the mode up to a whole number of words. */
8704 if (mips_return_in_msb (valtype))
8706 HOST_WIDE_INT size = int_size_in_bytes (valtype);
8707 if (size % UNITS_PER_WORD != 0)
8709 size += UNITS_PER_WORD - size % UNITS_PER_WORD;
8710 mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
8714 /* For EABI, the class of return register depends entirely on MODE.
8715 For example, "struct { some_type x; }" and "union { some_type x; }"
8716 are returned in the same way as a bare "some_type" would be.
8717 Other ABIs only use FPRs for scalar, complex or vector types. */
8718 if (mips_abi != ABI_EABI && !FLOAT_TYPE_P (valtype))
8719 return gen_rtx_REG (mode, GP_RETURN);
8724 /* Handle long doubles for n32 & n64. */
8726 return mips_return_fpr_pair (mode,
8728 DImode, GET_MODE_SIZE (mode) / 2);
8730 if (mips_return_mode_in_fpr_p (mode))
8732 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8733 return mips_return_fpr_pair (mode,
8734 GET_MODE_INNER (mode), 0,
8735 GET_MODE_INNER (mode),
8736 GET_MODE_SIZE (mode) / 2);
8738 return gen_rtx_REG (mode, FP_RETURN);
8742 return gen_rtx_REG (mode, GP_RETURN);
8745 /* Return nonzero when an argument must be passed by reference. */
8748 mips_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
8749 enum machine_mode mode, const_tree type,
8750 bool named ATTRIBUTE_UNUSED)
8752 if (mips_abi == ABI_EABI)
8756 /* ??? How should SCmode be handled? */
8757 if (mode == DImode || mode == DFmode)
8760 size = type ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
8761 return size == -1 || size > UNITS_PER_WORD;
8765 /* If we have a variable-sized parameter, we have no choice. */
8766 return targetm.calls.must_pass_in_stack (mode, type);
8771 mips_callee_copies (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
8772 enum machine_mode mode ATTRIBUTE_UNUSED,
8773 const_tree type ATTRIBUTE_UNUSED, bool named)
8775 return mips_abi == ABI_EABI && named;
8778 /* Return true if registers of class CLASS cannot change from mode FROM
8782 mips_cannot_change_mode_class (enum machine_mode from,
8783 enum machine_mode to, enum reg_class class)
8785 if (MIN (GET_MODE_SIZE (from), GET_MODE_SIZE (to)) <= UNITS_PER_WORD
8786 && MAX (GET_MODE_SIZE (from), GET_MODE_SIZE (to)) > UNITS_PER_WORD)
8788 if (TARGET_BIG_ENDIAN)
8790 /* When a multi-word value is stored in paired floating-point
8791 registers, the first register always holds the low word.
8792 We therefore can't allow FPRs to change between single-word
8793 and multi-word modes. */
8794 if (MAX_FPRS_PER_FMT > 1 && reg_classes_intersect_p (FP_REGS, class))
8799 /* gcc assumes that each word of a multiword register can be accessed
8800 individually using SUBREGs. This is not true for floating-point
8801 registers if they are bigger than a word. */
8802 if (UNITS_PER_FPREG > UNITS_PER_WORD
8803 && GET_MODE_SIZE (from) > UNITS_PER_WORD
8804 && GET_MODE_SIZE (to) < UNITS_PER_FPREG
8805 && reg_classes_intersect_p (FP_REGS, class))
8808 /* Loading a 32-bit value into a 64-bit floating-point register
8809 will not sign-extend the value, despite what LOAD_EXTEND_OP says.
8810 We can't allow 64-bit float registers to change from SImode to
8815 && GET_MODE_SIZE (to) >= UNITS_PER_WORD
8816 && reg_classes_intersect_p (FP_REGS, class))
8822 /* Return true if X should not be moved directly into register $25.
8823 We need this because many versions of GAS will treat "la $25,foo" as
8824 part of a call sequence and so allow a global "foo" to be lazily bound. */
8827 mips_dangerous_for_la25_p (rtx x)
8829 return (!TARGET_EXPLICIT_RELOCS
8831 && GET_CODE (x) == SYMBOL_REF
8832 && mips_global_symbol_p (x));
8835 /* Implement PREFERRED_RELOAD_CLASS. */
8838 mips_preferred_reload_class (rtx x, enum reg_class class)
8840 if (mips_dangerous_for_la25_p (x) && reg_class_subset_p (LEA_REGS, class))
8843 if (TARGET_HARD_FLOAT
8844 && FLOAT_MODE_P (GET_MODE (x))
8845 && reg_class_subset_p (FP_REGS, class))
8848 if (reg_class_subset_p (GR_REGS, class))
8851 if (TARGET_MIPS16 && reg_class_subset_p (M16_REGS, class))
8857 /* This function returns the register class required for a secondary
8858 register when copying between one of the registers in CLASS, and X,
8859 using MODE. If IN_P is nonzero, the copy is going from X to the
8860 register, otherwise the register is the source. A return value of
8861 NO_REGS means that no secondary register is required. */
8864 mips_secondary_reload_class (enum reg_class class,
8865 enum machine_mode mode, rtx x, int in_p)
8867 enum reg_class gr_regs = TARGET_MIPS16 ? M16_REGS : GR_REGS;
8871 if (REG_P (x)|| GET_CODE (x) == SUBREG)
8872 regno = true_regnum (x);
8874 gp_reg_p = TARGET_MIPS16 ? M16_REG_P (regno) : GP_REG_P (regno);
8876 if (mips_dangerous_for_la25_p (x))
8879 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], 25))
8883 /* Copying from HI or LO to anywhere other than a general register
8884 requires a general register.
8885 This rule applies to both the original HI/LO pair and the new
8886 DSP accumulators. */
8887 if (reg_class_subset_p (class, ACC_REGS))
8889 if (TARGET_MIPS16 && in_p)
8891 /* We can't really copy to HI or LO at all in mips16 mode. */
8894 return gp_reg_p ? NO_REGS : gr_regs;
8896 if (ACC_REG_P (regno))
8898 if (TARGET_MIPS16 && ! in_p)
8900 /* We can't really copy to HI or LO at all in mips16 mode. */
8903 return class == gr_regs ? NO_REGS : gr_regs;
8906 /* We can only copy a value to a condition code register from a
8907 floating point register, and even then we require a scratch
8908 floating point register. We can only copy a value out of a
8909 condition code register into a general register. */
8910 if (class == ST_REGS)
8914 return gp_reg_p ? NO_REGS : gr_regs;
8916 if (ST_REG_P (regno))
8920 return class == gr_regs ? NO_REGS : gr_regs;
8923 if (class == FP_REGS)
8927 /* In this case we can use lwc1, swc1, ldc1 or sdc1. */
8930 else if (CONSTANT_P (x) && GET_MODE_CLASS (mode) == MODE_FLOAT)
8932 /* We can use the l.s and l.d macros to load floating-point
8933 constants. ??? For l.s, we could probably get better
8934 code by returning GR_REGS here. */
8937 else if (gp_reg_p || x == CONST0_RTX (mode))
8939 /* In this case we can use mtc1, mfc1, dmtc1 or dmfc1. */
8942 else if (FP_REG_P (regno))
8944 /* In this case we can use mov.s or mov.d. */
8949 /* Otherwise, we need to reload through an integer register. */
8954 /* In mips16 mode, going between memory and anything but M16_REGS
8955 requires an M16_REG. */
8958 if (class != M16_REGS && class != M16_NA_REGS)
8966 if (class == M16_REGS || class == M16_NA_REGS)
8975 /* Implement CLASS_MAX_NREGS.
8977 - UNITS_PER_FPREG controls the number of registers needed by FP_REGS.
8979 - ST_REGS are always hold CCmode values, and CCmode values are
8980 considered to be 4 bytes wide.
8982 All other register classes are covered by UNITS_PER_WORD. Note that
8983 this is true even for unions of integer and float registers when the
8984 latter are smaller than the former. The only supported combination
8985 in which case this occurs is -mgp64 -msingle-float, which has 64-bit
8986 words but 32-bit float registers. A word-based calculation is correct
8987 in that case since -msingle-float disallows multi-FPR values. */
8990 mips_class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,
8991 enum machine_mode mode)
8993 if (class == ST_REGS)
8994 return (GET_MODE_SIZE (mode) + 3) / 4;
8995 else if (class == FP_REGS)
8996 return (GET_MODE_SIZE (mode) + UNITS_PER_FPREG - 1) / UNITS_PER_FPREG;
8998 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
9002 mips_valid_pointer_mode (enum machine_mode mode)
9004 return (mode == SImode || (TARGET_64BIT && mode == DImode));
9007 /* Target hook for vector_mode_supported_p. */
9010 mips_vector_mode_supported_p (enum machine_mode mode)
9015 return TARGET_PAIRED_SINGLE_FLOAT;
9026 /* If we can access small data directly (using gp-relative relocation
9027 operators) return the small data pointer, otherwise return null.
9029 For each mips16 function which refers to GP relative symbols, we
9030 use a pseudo register, initialized at the start of the function, to
9031 hold the $gp value. */
9034 mips16_gp_pseudo_reg (void)
9036 if (cfun->machine->mips16_gp_pseudo_rtx == NULL_RTX)
9037 cfun->machine->mips16_gp_pseudo_rtx = gen_reg_rtx (Pmode);
9039 /* Don't initialize the pseudo register if we are being called from
9040 the tree optimizers' cost-calculation routines. */
9041 if (!cfun->machine->initialized_mips16_gp_pseudo_p
9042 && (current_ir_type () != IR_GIMPLE || currently_expanding_to_rtl))
9046 /* We want to initialize this to a value which gcc will believe
9048 insn = gen_load_const_gp (cfun->machine->mips16_gp_pseudo_rtx);
9050 push_topmost_sequence ();
9051 /* We need to emit the initialization after the FUNCTION_BEG
9052 note, so that it will be integrated. */
9053 for (scan = get_insns (); scan != NULL_RTX; scan = NEXT_INSN (scan))
9055 && NOTE_KIND (scan) == NOTE_INSN_FUNCTION_BEG)
9057 if (scan == NULL_RTX)
9058 scan = get_insns ();
9059 insn = emit_insn_after (insn, scan);
9060 pop_topmost_sequence ();
9062 cfun->machine->initialized_mips16_gp_pseudo_p = true;
9065 return cfun->machine->mips16_gp_pseudo_rtx;
9068 /* Write out code to move floating point arguments in or out of
9069 general registers. Output the instructions to FILE. FP_CODE is
9070 the code describing which arguments are present (see the comment at
9071 the definition of CUMULATIVE_ARGS in mips.h). FROM_FP_P is nonzero if
9072 we are copying from the floating point registers. */
9075 mips16_fp_args (FILE *file, int fp_code, int from_fp_p)
9080 CUMULATIVE_ARGS cum;
9082 /* This code only works for the original 32-bit ABI and the O64 ABI. */
9083 gcc_assert (TARGET_OLDABI);
9090 init_cumulative_args (&cum, NULL, NULL);
9092 for (f = (unsigned int) fp_code; f != 0; f >>= 2)
9094 enum machine_mode mode;
9095 struct mips_arg_info info;
9099 else if ((f & 3) == 2)
9104 mips_arg_info (&cum, mode, NULL, true, &info);
9105 gparg = mips_arg_regno (&info, false);
9106 fparg = mips_arg_regno (&info, true);
9109 fprintf (file, "\t%s\t%s,%s\n", s,
9110 reg_names[gparg], reg_names[fparg]);
9111 else if (TARGET_64BIT)
9112 fprintf (file, "\td%s\t%s,%s\n", s,
9113 reg_names[gparg], reg_names[fparg]);
9114 else if (ISA_HAS_MXHC1)
9115 /* -mips32r2 -mfp64 */
9116 fprintf (file, "\t%s\t%s,%s\n\t%s\t%s,%s\n",
9118 reg_names[gparg + (WORDS_BIG_ENDIAN ? 1 : 0)],
9120 from_fp_p ? "mfhc1" : "mthc1",
9121 reg_names[gparg + (WORDS_BIG_ENDIAN ? 0 : 1)],
9123 else if (TARGET_BIG_ENDIAN)
9124 fprintf (file, "\t%s\t%s,%s\n\t%s\t%s,%s\n", s,
9125 reg_names[gparg], reg_names[fparg + 1], s,
9126 reg_names[gparg + 1], reg_names[fparg]);
9128 fprintf (file, "\t%s\t%s,%s\n\t%s\t%s,%s\n", s,
9129 reg_names[gparg], reg_names[fparg], s,
9130 reg_names[gparg + 1], reg_names[fparg + 1]);
9132 function_arg_advance (&cum, mode, NULL, true);
9136 /* Build a mips16 function stub. This is used for functions which
9137 take arguments in the floating point registers. It is 32-bit code
9138 that moves the floating point args into the general registers, and
9139 then jumps to the 16-bit code. */
9142 build_mips16_function_stub (FILE *file)
9145 char *secname, *stubname;
9146 tree stubid, stubdecl;
9150 fnname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9151 secname = (char *) alloca (strlen (fnname) + 20);
9152 sprintf (secname, ".mips16.fn.%s", fnname);
9153 stubname = (char *) alloca (strlen (fnname) + 20);
9154 sprintf (stubname, "__fn_stub_%s", fnname);
9155 stubid = get_identifier (stubname);
9156 stubdecl = build_decl (FUNCTION_DECL, stubid,
9157 build_function_type (void_type_node, NULL_TREE));
9158 DECL_SECTION_NAME (stubdecl) = build_string (strlen (secname), secname);
9159 DECL_RESULT (stubdecl) = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
9161 fprintf (file, "\t# Stub function for %s (", current_function_name ());
9163 for (f = (unsigned int) current_function_args_info.fp_code; f != 0; f >>= 2)
9165 fprintf (file, "%s%s",
9166 need_comma ? ", " : "",
9167 (f & 3) == 1 ? "float" : "double");
9170 fprintf (file, ")\n");
9172 fprintf (file, "\t.set\tnomips16\n");
9173 switch_to_section (function_section (stubdecl));
9174 ASM_OUTPUT_ALIGN (file, floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT));
9176 /* ??? If FUNCTION_NAME_ALREADY_DECLARED is defined, then we are
9177 within a .ent, and we cannot emit another .ent. */
9178 if (!FUNCTION_NAME_ALREADY_DECLARED)
9180 fputs ("\t.ent\t", file);
9181 assemble_name (file, stubname);
9185 assemble_name (file, stubname);
9186 fputs (":\n", file);
9188 /* We don't want the assembler to insert any nops here. */
9189 fprintf (file, "\t.set\tnoreorder\n");
9191 mips16_fp_args (file, current_function_args_info.fp_code, 1);
9193 fprintf (asm_out_file, "\t.set\tnoat\n");
9194 fprintf (asm_out_file, "\tla\t%s,", reg_names[GP_REG_FIRST + 1]);
9195 assemble_name (file, fnname);
9196 fprintf (file, "\n");
9197 fprintf (asm_out_file, "\tjr\t%s\n", reg_names[GP_REG_FIRST + 1]);
9198 fprintf (asm_out_file, "\t.set\tat\n");
9200 /* Unfortunately, we can't fill the jump delay slot. We can't fill
9201 with one of the mfc1 instructions, because the result is not
9202 available for one instruction, so if the very first instruction
9203 in the function refers to the register, it will see the wrong
9205 fprintf (file, "\tnop\n");
9207 fprintf (file, "\t.set\treorder\n");
9209 if (!FUNCTION_NAME_ALREADY_DECLARED)
9211 fputs ("\t.end\t", file);
9212 assemble_name (file, stubname);
9216 switch_to_section (function_section (current_function_decl));
9219 /* We keep a list of functions for which we have already built stubs
9220 in build_mips16_call_stub. */
9224 struct mips16_stub *next;
9229 static struct mips16_stub *mips16_stubs;
9231 /* Emit code to return a double value from a mips16 stub. GPREG is the
9232 first GP reg to use, FPREG is the first FP reg to use. */
9235 mips16_fpret_double (int gpreg, int fpreg)
9238 fprintf (asm_out_file, "\tdmfc1\t%s,%s\n",
9239 reg_names[gpreg], reg_names[fpreg]);
9240 else if (TARGET_FLOAT64)
9242 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9243 reg_names[gpreg + WORDS_BIG_ENDIAN],
9245 fprintf (asm_out_file, "\tmfhc1\t%s,%s\n",
9246 reg_names[gpreg + !WORDS_BIG_ENDIAN],
9251 if (TARGET_BIG_ENDIAN)
9253 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9254 reg_names[gpreg + 0],
9255 reg_names[fpreg + 1]);
9256 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9257 reg_names[gpreg + 1],
9258 reg_names[fpreg + 0]);
9262 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9263 reg_names[gpreg + 0],
9264 reg_names[fpreg + 0]);
9265 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9266 reg_names[gpreg + 1],
9267 reg_names[fpreg + 1]);
9272 /* Build a call stub for a mips16 call. A stub is needed if we are
9273 passing any floating point values which should go into the floating
9274 point registers. If we are, and the call turns out to be to a
9275 32-bit function, the stub will be used to move the values into the
9276 floating point registers before calling the 32-bit function. The
9277 linker will magically adjust the function call to either the 16-bit
9278 function or the 32-bit stub, depending upon where the function call
9279 is actually defined.
9281 Similarly, we need a stub if the return value might come back in a
9282 floating point register.
9284 RETVAL is the location of the return value, or null if this is
9285 a call rather than a call_value. FN is the address of the
9286 function and ARG_SIZE is the size of the arguments. FP_CODE
9287 is the code built by function_arg. This function returns a nonzero
9288 value if it builds the call instruction itself. */
9291 build_mips16_call_stub (rtx retval, rtx fn, rtx arg_size, int fp_code)
9295 char *secname, *stubname;
9296 struct mips16_stub *l;
9297 tree stubid, stubdecl;
9301 /* We don't need to do anything if we aren't in mips16 mode, or if
9302 we were invoked with the -msoft-float option. */
9303 if (!TARGET_MIPS16 || TARGET_SOFT_FLOAT_ABI)
9306 /* Figure out whether the value might come back in a floating point
9309 fpret = mips_return_mode_in_fpr_p (GET_MODE (retval));
9311 /* We don't need to do anything if there were no floating point
9312 arguments and the value will not be returned in a floating point
9314 if (fp_code == 0 && ! fpret)
9317 /* We don't need to do anything if this is a call to a special
9318 mips16 support function. */
9319 if (GET_CODE (fn) == SYMBOL_REF
9320 && strncmp (XSTR (fn, 0), "__mips16_", 9) == 0)
9323 /* This code will only work for o32 and o64 abis. The other ABI's
9324 require more sophisticated support. */
9325 gcc_assert (TARGET_OLDABI);
9327 /* If we're calling via a function pointer, then we must always call
9328 via a stub. There are magic stubs provided in libgcc.a for each
9329 of the required cases. Each of them expects the function address
9330 to arrive in register $2. */
9332 if (GET_CODE (fn) != SYMBOL_REF)
9338 /* ??? If this code is modified to support other ABI's, we need
9339 to handle PARALLEL return values here. */
9342 sprintf (buf, "__mips16_call_stub_%s_%d",
9343 mips16_call_stub_mode_suffix (GET_MODE (retval)),
9346 sprintf (buf, "__mips16_call_stub_%d",
9349 id = get_identifier (buf);
9350 stub_fn = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (id));
9352 mips_emit_move (gen_rtx_REG (Pmode, 2), fn);
9354 if (retval == NULL_RTX)
9355 insn = gen_call_internal (stub_fn, arg_size);
9357 insn = gen_call_value_internal (retval, stub_fn, arg_size);
9358 insn = emit_call_insn (insn);
9360 /* Put the register usage information on the CALL. */
9361 CALL_INSN_FUNCTION_USAGE (insn) =
9362 gen_rtx_EXPR_LIST (VOIDmode,
9363 gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 2)),
9364 CALL_INSN_FUNCTION_USAGE (insn));
9366 /* If we are handling a floating point return value, we need to
9367 save $18 in the function prologue. Putting a note on the
9368 call will mean that df_regs_ever_live_p ($18) will be true if the
9369 call is not eliminated, and we can check that in the prologue
9372 CALL_INSN_FUNCTION_USAGE (insn) =
9373 gen_rtx_EXPR_LIST (VOIDmode,
9374 gen_rtx_USE (VOIDmode,
9375 gen_rtx_REG (word_mode, 18)),
9376 CALL_INSN_FUNCTION_USAGE (insn));
9378 /* Return 1 to tell the caller that we've generated the call
9383 /* We know the function we are going to call. If we have already
9384 built a stub, we don't need to do anything further. */
9386 fnname = XSTR (fn, 0);
9387 for (l = mips16_stubs; l != NULL; l = l->next)
9388 if (strcmp (l->name, fnname) == 0)
9393 /* Build a special purpose stub. When the linker sees a
9394 function call in mips16 code, it will check where the target
9395 is defined. If the target is a 32-bit call, the linker will
9396 search for the section defined here. It can tell which
9397 symbol this section is associated with by looking at the
9398 relocation information (the name is unreliable, since this
9399 might be a static function). If such a section is found, the
9400 linker will redirect the call to the start of the magic
9403 If the function does not return a floating point value, the
9404 special stub section is named
9407 If the function does return a floating point value, the stub
9409 .mips16.call.fp.FNNAME
9412 secname = (char *) alloca (strlen (fnname) + 40);
9413 sprintf (secname, ".mips16.call.%s%s",
9416 stubname = (char *) alloca (strlen (fnname) + 20);
9417 sprintf (stubname, "__call_stub_%s%s",
9420 stubid = get_identifier (stubname);
9421 stubdecl = build_decl (FUNCTION_DECL, stubid,
9422 build_function_type (void_type_node, NULL_TREE));
9423 DECL_SECTION_NAME (stubdecl) = build_string (strlen (secname), secname);
9424 DECL_RESULT (stubdecl) = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
9426 fprintf (asm_out_file, "\t# Stub function to call %s%s (",
9428 ? (GET_MODE (retval) == SFmode ? "float " : "double ")
9432 for (f = (unsigned int) fp_code; f != 0; f >>= 2)
9434 fprintf (asm_out_file, "%s%s",
9435 need_comma ? ", " : "",
9436 (f & 3) == 1 ? "float" : "double");
9439 fprintf (asm_out_file, ")\n");
9441 fprintf (asm_out_file, "\t.set\tnomips16\n");
9442 assemble_start_function (stubdecl, stubname);
9444 if (!FUNCTION_NAME_ALREADY_DECLARED)
9446 fputs ("\t.ent\t", asm_out_file);
9447 assemble_name (asm_out_file, stubname);
9448 fputs ("\n", asm_out_file);
9450 assemble_name (asm_out_file, stubname);
9451 fputs (":\n", asm_out_file);
9454 /* We build the stub code by hand. That's the only way we can
9455 do it, since we can't generate 32-bit code during a 16-bit
9458 /* We don't want the assembler to insert any nops here. */
9459 fprintf (asm_out_file, "\t.set\tnoreorder\n");
9461 mips16_fp_args (asm_out_file, fp_code, 0);
9465 fprintf (asm_out_file, "\t.set\tnoat\n");
9466 fprintf (asm_out_file, "\tla\t%s,%s\n", reg_names[GP_REG_FIRST + 1],
9468 fprintf (asm_out_file, "\tjr\t%s\n", reg_names[GP_REG_FIRST + 1]);
9469 fprintf (asm_out_file, "\t.set\tat\n");
9470 /* Unfortunately, we can't fill the jump delay slot. We
9471 can't fill with one of the mtc1 instructions, because the
9472 result is not available for one instruction, so if the
9473 very first instruction in the function refers to the
9474 register, it will see the wrong value. */
9475 fprintf (asm_out_file, "\tnop\n");
9479 fprintf (asm_out_file, "\tmove\t%s,%s\n",
9480 reg_names[GP_REG_FIRST + 18], reg_names[GP_REG_FIRST + 31]);
9481 fprintf (asm_out_file, "\tjal\t%s\n", fnname);
9482 /* As above, we can't fill the delay slot. */
9483 fprintf (asm_out_file, "\tnop\n");
9484 if (GET_MODE (retval) == SFmode)
9485 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9486 reg_names[GP_REG_FIRST + 2], reg_names[FP_REG_FIRST + 0]);
9487 else if (GET_MODE (retval) == SCmode)
9489 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9490 reg_names[GP_REG_FIRST + 2],
9491 reg_names[FP_REG_FIRST + 0]);
9492 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9493 reg_names[GP_REG_FIRST + 3],
9494 reg_names[FP_REG_FIRST + MAX_FPRS_PER_FMT]);
9496 else if (GET_MODE (retval) == DFmode
9497 || GET_MODE (retval) == V2SFmode)
9499 mips16_fpret_double (GP_REG_FIRST + 2, FP_REG_FIRST + 0);
9501 else if (GET_MODE (retval) == DCmode)
9503 mips16_fpret_double (GP_REG_FIRST + 2,
9505 mips16_fpret_double (GP_REG_FIRST + 4,
9506 FP_REG_FIRST + MAX_FPRS_PER_FMT);
9510 if (TARGET_BIG_ENDIAN)
9512 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9513 reg_names[GP_REG_FIRST + 2],
9514 reg_names[FP_REG_FIRST + 1]);
9515 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9516 reg_names[GP_REG_FIRST + 3],
9517 reg_names[FP_REG_FIRST + 0]);
9521 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9522 reg_names[GP_REG_FIRST + 2],
9523 reg_names[FP_REG_FIRST + 0]);
9524 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9525 reg_names[GP_REG_FIRST + 3],
9526 reg_names[FP_REG_FIRST + 1]);
9529 fprintf (asm_out_file, "\tj\t%s\n", reg_names[GP_REG_FIRST + 18]);
9530 /* As above, we can't fill the delay slot. */
9531 fprintf (asm_out_file, "\tnop\n");
9534 fprintf (asm_out_file, "\t.set\treorder\n");
9536 #ifdef ASM_DECLARE_FUNCTION_SIZE
9537 ASM_DECLARE_FUNCTION_SIZE (asm_out_file, stubname, stubdecl);
9540 if (!FUNCTION_NAME_ALREADY_DECLARED)
9542 fputs ("\t.end\t", asm_out_file);
9543 assemble_name (asm_out_file, stubname);
9544 fputs ("\n", asm_out_file);
9547 /* Record this stub. */
9548 l = (struct mips16_stub *) xmalloc (sizeof *l);
9549 l->name = xstrdup (fnname);
9551 l->next = mips16_stubs;
9555 /* If we expect a floating point return value, but we've built a
9556 stub which does not expect one, then we're in trouble. We can't
9557 use the existing stub, because it won't handle the floating point
9558 value. We can't build a new stub, because the linker won't know
9559 which stub to use for the various calls in this object file.
9560 Fortunately, this case is illegal, since it means that a function
9561 was declared in two different ways in a single compilation. */
9562 if (fpret && ! l->fpret)
9563 error ("cannot handle inconsistent calls to %qs", fnname);
9565 /* If we are calling a stub which handles a floating point return
9566 value, we need to arrange to save $18 in the prologue. We do
9567 this by marking the function call as using the register. The
9568 prologue will later see that it is used, and emit code to save
9575 if (retval == NULL_RTX)
9576 insn = gen_call_internal (fn, arg_size);
9578 insn = gen_call_value_internal (retval, fn, arg_size);
9579 insn = emit_call_insn (insn);
9581 CALL_INSN_FUNCTION_USAGE (insn) =
9582 gen_rtx_EXPR_LIST (VOIDmode,
9583 gen_rtx_USE (VOIDmode, gen_rtx_REG (word_mode, 18)),
9584 CALL_INSN_FUNCTION_USAGE (insn));
9586 /* Return 1 to tell the caller that we've generated the call
9591 /* Return 0 to let the caller generate the call insn. */
9595 /* An entry in the mips16 constant pool. VALUE is the pool constant,
9596 MODE is its mode, and LABEL is the CODE_LABEL associated with it. */
9598 struct mips16_constant {
9599 struct mips16_constant *next;
9602 enum machine_mode mode;
9605 /* Information about an incomplete mips16 constant pool. FIRST is the
9606 first constant, HIGHEST_ADDRESS is the highest address that the first
9607 byte of the pool can have, and INSN_ADDRESS is the current instruction
9610 struct mips16_constant_pool {
9611 struct mips16_constant *first;
9612 int highest_address;
9616 /* Add constant VALUE to POOL and return its label. MODE is the
9617 value's mode (used for CONST_INTs, etc.). */
9620 add_constant (struct mips16_constant_pool *pool,
9621 rtx value, enum machine_mode mode)
9623 struct mips16_constant **p, *c;
9624 bool first_of_size_p;
9626 /* See whether the constant is already in the pool. If so, return the
9627 existing label, otherwise leave P pointing to the place where the
9628 constant should be added.
9630 Keep the pool sorted in increasing order of mode size so that we can
9631 reduce the number of alignments needed. */
9632 first_of_size_p = true;
9633 for (p = &pool->first; *p != 0; p = &(*p)->next)
9635 if (mode == (*p)->mode && rtx_equal_p (value, (*p)->value))
9637 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE ((*p)->mode))
9639 if (GET_MODE_SIZE (mode) == GET_MODE_SIZE ((*p)->mode))
9640 first_of_size_p = false;
9643 /* In the worst case, the constant needed by the earliest instruction
9644 will end up at the end of the pool. The entire pool must then be
9645 accessible from that instruction.
9647 When adding the first constant, set the pool's highest address to
9648 the address of the first out-of-range byte. Adjust this address
9649 downwards each time a new constant is added. */
9650 if (pool->first == 0)
9651 /* For pc-relative lw, addiu and daddiu instructions, the base PC value
9652 is the address of the instruction with the lowest two bits clear.
9653 The base PC value for ld has the lowest three bits clear. Assume
9654 the worst case here. */
9655 pool->highest_address = pool->insn_address - (UNITS_PER_WORD - 2) + 0x8000;
9656 pool->highest_address -= GET_MODE_SIZE (mode);
9657 if (first_of_size_p)
9658 /* Take into account the worst possible padding due to alignment. */
9659 pool->highest_address -= GET_MODE_SIZE (mode) - 1;
9661 /* Create a new entry. */
9662 c = (struct mips16_constant *) xmalloc (sizeof *c);
9665 c->label = gen_label_rtx ();
9672 /* Output constant VALUE after instruction INSN and return the last
9673 instruction emitted. MODE is the mode of the constant. */
9676 dump_constants_1 (enum machine_mode mode, rtx value, rtx insn)
9678 switch (GET_MODE_CLASS (mode))
9682 rtx size = GEN_INT (GET_MODE_SIZE (mode));
9683 return emit_insn_after (gen_consttable_int (value, size), insn);
9687 return emit_insn_after (gen_consttable_float (value), insn);
9689 case MODE_VECTOR_FLOAT:
9690 case MODE_VECTOR_INT:
9693 for (i = 0; i < CONST_VECTOR_NUNITS (value); i++)
9694 insn = dump_constants_1 (GET_MODE_INNER (mode),
9695 CONST_VECTOR_ELT (value, i), insn);
9705 /* Dump out the constants in CONSTANTS after INSN. */
9708 dump_constants (struct mips16_constant *constants, rtx insn)
9710 struct mips16_constant *c, *next;
9714 for (c = constants; c != NULL; c = next)
9716 /* If necessary, increase the alignment of PC. */
9717 if (align < GET_MODE_SIZE (c->mode))
9719 int align_log = floor_log2 (GET_MODE_SIZE (c->mode));
9720 insn = emit_insn_after (gen_align (GEN_INT (align_log)), insn);
9722 align = GET_MODE_SIZE (c->mode);
9724 insn = emit_label_after (c->label, insn);
9725 insn = dump_constants_1 (c->mode, c->value, insn);
9731 emit_barrier_after (insn);
9734 /* Return the length of instruction INSN. */
9737 mips16_insn_length (rtx insn)
9741 rtx body = PATTERN (insn);
9742 if (GET_CODE (body) == ADDR_VEC)
9743 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, 0);
9744 if (GET_CODE (body) == ADDR_DIFF_VEC)
9745 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, 1);
9747 return get_attr_length (insn);
9750 /* Rewrite *X so that constant pool references refer to the constant's
9751 label instead. DATA points to the constant pool structure. */
9754 mips16_rewrite_pool_refs (rtx *x, void *data)
9756 struct mips16_constant_pool *pool = data;
9757 rtx base, offset, label;
9761 else if (!TARGET_MIPS16_TEXT_LOADS)
9764 split_const (*x, &base, &offset);
9765 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
9767 label = add_constant (pool, get_pool_constant (base),
9768 get_pool_mode (base));
9769 base = gen_rtx_LABEL_REF (Pmode, label);
9770 *x = mips_unspec_address_offset (base, offset, SYMBOL_PC_RELATIVE);
9773 return GET_CODE (*x) == CONST ? -1 : 0;
9776 /* Build MIPS16 constant pools. */
9779 mips16_lay_out_constants (void)
9781 struct mips16_constant_pool pool;
9784 if (!TARGET_MIPS16_PCREL_LOADS)
9788 memset (&pool, 0, sizeof (pool));
9789 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9791 /* Rewrite constant pool references in INSN. */
9793 for_each_rtx (&PATTERN (insn), mips16_rewrite_pool_refs, &pool);
9795 pool.insn_address += mips16_insn_length (insn);
9797 if (pool.first != NULL)
9799 /* If there are no natural barriers between the first user of
9800 the pool and the highest acceptable address, we'll need to
9801 create a new instruction to jump around the constant pool.
9802 In the worst case, this instruction will be 4 bytes long.
9804 If it's too late to do this transformation after INSN,
9805 do it immediately before INSN. */
9806 if (barrier == 0 && pool.insn_address + 4 > pool.highest_address)
9810 label = gen_label_rtx ();
9812 jump = emit_jump_insn_before (gen_jump (label), insn);
9813 JUMP_LABEL (jump) = label;
9814 LABEL_NUSES (label) = 1;
9815 barrier = emit_barrier_after (jump);
9817 emit_label_after (label, barrier);
9818 pool.insn_address += 4;
9821 /* See whether the constant pool is now out of range of the first
9822 user. If so, output the constants after the previous barrier.
9823 Note that any instructions between BARRIER and INSN (inclusive)
9824 will use negative offsets to refer to the pool. */
9825 if (pool.insn_address > pool.highest_address)
9827 dump_constants (pool.first, barrier);
9831 else if (BARRIER_P (insn))
9835 dump_constants (pool.first, get_last_insn ());
9838 /* A temporary variable used by for_each_rtx callbacks, etc. */
9839 static rtx mips_sim_insn;
9841 /* A structure representing the state of the processor pipeline.
9842 Used by the mips_sim_* family of functions. */
9844 /* The maximum number of instructions that can be issued in a cycle.
9845 (Caches mips_issue_rate.) */
9846 unsigned int issue_rate;
9848 /* The current simulation time. */
9851 /* How many more instructions can be issued in the current cycle. */
9852 unsigned int insns_left;
9854 /* LAST_SET[X].INSN is the last instruction to set register X.
9855 LAST_SET[X].TIME is the time at which that instruction was issued.
9856 INSN is null if no instruction has yet set register X. */
9860 } last_set[FIRST_PSEUDO_REGISTER];
9862 /* The pipeline's current DFA state. */
9866 /* Reset STATE to the initial simulation state. */
9869 mips_sim_reset (struct mips_sim *state)
9872 state->insns_left = state->issue_rate;
9873 memset (&state->last_set, 0, sizeof (state->last_set));
9874 state_reset (state->dfa_state);
9877 /* Initialize STATE before its first use. DFA_STATE points to an
9878 allocated but uninitialized DFA state. */
9881 mips_sim_init (struct mips_sim *state, state_t dfa_state)
9883 state->issue_rate = mips_issue_rate ();
9884 state->dfa_state = dfa_state;
9885 mips_sim_reset (state);
9888 /* Advance STATE by one clock cycle. */
9891 mips_sim_next_cycle (struct mips_sim *state)
9894 state->insns_left = state->issue_rate;
9895 state_transition (state->dfa_state, 0);
9898 /* Advance simulation state STATE until instruction INSN can read
9902 mips_sim_wait_reg (struct mips_sim *state, rtx insn, rtx reg)
9906 for (i = 0; i < HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)); i++)
9907 if (state->last_set[REGNO (reg) + i].insn != 0)
9911 t = state->last_set[REGNO (reg) + i].time;
9912 t += insn_latency (state->last_set[REGNO (reg) + i].insn, insn);
9913 while (state->time < t)
9914 mips_sim_next_cycle (state);
9918 /* A for_each_rtx callback. If *X is a register, advance simulation state
9919 DATA until mips_sim_insn can read the register's value. */
9922 mips_sim_wait_regs_2 (rtx *x, void *data)
9925 mips_sim_wait_reg (data, mips_sim_insn, *x);
9929 /* Call mips_sim_wait_regs_2 (R, DATA) for each register R mentioned in *X. */
9932 mips_sim_wait_regs_1 (rtx *x, void *data)
9934 for_each_rtx (x, mips_sim_wait_regs_2, data);
9937 /* Advance simulation state STATE until all of INSN's register
9938 dependencies are satisfied. */
9941 mips_sim_wait_regs (struct mips_sim *state, rtx insn)
9943 mips_sim_insn = insn;
9944 note_uses (&PATTERN (insn), mips_sim_wait_regs_1, state);
9947 /* Advance simulation state STATE until the units required by
9948 instruction INSN are available. */
9951 mips_sim_wait_units (struct mips_sim *state, rtx insn)
9955 tmp_state = alloca (state_size ());
9956 while (state->insns_left == 0
9957 || (memcpy (tmp_state, state->dfa_state, state_size ()),
9958 state_transition (tmp_state, insn) >= 0))
9959 mips_sim_next_cycle (state);
9962 /* Advance simulation state STATE until INSN is ready to issue. */
9965 mips_sim_wait_insn (struct mips_sim *state, rtx insn)
9967 mips_sim_wait_regs (state, insn);
9968 mips_sim_wait_units (state, insn);
9971 /* mips_sim_insn has just set X. Update the LAST_SET array
9972 in simulation state DATA. */
9975 mips_sim_record_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
9977 struct mips_sim *state;
9982 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
9984 state->last_set[REGNO (x) + i].insn = mips_sim_insn;
9985 state->last_set[REGNO (x) + i].time = state->time;
9989 /* Issue instruction INSN in scheduler state STATE. Assume that INSN
9990 can issue immediately (i.e., that mips_sim_wait_insn has already
9994 mips_sim_issue_insn (struct mips_sim *state, rtx insn)
9996 state_transition (state->dfa_state, insn);
9997 state->insns_left--;
9999 mips_sim_insn = insn;
10000 note_stores (PATTERN (insn), mips_sim_record_set, state);
10003 /* Simulate issuing a NOP in state STATE. */
10006 mips_sim_issue_nop (struct mips_sim *state)
10008 if (state->insns_left == 0)
10009 mips_sim_next_cycle (state);
10010 state->insns_left--;
10013 /* Update simulation state STATE so that it's ready to accept the instruction
10014 after INSN. INSN should be part of the main rtl chain, not a member of a
10018 mips_sim_finish_insn (struct mips_sim *state, rtx insn)
10020 /* If INSN is a jump with an implicit delay slot, simulate a nop. */
10022 mips_sim_issue_nop (state);
10024 switch (GET_CODE (SEQ_BEGIN (insn)))
10028 /* We can't predict the processor state after a call or label. */
10029 mips_sim_reset (state);
10033 /* The delay slots of branch likely instructions are only executed
10034 when the branch is taken. Therefore, if the caller has simulated
10035 the delay slot instruction, STATE does not really reflect the state
10036 of the pipeline for the instruction after the delay slot. Also,
10037 branch likely instructions tend to incur a penalty when not taken,
10038 so there will probably be an extra delay between the branch and
10039 the instruction after the delay slot. */
10040 if (INSN_ANNULLED_BRANCH_P (SEQ_BEGIN (insn)))
10041 mips_sim_reset (state);
10049 /* The VR4130 pipeline issues aligned pairs of instructions together,
10050 but it stalls the second instruction if it depends on the first.
10051 In order to cut down the amount of logic required, this dependence
10052 check is not based on a full instruction decode. Instead, any non-SPECIAL
10053 instruction is assumed to modify the register specified by bits 20-16
10054 (which is usually the "rt" field).
10056 In beq, beql, bne and bnel instructions, the rt field is actually an
10057 input, so we can end up with a false dependence between the branch
10058 and its delay slot. If this situation occurs in instruction INSN,
10059 try to avoid it by swapping rs and rt. */
10062 vr4130_avoid_branch_rt_conflict (rtx insn)
10066 first = SEQ_BEGIN (insn);
10067 second = SEQ_END (insn);
10069 && NONJUMP_INSN_P (second)
10070 && GET_CODE (PATTERN (first)) == SET
10071 && GET_CODE (SET_DEST (PATTERN (first))) == PC
10072 && GET_CODE (SET_SRC (PATTERN (first))) == IF_THEN_ELSE)
10074 /* Check for the right kind of condition. */
10075 rtx cond = XEXP (SET_SRC (PATTERN (first)), 0);
10076 if ((GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
10077 && REG_P (XEXP (cond, 0))
10078 && REG_P (XEXP (cond, 1))
10079 && reg_referenced_p (XEXP (cond, 1), PATTERN (second))
10080 && !reg_referenced_p (XEXP (cond, 0), PATTERN (second)))
10082 /* SECOND mentions the rt register but not the rs register. */
10083 rtx tmp = XEXP (cond, 0);
10084 XEXP (cond, 0) = XEXP (cond, 1);
10085 XEXP (cond, 1) = tmp;
10090 /* Implement -mvr4130-align. Go through each basic block and simulate the
10091 processor pipeline. If we find that a pair of instructions could execute
10092 in parallel, and the first of those instruction is not 8-byte aligned,
10093 insert a nop to make it aligned. */
10096 vr4130_align_insns (void)
10098 struct mips_sim state;
10099 rtx insn, subinsn, last, last2, next;
10104 /* LAST is the last instruction before INSN to have a nonzero length.
10105 LAST2 is the last such instruction before LAST. */
10109 /* ALIGNED_P is true if INSN is known to be at an aligned address. */
10112 mips_sim_init (&state, alloca (state_size ()));
10113 for (insn = get_insns (); insn != 0; insn = next)
10115 unsigned int length;
10117 next = NEXT_INSN (insn);
10119 /* See the comment above vr4130_avoid_branch_rt_conflict for details.
10120 This isn't really related to the alignment pass, but we do it on
10121 the fly to avoid a separate instruction walk. */
10122 vr4130_avoid_branch_rt_conflict (insn);
10124 if (USEFUL_INSN_P (insn))
10125 FOR_EACH_SUBINSN (subinsn, insn)
10127 mips_sim_wait_insn (&state, subinsn);
10129 /* If we want this instruction to issue in parallel with the
10130 previous one, make sure that the previous instruction is
10131 aligned. There are several reasons why this isn't worthwhile
10132 when the second instruction is a call:
10134 - Calls are less likely to be performance critical,
10135 - There's a good chance that the delay slot can execute
10136 in parallel with the call.
10137 - The return address would then be unaligned.
10139 In general, if we're going to insert a nop between instructions
10140 X and Y, it's better to insert it immediately after X. That
10141 way, if the nop makes Y aligned, it will also align any labels
10142 between X and Y. */
10143 if (state.insns_left != state.issue_rate
10144 && !CALL_P (subinsn))
10146 if (subinsn == SEQ_BEGIN (insn) && aligned_p)
10148 /* SUBINSN is the first instruction in INSN and INSN is
10149 aligned. We want to align the previous instruction
10150 instead, so insert a nop between LAST2 and LAST.
10152 Note that LAST could be either a single instruction
10153 or a branch with a delay slot. In the latter case,
10154 LAST, like INSN, is already aligned, but the delay
10155 slot must have some extra delay that stops it from
10156 issuing at the same time as the branch. We therefore
10157 insert a nop before the branch in order to align its
10159 emit_insn_after (gen_nop (), last2);
10162 else if (subinsn != SEQ_BEGIN (insn) && !aligned_p)
10164 /* SUBINSN is the delay slot of INSN, but INSN is
10165 currently unaligned. Insert a nop between
10166 LAST and INSN to align it. */
10167 emit_insn_after (gen_nop (), last);
10171 mips_sim_issue_insn (&state, subinsn);
10173 mips_sim_finish_insn (&state, insn);
10175 /* Update LAST, LAST2 and ALIGNED_P for the next instruction. */
10176 length = get_attr_length (insn);
10179 /* If the instruction is an asm statement or multi-instruction
10180 mips.md patern, the length is only an estimate. Insert an
10181 8 byte alignment after it so that the following instructions
10182 can be handled correctly. */
10183 if (NONJUMP_INSN_P (SEQ_BEGIN (insn))
10184 && (recog_memoized (insn) < 0 || length >= 8))
10186 next = emit_insn_after (gen_align (GEN_INT (3)), insn);
10187 next = NEXT_INSN (next);
10188 mips_sim_next_cycle (&state);
10191 else if (length & 4)
10192 aligned_p = !aligned_p;
10197 /* See whether INSN is an aligned label. */
10198 if (LABEL_P (insn) && label_to_alignment (insn) >= 3)
10204 /* Subroutine of mips_reorg. If there is a hazard between INSN
10205 and a previous instruction, avoid it by inserting nops after
10208 *DELAYED_REG and *HILO_DELAY describe the hazards that apply at
10209 this point. If *DELAYED_REG is non-null, INSN must wait a cycle
10210 before using the value of that register. *HILO_DELAY counts the
10211 number of instructions since the last hilo hazard (that is,
10212 the number of instructions since the last mflo or mfhi).
10214 After inserting nops for INSN, update *DELAYED_REG and *HILO_DELAY
10215 for the next instruction.
10217 LO_REG is an rtx for the LO register, used in dependence checking. */
10220 mips_avoid_hazard (rtx after, rtx insn, int *hilo_delay,
10221 rtx *delayed_reg, rtx lo_reg)
10226 if (!INSN_P (insn))
10229 pattern = PATTERN (insn);
10231 /* Do not put the whole function in .set noreorder if it contains
10232 an asm statement. We don't know whether there will be hazards
10233 between the asm statement and the gcc-generated code. */
10234 if (GET_CODE (pattern) == ASM_INPUT || asm_noperands (pattern) >= 0)
10235 cfun->machine->all_noreorder_p = false;
10237 /* Ignore zero-length instructions (barriers and the like). */
10238 ninsns = get_attr_length (insn) / 4;
10242 /* Work out how many nops are needed. Note that we only care about
10243 registers that are explicitly mentioned in the instruction's pattern.
10244 It doesn't matter that calls use the argument registers or that they
10245 clobber hi and lo. */
10246 if (*hilo_delay < 2 && reg_set_p (lo_reg, pattern))
10247 nops = 2 - *hilo_delay;
10248 else if (*delayed_reg != 0 && reg_referenced_p (*delayed_reg, pattern))
10253 /* Insert the nops between this instruction and the previous one.
10254 Each new nop takes us further from the last hilo hazard. */
10255 *hilo_delay += nops;
10257 emit_insn_after (gen_hazard_nop (), after);
10259 /* Set up the state for the next instruction. */
10260 *hilo_delay += ninsns;
10262 if (INSN_CODE (insn) >= 0)
10263 switch (get_attr_hazard (insn))
10273 set = single_set (insn);
10274 gcc_assert (set != 0);
10275 *delayed_reg = SET_DEST (set);
10281 /* Go through the instruction stream and insert nops where necessary.
10282 See if the whole function can then be put into .set noreorder &
10286 mips_avoid_hazards (void)
10288 rtx insn, last_insn, lo_reg, delayed_reg;
10291 /* Force all instructions to be split into their final form. */
10292 split_all_insns_noflow ();
10294 /* Recalculate instruction lengths without taking nops into account. */
10295 cfun->machine->ignore_hazard_length_p = true;
10296 shorten_branches (get_insns ());
10298 cfun->machine->all_noreorder_p = true;
10300 /* Profiled functions can't be all noreorder because the profiler
10301 support uses assembler macros. */
10302 if (current_function_profile)
10303 cfun->machine->all_noreorder_p = false;
10305 /* Code compiled with -mfix-vr4120 can't be all noreorder because
10306 we rely on the assembler to work around some errata. */
10307 if (TARGET_FIX_VR4120)
10308 cfun->machine->all_noreorder_p = false;
10310 /* The same is true for -mfix-vr4130 if we might generate mflo or
10311 mfhi instructions. Note that we avoid using mflo and mfhi if
10312 the VR4130 macc and dmacc instructions are available instead;
10313 see the *mfhilo_{si,di}_macc patterns. */
10314 if (TARGET_FIX_VR4130 && !ISA_HAS_MACCHI)
10315 cfun->machine->all_noreorder_p = false;
10320 lo_reg = gen_rtx_REG (SImode, LO_REGNUM);
10322 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10325 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
10326 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
10327 mips_avoid_hazard (last_insn, XVECEXP (PATTERN (insn), 0, i),
10328 &hilo_delay, &delayed_reg, lo_reg);
10330 mips_avoid_hazard (last_insn, insn, &hilo_delay,
10331 &delayed_reg, lo_reg);
10338 /* Implement TARGET_MACHINE_DEPENDENT_REORG. */
10343 mips16_lay_out_constants ();
10344 if (TARGET_EXPLICIT_RELOCS)
10346 if (mips_flag_delayed_branch)
10347 dbr_schedule (get_insns ());
10348 mips_avoid_hazards ();
10349 if (TUNE_MIPS4130 && TARGET_VR4130_ALIGN)
10350 vr4130_align_insns ();
10354 /* This function does three things:
10356 - Register the special divsi3 and modsi3 functions if -mfix-vr4120.
10357 - Register the mips16 hardware floating point stubs.
10358 - Register the gofast functions if selected using --enable-gofast. */
10360 #include "config/gofast.h"
10363 mips_init_libfuncs (void)
10365 if (TARGET_FIX_VR4120)
10367 set_optab_libfunc (sdiv_optab, SImode, "__vr4120_divsi3");
10368 set_optab_libfunc (smod_optab, SImode, "__vr4120_modsi3");
10371 if (TARGET_MIPS16 && TARGET_HARD_FLOAT_ABI)
10373 set_optab_libfunc (add_optab, SFmode, "__mips16_addsf3");
10374 set_optab_libfunc (sub_optab, SFmode, "__mips16_subsf3");
10375 set_optab_libfunc (smul_optab, SFmode, "__mips16_mulsf3");
10376 set_optab_libfunc (sdiv_optab, SFmode, "__mips16_divsf3");
10378 set_optab_libfunc (eq_optab, SFmode, "__mips16_eqsf2");
10379 set_optab_libfunc (ne_optab, SFmode, "__mips16_nesf2");
10380 set_optab_libfunc (gt_optab, SFmode, "__mips16_gtsf2");
10381 set_optab_libfunc (ge_optab, SFmode, "__mips16_gesf2");
10382 set_optab_libfunc (lt_optab, SFmode, "__mips16_ltsf2");
10383 set_optab_libfunc (le_optab, SFmode, "__mips16_lesf2");
10384 set_optab_libfunc (unord_optab, SFmode, "__mips16_unordsf2");
10386 set_conv_libfunc (sfix_optab, SImode, SFmode, "__mips16_fix_truncsfsi");
10387 set_conv_libfunc (sfloat_optab, SFmode, SImode, "__mips16_floatsisf");
10388 set_conv_libfunc (ufloat_optab, SFmode, SImode, "__mips16_floatunsisf");
10390 if (TARGET_DOUBLE_FLOAT)
10392 set_optab_libfunc (add_optab, DFmode, "__mips16_adddf3");
10393 set_optab_libfunc (sub_optab, DFmode, "__mips16_subdf3");
10394 set_optab_libfunc (smul_optab, DFmode, "__mips16_muldf3");
10395 set_optab_libfunc (sdiv_optab, DFmode, "__mips16_divdf3");
10397 set_optab_libfunc (eq_optab, DFmode, "__mips16_eqdf2");
10398 set_optab_libfunc (ne_optab, DFmode, "__mips16_nedf2");
10399 set_optab_libfunc (gt_optab, DFmode, "__mips16_gtdf2");
10400 set_optab_libfunc (ge_optab, DFmode, "__mips16_gedf2");
10401 set_optab_libfunc (lt_optab, DFmode, "__mips16_ltdf2");
10402 set_optab_libfunc (le_optab, DFmode, "__mips16_ledf2");
10403 set_optab_libfunc (unord_optab, DFmode, "__mips16_unorddf2");
10405 set_conv_libfunc (sext_optab, DFmode, SFmode, "__mips16_extendsfdf2");
10406 set_conv_libfunc (trunc_optab, SFmode, DFmode, "__mips16_truncdfsf2");
10408 set_conv_libfunc (sfix_optab, SImode, DFmode, "__mips16_fix_truncdfsi");
10409 set_conv_libfunc (sfloat_optab, DFmode, SImode, "__mips16_floatsidf");
10410 set_conv_libfunc (ufloat_optab, DFmode, SImode, "__mips16_floatunsidf");
10414 gofast_maybe_init_libfuncs ();
10417 /* Return a number assessing the cost of moving a register in class
10418 FROM to class TO. The classes are expressed using the enumeration
10419 values such as `GENERAL_REGS'. A value of 2 is the default; other
10420 values are interpreted relative to that.
10422 It is not required that the cost always equal 2 when FROM is the
10423 same as TO; on some machines it is expensive to move between
10424 registers if they are not general registers.
10426 If reload sees an insn consisting of a single `set' between two
10427 hard registers, and if `REGISTER_MOVE_COST' applied to their
10428 classes returns a value of 2, reload does not check to ensure that
10429 the constraints of the insn are met. Setting a cost of other than
10430 2 will allow reload to verify that the constraints are met. You
10431 should do this if the `movM' pattern's constraints do not allow
10434 ??? We make the cost of moving from HI/LO into general
10435 registers the same as for one of moving general registers to
10436 HI/LO for TARGET_MIPS16 in order to prevent allocating a
10437 pseudo to HI/LO. This might hurt optimizations though, it
10438 isn't clear if it is wise. And it might not work in all cases. We
10439 could solve the DImode LO reg problem by using a multiply, just
10440 like reload_{in,out}si. We could solve the SImode/HImode HI reg
10441 problem by using divide instructions. divu puts the remainder in
10442 the HI reg, so doing a divide by -1 will move the value in the HI
10443 reg for all values except -1. We could handle that case by using a
10444 signed divide, e.g. -1 / 2 (or maybe 1 / -2?). We'd have to emit
10445 a compare/branch to test the input value to see which instruction
10446 we need to use. This gets pretty messy, but it is feasible. */
10449 mips_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
10450 enum reg_class to, enum reg_class from)
10452 if (from == M16_REGS && reg_class_subset_p (to, GENERAL_REGS))
10454 else if (from == M16_NA_REGS && reg_class_subset_p (to, GENERAL_REGS))
10456 else if (reg_class_subset_p (from, GENERAL_REGS))
10458 if (to == M16_REGS)
10460 else if (to == M16_NA_REGS)
10462 else if (reg_class_subset_p (to, GENERAL_REGS))
10469 else if (to == FP_REGS)
10471 else if (reg_class_subset_p (to, ACC_REGS))
10478 else if (reg_class_subset_p (to, ALL_COP_REGS))
10483 else if (from == FP_REGS)
10485 if (reg_class_subset_p (to, GENERAL_REGS))
10487 else if (to == FP_REGS)
10489 else if (to == ST_REGS)
10492 else if (reg_class_subset_p (from, ACC_REGS))
10494 if (reg_class_subset_p (to, GENERAL_REGS))
10502 else if (from == ST_REGS && reg_class_subset_p (to, GENERAL_REGS))
10504 else if (reg_class_subset_p (from, ALL_COP_REGS))
10510 ??? What cases are these? Shouldn't we return 2 here? */
10515 /* Return the length of INSN. LENGTH is the initial length computed by
10516 attributes in the machine-description file. */
10519 mips_adjust_insn_length (rtx insn, int length)
10521 /* A unconditional jump has an unfilled delay slot if it is not part
10522 of a sequence. A conditional jump normally has a delay slot, but
10523 does not on MIPS16. */
10524 if (CALL_P (insn) || (TARGET_MIPS16 ? simplejump_p (insn) : JUMP_P (insn)))
10527 /* See how many nops might be needed to avoid hardware hazards. */
10528 if (!cfun->machine->ignore_hazard_length_p && INSN_CODE (insn) >= 0)
10529 switch (get_attr_hazard (insn))
10543 /* All MIPS16 instructions are a measly two bytes. */
10551 /* Return an asm sequence to start a noat block and load the address
10552 of a label into $1. */
10555 mips_output_load_label (void)
10557 if (TARGET_EXPLICIT_RELOCS)
10561 return "%[lw\t%@,%%got_page(%0)(%+)\n\taddiu\t%@,%@,%%got_ofst(%0)";
10564 return "%[ld\t%@,%%got_page(%0)(%+)\n\tdaddiu\t%@,%@,%%got_ofst(%0)";
10567 if (ISA_HAS_LOAD_DELAY)
10568 return "%[lw\t%@,%%got(%0)(%+)%#\n\taddiu\t%@,%@,%%lo(%0)";
10569 return "%[lw\t%@,%%got(%0)(%+)\n\taddiu\t%@,%@,%%lo(%0)";
10573 if (Pmode == DImode)
10574 return "%[dla\t%@,%0";
10576 return "%[la\t%@,%0";
10580 /* Return the assembly code for INSN, which has the operands given by
10581 OPERANDS, and which branches to OPERANDS[1] if some condition is true.
10582 BRANCH_IF_TRUE is the asm template that should be used if OPERANDS[1]
10583 is in range of a direct branch. BRANCH_IF_FALSE is an inverted
10584 version of BRANCH_IF_TRUE. */
10587 mips_output_conditional_branch (rtx insn, rtx *operands,
10588 const char *branch_if_true,
10589 const char *branch_if_false)
10591 unsigned int length;
10592 rtx taken, not_taken;
10594 length = get_attr_length (insn);
10597 /* Just a simple conditional branch. */
10598 mips_branch_likely = (final_sequence && INSN_ANNULLED_BRANCH_P (insn));
10599 return branch_if_true;
10602 /* Generate a reversed branch around a direct jump. This fallback does
10603 not use branch-likely instructions. */
10604 mips_branch_likely = false;
10605 not_taken = gen_label_rtx ();
10606 taken = operands[1];
10608 /* Generate the reversed branch to NOT_TAKEN. */
10609 operands[1] = not_taken;
10610 output_asm_insn (branch_if_false, operands);
10612 /* If INSN has a delay slot, we must provide delay slots for both the
10613 branch to NOT_TAKEN and the conditional jump. We must also ensure
10614 that INSN's delay slot is executed in the appropriate cases. */
10615 if (final_sequence)
10617 /* This first delay slot will always be executed, so use INSN's
10618 delay slot if is not annulled. */
10619 if (!INSN_ANNULLED_BRANCH_P (insn))
10621 final_scan_insn (XVECEXP (final_sequence, 0, 1),
10622 asm_out_file, optimize, 1, NULL);
10623 INSN_DELETED_P (XVECEXP (final_sequence, 0, 1)) = 1;
10626 output_asm_insn ("nop", 0);
10627 fprintf (asm_out_file, "\n");
10630 /* Output the unconditional branch to TAKEN. */
10632 output_asm_insn ("j\t%0%/", &taken);
10635 output_asm_insn (mips_output_load_label (), &taken);
10636 output_asm_insn ("jr\t%@%]%/", 0);
10639 /* Now deal with its delay slot; see above. */
10640 if (final_sequence)
10642 /* This delay slot will only be executed if the branch is taken.
10643 Use INSN's delay slot if is annulled. */
10644 if (INSN_ANNULLED_BRANCH_P (insn))
10646 final_scan_insn (XVECEXP (final_sequence, 0, 1),
10647 asm_out_file, optimize, 1, NULL);
10648 INSN_DELETED_P (XVECEXP (final_sequence, 0, 1)) = 1;
10651 output_asm_insn ("nop", 0);
10652 fprintf (asm_out_file, "\n");
10655 /* Output NOT_TAKEN. */
10656 (*targetm.asm_out.internal_label) (asm_out_file, "L",
10657 CODE_LABEL_NUMBER (not_taken));
10661 /* Return the assembly code for INSN, which branches to OPERANDS[1]
10662 if some ordered condition is true. The condition is given by
10663 OPERANDS[0] if !INVERTED_P, otherwise it is the inverse of
10664 OPERANDS[0]. OPERANDS[2] is the comparison's first operand;
10665 its second is always zero. */
10668 mips_output_order_conditional_branch (rtx insn, rtx *operands, bool inverted_p)
10670 const char *branch[2];
10672 /* Make BRANCH[1] branch to OPERANDS[1] when the condition is true.
10673 Make BRANCH[0] branch on the inverse condition. */
10674 switch (GET_CODE (operands[0]))
10676 /* These cases are equivalent to comparisons against zero. */
10678 inverted_p = !inverted_p;
10679 /* Fall through. */
10681 branch[!inverted_p] = MIPS_BRANCH ("bne", "%2,%.,%1");
10682 branch[inverted_p] = MIPS_BRANCH ("beq", "%2,%.,%1");
10685 /* These cases are always true or always false. */
10687 inverted_p = !inverted_p;
10688 /* Fall through. */
10690 branch[!inverted_p] = MIPS_BRANCH ("beq", "%.,%.,%1");
10691 branch[inverted_p] = MIPS_BRANCH ("bne", "%.,%.,%1");
10695 branch[!inverted_p] = MIPS_BRANCH ("b%C0z", "%2,%1");
10696 branch[inverted_p] = MIPS_BRANCH ("b%N0z", "%2,%1");
10699 return mips_output_conditional_branch (insn, operands, branch[1], branch[0]);
10702 /* Used to output div or ddiv instruction DIVISION, which has the operands
10703 given by OPERANDS. Add in a divide-by-zero check if needed.
10705 When working around R4000 and R4400 errata, we need to make sure that
10706 the division is not immediately followed by a shift[1][2]. We also
10707 need to stop the division from being put into a branch delay slot[3].
10708 The easiest way to avoid both problems is to add a nop after the
10709 division. When a divide-by-zero check is needed, this nop can be
10710 used to fill the branch delay slot.
10712 [1] If a double-word or a variable shift executes immediately
10713 after starting an integer division, the shift may give an
10714 incorrect result. See quotations of errata #16 and #28 from
10715 "MIPS R4000PC/SC Errata, Processor Revision 2.2 and 3.0"
10716 in mips.md for details.
10718 [2] A similar bug to [1] exists for all revisions of the
10719 R4000 and the R4400 when run in an MC configuration.
10720 From "MIPS R4000MC Errata, Processor Revision 2.2 and 3.0":
10722 "19. In this following sequence:
10724 ddiv (or ddivu or div or divu)
10725 dsll32 (or dsrl32, dsra32)
10727 if an MPT stall occurs, while the divide is slipping the cpu
10728 pipeline, then the following double shift would end up with an
10731 Workaround: The compiler needs to avoid generating any
10732 sequence with divide followed by extended double shift."
10734 This erratum is also present in "MIPS R4400MC Errata, Processor
10735 Revision 1.0" and "MIPS R4400MC Errata, Processor Revision 2.0
10736 & 3.0" as errata #10 and #4, respectively.
10738 [3] From "MIPS R4000PC/SC Errata, Processor Revision 2.2 and 3.0"
10739 (also valid for MIPS R4000MC processors):
10741 "52. R4000SC: This bug does not apply for the R4000PC.
10743 There are two flavors of this bug:
10745 1) If the instruction just after divide takes an RF exception
10746 (tlb-refill, tlb-invalid) and gets an instruction cache
10747 miss (both primary and secondary) and the line which is
10748 currently in secondary cache at this index had the first
10749 data word, where the bits 5..2 are set, then R4000 would
10750 get a wrong result for the div.
10755 ------------------- # end-of page. -tlb-refill
10760 ------------------- # end-of page. -tlb-invalid
10763 2) If the divide is in the taken branch delay slot, where the
10764 target takes RF exception and gets an I-cache miss for the
10765 exception vector or where I-cache miss occurs for the
10766 target address, under the above mentioned scenarios, the
10767 div would get wrong results.
10770 j r2 # to next page mapped or unmapped
10771 div r8,r9 # this bug would be there as long
10772 # as there is an ICache miss and
10773 nop # the "data pattern" is present
10776 beq r0, r0, NextPage # to Next page
10780 This bug is present for div, divu, ddiv, and ddivu
10783 Workaround: For item 1), OS could make sure that the next page
10784 after the divide instruction is also mapped. For item 2), the
10785 compiler could make sure that the divide instruction is not in
10786 the branch delay slot."
10788 These processors have PRId values of 0x00004220 and 0x00004300 for
10789 the R4000 and 0x00004400, 0x00004500 and 0x00004600 for the R4400. */
10792 mips_output_division (const char *division, rtx *operands)
10797 if (TARGET_FIX_R4000 || TARGET_FIX_R4400)
10799 output_asm_insn (s, operands);
10802 if (TARGET_CHECK_ZERO_DIV)
10806 output_asm_insn (s, operands);
10807 s = "bnez\t%2,1f\n\tbreak\t7\n1:";
10809 else if (GENERATE_DIVIDE_TRAPS)
10811 output_asm_insn (s, operands);
10812 s = "teq\t%2,%.,7";
10816 output_asm_insn ("%(bne\t%2,%.,1f", operands);
10817 output_asm_insn (s, operands);
10818 s = "break\t7%)\n1:";
10824 /* Return true if GIVEN is the same as CANONICAL, or if it is CANONICAL
10825 with a final "000" replaced by "k". Ignore case.
10827 Note: this function is shared between GCC and GAS. */
10830 mips_strict_matching_cpu_name_p (const char *canonical, const char *given)
10832 while (*given != 0 && TOLOWER (*given) == TOLOWER (*canonical))
10833 given++, canonical++;
10835 return ((*given == 0 && *canonical == 0)
10836 || (strcmp (canonical, "000") == 0 && strcasecmp (given, "k") == 0));
10840 /* Return true if GIVEN matches CANONICAL, where GIVEN is a user-supplied
10841 CPU name. We've traditionally allowed a lot of variation here.
10843 Note: this function is shared between GCC and GAS. */
10846 mips_matching_cpu_name_p (const char *canonical, const char *given)
10848 /* First see if the name matches exactly, or with a final "000"
10849 turned into "k". */
10850 if (mips_strict_matching_cpu_name_p (canonical, given))
10853 /* If not, try comparing based on numerical designation alone.
10854 See if GIVEN is an unadorned number, or 'r' followed by a number. */
10855 if (TOLOWER (*given) == 'r')
10857 if (!ISDIGIT (*given))
10860 /* Skip over some well-known prefixes in the canonical name,
10861 hoping to find a number there too. */
10862 if (TOLOWER (canonical[0]) == 'v' && TOLOWER (canonical[1]) == 'r')
10864 else if (TOLOWER (canonical[0]) == 'r' && TOLOWER (canonical[1]) == 'm')
10866 else if (TOLOWER (canonical[0]) == 'r')
10869 return mips_strict_matching_cpu_name_p (canonical, given);
10873 /* Return the mips_cpu_info entry for the processor or ISA given
10874 by CPU_STRING. Return null if the string isn't recognized.
10876 A similar function exists in GAS. */
10878 static const struct mips_cpu_info *
10879 mips_parse_cpu (const char *cpu_string)
10881 const struct mips_cpu_info *p;
10884 /* In the past, we allowed upper-case CPU names, but it doesn't
10885 work well with the multilib machinery. */
10886 for (s = cpu_string; *s != 0; s++)
10889 warning (0, "the cpu name must be lower case");
10893 /* 'from-abi' selects the most compatible architecture for the given
10894 ABI: MIPS I for 32-bit ABIs and MIPS III for 64-bit ABIs. For the
10895 EABIs, we have to decide whether we're using the 32-bit or 64-bit
10896 version. Look first at the -mgp options, if given, otherwise base
10897 the choice on MASK_64BIT in TARGET_DEFAULT. */
10898 if (strcasecmp (cpu_string, "from-abi") == 0)
10899 return mips_cpu_info_from_isa (ABI_NEEDS_32BIT_REGS ? 1
10900 : ABI_NEEDS_64BIT_REGS ? 3
10901 : (TARGET_64BIT ? 3 : 1));
10903 /* 'default' has traditionally been a no-op. Probably not very useful. */
10904 if (strcasecmp (cpu_string, "default") == 0)
10907 for (p = mips_cpu_info_table; p->name != 0; p++)
10908 if (mips_matching_cpu_name_p (p->name, cpu_string))
10915 /* Return the processor associated with the given ISA level, or null
10916 if the ISA isn't valid. */
10918 static const struct mips_cpu_info *
10919 mips_cpu_info_from_isa (int isa)
10921 const struct mips_cpu_info *p;
10923 for (p = mips_cpu_info_table; p->name != 0; p++)
10930 /* Implement HARD_REGNO_NREGS. The size of FP registers is controlled
10931 by UNITS_PER_FPREG. The size of FP status registers is always 4, because
10932 they only hold condition code modes, and CCmode is always considered to
10933 be 4 bytes wide. All other registers are word sized. */
10936 mips_hard_regno_nregs (int regno, enum machine_mode mode)
10938 if (ST_REG_P (regno))
10939 return ((GET_MODE_SIZE (mode) + 3) / 4);
10940 else if (! FP_REG_P (regno))
10941 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
10943 return ((GET_MODE_SIZE (mode) + UNITS_PER_FPREG - 1) / UNITS_PER_FPREG);
10946 /* Implement TARGET_RETURN_IN_MEMORY. Under the old (i.e., 32 and O64 ABIs)
10947 all BLKmode objects are returned in memory. Under the new (N32 and
10948 64-bit MIPS ABIs) small structures are returned in a register.
10949 Objects with varying size must still be returned in memory, of
10953 mips_return_in_memory (const_tree type, const_tree fndecl ATTRIBUTE_UNUSED)
10956 return (TYPE_MODE (type) == BLKmode);
10958 return ((int_size_in_bytes (type) > (2 * UNITS_PER_WORD))
10959 || (int_size_in_bytes (type) == -1));
10963 mips_strict_argument_naming (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
10965 return !TARGET_OLDABI;
10968 /* Return true if INSN is a multiply-add or multiply-subtract
10969 instruction and PREV assigns to the accumulator operand. */
10972 mips_linked_madd_p (rtx prev, rtx insn)
10976 x = single_set (insn);
10982 if (GET_CODE (x) == PLUS
10983 && GET_CODE (XEXP (x, 0)) == MULT
10984 && reg_set_p (XEXP (x, 1), prev))
10987 if (GET_CODE (x) == MINUS
10988 && GET_CODE (XEXP (x, 1)) == MULT
10989 && reg_set_p (XEXP (x, 0), prev))
10995 /* Used by TUNE_MACC_CHAINS to record the last scheduled instruction
10996 that may clobber hi or lo. */
10998 static rtx mips_macc_chains_last_hilo;
11000 /* A TUNE_MACC_CHAINS helper function. Record that instruction INSN has
11001 been scheduled, updating mips_macc_chains_last_hilo appropriately. */
11004 mips_macc_chains_record (rtx insn)
11006 if (get_attr_may_clobber_hilo (insn))
11007 mips_macc_chains_last_hilo = insn;
11010 /* A TUNE_MACC_CHAINS helper function. Search ready queue READY, which
11011 has NREADY elements, looking for a multiply-add or multiply-subtract
11012 instruction that is cumulative with mips_macc_chains_last_hilo.
11013 If there is one, promote it ahead of anything else that might
11014 clobber hi or lo. */
11017 mips_macc_chains_reorder (rtx *ready, int nready)
11021 if (mips_macc_chains_last_hilo != 0)
11022 for (i = nready - 1; i >= 0; i--)
11023 if (mips_linked_madd_p (mips_macc_chains_last_hilo, ready[i]))
11025 for (j = nready - 1; j > i; j--)
11026 if (recog_memoized (ready[j]) >= 0
11027 && get_attr_may_clobber_hilo (ready[j]))
11029 mips_promote_ready (ready, i, j);
11036 /* The last instruction to be scheduled. */
11038 static rtx vr4130_last_insn;
11040 /* A note_stores callback used by vr4130_true_reg_dependence_p. DATA
11041 points to an rtx that is initially an instruction. Nullify the rtx
11042 if the instruction uses the value of register X. */
11045 vr4130_true_reg_dependence_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
11047 rtx *insn_ptr = data;
11050 && reg_referenced_p (x, PATTERN (*insn_ptr)))
11054 /* Return true if there is true register dependence between vr4130_last_insn
11058 vr4130_true_reg_dependence_p (rtx insn)
11060 note_stores (PATTERN (vr4130_last_insn),
11061 vr4130_true_reg_dependence_p_1, &insn);
11065 /* A TUNE_MIPS4130 helper function. Given that INSN1 is at the head of
11066 the ready queue and that INSN2 is the instruction after it, return
11067 true if it is worth promoting INSN2 ahead of INSN1. Look for cases
11068 in which INSN1 and INSN2 can probably issue in parallel, but for
11069 which (INSN2, INSN1) should be less sensitive to instruction
11070 alignment than (INSN1, INSN2). See 4130.md for more details. */
11073 vr4130_swap_insns_p (rtx insn1, rtx insn2)
11075 sd_iterator_def sd_it;
11078 /* Check for the following case:
11080 1) there is some other instruction X with an anti dependence on INSN1;
11081 2) X has a higher priority than INSN2; and
11082 3) X is an arithmetic instruction (and thus has no unit restrictions).
11084 If INSN1 is the last instruction blocking X, it would better to
11085 choose (INSN1, X) over (INSN2, INSN1). */
11086 FOR_EACH_DEP (insn1, SD_LIST_FORW, sd_it, dep)
11087 if (DEP_TYPE (dep) == REG_DEP_ANTI
11088 && INSN_PRIORITY (DEP_CON (dep)) > INSN_PRIORITY (insn2)
11089 && recog_memoized (DEP_CON (dep)) >= 0
11090 && get_attr_vr4130_class (DEP_CON (dep)) == VR4130_CLASS_ALU)
11093 if (vr4130_last_insn != 0
11094 && recog_memoized (insn1) >= 0
11095 && recog_memoized (insn2) >= 0)
11097 /* See whether INSN1 and INSN2 use different execution units,
11098 or if they are both ALU-type instructions. If so, they can
11099 probably execute in parallel. */
11100 enum attr_vr4130_class class1 = get_attr_vr4130_class (insn1);
11101 enum attr_vr4130_class class2 = get_attr_vr4130_class (insn2);
11102 if (class1 != class2 || class1 == VR4130_CLASS_ALU)
11104 /* If only one of the instructions has a dependence on
11105 vr4130_last_insn, prefer to schedule the other one first. */
11106 bool dep1 = vr4130_true_reg_dependence_p (insn1);
11107 bool dep2 = vr4130_true_reg_dependence_p (insn2);
11111 /* Prefer to schedule INSN2 ahead of INSN1 if vr4130_last_insn
11112 is not an ALU-type instruction and if INSN1 uses the same
11113 execution unit. (Note that if this condition holds, we already
11114 know that INSN2 uses a different execution unit.) */
11115 if (class1 != VR4130_CLASS_ALU
11116 && recog_memoized (vr4130_last_insn) >= 0
11117 && class1 == get_attr_vr4130_class (vr4130_last_insn))
11124 /* A TUNE_MIPS4130 helper function. (READY, NREADY) describes a ready
11125 queue with at least two instructions. Swap the first two if
11126 vr4130_swap_insns_p says that it could be worthwhile. */
11129 vr4130_reorder (rtx *ready, int nready)
11131 if (vr4130_swap_insns_p (ready[nready - 1], ready[nready - 2]))
11132 mips_promote_ready (ready, nready - 2, nready - 1);
11135 /* Remove the instruction at index LOWER from ready queue READY and
11136 reinsert it in front of the instruction at index HIGHER. LOWER must
11140 mips_promote_ready (rtx *ready, int lower, int higher)
11145 new_head = ready[lower];
11146 for (i = lower; i < higher; i++)
11147 ready[i] = ready[i + 1];
11148 ready[i] = new_head;
11151 /* If the priority of the instruction at POS2 in the ready queue READY
11152 is within LIMIT units of that of the instruction at POS1, swap the
11153 instructions if POS2 is not already less than POS1. */
11156 mips_maybe_swap_ready (rtx *ready, int pos1, int pos2, int limit)
11159 && INSN_PRIORITY (ready[pos1]) + limit >= INSN_PRIORITY (ready[pos2]))
11162 temp = ready[pos1];
11163 ready[pos1] = ready[pos2];
11164 ready[pos2] = temp;
11168 /* Record whether last 74k AGEN instruction was a load or store. */
11170 static enum attr_type mips_last_74k_agen_insn = TYPE_UNKNOWN;
11172 /* Initialize mips_last_74k_agen_insn from INSN. A null argument
11173 resets to TYPE_UNKNOWN state. */
11176 mips_74k_agen_init (rtx insn)
11178 if (!insn || !NONJUMP_INSN_P (insn))
11179 mips_last_74k_agen_insn = TYPE_UNKNOWN;
11180 else if (USEFUL_INSN_P (insn))
11182 enum attr_type type = get_attr_type (insn);
11183 if (type == TYPE_LOAD || type == TYPE_STORE)
11184 mips_last_74k_agen_insn = type;
11188 /* A TUNE_74K helper function. The 74K AGEN pipeline likes multiple
11189 loads to be grouped together, and multiple stores to be grouped
11190 together. Swap things around in the ready queue to make this happen. */
11193 mips_74k_agen_reorder (rtx *ready, int nready)
11196 int store_pos, load_pos;
11201 for (i = nready - 1; i >= 0; i--)
11203 rtx insn = ready[i];
11204 if (USEFUL_INSN_P (insn))
11205 switch (get_attr_type (insn))
11208 if (store_pos == -1)
11213 if (load_pos == -1)
11222 if (load_pos == -1 || store_pos == -1)
11225 switch (mips_last_74k_agen_insn)
11228 /* Prefer to schedule loads since they have a higher latency. */
11230 /* Swap loads to the front of the queue. */
11231 mips_maybe_swap_ready (ready, load_pos, store_pos, 4);
11234 /* Swap stores to the front of the queue. */
11235 mips_maybe_swap_ready (ready, store_pos, load_pos, 4);
11242 /* Implement TARGET_SCHED_INIT. */
11245 mips_sched_init (FILE *file ATTRIBUTE_UNUSED, int verbose ATTRIBUTE_UNUSED,
11246 int max_ready ATTRIBUTE_UNUSED)
11248 mips_macc_chains_last_hilo = 0;
11249 vr4130_last_insn = 0;
11250 mips_74k_agen_init (NULL_RTX);
11253 /* Implement TARGET_SCHED_REORDER and TARG_SCHED_REORDER2. */
11256 mips_sched_reorder (FILE *file ATTRIBUTE_UNUSED, int verbose ATTRIBUTE_UNUSED,
11257 rtx *ready, int *nreadyp, int cycle ATTRIBUTE_UNUSED)
11259 if (!reload_completed
11260 && TUNE_MACC_CHAINS
11262 mips_macc_chains_reorder (ready, *nreadyp);
11263 if (reload_completed
11265 && !TARGET_VR4130_ALIGN
11267 vr4130_reorder (ready, *nreadyp);
11269 mips_74k_agen_reorder (ready, *nreadyp);
11270 return mips_issue_rate ();
11273 /* Implement TARGET_SCHED_VARIABLE_ISSUE. */
11276 mips_variable_issue (FILE *file ATTRIBUTE_UNUSED, int verbose ATTRIBUTE_UNUSED,
11277 rtx insn, int more)
11280 mips_74k_agen_init (insn);
11281 switch (GET_CODE (PATTERN (insn)))
11285 /* Don't count USEs and CLOBBERs against the issue rate. */
11290 if (!reload_completed && TUNE_MACC_CHAINS)
11291 mips_macc_chains_record (insn);
11292 vr4130_last_insn = insn;
11298 /* Implement TARGET_SCHED_ADJUST_COST. We assume that anti and output
11299 dependencies have no cost, except on the 20Kc where output-dependence
11300 is treated like input-dependence. */
11303 mips_adjust_cost (rtx insn ATTRIBUTE_UNUSED, rtx link,
11304 rtx dep ATTRIBUTE_UNUSED, int cost)
11306 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
11309 if (REG_NOTE_KIND (link) != 0)
11314 /* Return the number of instructions that can be issued per cycle. */
11317 mips_issue_rate (void)
11321 case PROCESSOR_74KC:
11322 case PROCESSOR_74KF2_1:
11323 case PROCESSOR_74KF1_1:
11324 case PROCESSOR_74KF3_2:
11325 /* The 74k is not strictly quad-issue cpu, but can be seen as one
11326 by the scheduler. It can issue 1 ALU, 1 AGEN and 2 FPU insns,
11327 but in reality only a maximum of 3 insns can be issued as the
11328 floating point load/stores also require a slot in the AGEN pipe. */
11331 case PROCESSOR_20KC:
11332 case PROCESSOR_R4130:
11333 case PROCESSOR_R5400:
11334 case PROCESSOR_R5500:
11335 case PROCESSOR_R7000:
11336 case PROCESSOR_R9000:
11339 case PROCESSOR_SB1:
11340 case PROCESSOR_SB1A:
11341 /* This is actually 4, but we get better performance if we claim 3.
11342 This is partly because of unwanted speculative code motion with the
11343 larger number, and partly because in most common cases we can't
11344 reach the theoretical max of 4. */
11352 /* Implements TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD. This should
11353 be as wide as the scheduling freedom in the DFA. */
11356 mips_multipass_dfa_lookahead (void)
11358 /* Can schedule up to 4 of the 6 function units in any one cycle. */
11365 /* Implements a store data bypass check. We need this because the cprestore
11366 pattern is type store, but defined using an UNSPEC. This UNSPEC causes the
11367 default routine to abort. We just return false for that case. */
11368 /* ??? Should try to give a better result here than assuming false. */
11371 mips_store_data_bypass_p (rtx out_insn, rtx in_insn)
11373 if (GET_CODE (PATTERN (in_insn)) == UNSPEC_VOLATILE)
11376 return ! store_data_bypass_p (out_insn, in_insn);
11379 /* Given that we have an rtx of the form (prefetch ... WRITE LOCALITY),
11380 return the first operand of the associated "pref" or "prefx" insn. */
11383 mips_prefetch_cookie (rtx write, rtx locality)
11385 /* store_streamed / load_streamed. */
11386 if (INTVAL (locality) <= 0)
11387 return GEN_INT (INTVAL (write) + 4);
11389 /* store / load. */
11390 if (INTVAL (locality) <= 2)
11393 /* store_retained / load_retained. */
11394 return GEN_INT (INTVAL (write) + 6);
11397 /* MIPS builtin function support. */
11399 struct builtin_description
11401 /* The code of the main .md file instruction. See mips_builtin_type
11402 for more information. */
11403 enum insn_code icode;
11405 /* The floating-point comparison code to use with ICODE, if any. */
11406 enum mips_fp_condition cond;
11408 /* The name of the builtin function. */
11411 /* Specifies how the function should be expanded. */
11412 enum mips_builtin_type builtin_type;
11414 /* The function's prototype. */
11415 enum mips_function_type function_type;
11417 /* The target flags required for this function. */
11421 /* Define a MIPS_BUILTIN_DIRECT function for instruction CODE_FOR_mips_<INSN>.
11422 FUNCTION_TYPE and TARGET_FLAGS are builtin_description fields. */
11423 #define DIRECT_BUILTIN(INSN, FUNCTION_TYPE, TARGET_FLAGS) \
11424 { CODE_FOR_mips_ ## INSN, 0, "__builtin_mips_" #INSN, \
11425 MIPS_BUILTIN_DIRECT, FUNCTION_TYPE, TARGET_FLAGS }
11427 /* Define __builtin_mips_<INSN>_<COND>_{s,d}, both of which require
11429 #define CMP_SCALAR_BUILTINS(INSN, COND, TARGET_FLAGS) \
11430 { CODE_FOR_mips_ ## INSN ## _cond_s, MIPS_FP_COND_ ## COND, \
11431 "__builtin_mips_" #INSN "_" #COND "_s", \
11432 MIPS_BUILTIN_CMP_SINGLE, MIPS_INT_FTYPE_SF_SF, TARGET_FLAGS }, \
11433 { CODE_FOR_mips_ ## INSN ## _cond_d, MIPS_FP_COND_ ## COND, \
11434 "__builtin_mips_" #INSN "_" #COND "_d", \
11435 MIPS_BUILTIN_CMP_SINGLE, MIPS_INT_FTYPE_DF_DF, TARGET_FLAGS }
11437 /* Define __builtin_mips_{any,all,upper,lower}_<INSN>_<COND>_ps.
11438 The lower and upper forms require TARGET_FLAGS while the any and all
11439 forms require MASK_MIPS3D. */
11440 #define CMP_PS_BUILTINS(INSN, COND, TARGET_FLAGS) \
11441 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11442 "__builtin_mips_any_" #INSN "_" #COND "_ps", \
11443 MIPS_BUILTIN_CMP_ANY, MIPS_INT_FTYPE_V2SF_V2SF, MASK_MIPS3D }, \
11444 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11445 "__builtin_mips_all_" #INSN "_" #COND "_ps", \
11446 MIPS_BUILTIN_CMP_ALL, MIPS_INT_FTYPE_V2SF_V2SF, MASK_MIPS3D }, \
11447 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11448 "__builtin_mips_lower_" #INSN "_" #COND "_ps", \
11449 MIPS_BUILTIN_CMP_LOWER, MIPS_INT_FTYPE_V2SF_V2SF, TARGET_FLAGS }, \
11450 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11451 "__builtin_mips_upper_" #INSN "_" #COND "_ps", \
11452 MIPS_BUILTIN_CMP_UPPER, MIPS_INT_FTYPE_V2SF_V2SF, TARGET_FLAGS }
11454 /* Define __builtin_mips_{any,all}_<INSN>_<COND>_4s. The functions
11455 require MASK_MIPS3D. */
11456 #define CMP_4S_BUILTINS(INSN, COND) \
11457 { CODE_FOR_mips_ ## INSN ## _cond_4s, MIPS_FP_COND_ ## COND, \
11458 "__builtin_mips_any_" #INSN "_" #COND "_4s", \
11459 MIPS_BUILTIN_CMP_ANY, MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF, \
11461 { CODE_FOR_mips_ ## INSN ## _cond_4s, MIPS_FP_COND_ ## COND, \
11462 "__builtin_mips_all_" #INSN "_" #COND "_4s", \
11463 MIPS_BUILTIN_CMP_ALL, MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF, \
11466 /* Define __builtin_mips_mov{t,f}_<INSN>_<COND>_ps. The comparison
11467 instruction requires TARGET_FLAGS. */
11468 #define MOVTF_BUILTINS(INSN, COND, TARGET_FLAGS) \
11469 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11470 "__builtin_mips_movt_" #INSN "_" #COND "_ps", \
11471 MIPS_BUILTIN_MOVT, MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF, \
11473 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11474 "__builtin_mips_movf_" #INSN "_" #COND "_ps", \
11475 MIPS_BUILTIN_MOVF, MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF, \
11478 /* Define all the builtins related to c.cond.fmt condition COND. */
11479 #define CMP_BUILTINS(COND) \
11480 MOVTF_BUILTINS (c, COND, MASK_PAIRED_SINGLE_FLOAT), \
11481 MOVTF_BUILTINS (cabs, COND, MASK_MIPS3D), \
11482 CMP_SCALAR_BUILTINS (cabs, COND, MASK_MIPS3D), \
11483 CMP_PS_BUILTINS (c, COND, MASK_PAIRED_SINGLE_FLOAT), \
11484 CMP_PS_BUILTINS (cabs, COND, MASK_MIPS3D), \
11485 CMP_4S_BUILTINS (c, COND), \
11486 CMP_4S_BUILTINS (cabs, COND)
11488 static const struct builtin_description mips_bdesc[] =
11490 DIRECT_BUILTIN (pll_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
11491 DIRECT_BUILTIN (pul_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
11492 DIRECT_BUILTIN (plu_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
11493 DIRECT_BUILTIN (puu_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
11494 DIRECT_BUILTIN (cvt_ps_s, MIPS_V2SF_FTYPE_SF_SF, MASK_PAIRED_SINGLE_FLOAT),
11495 DIRECT_BUILTIN (cvt_s_pl, MIPS_SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT),
11496 DIRECT_BUILTIN (cvt_s_pu, MIPS_SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT),
11497 DIRECT_BUILTIN (abs_ps, MIPS_V2SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT),
11499 DIRECT_BUILTIN (alnv_ps, MIPS_V2SF_FTYPE_V2SF_V2SF_INT,
11500 MASK_PAIRED_SINGLE_FLOAT),
11501 DIRECT_BUILTIN (addr_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
11502 DIRECT_BUILTIN (mulr_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
11503 DIRECT_BUILTIN (cvt_pw_ps, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
11504 DIRECT_BUILTIN (cvt_ps_pw, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
11506 DIRECT_BUILTIN (recip1_s, MIPS_SF_FTYPE_SF, MASK_MIPS3D),
11507 DIRECT_BUILTIN (recip1_d, MIPS_DF_FTYPE_DF, MASK_MIPS3D),
11508 DIRECT_BUILTIN (recip1_ps, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
11509 DIRECT_BUILTIN (recip2_s, MIPS_SF_FTYPE_SF_SF, MASK_MIPS3D),
11510 DIRECT_BUILTIN (recip2_d, MIPS_DF_FTYPE_DF_DF, MASK_MIPS3D),
11511 DIRECT_BUILTIN (recip2_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
11513 DIRECT_BUILTIN (rsqrt1_s, MIPS_SF_FTYPE_SF, MASK_MIPS3D),
11514 DIRECT_BUILTIN (rsqrt1_d, MIPS_DF_FTYPE_DF, MASK_MIPS3D),
11515 DIRECT_BUILTIN (rsqrt1_ps, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
11516 DIRECT_BUILTIN (rsqrt2_s, MIPS_SF_FTYPE_SF_SF, MASK_MIPS3D),
11517 DIRECT_BUILTIN (rsqrt2_d, MIPS_DF_FTYPE_DF_DF, MASK_MIPS3D),
11518 DIRECT_BUILTIN (rsqrt2_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
11520 MIPS_FP_CONDITIONS (CMP_BUILTINS)
11523 /* Builtin functions for the SB-1 processor. */
11525 #define CODE_FOR_mips_sqrt_ps CODE_FOR_sqrtv2sf2
11527 static const struct builtin_description sb1_bdesc[] =
11529 DIRECT_BUILTIN (sqrt_ps, MIPS_V2SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT)
11532 /* Builtin functions for DSP ASE. */
11534 #define CODE_FOR_mips_addq_ph CODE_FOR_addv2hi3
11535 #define CODE_FOR_mips_addu_qb CODE_FOR_addv4qi3
11536 #define CODE_FOR_mips_subq_ph CODE_FOR_subv2hi3
11537 #define CODE_FOR_mips_subu_qb CODE_FOR_subv4qi3
11538 #define CODE_FOR_mips_mul_ph CODE_FOR_mulv2hi3
11540 /* Define a MIPS_BUILTIN_DIRECT_NO_TARGET function for instruction
11541 CODE_FOR_mips_<INSN>. FUNCTION_TYPE and TARGET_FLAGS are
11542 builtin_description fields. */
11543 #define DIRECT_NO_TARGET_BUILTIN(INSN, FUNCTION_TYPE, TARGET_FLAGS) \
11544 { CODE_FOR_mips_ ## INSN, 0, "__builtin_mips_" #INSN, \
11545 MIPS_BUILTIN_DIRECT_NO_TARGET, FUNCTION_TYPE, TARGET_FLAGS }
11547 /* Define __builtin_mips_bposge<VALUE>. <VALUE> is 32 for the MIPS32 DSP
11548 branch instruction. TARGET_FLAGS is a builtin_description field. */
11549 #define BPOSGE_BUILTIN(VALUE, TARGET_FLAGS) \
11550 { CODE_FOR_mips_bposge, 0, "__builtin_mips_bposge" #VALUE, \
11551 MIPS_BUILTIN_BPOSGE ## VALUE, MIPS_SI_FTYPE_VOID, TARGET_FLAGS }
11553 static const struct builtin_description dsp_bdesc[] =
11555 DIRECT_BUILTIN (addq_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
11556 DIRECT_BUILTIN (addq_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
11557 DIRECT_BUILTIN (addq_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11558 DIRECT_BUILTIN (addu_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
11559 DIRECT_BUILTIN (addu_s_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
11560 DIRECT_BUILTIN (subq_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
11561 DIRECT_BUILTIN (subq_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
11562 DIRECT_BUILTIN (subq_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11563 DIRECT_BUILTIN (subu_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
11564 DIRECT_BUILTIN (subu_s_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
11565 DIRECT_BUILTIN (addsc, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11566 DIRECT_BUILTIN (addwc, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11567 DIRECT_BUILTIN (modsub, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11568 DIRECT_BUILTIN (raddu_w_qb, MIPS_SI_FTYPE_V4QI, MASK_DSP),
11569 DIRECT_BUILTIN (absq_s_ph, MIPS_V2HI_FTYPE_V2HI, MASK_DSP),
11570 DIRECT_BUILTIN (absq_s_w, MIPS_SI_FTYPE_SI, MASK_DSP),
11571 DIRECT_BUILTIN (precrq_qb_ph, MIPS_V4QI_FTYPE_V2HI_V2HI, MASK_DSP),
11572 DIRECT_BUILTIN (precrq_ph_w, MIPS_V2HI_FTYPE_SI_SI, MASK_DSP),
11573 DIRECT_BUILTIN (precrq_rs_ph_w, MIPS_V2HI_FTYPE_SI_SI, MASK_DSP),
11574 DIRECT_BUILTIN (precrqu_s_qb_ph, MIPS_V4QI_FTYPE_V2HI_V2HI, MASK_DSP),
11575 DIRECT_BUILTIN (preceq_w_phl, MIPS_SI_FTYPE_V2HI, MASK_DSP),
11576 DIRECT_BUILTIN (preceq_w_phr, MIPS_SI_FTYPE_V2HI, MASK_DSP),
11577 DIRECT_BUILTIN (precequ_ph_qbl, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11578 DIRECT_BUILTIN (precequ_ph_qbr, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11579 DIRECT_BUILTIN (precequ_ph_qbla, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11580 DIRECT_BUILTIN (precequ_ph_qbra, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11581 DIRECT_BUILTIN (preceu_ph_qbl, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11582 DIRECT_BUILTIN (preceu_ph_qbr, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11583 DIRECT_BUILTIN (preceu_ph_qbla, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11584 DIRECT_BUILTIN (preceu_ph_qbra, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11585 DIRECT_BUILTIN (shll_qb, MIPS_V4QI_FTYPE_V4QI_SI, MASK_DSP),
11586 DIRECT_BUILTIN (shll_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
11587 DIRECT_BUILTIN (shll_s_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
11588 DIRECT_BUILTIN (shll_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11589 DIRECT_BUILTIN (shrl_qb, MIPS_V4QI_FTYPE_V4QI_SI, MASK_DSP),
11590 DIRECT_BUILTIN (shra_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
11591 DIRECT_BUILTIN (shra_r_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
11592 DIRECT_BUILTIN (shra_r_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11593 DIRECT_BUILTIN (muleu_s_ph_qbl, MIPS_V2HI_FTYPE_V4QI_V2HI, MASK_DSP),
11594 DIRECT_BUILTIN (muleu_s_ph_qbr, MIPS_V2HI_FTYPE_V4QI_V2HI, MASK_DSP),
11595 DIRECT_BUILTIN (mulq_rs_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
11596 DIRECT_BUILTIN (muleq_s_w_phl, MIPS_SI_FTYPE_V2HI_V2HI, MASK_DSP),
11597 DIRECT_BUILTIN (muleq_s_w_phr, MIPS_SI_FTYPE_V2HI_V2HI, MASK_DSP),
11598 DIRECT_BUILTIN (bitrev, MIPS_SI_FTYPE_SI, MASK_DSP),
11599 DIRECT_BUILTIN (insv, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11600 DIRECT_BUILTIN (repl_qb, MIPS_V4QI_FTYPE_SI, MASK_DSP),
11601 DIRECT_BUILTIN (repl_ph, MIPS_V2HI_FTYPE_SI, MASK_DSP),
11602 DIRECT_NO_TARGET_BUILTIN (cmpu_eq_qb, MIPS_VOID_FTYPE_V4QI_V4QI, MASK_DSP),
11603 DIRECT_NO_TARGET_BUILTIN (cmpu_lt_qb, MIPS_VOID_FTYPE_V4QI_V4QI, MASK_DSP),
11604 DIRECT_NO_TARGET_BUILTIN (cmpu_le_qb, MIPS_VOID_FTYPE_V4QI_V4QI, MASK_DSP),
11605 DIRECT_BUILTIN (cmpgu_eq_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSP),
11606 DIRECT_BUILTIN (cmpgu_lt_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSP),
11607 DIRECT_BUILTIN (cmpgu_le_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSP),
11608 DIRECT_NO_TARGET_BUILTIN (cmp_eq_ph, MIPS_VOID_FTYPE_V2HI_V2HI, MASK_DSP),
11609 DIRECT_NO_TARGET_BUILTIN (cmp_lt_ph, MIPS_VOID_FTYPE_V2HI_V2HI, MASK_DSP),
11610 DIRECT_NO_TARGET_BUILTIN (cmp_le_ph, MIPS_VOID_FTYPE_V2HI_V2HI, MASK_DSP),
11611 DIRECT_BUILTIN (pick_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
11612 DIRECT_BUILTIN (pick_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
11613 DIRECT_BUILTIN (packrl_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
11614 DIRECT_NO_TARGET_BUILTIN (wrdsp, MIPS_VOID_FTYPE_SI_SI, MASK_DSP),
11615 DIRECT_BUILTIN (rddsp, MIPS_SI_FTYPE_SI, MASK_DSP),
11616 DIRECT_BUILTIN (lbux, MIPS_SI_FTYPE_PTR_SI, MASK_DSP),
11617 DIRECT_BUILTIN (lhx, MIPS_SI_FTYPE_PTR_SI, MASK_DSP),
11618 DIRECT_BUILTIN (lwx, MIPS_SI_FTYPE_PTR_SI, MASK_DSP),
11619 BPOSGE_BUILTIN (32, MASK_DSP),
11621 /* The following are for the MIPS DSP ASE REV 2. */
11622 DIRECT_BUILTIN (absq_s_qb, MIPS_V4QI_FTYPE_V4QI, MASK_DSPR2),
11623 DIRECT_BUILTIN (addu_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11624 DIRECT_BUILTIN (addu_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11625 DIRECT_BUILTIN (adduh_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSPR2),
11626 DIRECT_BUILTIN (adduh_r_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSPR2),
11627 DIRECT_BUILTIN (append, MIPS_SI_FTYPE_SI_SI_SI, MASK_DSPR2),
11628 DIRECT_BUILTIN (balign, MIPS_SI_FTYPE_SI_SI_SI, MASK_DSPR2),
11629 DIRECT_BUILTIN (cmpgdu_eq_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSPR2),
11630 DIRECT_BUILTIN (cmpgdu_lt_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSPR2),
11631 DIRECT_BUILTIN (cmpgdu_le_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSPR2),
11632 DIRECT_BUILTIN (mul_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11633 DIRECT_BUILTIN (mul_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11634 DIRECT_BUILTIN (mulq_rs_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
11635 DIRECT_BUILTIN (mulq_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11636 DIRECT_BUILTIN (mulq_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
11637 DIRECT_BUILTIN (precr_qb_ph, MIPS_V4QI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11638 DIRECT_BUILTIN (precr_sra_ph_w, MIPS_V2HI_FTYPE_SI_SI_SI, MASK_DSPR2),
11639 DIRECT_BUILTIN (precr_sra_r_ph_w, MIPS_V2HI_FTYPE_SI_SI_SI, MASK_DSPR2),
11640 DIRECT_BUILTIN (prepend, MIPS_SI_FTYPE_SI_SI_SI, MASK_DSPR2),
11641 DIRECT_BUILTIN (shra_qb, MIPS_V4QI_FTYPE_V4QI_SI, MASK_DSPR2),
11642 DIRECT_BUILTIN (shra_r_qb, MIPS_V4QI_FTYPE_V4QI_SI, MASK_DSPR2),
11643 DIRECT_BUILTIN (shrl_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSPR2),
11644 DIRECT_BUILTIN (subu_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11645 DIRECT_BUILTIN (subu_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11646 DIRECT_BUILTIN (subuh_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSPR2),
11647 DIRECT_BUILTIN (subuh_r_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSPR2),
11648 DIRECT_BUILTIN (addqh_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11649 DIRECT_BUILTIN (addqh_r_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11650 DIRECT_BUILTIN (addqh_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
11651 DIRECT_BUILTIN (addqh_r_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
11652 DIRECT_BUILTIN (subqh_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11653 DIRECT_BUILTIN (subqh_r_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11654 DIRECT_BUILTIN (subqh_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
11655 DIRECT_BUILTIN (subqh_r_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2)
11658 static const struct builtin_description dsp_32only_bdesc[] =
11660 DIRECT_BUILTIN (dpau_h_qbl, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
11661 DIRECT_BUILTIN (dpau_h_qbr, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
11662 DIRECT_BUILTIN (dpsu_h_qbl, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
11663 DIRECT_BUILTIN (dpsu_h_qbr, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
11664 DIRECT_BUILTIN (dpaq_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
11665 DIRECT_BUILTIN (dpsq_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
11666 DIRECT_BUILTIN (mulsaq_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
11667 DIRECT_BUILTIN (dpaq_sa_l_w, MIPS_DI_FTYPE_DI_SI_SI, MASK_DSP),
11668 DIRECT_BUILTIN (dpsq_sa_l_w, MIPS_DI_FTYPE_DI_SI_SI, MASK_DSP),
11669 DIRECT_BUILTIN (maq_s_w_phl, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
11670 DIRECT_BUILTIN (maq_s_w_phr, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
11671 DIRECT_BUILTIN (maq_sa_w_phl, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
11672 DIRECT_BUILTIN (maq_sa_w_phr, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
11673 DIRECT_BUILTIN (extr_w, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
11674 DIRECT_BUILTIN (extr_r_w, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
11675 DIRECT_BUILTIN (extr_rs_w, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
11676 DIRECT_BUILTIN (extr_s_h, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
11677 DIRECT_BUILTIN (extp, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
11678 DIRECT_BUILTIN (extpdp, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
11679 DIRECT_BUILTIN (shilo, MIPS_DI_FTYPE_DI_SI, MASK_DSP),
11680 DIRECT_BUILTIN (mthlip, MIPS_DI_FTYPE_DI_SI, MASK_DSP),
11682 /* The following are for the MIPS DSP ASE REV 2. */
11683 DIRECT_BUILTIN (dpa_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11684 DIRECT_BUILTIN (dps_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11685 DIRECT_BUILTIN (madd, MIPS_DI_FTYPE_DI_SI_SI, MASK_DSPR2),
11686 DIRECT_BUILTIN (maddu, MIPS_DI_FTYPE_DI_USI_USI, MASK_DSPR2),
11687 DIRECT_BUILTIN (msub, MIPS_DI_FTYPE_DI_SI_SI, MASK_DSPR2),
11688 DIRECT_BUILTIN (msubu, MIPS_DI_FTYPE_DI_USI_USI, MASK_DSPR2),
11689 DIRECT_BUILTIN (mulsa_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11690 DIRECT_BUILTIN (mult, MIPS_DI_FTYPE_SI_SI, MASK_DSPR2),
11691 DIRECT_BUILTIN (multu, MIPS_DI_FTYPE_USI_USI, MASK_DSPR2),
11692 DIRECT_BUILTIN (dpax_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11693 DIRECT_BUILTIN (dpsx_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11694 DIRECT_BUILTIN (dpaqx_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11695 DIRECT_BUILTIN (dpaqx_sa_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11696 DIRECT_BUILTIN (dpsqx_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11697 DIRECT_BUILTIN (dpsqx_sa_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2)
11700 /* This helps provide a mapping from builtin function codes to bdesc
11705 /* The builtin function table that this entry describes. */
11706 const struct builtin_description *bdesc;
11708 /* The number of entries in the builtin function table. */
11711 /* The target processor that supports these builtin functions.
11712 PROCESSOR_MAX means we enable them for all processors. */
11713 enum processor_type proc;
11715 /* If the target has these flags, this builtin function table
11716 will not be supported. */
11717 int unsupported_target_flags;
11720 static const struct bdesc_map bdesc_arrays[] =
11722 { mips_bdesc, ARRAY_SIZE (mips_bdesc), PROCESSOR_MAX, 0 },
11723 { sb1_bdesc, ARRAY_SIZE (sb1_bdesc), PROCESSOR_SB1, 0 },
11724 { dsp_bdesc, ARRAY_SIZE (dsp_bdesc), PROCESSOR_MAX, 0 },
11725 { dsp_32only_bdesc, ARRAY_SIZE (dsp_32only_bdesc), PROCESSOR_MAX,
11729 /* Take the argument ARGNUM of the arglist of EXP and convert it into a form
11730 suitable for input operand OP of instruction ICODE. Return the value. */
11733 mips_prepare_builtin_arg (enum insn_code icode,
11734 unsigned int op, tree exp, unsigned int argnum)
11737 enum machine_mode mode;
11739 value = expand_normal (CALL_EXPR_ARG (exp, argnum));
11740 mode = insn_data[icode].operand[op].mode;
11741 if (!insn_data[icode].operand[op].predicate (value, mode))
11743 value = copy_to_mode_reg (mode, value);
11744 /* Check the predicate again. */
11745 if (!insn_data[icode].operand[op].predicate (value, mode))
11747 error ("invalid argument to builtin function");
11755 /* Return an rtx suitable for output operand OP of instruction ICODE.
11756 If TARGET is non-null, try to use it where possible. */
11759 mips_prepare_builtin_target (enum insn_code icode, unsigned int op, rtx target)
11761 enum machine_mode mode;
11763 mode = insn_data[icode].operand[op].mode;
11764 if (target == 0 || !insn_data[icode].operand[op].predicate (target, mode))
11765 target = gen_reg_rtx (mode);
11770 /* Expand builtin functions. This is called from TARGET_EXPAND_BUILTIN. */
11773 mips_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
11774 enum machine_mode mode ATTRIBUTE_UNUSED,
11775 int ignore ATTRIBUTE_UNUSED)
11777 enum insn_code icode;
11778 enum mips_builtin_type type;
11780 unsigned int fcode;
11781 const struct builtin_description *bdesc;
11782 const struct bdesc_map *m;
11784 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
11785 fcode = DECL_FUNCTION_CODE (fndecl);
11789 error ("built-in function %qs not supported for MIPS16",
11790 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
11795 for (m = bdesc_arrays; m < &bdesc_arrays[ARRAY_SIZE (bdesc_arrays)]; m++)
11797 if (fcode < m->size)
11800 icode = bdesc[fcode].icode;
11801 type = bdesc[fcode].builtin_type;
11811 case MIPS_BUILTIN_DIRECT:
11812 return mips_expand_builtin_direct (icode, target, exp, true);
11814 case MIPS_BUILTIN_DIRECT_NO_TARGET:
11815 return mips_expand_builtin_direct (icode, target, exp, false);
11817 case MIPS_BUILTIN_MOVT:
11818 case MIPS_BUILTIN_MOVF:
11819 return mips_expand_builtin_movtf (type, icode, bdesc[fcode].cond,
11822 case MIPS_BUILTIN_CMP_ANY:
11823 case MIPS_BUILTIN_CMP_ALL:
11824 case MIPS_BUILTIN_CMP_UPPER:
11825 case MIPS_BUILTIN_CMP_LOWER:
11826 case MIPS_BUILTIN_CMP_SINGLE:
11827 return mips_expand_builtin_compare (type, icode, bdesc[fcode].cond,
11830 case MIPS_BUILTIN_BPOSGE32:
11831 return mips_expand_builtin_bposge (type, target);
11838 /* Init builtin functions. This is called from TARGET_INIT_BUILTIN. */
11841 mips_init_builtins (void)
11843 const struct builtin_description *d;
11844 const struct bdesc_map *m;
11845 tree types[(int) MIPS_MAX_FTYPE_MAX];
11846 tree V2SF_type_node;
11847 tree V2HI_type_node;
11848 tree V4QI_type_node;
11849 unsigned int offset;
11851 /* We have only builtins for -mpaired-single, -mips3d and -mdsp. */
11852 if (!TARGET_PAIRED_SINGLE_FLOAT && !TARGET_DSP)
11855 if (TARGET_PAIRED_SINGLE_FLOAT)
11857 V2SF_type_node = build_vector_type_for_mode (float_type_node, V2SFmode);
11859 types[MIPS_V2SF_FTYPE_V2SF]
11860 = build_function_type_list (V2SF_type_node, V2SF_type_node, NULL_TREE);
11862 types[MIPS_V2SF_FTYPE_V2SF_V2SF]
11863 = build_function_type_list (V2SF_type_node,
11864 V2SF_type_node, V2SF_type_node, NULL_TREE);
11866 types[MIPS_V2SF_FTYPE_V2SF_V2SF_INT]
11867 = build_function_type_list (V2SF_type_node,
11868 V2SF_type_node, V2SF_type_node,
11869 integer_type_node, NULL_TREE);
11871 types[MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF]
11872 = build_function_type_list (V2SF_type_node,
11873 V2SF_type_node, V2SF_type_node,
11874 V2SF_type_node, V2SF_type_node, NULL_TREE);
11876 types[MIPS_V2SF_FTYPE_SF_SF]
11877 = build_function_type_list (V2SF_type_node,
11878 float_type_node, float_type_node, NULL_TREE);
11880 types[MIPS_INT_FTYPE_V2SF_V2SF]
11881 = build_function_type_list (integer_type_node,
11882 V2SF_type_node, V2SF_type_node, NULL_TREE);
11884 types[MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF]
11885 = build_function_type_list (integer_type_node,
11886 V2SF_type_node, V2SF_type_node,
11887 V2SF_type_node, V2SF_type_node, NULL_TREE);
11889 types[MIPS_INT_FTYPE_SF_SF]
11890 = build_function_type_list (integer_type_node,
11891 float_type_node, float_type_node, NULL_TREE);
11893 types[MIPS_INT_FTYPE_DF_DF]
11894 = build_function_type_list (integer_type_node,
11895 double_type_node, double_type_node, NULL_TREE);
11897 types[MIPS_SF_FTYPE_V2SF]
11898 = build_function_type_list (float_type_node, V2SF_type_node, NULL_TREE);
11900 types[MIPS_SF_FTYPE_SF]
11901 = build_function_type_list (float_type_node,
11902 float_type_node, NULL_TREE);
11904 types[MIPS_SF_FTYPE_SF_SF]
11905 = build_function_type_list (float_type_node,
11906 float_type_node, float_type_node, NULL_TREE);
11908 types[MIPS_DF_FTYPE_DF]
11909 = build_function_type_list (double_type_node,
11910 double_type_node, NULL_TREE);
11912 types[MIPS_DF_FTYPE_DF_DF]
11913 = build_function_type_list (double_type_node,
11914 double_type_node, double_type_node, NULL_TREE);
11919 V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
11920 V4QI_type_node = build_vector_type_for_mode (intQI_type_node, V4QImode);
11922 types[MIPS_V2HI_FTYPE_V2HI_V2HI]
11923 = build_function_type_list (V2HI_type_node,
11924 V2HI_type_node, V2HI_type_node,
11927 types[MIPS_SI_FTYPE_SI_SI]
11928 = build_function_type_list (intSI_type_node,
11929 intSI_type_node, intSI_type_node,
11932 types[MIPS_V4QI_FTYPE_V4QI_V4QI]
11933 = build_function_type_list (V4QI_type_node,
11934 V4QI_type_node, V4QI_type_node,
11937 types[MIPS_SI_FTYPE_V4QI]
11938 = build_function_type_list (intSI_type_node,
11942 types[MIPS_V2HI_FTYPE_V2HI]
11943 = build_function_type_list (V2HI_type_node,
11947 types[MIPS_SI_FTYPE_SI]
11948 = build_function_type_list (intSI_type_node,
11952 types[MIPS_V4QI_FTYPE_V2HI_V2HI]
11953 = build_function_type_list (V4QI_type_node,
11954 V2HI_type_node, V2HI_type_node,
11957 types[MIPS_V2HI_FTYPE_SI_SI]
11958 = build_function_type_list (V2HI_type_node,
11959 intSI_type_node, intSI_type_node,
11962 types[MIPS_SI_FTYPE_V2HI]
11963 = build_function_type_list (intSI_type_node,
11967 types[MIPS_V2HI_FTYPE_V4QI]
11968 = build_function_type_list (V2HI_type_node,
11972 types[MIPS_V4QI_FTYPE_V4QI_SI]
11973 = build_function_type_list (V4QI_type_node,
11974 V4QI_type_node, intSI_type_node,
11977 types[MIPS_V2HI_FTYPE_V2HI_SI]
11978 = build_function_type_list (V2HI_type_node,
11979 V2HI_type_node, intSI_type_node,
11982 types[MIPS_V2HI_FTYPE_V4QI_V2HI]
11983 = build_function_type_list (V2HI_type_node,
11984 V4QI_type_node, V2HI_type_node,
11987 types[MIPS_SI_FTYPE_V2HI_V2HI]
11988 = build_function_type_list (intSI_type_node,
11989 V2HI_type_node, V2HI_type_node,
11992 types[MIPS_DI_FTYPE_DI_V4QI_V4QI]
11993 = build_function_type_list (intDI_type_node,
11994 intDI_type_node, V4QI_type_node, V4QI_type_node,
11997 types[MIPS_DI_FTYPE_DI_V2HI_V2HI]
11998 = build_function_type_list (intDI_type_node,
11999 intDI_type_node, V2HI_type_node, V2HI_type_node,
12002 types[MIPS_DI_FTYPE_DI_SI_SI]
12003 = build_function_type_list (intDI_type_node,
12004 intDI_type_node, intSI_type_node, intSI_type_node,
12007 types[MIPS_V4QI_FTYPE_SI]
12008 = build_function_type_list (V4QI_type_node,
12012 types[MIPS_V2HI_FTYPE_SI]
12013 = build_function_type_list (V2HI_type_node,
12017 types[MIPS_VOID_FTYPE_V4QI_V4QI]
12018 = build_function_type_list (void_type_node,
12019 V4QI_type_node, V4QI_type_node,
12022 types[MIPS_SI_FTYPE_V4QI_V4QI]
12023 = build_function_type_list (intSI_type_node,
12024 V4QI_type_node, V4QI_type_node,
12027 types[MIPS_VOID_FTYPE_V2HI_V2HI]
12028 = build_function_type_list (void_type_node,
12029 V2HI_type_node, V2HI_type_node,
12032 types[MIPS_SI_FTYPE_DI_SI]
12033 = build_function_type_list (intSI_type_node,
12034 intDI_type_node, intSI_type_node,
12037 types[MIPS_DI_FTYPE_DI_SI]
12038 = build_function_type_list (intDI_type_node,
12039 intDI_type_node, intSI_type_node,
12042 types[MIPS_VOID_FTYPE_SI_SI]
12043 = build_function_type_list (void_type_node,
12044 intSI_type_node, intSI_type_node,
12047 types[MIPS_SI_FTYPE_PTR_SI]
12048 = build_function_type_list (intSI_type_node,
12049 ptr_type_node, intSI_type_node,
12052 types[MIPS_SI_FTYPE_VOID]
12053 = build_function_type (intSI_type_node, void_list_node);
12057 types[MIPS_V4QI_FTYPE_V4QI]
12058 = build_function_type_list (V4QI_type_node,
12062 types[MIPS_SI_FTYPE_SI_SI_SI]
12063 = build_function_type_list (intSI_type_node,
12064 intSI_type_node, intSI_type_node,
12065 intSI_type_node, NULL_TREE);
12067 types[MIPS_DI_FTYPE_DI_USI_USI]
12068 = build_function_type_list (intDI_type_node,
12070 unsigned_intSI_type_node,
12071 unsigned_intSI_type_node, NULL_TREE);
12073 types[MIPS_DI_FTYPE_SI_SI]
12074 = build_function_type_list (intDI_type_node,
12075 intSI_type_node, intSI_type_node,
12078 types[MIPS_DI_FTYPE_USI_USI]
12079 = build_function_type_list (intDI_type_node,
12080 unsigned_intSI_type_node,
12081 unsigned_intSI_type_node, NULL_TREE);
12083 types[MIPS_V2HI_FTYPE_SI_SI_SI]
12084 = build_function_type_list (V2HI_type_node,
12085 intSI_type_node, intSI_type_node,
12086 intSI_type_node, NULL_TREE);
12091 /* Iterate through all of the bdesc arrays, initializing all of the
12092 builtin functions. */
12095 for (m = bdesc_arrays; m < &bdesc_arrays[ARRAY_SIZE (bdesc_arrays)]; m++)
12097 if ((m->proc == PROCESSOR_MAX || (m->proc == mips_arch))
12098 && (m->unsupported_target_flags & target_flags) == 0)
12099 for (d = m->bdesc; d < &m->bdesc[m->size]; d++)
12100 if ((d->target_flags & target_flags) == d->target_flags)
12101 add_builtin_function (d->name, types[d->function_type],
12102 d - m->bdesc + offset,
12103 BUILT_IN_MD, NULL, NULL);
12108 /* Expand a MIPS_BUILTIN_DIRECT function. ICODE is the code of the
12109 .md pattern and CALL is the function expr with arguments. TARGET,
12110 if nonnull, suggests a good place to put the result.
12111 HAS_TARGET indicates the function must return something. */
12114 mips_expand_builtin_direct (enum insn_code icode, rtx target, tree exp,
12117 rtx ops[MAX_RECOG_OPERANDS];
12123 /* We save target to ops[0]. */
12124 ops[0] = mips_prepare_builtin_target (icode, 0, target);
12128 /* We need to test if the arglist is not zero. Some instructions have extra
12129 clobber registers. */
12130 for (; i < insn_data[icode].n_operands && i <= call_expr_nargs (exp); i++, j++)
12131 ops[i] = mips_prepare_builtin_arg (icode, i, exp, j);
12136 emit_insn (GEN_FCN (icode) (ops[0], ops[1]));
12140 emit_insn (GEN_FCN (icode) (ops[0], ops[1], ops[2]));
12144 emit_insn (GEN_FCN (icode) (ops[0], ops[1], ops[2], ops[3]));
12148 gcc_unreachable ();
12153 /* Expand a __builtin_mips_movt_*_ps() or __builtin_mips_movf_*_ps()
12154 function (TYPE says which). EXP is the tree for the function
12155 function, ICODE is the instruction that should be used to compare
12156 the first two arguments, and COND is the condition it should test.
12157 TARGET, if nonnull, suggests a good place to put the result. */
12160 mips_expand_builtin_movtf (enum mips_builtin_type type,
12161 enum insn_code icode, enum mips_fp_condition cond,
12162 rtx target, tree exp)
12164 rtx cmp_result, op0, op1;
12166 cmp_result = mips_prepare_builtin_target (icode, 0, 0);
12167 op0 = mips_prepare_builtin_arg (icode, 1, exp, 0);
12168 op1 = mips_prepare_builtin_arg (icode, 2, exp, 1);
12169 emit_insn (GEN_FCN (icode) (cmp_result, op0, op1, GEN_INT (cond)));
12171 icode = CODE_FOR_mips_cond_move_tf_ps;
12172 target = mips_prepare_builtin_target (icode, 0, target);
12173 if (type == MIPS_BUILTIN_MOVT)
12175 op1 = mips_prepare_builtin_arg (icode, 2, exp, 2);
12176 op0 = mips_prepare_builtin_arg (icode, 1, exp, 3);
12180 op0 = mips_prepare_builtin_arg (icode, 1, exp, 2);
12181 op1 = mips_prepare_builtin_arg (icode, 2, exp, 3);
12183 emit_insn (gen_mips_cond_move_tf_ps (target, op0, op1, cmp_result));
12187 /* Move VALUE_IF_TRUE into TARGET if CONDITION is true; move VALUE_IF_FALSE
12188 into TARGET otherwise. Return TARGET. */
12191 mips_builtin_branch_and_move (rtx condition, rtx target,
12192 rtx value_if_true, rtx value_if_false)
12194 rtx true_label, done_label;
12196 true_label = gen_label_rtx ();
12197 done_label = gen_label_rtx ();
12199 /* First assume that CONDITION is false. */
12200 mips_emit_move (target, value_if_false);
12202 /* Branch to TRUE_LABEL if CONDITION is true and DONE_LABEL otherwise. */
12203 emit_jump_insn (gen_condjump (condition, true_label));
12204 emit_jump_insn (gen_jump (done_label));
12207 /* Fix TARGET if CONDITION is true. */
12208 emit_label (true_label);
12209 mips_emit_move (target, value_if_true);
12211 emit_label (done_label);
12215 /* Expand a comparison builtin of type BUILTIN_TYPE. ICODE is the code
12216 of the comparison instruction and COND is the condition it should test.
12217 EXP is the function call and arguments and TARGET, if nonnull,
12218 suggests a good place to put the boolean result. */
12221 mips_expand_builtin_compare (enum mips_builtin_type builtin_type,
12222 enum insn_code icode, enum mips_fp_condition cond,
12223 rtx target, tree exp)
12225 rtx offset, condition, cmp_result, ops[MAX_RECOG_OPERANDS];
12229 if (target == 0 || GET_MODE (target) != SImode)
12230 target = gen_reg_rtx (SImode);
12232 /* Prepare the operands to the comparison. */
12233 cmp_result = mips_prepare_builtin_target (icode, 0, 0);
12234 for (i = 1; i < insn_data[icode].n_operands - 1; i++, j++)
12235 ops[i] = mips_prepare_builtin_arg (icode, i, exp, j);
12237 switch (insn_data[icode].n_operands)
12240 emit_insn (GEN_FCN (icode) (cmp_result, ops[1], ops[2], GEN_INT (cond)));
12244 emit_insn (GEN_FCN (icode) (cmp_result, ops[1], ops[2],
12245 ops[3], ops[4], GEN_INT (cond)));
12249 gcc_unreachable ();
12252 /* If the comparison sets more than one register, we define the result
12253 to be 0 if all registers are false and -1 if all registers are true.
12254 The value of the complete result is indeterminate otherwise. */
12255 switch (builtin_type)
12257 case MIPS_BUILTIN_CMP_ALL:
12258 condition = gen_rtx_NE (VOIDmode, cmp_result, constm1_rtx);
12259 return mips_builtin_branch_and_move (condition, target,
12260 const0_rtx, const1_rtx);
12262 case MIPS_BUILTIN_CMP_UPPER:
12263 case MIPS_BUILTIN_CMP_LOWER:
12264 offset = GEN_INT (builtin_type == MIPS_BUILTIN_CMP_UPPER);
12265 condition = gen_single_cc (cmp_result, offset);
12266 return mips_builtin_branch_and_move (condition, target,
12267 const1_rtx, const0_rtx);
12270 condition = gen_rtx_NE (VOIDmode, cmp_result, const0_rtx);
12271 return mips_builtin_branch_and_move (condition, target,
12272 const1_rtx, const0_rtx);
12276 /* Expand a bposge builtin of type BUILTIN_TYPE. TARGET, if nonnull,
12277 suggests a good place to put the boolean result. */
12280 mips_expand_builtin_bposge (enum mips_builtin_type builtin_type, rtx target)
12282 rtx condition, cmp_result;
12285 if (target == 0 || GET_MODE (target) != SImode)
12286 target = gen_reg_rtx (SImode);
12288 cmp_result = gen_rtx_REG (CCDSPmode, CCDSP_PO_REGNUM);
12290 if (builtin_type == MIPS_BUILTIN_BPOSGE32)
12295 condition = gen_rtx_GE (VOIDmode, cmp_result, GEN_INT (cmp_value));
12296 return mips_builtin_branch_and_move (condition, target,
12297 const1_rtx, const0_rtx);
12300 /* Return true if we should force MIPS16 mode for the function named by
12301 the SYMBOL_REF SYMBOL, which belongs to DECL and has type TYPE.
12302 FIRST is true if this is the first time handling this decl. */
12305 mips_use_mips16_mode_p (rtx symbol, tree decl, int first, tree type)
12309 /* Explicit function attributes take precedence. */
12310 if (mips_mips16_type_p (type))
12312 if (mips_nomips16_type_p (type))
12315 /* A nested function should inherit the MIPS16 setting from its parent. */
12316 parent = decl_function_context (decl);
12318 return SYMBOL_REF_MIPS16_FUNC_P (XEXP (DECL_RTL (parent), 0));
12320 /* Handle -mflip-mips16. */
12321 if (TARGET_FLIP_MIPS16
12322 && !DECL_BUILT_IN (decl)
12323 && !DECL_ARTIFICIAL (decl))
12326 /* Use the setting we picked first time around. */
12327 return SYMBOL_REF_MIPS16_FUNC_P (symbol);
12329 mips16_flipper = !mips16_flipper;
12330 if (mips16_flipper)
12331 return !mips_base_mips16;
12334 return mips_base_mips16;
12337 /* Set SYMBOL_REF_FLAGS for the SYMBOL_REF inside RTL, which belongs to DECL.
12338 FIRST is true if this is the first time handling this decl. */
12341 mips_encode_section_info (tree decl, rtx rtl, int first)
12343 default_encode_section_info (decl, rtl, first);
12345 if (TREE_CODE (decl) == FUNCTION_DECL)
12347 rtx symbol = XEXP (rtl, 0);
12348 tree type = TREE_TYPE (decl);
12350 if ((TARGET_LONG_CALLS && !mips_near_type_p (type))
12351 || mips_far_type_p (type))
12352 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LONG_CALL;
12354 if (mips_use_mips16_mode_p (symbol, decl, first, type))
12356 if (flag_pic || TARGET_ABICALLS)
12357 sorry ("MIPS16 PIC");
12359 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_MIPS16_FUNC;
12364 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. Some code models use the incoming
12365 value of PIC_FUNCTION_ADDR_REGNUM to set up the global pointer. */
12368 mips_extra_live_on_entry (bitmap regs)
12370 if (TARGET_USE_GOT && !TARGET_ABSOLUTE_ABICALLS)
12371 bitmap_set_bit (regs, PIC_FUNCTION_ADDR_REGNUM);
12374 /* SImode values are represented as sign-extended to DImode. */
12377 mips_mode_rep_extended (enum machine_mode mode, enum machine_mode mode_rep)
12379 if (TARGET_64BIT && mode == SImode && mode_rep == DImode)
12380 return SIGN_EXTEND;
12385 /* MIPS implementation of TARGET_ASM_OUTPUT_DWARF_DTPREL. */
12388 mips_output_dwarf_dtprel (FILE *file, int size, rtx x)
12393 fputs ("\t.dtprelword\t", file);
12397 fputs ("\t.dtpreldword\t", file);
12401 gcc_unreachable ();
12403 output_addr_const (file, x);
12404 fputs ("+0x8000", file);
12407 #include "gt-mips.h"