1 /* Subroutines used for MIPS code generation.
2 Copyright (C) 1989, 1990, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5 Contributed by A. Lichnewsky, lich@inria.inria.fr.
6 Changes by Michael Meissner, meissner@osf.org.
7 64-bit r4000 support by Ian Lance Taylor, ian@cygnus.com, and
8 Brendan Eich, brendan@microunity.com.
10 This file is part of GCC.
12 GCC is free software; you can redistribute it and/or modify
13 it under the terms of the GNU General Public License as published by
14 the Free Software Foundation; either version 3, or (at your option)
17 GCC is distributed in the hope that it will be useful,
18 but WITHOUT ANY WARRANTY; without even the implied warranty of
19 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 GNU General Public License for more details.
22 You should have received a copy of the GNU General Public License
23 along with GCC; see the file COPYING3. If not see
24 <http://www.gnu.org/licenses/>. */
28 #include "coretypes.h"
33 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "insn-attr.h"
53 #include "target-def.h"
54 #include "integrate.h"
55 #include "langhooks.h"
56 #include "cfglayout.h"
57 #include "sched-int.h"
58 #include "tree-gimple.h"
61 /* True if X is an unspec wrapper around a SYMBOL_REF or LABEL_REF. */
62 #define UNSPEC_ADDRESS_P(X) \
63 (GET_CODE (X) == UNSPEC \
64 && XINT (X, 1) >= UNSPEC_ADDRESS_FIRST \
65 && XINT (X, 1) < UNSPEC_ADDRESS_FIRST + NUM_SYMBOL_TYPES)
67 /* Extract the symbol or label from UNSPEC wrapper X. */
68 #define UNSPEC_ADDRESS(X) \
71 /* Extract the symbol type from UNSPEC wrapper X. */
72 #define UNSPEC_ADDRESS_TYPE(X) \
73 ((enum mips_symbol_type) (XINT (X, 1) - UNSPEC_ADDRESS_FIRST))
75 /* The maximum distance between the top of the stack frame and the
76 value $sp has when we save and restore registers.
78 The value for normal-mode code must be a SMALL_OPERAND and must
79 preserve the maximum stack alignment. We therefore use a value
80 of 0x7ff0 in this case.
82 MIPS16e SAVE and RESTORE instructions can adjust the stack pointer by
83 up to 0x7f8 bytes and can usually save or restore all the registers
84 that we need to save or restore. (Note that we can only use these
85 instructions for o32, for which the stack alignment is 8 bytes.)
87 We use a maximum gap of 0x100 or 0x400 for MIPS16 code when SAVE and
88 RESTORE are not available. We can then use unextended instructions
89 to save and restore registers, and to allocate and deallocate the top
91 #define MIPS_MAX_FIRST_STACK_STEP \
92 (!TARGET_MIPS16 ? 0x7ff0 \
93 : GENERATE_MIPS16E_SAVE_RESTORE ? 0x7f8 \
94 : TARGET_64BIT ? 0x100 : 0x400)
96 /* True if INSN is a mips.md pattern or asm statement. */
97 #define USEFUL_INSN_P(INSN) \
99 && GET_CODE (PATTERN (INSN)) != USE \
100 && GET_CODE (PATTERN (INSN)) != CLOBBER \
101 && GET_CODE (PATTERN (INSN)) != ADDR_VEC \
102 && GET_CODE (PATTERN (INSN)) != ADDR_DIFF_VEC)
104 /* If INSN is a delayed branch sequence, return the first instruction
105 in the sequence, otherwise return INSN itself. */
106 #define SEQ_BEGIN(INSN) \
107 (INSN_P (INSN) && GET_CODE (PATTERN (INSN)) == SEQUENCE \
108 ? XVECEXP (PATTERN (INSN), 0, 0) \
111 /* Likewise for the last instruction in a delayed branch sequence. */
112 #define SEQ_END(INSN) \
113 (INSN_P (INSN) && GET_CODE (PATTERN (INSN)) == SEQUENCE \
114 ? XVECEXP (PATTERN (INSN), 0, XVECLEN (PATTERN (INSN), 0) - 1) \
117 /* Execute the following loop body with SUBINSN set to each instruction
118 between SEQ_BEGIN (INSN) and SEQ_END (INSN) inclusive. */
119 #define FOR_EACH_SUBINSN(SUBINSN, INSN) \
120 for ((SUBINSN) = SEQ_BEGIN (INSN); \
121 (SUBINSN) != NEXT_INSN (SEQ_END (INSN)); \
122 (SUBINSN) = NEXT_INSN (SUBINSN))
124 /* True if bit BIT is set in VALUE. */
125 #define BITSET_P(VALUE, BIT) (((VALUE) & (1 << (BIT))) != 0)
127 /* Classifies an address.
130 A natural register + offset address. The register satisfies
131 mips_valid_base_register_p and the offset is a const_arith_operand.
134 A LO_SUM rtx. The first operand is a valid base register and
135 the second operand is a symbolic address.
138 A signed 16-bit constant address.
141 A constant symbolic address (equivalent to CONSTANT_SYMBOLIC). */
142 enum mips_address_type {
149 /* Classifies the prototype of a builtin function. */
150 enum mips_function_type
152 MIPS_V2SF_FTYPE_V2SF,
153 MIPS_V2SF_FTYPE_V2SF_V2SF,
154 MIPS_V2SF_FTYPE_V2SF_V2SF_INT,
155 MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF,
156 MIPS_V2SF_FTYPE_SF_SF,
157 MIPS_INT_FTYPE_V2SF_V2SF,
158 MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF,
159 MIPS_INT_FTYPE_SF_SF,
160 MIPS_INT_FTYPE_DF_DF,
167 /* For MIPS DSP ASE */
169 MIPS_DI_FTYPE_DI_SI_SI,
170 MIPS_DI_FTYPE_DI_V2HI_V2HI,
171 MIPS_DI_FTYPE_DI_V4QI_V4QI,
173 MIPS_SI_FTYPE_PTR_SI,
177 MIPS_SI_FTYPE_V2HI_V2HI,
179 MIPS_SI_FTYPE_V4QI_V4QI,
182 MIPS_V2HI_FTYPE_SI_SI,
183 MIPS_V2HI_FTYPE_V2HI,
184 MIPS_V2HI_FTYPE_V2HI_SI,
185 MIPS_V2HI_FTYPE_V2HI_V2HI,
186 MIPS_V2HI_FTYPE_V4QI,
187 MIPS_V2HI_FTYPE_V4QI_V2HI,
189 MIPS_V4QI_FTYPE_V2HI_V2HI,
190 MIPS_V4QI_FTYPE_V4QI_SI,
191 MIPS_V4QI_FTYPE_V4QI_V4QI,
192 MIPS_VOID_FTYPE_SI_SI,
193 MIPS_VOID_FTYPE_V2HI_V2HI,
194 MIPS_VOID_FTYPE_V4QI_V4QI,
196 /* For MIPS DSP REV 2 ASE. */
197 MIPS_V4QI_FTYPE_V4QI,
198 MIPS_SI_FTYPE_SI_SI_SI,
199 MIPS_DI_FTYPE_DI_USI_USI,
201 MIPS_DI_FTYPE_USI_USI,
202 MIPS_V2HI_FTYPE_SI_SI_SI,
208 /* Specifies how a builtin function should be converted into rtl. */
209 enum mips_builtin_type
211 /* The builtin corresponds directly to an .md pattern. The return
212 value is mapped to operand 0 and the arguments are mapped to
213 operands 1 and above. */
216 /* The builtin corresponds directly to an .md pattern. There is no return
217 value and the arguments are mapped to operands 0 and above. */
218 MIPS_BUILTIN_DIRECT_NO_TARGET,
220 /* The builtin corresponds to a comparison instruction followed by
221 a mips_cond_move_tf_ps pattern. The first two arguments are the
222 values to compare and the second two arguments are the vector
223 operands for the movt.ps or movf.ps instruction (in assembly order). */
227 /* The builtin corresponds to a V2SF comparison instruction. Operand 0
228 of this instruction is the result of the comparison, which has mode
229 CCV2 or CCV4. The function arguments are mapped to operands 1 and
230 above. The function's return value is an SImode boolean that is
231 true under the following conditions:
233 MIPS_BUILTIN_CMP_ANY: one of the registers is true
234 MIPS_BUILTIN_CMP_ALL: all of the registers are true
235 MIPS_BUILTIN_CMP_LOWER: the first register is true
236 MIPS_BUILTIN_CMP_UPPER: the second register is true. */
237 MIPS_BUILTIN_CMP_ANY,
238 MIPS_BUILTIN_CMP_ALL,
239 MIPS_BUILTIN_CMP_UPPER,
240 MIPS_BUILTIN_CMP_LOWER,
242 /* As above, but the instruction only sets a single $fcc register. */
243 MIPS_BUILTIN_CMP_SINGLE,
245 /* For generating bposge32 branch instructions in MIPS32 DSP ASE. */
246 MIPS_BUILTIN_BPOSGE32
249 /* Invokes MACRO (COND) for each c.cond.fmt condition. */
250 #define MIPS_FP_CONDITIONS(MACRO) \
268 /* Enumerates the codes above as MIPS_FP_COND_<X>. */
269 #define DECLARE_MIPS_COND(X) MIPS_FP_COND_ ## X
270 enum mips_fp_condition {
271 MIPS_FP_CONDITIONS (DECLARE_MIPS_COND)
274 /* Index X provides the string representation of MIPS_FP_COND_<X>. */
275 #define STRINGIFY(X) #X
276 static const char *const mips_fp_conditions[] = {
277 MIPS_FP_CONDITIONS (STRINGIFY)
280 /* A function to save or store a register. The first argument is the
281 register and the second is the stack slot. */
282 typedef void (*mips_save_restore_fn) (rtx, rtx);
284 struct mips16_constant;
285 struct mips_arg_info;
286 struct mips_address_info;
287 struct mips_integer_op;
290 static bool mips_valid_base_register_p (rtx, enum machine_mode, int);
291 static bool mips_classify_address (struct mips_address_info *, rtx,
292 enum machine_mode, int);
293 static bool mips_cannot_force_const_mem (rtx);
294 static bool mips_use_blocks_for_constant_p (enum machine_mode, rtx);
295 static int mips_symbol_insns (enum mips_symbol_type, enum machine_mode);
296 static bool mips16_unextended_reference_p (enum machine_mode mode, rtx, rtx);
297 static rtx mips_force_temporary (rtx, rtx);
298 static rtx mips_unspec_offset_high (rtx, rtx, rtx, enum mips_symbol_type);
299 static rtx mips_add_offset (rtx, rtx, HOST_WIDE_INT);
300 static unsigned int mips_build_shift (struct mips_integer_op *, HOST_WIDE_INT);
301 static unsigned int mips_build_lower (struct mips_integer_op *,
302 unsigned HOST_WIDE_INT);
303 static unsigned int mips_build_integer (struct mips_integer_op *,
304 unsigned HOST_WIDE_INT);
305 static void mips_legitimize_const_move (enum machine_mode, rtx, rtx);
306 static int m16_check_op (rtx, int, int, int);
307 static bool mips_rtx_costs (rtx, int, int, int *);
308 static int mips_address_cost (rtx);
309 static void mips_emit_compare (enum rtx_code *, rtx *, rtx *, bool);
310 static void mips_load_call_address (rtx, rtx, int);
311 static bool mips_function_ok_for_sibcall (tree, tree);
312 static void mips_block_move_straight (rtx, rtx, HOST_WIDE_INT);
313 static void mips_adjust_block_mem (rtx, HOST_WIDE_INT, rtx *, rtx *);
314 static void mips_block_move_loop (rtx, rtx, HOST_WIDE_INT);
315 static void mips_arg_info (const CUMULATIVE_ARGS *, enum machine_mode,
316 tree, int, struct mips_arg_info *);
317 static bool mips_get_unaligned_mem (rtx *, unsigned int, int, rtx *, rtx *);
318 static void mips_set_architecture (const struct mips_cpu_info *);
319 static void mips_set_tune (const struct mips_cpu_info *);
320 static bool mips_handle_option (size_t, const char *, int);
321 static struct machine_function *mips_init_machine_status (void);
322 static void print_operand_reloc (FILE *, rtx, enum mips_symbol_context,
324 static void mips_file_start (void);
325 static int mips_small_data_pattern_1 (rtx *, void *);
326 static int mips_rewrite_small_data_1 (rtx *, void *);
327 static bool mips_function_has_gp_insn (void);
328 static unsigned int mips_global_pointer (void);
329 static bool mips_save_reg_p (unsigned int);
330 static void mips_save_restore_reg (enum machine_mode, int, HOST_WIDE_INT,
331 mips_save_restore_fn);
332 static void mips_for_each_saved_reg (HOST_WIDE_INT, mips_save_restore_fn);
333 static void mips_output_cplocal (void);
334 static void mips_emit_loadgp (void);
335 static void mips_output_function_prologue (FILE *, HOST_WIDE_INT);
336 static void mips_set_frame_expr (rtx);
337 static rtx mips_frame_set (rtx, rtx);
338 static void mips_save_reg (rtx, rtx);
339 static void mips_output_function_epilogue (FILE *, HOST_WIDE_INT);
340 static void mips_restore_reg (rtx, rtx);
341 static void mips_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
342 HOST_WIDE_INT, tree);
343 static int symbolic_expression_p (rtx);
344 static section *mips_select_rtx_section (enum machine_mode, rtx,
345 unsigned HOST_WIDE_INT);
346 static section *mips_function_rodata_section (tree);
347 static bool mips_in_small_data_p (tree);
348 static bool mips_use_anchors_for_symbol_p (rtx);
349 static int mips_fpr_return_fields (tree, tree *);
350 static bool mips_return_in_msb (tree);
351 static rtx mips_return_fpr_pair (enum machine_mode mode,
352 enum machine_mode mode1, HOST_WIDE_INT,
353 enum machine_mode mode2, HOST_WIDE_INT);
354 static rtx mips16_gp_pseudo_reg (void);
355 static void mips16_fp_args (FILE *, int, int);
356 static void build_mips16_function_stub (FILE *);
357 static rtx dump_constants_1 (enum machine_mode, rtx, rtx);
358 static void dump_constants (struct mips16_constant *, rtx);
359 static int mips16_insn_length (rtx);
360 static int mips16_rewrite_pool_refs (rtx *, void *);
361 static void mips16_lay_out_constants (void);
362 static void mips_sim_reset (struct mips_sim *);
363 static void mips_sim_init (struct mips_sim *, state_t);
364 static void mips_sim_next_cycle (struct mips_sim *);
365 static void mips_sim_wait_reg (struct mips_sim *, rtx, rtx);
366 static int mips_sim_wait_regs_2 (rtx *, void *);
367 static void mips_sim_wait_regs_1 (rtx *, void *);
368 static void mips_sim_wait_regs (struct mips_sim *, rtx);
369 static void mips_sim_wait_units (struct mips_sim *, rtx);
370 static void mips_sim_wait_insn (struct mips_sim *, rtx);
371 static void mips_sim_record_set (rtx, const_rtx, void *);
372 static void mips_sim_issue_insn (struct mips_sim *, rtx);
373 static void mips_sim_issue_nop (struct mips_sim *);
374 static void mips_sim_finish_insn (struct mips_sim *, rtx);
375 static void vr4130_avoid_branch_rt_conflict (rtx);
376 static void vr4130_align_insns (void);
377 static void mips_avoid_hazard (rtx, rtx, int *, rtx *, rtx);
378 static void mips_avoid_hazards (void);
379 static void mips_reorg (void);
380 static bool mips_strict_matching_cpu_name_p (const char *, const char *);
381 static bool mips_matching_cpu_name_p (const char *, const char *);
382 static const struct mips_cpu_info *mips_parse_cpu (const char *);
383 static const struct mips_cpu_info *mips_cpu_info_from_isa (int);
384 static bool mips_return_in_memory (tree, tree);
385 static bool mips_strict_argument_naming (CUMULATIVE_ARGS *);
386 static void mips_macc_chains_record (rtx);
387 static void mips_macc_chains_reorder (rtx *, int);
388 static void vr4130_true_reg_dependence_p_1 (rtx, const_rtx, void *);
389 static bool vr4130_true_reg_dependence_p (rtx);
390 static bool vr4130_swap_insns_p (rtx, rtx);
391 static void vr4130_reorder (rtx *, int);
392 static void mips_promote_ready (rtx *, int, int);
393 static int mips_sched_reorder (FILE *, int, rtx *, int *, int);
394 static int mips_variable_issue (FILE *, int, rtx, int);
395 static int mips_adjust_cost (rtx, rtx, rtx, int);
396 static int mips_issue_rate (void);
397 static int mips_multipass_dfa_lookahead (void);
398 static void mips_init_libfuncs (void);
399 static void mips_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
401 static tree mips_build_builtin_va_list (void);
402 static tree mips_gimplify_va_arg_expr (tree, tree, tree *, tree *);
403 static bool mips_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode mode,
405 static bool mips_callee_copies (CUMULATIVE_ARGS *, enum machine_mode mode,
407 static int mips_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode mode,
409 static bool mips_valid_pointer_mode (enum machine_mode);
410 static bool mips_vector_mode_supported_p (enum machine_mode);
411 static rtx mips_prepare_builtin_arg (enum insn_code, unsigned int, tree, unsigned int);
412 static rtx mips_prepare_builtin_target (enum insn_code, unsigned int, rtx);
413 static rtx mips_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
414 static void mips_init_builtins (void);
415 static rtx mips_expand_builtin_direct (enum insn_code, rtx, tree, bool);
416 static rtx mips_expand_builtin_movtf (enum mips_builtin_type,
417 enum insn_code, enum mips_fp_condition,
419 static rtx mips_expand_builtin_compare (enum mips_builtin_type,
420 enum insn_code, enum mips_fp_condition,
422 static rtx mips_expand_builtin_bposge (enum mips_builtin_type, rtx);
423 static void mips_encode_section_info (tree, rtx, int);
424 static void mips_extra_live_on_entry (bitmap);
425 static int mips_comp_type_attributes (tree, tree);
426 static int mips_mode_rep_extended (enum machine_mode, enum machine_mode);
427 static bool mips_offset_within_alignment_p (rtx, HOST_WIDE_INT);
428 static void mips_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
430 /* Structure to be filled in by compute_frame_size with register
431 save masks, and offsets for the current function. */
433 struct mips_frame_info GTY(())
435 HOST_WIDE_INT total_size; /* # bytes that the entire frame takes up */
436 HOST_WIDE_INT var_size; /* # bytes that variables take up */
437 HOST_WIDE_INT args_size; /* # bytes that outgoing arguments take up */
438 HOST_WIDE_INT cprestore_size; /* # bytes that the .cprestore slot takes up */
439 HOST_WIDE_INT gp_reg_size; /* # bytes needed to store gp regs */
440 HOST_WIDE_INT fp_reg_size; /* # bytes needed to store fp regs */
441 unsigned int mask; /* mask of saved gp registers */
442 unsigned int fmask; /* mask of saved fp registers */
443 HOST_WIDE_INT gp_save_offset; /* offset from vfp to store gp registers */
444 HOST_WIDE_INT fp_save_offset; /* offset from vfp to store fp registers */
445 HOST_WIDE_INT gp_sp_offset; /* offset from new sp to store gp registers */
446 HOST_WIDE_INT fp_sp_offset; /* offset from new sp to store fp registers */
447 bool initialized; /* true if frame size already calculated */
448 int num_gp; /* number of gp registers saved */
449 int num_fp; /* number of fp registers saved */
452 struct machine_function GTY(()) {
453 /* Pseudo-reg holding the value of $28 in a mips16 function which
454 refers to GP relative global variables. */
455 rtx mips16_gp_pseudo_rtx;
457 /* The number of extra stack bytes taken up by register varargs.
458 This area is allocated by the callee at the very top of the frame. */
461 /* Current frame information, calculated by compute_frame_size. */
462 struct mips_frame_info frame;
464 /* The register to use as the global pointer within this function. */
465 unsigned int global_pointer;
467 /* True if mips_adjust_insn_length should ignore an instruction's
469 bool ignore_hazard_length_p;
471 /* True if the whole function is suitable for .set noreorder and
473 bool all_noreorder_p;
475 /* True if the function is known to have an instruction that needs $gp. */
478 /* True if we have emitted an instruction to initialize
479 mips16_gp_pseudo_rtx. */
480 bool initialized_mips16_gp_pseudo_p;
483 /* Information about a single argument. */
486 /* True if the argument is passed in a floating-point register, or
487 would have been if we hadn't run out of registers. */
490 /* The number of words passed in registers, rounded up. */
491 unsigned int reg_words;
493 /* For EABI, the offset of the first register from GP_ARG_FIRST or
494 FP_ARG_FIRST. For other ABIs, the offset of the first register from
495 the start of the ABI's argument structure (see the CUMULATIVE_ARGS
496 comment for details).
498 The value is MAX_ARGS_IN_REGISTERS if the argument is passed entirely
500 unsigned int reg_offset;
502 /* The number of words that must be passed on the stack, rounded up. */
503 unsigned int stack_words;
505 /* The offset from the start of the stack overflow area of the argument's
506 first stack word. Only meaningful when STACK_WORDS is nonzero. */
507 unsigned int stack_offset;
511 /* Information about an address described by mips_address_type.
517 REG is the base register and OFFSET is the constant offset.
520 REG is the register that contains the high part of the address,
521 OFFSET is the symbolic address being referenced and SYMBOL_TYPE
522 is the type of OFFSET's symbol.
525 SYMBOL_TYPE is the type of symbol being referenced. */
527 struct mips_address_info
529 enum mips_address_type type;
532 enum mips_symbol_type symbol_type;
536 /* One stage in a constant building sequence. These sequences have
540 A = A CODE[1] VALUE[1]
541 A = A CODE[2] VALUE[2]
544 where A is an accumulator, each CODE[i] is a binary rtl operation
545 and each VALUE[i] is a constant integer. */
546 struct mips_integer_op {
548 unsigned HOST_WIDE_INT value;
552 /* The largest number of operations needed to load an integer constant.
553 The worst accepted case for 64-bit constants is LUI,ORI,SLL,ORI,SLL,ORI.
554 When the lowest bit is clear, we can try, but reject a sequence with
555 an extra SLL at the end. */
556 #define MIPS_MAX_INTEGER_OPS 7
558 /* Information about a MIPS16e SAVE or RESTORE instruction. */
559 struct mips16e_save_restore_info {
560 /* The number of argument registers saved by a SAVE instruction.
561 0 for RESTORE instructions. */
564 /* Bit X is set if the instruction saves or restores GPR X. */
567 /* The total number of bytes to allocate. */
571 /* Global variables for machine-dependent things. */
573 /* Threshold for data being put into the small data/bss area, instead
574 of the normal data area. */
575 int mips_section_threshold = -1;
577 /* Count the number of .file directives, so that .loc is up to date. */
578 int num_source_filenames = 0;
580 /* Count the number of sdb related labels are generated (to find block
581 start and end boundaries). */
582 int sdb_label_count = 0;
584 /* Next label # for each statement for Silicon Graphics IRIS systems. */
587 /* Name of the file containing the current function. */
588 const char *current_function_file = "";
590 /* Number of nested .set noreorder, noat, nomacro, and volatile requests. */
596 /* The next branch instruction is a branch likely, not branch normal. */
597 int mips_branch_likely;
599 /* The operands passed to the last cmpMM expander. */
602 /* The target cpu for code generation. */
603 enum processor_type mips_arch;
604 const struct mips_cpu_info *mips_arch_info;
606 /* The target cpu for optimization and scheduling. */
607 enum processor_type mips_tune;
608 const struct mips_cpu_info *mips_tune_info;
610 /* Which instruction set architecture to use. */
613 /* Which ABI to use. */
614 int mips_abi = MIPS_ABI_DEFAULT;
616 /* Cost information to use. */
617 const struct mips_rtx_cost_data *mips_cost;
619 /* The -mtext-loads setting. */
620 enum mips_code_readable_setting mips_code_readable = CODE_READABLE_YES;
622 /* The architecture selected by -mipsN. */
623 static const struct mips_cpu_info *mips_isa_info;
625 /* If TRUE, we split addresses into their high and low parts in the RTL. */
626 int mips_split_addresses;
628 /* Mode used for saving/restoring general purpose registers. */
629 static enum machine_mode gpr_mode;
631 /* Array giving truth value on whether or not a given hard register
632 can support a given mode. */
633 char mips_hard_regno_mode_ok[(int)MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
635 /* List of all MIPS punctuation characters used by print_operand. */
636 char mips_print_operand_punct[256];
638 /* Map GCC register number to debugger register number. */
639 int mips_dbx_regno[FIRST_PSEUDO_REGISTER];
640 int mips_dwarf_regno[FIRST_PSEUDO_REGISTER];
642 /* A copy of the original flag_delayed_branch: see override_options. */
643 static int mips_flag_delayed_branch;
645 static GTY (()) int mips_output_filename_first_time = 1;
647 /* mips_split_p[X] is true if symbols of type X can be split by
648 mips_split_symbol(). */
649 bool mips_split_p[NUM_SYMBOL_TYPES];
651 /* mips_lo_relocs[X] is the relocation to use when a symbol of type X
652 appears in a LO_SUM. It can be null if such LO_SUMs aren't valid or
653 if they are matched by a special .md file pattern. */
654 static const char *mips_lo_relocs[NUM_SYMBOL_TYPES];
656 /* Likewise for HIGHs. */
657 static const char *mips_hi_relocs[NUM_SYMBOL_TYPES];
659 /* Map hard register number to register class */
660 const enum reg_class mips_regno_to_class[] =
662 LEA_REGS, LEA_REGS, M16_NA_REGS, V1_REG,
663 M16_REGS, M16_REGS, M16_REGS, M16_REGS,
664 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
665 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
666 M16_NA_REGS, M16_NA_REGS, LEA_REGS, LEA_REGS,
667 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
668 T_REG, PIC_FN_ADDR_REG, LEA_REGS, LEA_REGS,
669 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
670 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
671 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
672 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
673 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
674 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
675 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
676 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
677 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
678 MD0_REG, MD1_REG, NO_REGS, ST_REGS,
679 ST_REGS, ST_REGS, ST_REGS, ST_REGS,
680 ST_REGS, ST_REGS, ST_REGS, NO_REGS,
681 NO_REGS, ALL_REGS, ALL_REGS, NO_REGS,
682 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
683 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
684 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
685 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
686 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
687 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
688 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
689 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
690 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
691 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
692 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
693 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
694 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
695 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
696 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
697 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
698 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
699 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
700 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
701 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
702 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
703 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
704 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
705 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
706 DSP_ACC_REGS, DSP_ACC_REGS, DSP_ACC_REGS, DSP_ACC_REGS,
707 DSP_ACC_REGS, DSP_ACC_REGS, ALL_REGS, ALL_REGS,
708 ALL_REGS, ALL_REGS, ALL_REGS, ALL_REGS
711 /* Table of machine dependent attributes. */
712 const struct attribute_spec mips_attribute_table[] =
714 { "long_call", 0, 0, false, true, true, NULL },
715 { "far", 0, 0, false, true, true, NULL },
716 { "near", 0, 0, false, true, true, NULL },
717 { NULL, 0, 0, false, false, false, NULL }
720 /* A table describing all the processors gcc knows about. Names are
721 matched in the order listed. The first mention of an ISA level is
722 taken as the canonical name for that ISA.
724 To ease comparison, please keep this table in the same order as
725 gas's mips_cpu_info_table[]. Please also make sure that
726 MIPS_ISA_LEVEL_SPEC handles all -march options correctly. */
727 const struct mips_cpu_info mips_cpu_info_table[] = {
728 /* Entries for generic ISAs */
729 { "mips1", PROCESSOR_R3000, 1 },
730 { "mips2", PROCESSOR_R6000, 2 },
731 { "mips3", PROCESSOR_R4000, 3 },
732 { "mips4", PROCESSOR_R8000, 4 },
733 { "mips32", PROCESSOR_4KC, 32 },
734 { "mips32r2", PROCESSOR_M4K, 33 },
735 { "mips64", PROCESSOR_5KC, 64 },
738 { "r3000", PROCESSOR_R3000, 1 },
739 { "r2000", PROCESSOR_R3000, 1 }, /* = r3000 */
740 { "r3900", PROCESSOR_R3900, 1 },
743 { "r6000", PROCESSOR_R6000, 2 },
746 { "r4000", PROCESSOR_R4000, 3 },
747 { "vr4100", PROCESSOR_R4100, 3 },
748 { "vr4111", PROCESSOR_R4111, 3 },
749 { "vr4120", PROCESSOR_R4120, 3 },
750 { "vr4130", PROCESSOR_R4130, 3 },
751 { "vr4300", PROCESSOR_R4300, 3 },
752 { "r4400", PROCESSOR_R4000, 3 }, /* = r4000 */
753 { "r4600", PROCESSOR_R4600, 3 },
754 { "orion", PROCESSOR_R4600, 3 }, /* = r4600 */
755 { "r4650", PROCESSOR_R4650, 3 },
758 { "r8000", PROCESSOR_R8000, 4 },
759 { "vr5000", PROCESSOR_R5000, 4 },
760 { "vr5400", PROCESSOR_R5400, 4 },
761 { "vr5500", PROCESSOR_R5500, 4 },
762 { "rm7000", PROCESSOR_R7000, 4 },
763 { "rm9000", PROCESSOR_R9000, 4 },
766 { "4kc", PROCESSOR_4KC, 32 },
767 { "4km", PROCESSOR_4KC, 32 }, /* = 4kc */
768 { "4kp", PROCESSOR_4KP, 32 },
769 { "4ksc", PROCESSOR_4KC, 32 },
771 /* MIPS32 Release 2 */
772 { "m4k", PROCESSOR_M4K, 33 },
773 { "4kec", PROCESSOR_4KC, 33 },
774 { "4kem", PROCESSOR_4KC, 33 },
775 { "4kep", PROCESSOR_4KP, 33 },
776 { "4ksd", PROCESSOR_4KC, 33 },
778 { "24kc", PROCESSOR_24KC, 33 },
779 { "24kf2_1", PROCESSOR_24KF2_1, 33 },
780 { "24kf", PROCESSOR_24KF2_1, 33 },
781 { "24kf1_1", PROCESSOR_24KF1_1, 33 },
782 { "24kfx", PROCESSOR_24KF1_1, 33 },
783 { "24kx", PROCESSOR_24KF1_1, 33 },
785 { "24kec", PROCESSOR_24KC, 33 }, /* 24K with DSP */
786 { "24kef2_1", PROCESSOR_24KF2_1, 33 },
787 { "24kef", PROCESSOR_24KF2_1, 33 },
788 { "24kef1_1", PROCESSOR_24KF1_1, 33 },
789 { "24kefx", PROCESSOR_24KF1_1, 33 },
790 { "24kex", PROCESSOR_24KF1_1, 33 },
792 { "34kc", PROCESSOR_24KC, 33 }, /* 34K with MT/DSP */
793 { "34kf2_1", PROCESSOR_24KF2_1, 33 },
794 { "34kf", PROCESSOR_24KF2_1, 33 },
795 { "34kf1_1", PROCESSOR_24KF1_1, 33 },
796 { "34kfx", PROCESSOR_24KF1_1, 33 },
797 { "34kx", PROCESSOR_24KF1_1, 33 },
799 { "74kc", PROCESSOR_74KC, 33 }, /* 74K with DSPr2 */
800 { "74kf2_1", PROCESSOR_74KF2_1, 33 },
801 { "74kf", PROCESSOR_74KF2_1, 33 },
802 { "74kf1_1", PROCESSOR_74KF1_1, 33 },
803 { "74kfx", PROCESSOR_74KF1_1, 33 },
804 { "74kx", PROCESSOR_74KF1_1, 33 },
805 { "74kf3_2", PROCESSOR_74KF3_2, 33 },
808 { "5kc", PROCESSOR_5KC, 64 },
809 { "5kf", PROCESSOR_5KF, 64 },
810 { "20kc", PROCESSOR_20KC, 64 },
811 { "sb1", PROCESSOR_SB1, 64 },
812 { "sb1a", PROCESSOR_SB1A, 64 },
813 { "sr71000", PROCESSOR_SR71000, 64 },
819 /* Default costs. If these are used for a processor we should look
820 up the actual costs. */
821 #define DEFAULT_COSTS COSTS_N_INSNS (6), /* fp_add */ \
822 COSTS_N_INSNS (7), /* fp_mult_sf */ \
823 COSTS_N_INSNS (8), /* fp_mult_df */ \
824 COSTS_N_INSNS (23), /* fp_div_sf */ \
825 COSTS_N_INSNS (36), /* fp_div_df */ \
826 COSTS_N_INSNS (10), /* int_mult_si */ \
827 COSTS_N_INSNS (10), /* int_mult_di */ \
828 COSTS_N_INSNS (69), /* int_div_si */ \
829 COSTS_N_INSNS (69), /* int_div_di */ \
830 2, /* branch_cost */ \
831 4 /* memory_latency */
833 /* Need to replace these with the costs of calling the appropriate
835 #define SOFT_FP_COSTS COSTS_N_INSNS (256), /* fp_add */ \
836 COSTS_N_INSNS (256), /* fp_mult_sf */ \
837 COSTS_N_INSNS (256), /* fp_mult_df */ \
838 COSTS_N_INSNS (256), /* fp_div_sf */ \
839 COSTS_N_INSNS (256) /* fp_div_df */
841 static struct mips_rtx_cost_data const mips_rtx_cost_optimize_size =
843 COSTS_N_INSNS (1), /* fp_add */
844 COSTS_N_INSNS (1), /* fp_mult_sf */
845 COSTS_N_INSNS (1), /* fp_mult_df */
846 COSTS_N_INSNS (1), /* fp_div_sf */
847 COSTS_N_INSNS (1), /* fp_div_df */
848 COSTS_N_INSNS (1), /* int_mult_si */
849 COSTS_N_INSNS (1), /* int_mult_di */
850 COSTS_N_INSNS (1), /* int_div_si */
851 COSTS_N_INSNS (1), /* int_div_di */
853 4 /* memory_latency */
856 static struct mips_rtx_cost_data const mips_rtx_cost_data[PROCESSOR_MAX] =
859 COSTS_N_INSNS (2), /* fp_add */
860 COSTS_N_INSNS (4), /* fp_mult_sf */
861 COSTS_N_INSNS (5), /* fp_mult_df */
862 COSTS_N_INSNS (12), /* fp_div_sf */
863 COSTS_N_INSNS (19), /* fp_div_df */
864 COSTS_N_INSNS (12), /* int_mult_si */
865 COSTS_N_INSNS (12), /* int_mult_di */
866 COSTS_N_INSNS (35), /* int_div_si */
867 COSTS_N_INSNS (35), /* int_div_di */
869 4 /* memory_latency */
874 COSTS_N_INSNS (6), /* int_mult_si */
875 COSTS_N_INSNS (6), /* int_mult_di */
876 COSTS_N_INSNS (36), /* int_div_si */
877 COSTS_N_INSNS (36), /* int_div_di */
879 4 /* memory_latency */
883 COSTS_N_INSNS (36), /* int_mult_si */
884 COSTS_N_INSNS (36), /* int_mult_di */
885 COSTS_N_INSNS (37), /* int_div_si */
886 COSTS_N_INSNS (37), /* int_div_di */
888 4 /* memory_latency */
892 COSTS_N_INSNS (4), /* int_mult_si */
893 COSTS_N_INSNS (11), /* int_mult_di */
894 COSTS_N_INSNS (36), /* int_div_si */
895 COSTS_N_INSNS (68), /* int_div_di */
897 4 /* memory_latency */
900 COSTS_N_INSNS (4), /* fp_add */
901 COSTS_N_INSNS (4), /* fp_mult_sf */
902 COSTS_N_INSNS (5), /* fp_mult_df */
903 COSTS_N_INSNS (17), /* fp_div_sf */
904 COSTS_N_INSNS (32), /* fp_div_df */
905 COSTS_N_INSNS (4), /* int_mult_si */
906 COSTS_N_INSNS (11), /* int_mult_di */
907 COSTS_N_INSNS (36), /* int_div_si */
908 COSTS_N_INSNS (68), /* int_div_di */
910 4 /* memory_latency */
913 COSTS_N_INSNS (4), /* fp_add */
914 COSTS_N_INSNS (4), /* fp_mult_sf */
915 COSTS_N_INSNS (5), /* fp_mult_df */
916 COSTS_N_INSNS (17), /* fp_div_sf */
917 COSTS_N_INSNS (32), /* fp_div_df */
918 COSTS_N_INSNS (4), /* int_mult_si */
919 COSTS_N_INSNS (7), /* int_mult_di */
920 COSTS_N_INSNS (42), /* int_div_si */
921 COSTS_N_INSNS (72), /* int_div_di */
923 4 /* memory_latency */
927 COSTS_N_INSNS (5), /* int_mult_si */
928 COSTS_N_INSNS (5), /* int_mult_di */
929 COSTS_N_INSNS (41), /* int_div_si */
930 COSTS_N_INSNS (41), /* int_div_di */
932 4 /* memory_latency */
935 COSTS_N_INSNS (8), /* fp_add */
936 COSTS_N_INSNS (8), /* fp_mult_sf */
937 COSTS_N_INSNS (10), /* fp_mult_df */
938 COSTS_N_INSNS (34), /* fp_div_sf */
939 COSTS_N_INSNS (64), /* fp_div_df */
940 COSTS_N_INSNS (5), /* int_mult_si */
941 COSTS_N_INSNS (5), /* int_mult_di */
942 COSTS_N_INSNS (41), /* int_div_si */
943 COSTS_N_INSNS (41), /* int_div_di */
945 4 /* memory_latency */
948 COSTS_N_INSNS (4), /* fp_add */
949 COSTS_N_INSNS (4), /* fp_mult_sf */
950 COSTS_N_INSNS (5), /* fp_mult_df */
951 COSTS_N_INSNS (17), /* fp_div_sf */
952 COSTS_N_INSNS (32), /* fp_div_df */
953 COSTS_N_INSNS (5), /* int_mult_si */
954 COSTS_N_INSNS (5), /* int_mult_di */
955 COSTS_N_INSNS (41), /* int_div_si */
956 COSTS_N_INSNS (41), /* int_div_di */
958 4 /* memory_latency */
962 COSTS_N_INSNS (5), /* int_mult_si */
963 COSTS_N_INSNS (5), /* int_mult_di */
964 COSTS_N_INSNS (41), /* int_div_si */
965 COSTS_N_INSNS (41), /* int_div_di */
967 4 /* memory_latency */
970 COSTS_N_INSNS (8), /* fp_add */
971 COSTS_N_INSNS (8), /* fp_mult_sf */
972 COSTS_N_INSNS (10), /* fp_mult_df */
973 COSTS_N_INSNS (34), /* fp_div_sf */
974 COSTS_N_INSNS (64), /* fp_div_df */
975 COSTS_N_INSNS (5), /* int_mult_si */
976 COSTS_N_INSNS (5), /* int_mult_di */
977 COSTS_N_INSNS (41), /* int_div_si */
978 COSTS_N_INSNS (41), /* int_div_di */
980 4 /* memory_latency */
983 COSTS_N_INSNS (4), /* fp_add */
984 COSTS_N_INSNS (4), /* fp_mult_sf */
985 COSTS_N_INSNS (5), /* fp_mult_df */
986 COSTS_N_INSNS (17), /* fp_div_sf */
987 COSTS_N_INSNS (32), /* fp_div_df */
988 COSTS_N_INSNS (5), /* int_mult_si */
989 COSTS_N_INSNS (5), /* int_mult_di */
990 COSTS_N_INSNS (41), /* int_div_si */
991 COSTS_N_INSNS (41), /* int_div_di */
993 4 /* memory_latency */
996 COSTS_N_INSNS (6), /* fp_add */
997 COSTS_N_INSNS (6), /* fp_mult_sf */
998 COSTS_N_INSNS (7), /* fp_mult_df */
999 COSTS_N_INSNS (25), /* fp_div_sf */
1000 COSTS_N_INSNS (48), /* fp_div_df */
1001 COSTS_N_INSNS (5), /* int_mult_si */
1002 COSTS_N_INSNS (5), /* int_mult_di */
1003 COSTS_N_INSNS (41), /* int_div_si */
1004 COSTS_N_INSNS (41), /* int_div_di */
1005 1, /* branch_cost */
1006 4 /* memory_latency */
1012 COSTS_N_INSNS (2), /* fp_add */
1013 COSTS_N_INSNS (4), /* fp_mult_sf */
1014 COSTS_N_INSNS (5), /* fp_mult_df */
1015 COSTS_N_INSNS (12), /* fp_div_sf */
1016 COSTS_N_INSNS (19), /* fp_div_df */
1017 COSTS_N_INSNS (2), /* int_mult_si */
1018 COSTS_N_INSNS (2), /* int_mult_di */
1019 COSTS_N_INSNS (35), /* int_div_si */
1020 COSTS_N_INSNS (35), /* int_div_di */
1021 1, /* branch_cost */
1022 4 /* memory_latency */
1025 COSTS_N_INSNS (3), /* fp_add */
1026 COSTS_N_INSNS (5), /* fp_mult_sf */
1027 COSTS_N_INSNS (6), /* fp_mult_df */
1028 COSTS_N_INSNS (15), /* fp_div_sf */
1029 COSTS_N_INSNS (16), /* fp_div_df */
1030 COSTS_N_INSNS (17), /* int_mult_si */
1031 COSTS_N_INSNS (17), /* int_mult_di */
1032 COSTS_N_INSNS (38), /* int_div_si */
1033 COSTS_N_INSNS (38), /* int_div_di */
1034 2, /* branch_cost */
1035 6 /* memory_latency */
1038 COSTS_N_INSNS (6), /* fp_add */
1039 COSTS_N_INSNS (7), /* fp_mult_sf */
1040 COSTS_N_INSNS (8), /* fp_mult_df */
1041 COSTS_N_INSNS (23), /* fp_div_sf */
1042 COSTS_N_INSNS (36), /* fp_div_df */
1043 COSTS_N_INSNS (10), /* int_mult_si */
1044 COSTS_N_INSNS (10), /* int_mult_di */
1045 COSTS_N_INSNS (69), /* int_div_si */
1046 COSTS_N_INSNS (69), /* int_div_di */
1047 2, /* branch_cost */
1048 6 /* memory_latency */
1060 /* The only costs that appear to be updated here are
1061 integer multiplication. */
1063 COSTS_N_INSNS (4), /* int_mult_si */
1064 COSTS_N_INSNS (6), /* int_mult_di */
1065 COSTS_N_INSNS (69), /* int_div_si */
1066 COSTS_N_INSNS (69), /* int_div_di */
1067 1, /* branch_cost */
1068 4 /* memory_latency */
1080 COSTS_N_INSNS (6), /* fp_add */
1081 COSTS_N_INSNS (4), /* fp_mult_sf */
1082 COSTS_N_INSNS (5), /* fp_mult_df */
1083 COSTS_N_INSNS (23), /* fp_div_sf */
1084 COSTS_N_INSNS (36), /* fp_div_df */
1085 COSTS_N_INSNS (5), /* int_mult_si */
1086 COSTS_N_INSNS (5), /* int_mult_di */
1087 COSTS_N_INSNS (36), /* int_div_si */
1088 COSTS_N_INSNS (36), /* int_div_di */
1089 1, /* branch_cost */
1090 4 /* memory_latency */
1093 COSTS_N_INSNS (6), /* fp_add */
1094 COSTS_N_INSNS (5), /* fp_mult_sf */
1095 COSTS_N_INSNS (6), /* fp_mult_df */
1096 COSTS_N_INSNS (30), /* fp_div_sf */
1097 COSTS_N_INSNS (59), /* fp_div_df */
1098 COSTS_N_INSNS (3), /* int_mult_si */
1099 COSTS_N_INSNS (4), /* int_mult_di */
1100 COSTS_N_INSNS (42), /* int_div_si */
1101 COSTS_N_INSNS (74), /* int_div_di */
1102 1, /* branch_cost */
1103 4 /* memory_latency */
1106 COSTS_N_INSNS (6), /* fp_add */
1107 COSTS_N_INSNS (5), /* fp_mult_sf */
1108 COSTS_N_INSNS (6), /* fp_mult_df */
1109 COSTS_N_INSNS (30), /* fp_div_sf */
1110 COSTS_N_INSNS (59), /* fp_div_df */
1111 COSTS_N_INSNS (5), /* int_mult_si */
1112 COSTS_N_INSNS (9), /* int_mult_di */
1113 COSTS_N_INSNS (42), /* int_div_si */
1114 COSTS_N_INSNS (74), /* int_div_di */
1115 1, /* branch_cost */
1116 4 /* memory_latency */
1119 /* The only costs that are changed here are
1120 integer multiplication. */
1121 COSTS_N_INSNS (6), /* fp_add */
1122 COSTS_N_INSNS (7), /* fp_mult_sf */
1123 COSTS_N_INSNS (8), /* fp_mult_df */
1124 COSTS_N_INSNS (23), /* fp_div_sf */
1125 COSTS_N_INSNS (36), /* fp_div_df */
1126 COSTS_N_INSNS (5), /* int_mult_si */
1127 COSTS_N_INSNS (9), /* int_mult_di */
1128 COSTS_N_INSNS (69), /* int_div_si */
1129 COSTS_N_INSNS (69), /* int_div_di */
1130 1, /* branch_cost */
1131 4 /* memory_latency */
1137 /* The only costs that are changed here are
1138 integer multiplication. */
1139 COSTS_N_INSNS (6), /* fp_add */
1140 COSTS_N_INSNS (7), /* fp_mult_sf */
1141 COSTS_N_INSNS (8), /* fp_mult_df */
1142 COSTS_N_INSNS (23), /* fp_div_sf */
1143 COSTS_N_INSNS (36), /* fp_div_df */
1144 COSTS_N_INSNS (3), /* int_mult_si */
1145 COSTS_N_INSNS (8), /* int_mult_di */
1146 COSTS_N_INSNS (69), /* int_div_si */
1147 COSTS_N_INSNS (69), /* int_div_di */
1148 1, /* branch_cost */
1149 4 /* memory_latency */
1152 /* These costs are the same as the SB-1A below. */
1153 COSTS_N_INSNS (4), /* fp_add */
1154 COSTS_N_INSNS (4), /* fp_mult_sf */
1155 COSTS_N_INSNS (4), /* fp_mult_df */
1156 COSTS_N_INSNS (24), /* fp_div_sf */
1157 COSTS_N_INSNS (32), /* fp_div_df */
1158 COSTS_N_INSNS (3), /* int_mult_si */
1159 COSTS_N_INSNS (4), /* int_mult_di */
1160 COSTS_N_INSNS (36), /* int_div_si */
1161 COSTS_N_INSNS (68), /* int_div_di */
1162 1, /* branch_cost */
1163 4 /* memory_latency */
1166 /* These costs are the same as the SB-1 above. */
1167 COSTS_N_INSNS (4), /* fp_add */
1168 COSTS_N_INSNS (4), /* fp_mult_sf */
1169 COSTS_N_INSNS (4), /* fp_mult_df */
1170 COSTS_N_INSNS (24), /* fp_div_sf */
1171 COSTS_N_INSNS (32), /* fp_div_df */
1172 COSTS_N_INSNS (3), /* int_mult_si */
1173 COSTS_N_INSNS (4), /* int_mult_di */
1174 COSTS_N_INSNS (36), /* int_div_si */
1175 COSTS_N_INSNS (68), /* int_div_di */
1176 1, /* branch_cost */
1177 4 /* memory_latency */
1184 /* If a MIPS16e SAVE or RESTORE instruction saves or restores register
1185 mips16e_s2_s8_regs[X], it must also save the registers in indexes
1186 X + 1 onwards. Likewise mips16e_a0_a3_regs. */
1187 static const unsigned char mips16e_s2_s8_regs[] = {
1188 30, 23, 22, 21, 20, 19, 18
1190 static const unsigned char mips16e_a0_a3_regs[] = {
1194 /* A list of the registers that can be saved by the MIPS16e SAVE instruction,
1195 ordered from the uppermost in memory to the lowest in memory. */
1196 static const unsigned char mips16e_save_restore_regs[] = {
1197 31, 30, 23, 22, 21, 20, 19, 18, 17, 16, 7, 6, 5, 4
1200 /* Nonzero if -march should decide the default value of
1201 MASK_SOFT_FLOAT_ABI. */
1202 #ifndef MIPS_MARCH_CONTROLS_SOFT_FLOAT
1203 #define MIPS_MARCH_CONTROLS_SOFT_FLOAT 0
1206 /* Initialize the GCC target structure. */
1207 #undef TARGET_ASM_ALIGNED_HI_OP
1208 #define TARGET_ASM_ALIGNED_HI_OP "\t.half\t"
1209 #undef TARGET_ASM_ALIGNED_SI_OP
1210 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
1211 #undef TARGET_ASM_ALIGNED_DI_OP
1212 #define TARGET_ASM_ALIGNED_DI_OP "\t.dword\t"
1214 #undef TARGET_ASM_FUNCTION_PROLOGUE
1215 #define TARGET_ASM_FUNCTION_PROLOGUE mips_output_function_prologue
1216 #undef TARGET_ASM_FUNCTION_EPILOGUE
1217 #define TARGET_ASM_FUNCTION_EPILOGUE mips_output_function_epilogue
1218 #undef TARGET_ASM_SELECT_RTX_SECTION
1219 #define TARGET_ASM_SELECT_RTX_SECTION mips_select_rtx_section
1220 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
1221 #define TARGET_ASM_FUNCTION_RODATA_SECTION mips_function_rodata_section
1223 #undef TARGET_SCHED_REORDER
1224 #define TARGET_SCHED_REORDER mips_sched_reorder
1225 #undef TARGET_SCHED_VARIABLE_ISSUE
1226 #define TARGET_SCHED_VARIABLE_ISSUE mips_variable_issue
1227 #undef TARGET_SCHED_ADJUST_COST
1228 #define TARGET_SCHED_ADJUST_COST mips_adjust_cost
1229 #undef TARGET_SCHED_ISSUE_RATE
1230 #define TARGET_SCHED_ISSUE_RATE mips_issue_rate
1231 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1232 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD \
1233 mips_multipass_dfa_lookahead
1235 #undef TARGET_DEFAULT_TARGET_FLAGS
1236 #define TARGET_DEFAULT_TARGET_FLAGS \
1238 | TARGET_CPU_DEFAULT \
1239 | TARGET_ENDIAN_DEFAULT \
1240 | TARGET_FP_EXCEPTIONS_DEFAULT \
1241 | MASK_CHECK_ZERO_DIV \
1243 #undef TARGET_HANDLE_OPTION
1244 #define TARGET_HANDLE_OPTION mips_handle_option
1246 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
1247 #define TARGET_FUNCTION_OK_FOR_SIBCALL mips_function_ok_for_sibcall
1249 #undef TARGET_VALID_POINTER_MODE
1250 #define TARGET_VALID_POINTER_MODE mips_valid_pointer_mode
1251 #undef TARGET_RTX_COSTS
1252 #define TARGET_RTX_COSTS mips_rtx_costs
1253 #undef TARGET_ADDRESS_COST
1254 #define TARGET_ADDRESS_COST mips_address_cost
1256 #undef TARGET_IN_SMALL_DATA_P
1257 #define TARGET_IN_SMALL_DATA_P mips_in_small_data_p
1259 #undef TARGET_MACHINE_DEPENDENT_REORG
1260 #define TARGET_MACHINE_DEPENDENT_REORG mips_reorg
1262 #undef TARGET_ASM_FILE_START
1263 #define TARGET_ASM_FILE_START mips_file_start
1264 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
1265 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
1267 #undef TARGET_INIT_LIBFUNCS
1268 #define TARGET_INIT_LIBFUNCS mips_init_libfuncs
1270 #undef TARGET_BUILD_BUILTIN_VA_LIST
1271 #define TARGET_BUILD_BUILTIN_VA_LIST mips_build_builtin_va_list
1272 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
1273 #define TARGET_GIMPLIFY_VA_ARG_EXPR mips_gimplify_va_arg_expr
1275 #undef TARGET_PROMOTE_FUNCTION_ARGS
1276 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
1277 #undef TARGET_PROMOTE_FUNCTION_RETURN
1278 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
1279 #undef TARGET_PROMOTE_PROTOTYPES
1280 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
1282 #undef TARGET_RETURN_IN_MEMORY
1283 #define TARGET_RETURN_IN_MEMORY mips_return_in_memory
1284 #undef TARGET_RETURN_IN_MSB
1285 #define TARGET_RETURN_IN_MSB mips_return_in_msb
1287 #undef TARGET_ASM_OUTPUT_MI_THUNK
1288 #define TARGET_ASM_OUTPUT_MI_THUNK mips_output_mi_thunk
1289 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
1290 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
1292 #undef TARGET_SETUP_INCOMING_VARARGS
1293 #define TARGET_SETUP_INCOMING_VARARGS mips_setup_incoming_varargs
1294 #undef TARGET_STRICT_ARGUMENT_NAMING
1295 #define TARGET_STRICT_ARGUMENT_NAMING mips_strict_argument_naming
1296 #undef TARGET_MUST_PASS_IN_STACK
1297 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
1298 #undef TARGET_PASS_BY_REFERENCE
1299 #define TARGET_PASS_BY_REFERENCE mips_pass_by_reference
1300 #undef TARGET_CALLEE_COPIES
1301 #define TARGET_CALLEE_COPIES mips_callee_copies
1302 #undef TARGET_ARG_PARTIAL_BYTES
1303 #define TARGET_ARG_PARTIAL_BYTES mips_arg_partial_bytes
1305 #undef TARGET_MODE_REP_EXTENDED
1306 #define TARGET_MODE_REP_EXTENDED mips_mode_rep_extended
1308 #undef TARGET_VECTOR_MODE_SUPPORTED_P
1309 #define TARGET_VECTOR_MODE_SUPPORTED_P mips_vector_mode_supported_p
1311 #undef TARGET_INIT_BUILTINS
1312 #define TARGET_INIT_BUILTINS mips_init_builtins
1313 #undef TARGET_EXPAND_BUILTIN
1314 #define TARGET_EXPAND_BUILTIN mips_expand_builtin
1316 #undef TARGET_HAVE_TLS
1317 #define TARGET_HAVE_TLS HAVE_AS_TLS
1319 #undef TARGET_CANNOT_FORCE_CONST_MEM
1320 #define TARGET_CANNOT_FORCE_CONST_MEM mips_cannot_force_const_mem
1322 #undef TARGET_ENCODE_SECTION_INFO
1323 #define TARGET_ENCODE_SECTION_INFO mips_encode_section_info
1325 #undef TARGET_ATTRIBUTE_TABLE
1326 #define TARGET_ATTRIBUTE_TABLE mips_attribute_table
1328 #undef TARGET_EXTRA_LIVE_ON_ENTRY
1329 #define TARGET_EXTRA_LIVE_ON_ENTRY mips_extra_live_on_entry
1331 #undef TARGET_MIN_ANCHOR_OFFSET
1332 #define TARGET_MIN_ANCHOR_OFFSET -32768
1333 #undef TARGET_MAX_ANCHOR_OFFSET
1334 #define TARGET_MAX_ANCHOR_OFFSET 32767
1335 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1336 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P mips_use_blocks_for_constant_p
1337 #undef TARGET_USE_ANCHORS_FOR_SYMBOL_P
1338 #define TARGET_USE_ANCHORS_FOR_SYMBOL_P mips_use_anchors_for_symbol_p
1340 #undef TARGET_COMP_TYPE_ATTRIBUTES
1341 #define TARGET_COMP_TYPE_ATTRIBUTES mips_comp_type_attributes
1343 #ifdef HAVE_AS_DTPRELWORD
1344 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1345 #define TARGET_ASM_OUTPUT_DWARF_DTPREL mips_output_dwarf_dtprel
1348 struct gcc_target targetm = TARGET_INITIALIZER;
1351 /* Predicates to test for presence of "near" and "far"/"long_call"
1352 attributes on the given TYPE. */
1355 mips_near_type_p (tree type)
1357 return lookup_attribute ("near", TYPE_ATTRIBUTES (type)) != NULL;
1361 mips_far_type_p (tree type)
1363 return (lookup_attribute ("long_call", TYPE_ATTRIBUTES (type)) != NULL
1364 || lookup_attribute ("far", TYPE_ATTRIBUTES (type)) != NULL);
1368 /* Return 0 if the attributes for two types are incompatible, 1 if they
1369 are compatible, and 2 if they are nearly compatible (which causes a
1370 warning to be generated). */
1373 mips_comp_type_attributes (tree type1, tree type2)
1375 /* Check for mismatch of non-default calling convention. */
1376 if (TREE_CODE (type1) != FUNCTION_TYPE)
1379 /* Disallow mixed near/far attributes. */
1380 if (mips_far_type_p (type1) && mips_near_type_p (type2))
1382 if (mips_near_type_p (type1) && mips_far_type_p (type2))
1388 /* If X is a PLUS of a CONST_INT, return the two terms in *BASE_PTR
1389 and *OFFSET_PTR. Return X in *BASE_PTR and 0 in *OFFSET_PTR otherwise. */
1392 mips_split_plus (rtx x, rtx *base_ptr, HOST_WIDE_INT *offset_ptr)
1394 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
1396 *base_ptr = XEXP (x, 0);
1397 *offset_ptr = INTVAL (XEXP (x, 1));
1406 /* Return true if SYMBOL_REF X is associated with a global symbol
1407 (in the STB_GLOBAL sense). */
1410 mips_global_symbol_p (rtx x)
1414 decl = SYMBOL_REF_DECL (x);
1416 return !SYMBOL_REF_LOCAL_P (x);
1418 /* Weakref symbols are not TREE_PUBLIC, but their targets are global
1419 or weak symbols. Relocations in the object file will be against
1420 the target symbol, so it's that symbol's binding that matters here. */
1421 return DECL_P (decl) && (TREE_PUBLIC (decl) || DECL_WEAK (decl));
1424 /* Return true if SYMBOL_REF X binds locally. */
1427 mips_symbol_binds_local_p (rtx x)
1429 return (SYMBOL_REF_DECL (x)
1430 ? targetm.binds_local_p (SYMBOL_REF_DECL (x))
1431 : SYMBOL_REF_LOCAL_P (x));
1434 /* Return the method that should be used to access SYMBOL_REF or
1435 LABEL_REF X in context CONTEXT. */
1437 static enum mips_symbol_type
1438 mips_classify_symbol (rtx x, enum mips_symbol_context context)
1441 return SYMBOL_GOT_DISP;
1443 if (GET_CODE (x) == LABEL_REF)
1445 /* LABEL_REFs are used for jump tables as well as text labels.
1446 Only return SYMBOL_PC_RELATIVE if we know the label is in
1447 the text section. */
1448 if (TARGET_MIPS16_SHORT_JUMP_TABLES)
1449 return SYMBOL_PC_RELATIVE;
1450 if (TARGET_ABICALLS && !TARGET_ABSOLUTE_ABICALLS)
1451 return SYMBOL_GOT_PAGE_OFST;
1452 return SYMBOL_ABSOLUTE;
1455 gcc_assert (GET_CODE (x) == SYMBOL_REF);
1457 if (SYMBOL_REF_TLS_MODEL (x))
1460 if (CONSTANT_POOL_ADDRESS_P (x))
1462 if (TARGET_MIPS16_TEXT_LOADS)
1463 return SYMBOL_PC_RELATIVE;
1465 if (TARGET_MIPS16_PCREL_LOADS && context == SYMBOL_CONTEXT_MEM)
1466 return SYMBOL_PC_RELATIVE;
1468 if (!TARGET_EMBEDDED_DATA
1469 && GET_MODE_SIZE (get_pool_mode (x)) <= mips_section_threshold)
1470 return SYMBOL_GP_RELATIVE;
1473 /* Do not use small-data accesses for weak symbols; they may end up
1475 if (SYMBOL_REF_SMALL_P (x)
1476 && !SYMBOL_REF_WEAK (x))
1477 return SYMBOL_GP_RELATIVE;
1479 /* Don't use GOT accesses for locally-binding symbols when -mno-shared
1482 && !(TARGET_ABSOLUTE_ABICALLS && mips_symbol_binds_local_p (x)))
1484 /* There are three cases to consider:
1486 - o32 PIC (either with or without explicit relocs)
1487 - n32/n64 PIC without explicit relocs
1488 - n32/n64 PIC with explicit relocs
1490 In the first case, both local and global accesses will use an
1491 R_MIPS_GOT16 relocation. We must correctly predict which of
1492 the two semantics (local or global) the assembler and linker
1493 will apply. The choice depends on the symbol's binding rather
1494 than its visibility.
1496 In the second case, the assembler will not use R_MIPS_GOT16
1497 relocations, but it chooses between local and global accesses
1498 in the same way as for o32 PIC.
1500 In the third case we have more freedom since both forms of
1501 access will work for any kind of symbol. However, there seems
1502 little point in doing things differently. */
1503 if (mips_global_symbol_p (x))
1504 return SYMBOL_GOT_DISP;
1506 return SYMBOL_GOT_PAGE_OFST;
1509 if (TARGET_MIPS16_PCREL_LOADS && context != SYMBOL_CONTEXT_CALL)
1510 return SYMBOL_FORCE_TO_MEM;
1511 return SYMBOL_ABSOLUTE;
1514 /* Return true if OFFSET is within the range [0, ALIGN), where ALIGN
1515 is the alignment (in bytes) of SYMBOL_REF X. */
1518 mips_offset_within_alignment_p (rtx x, HOST_WIDE_INT offset)
1520 /* If for some reason we can't get the alignment for the
1521 symbol, initializing this to one means we will only accept
1523 HOST_WIDE_INT align = 1;
1526 /* Get the alignment of the symbol we're referring to. */
1527 t = SYMBOL_REF_DECL (x);
1529 align = DECL_ALIGN_UNIT (t);
1531 return offset >= 0 && offset < align;
1534 /* Return true if X is a symbolic constant that can be used in context
1535 CONTEXT. If it is, store the type of the symbol in *SYMBOL_TYPE. */
1538 mips_symbolic_constant_p (rtx x, enum mips_symbol_context context,
1539 enum mips_symbol_type *symbol_type)
1543 split_const (x, &x, &offset);
1544 if (UNSPEC_ADDRESS_P (x))
1546 *symbol_type = UNSPEC_ADDRESS_TYPE (x);
1547 x = UNSPEC_ADDRESS (x);
1549 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1551 *symbol_type = mips_classify_symbol (x, context);
1552 if (*symbol_type == SYMBOL_TLS)
1558 if (offset == const0_rtx)
1561 /* Check whether a nonzero offset is valid for the underlying
1563 switch (*symbol_type)
1565 case SYMBOL_ABSOLUTE:
1566 case SYMBOL_FORCE_TO_MEM:
1567 case SYMBOL_32_HIGH:
1568 case SYMBOL_64_HIGH:
1571 /* If the target has 64-bit pointers and the object file only
1572 supports 32-bit symbols, the values of those symbols will be
1573 sign-extended. In this case we can't allow an arbitrary offset
1574 in case the 32-bit value X + OFFSET has a different sign from X. */
1575 if (Pmode == DImode && !ABI_HAS_64BIT_SYMBOLS)
1576 return offset_within_block_p (x, INTVAL (offset));
1578 /* In other cases the relocations can handle any offset. */
1581 case SYMBOL_PC_RELATIVE:
1582 /* Allow constant pool references to be converted to LABEL+CONSTANT.
1583 In this case, we no longer have access to the underlying constant,
1584 but the original symbol-based access was known to be valid. */
1585 if (GET_CODE (x) == LABEL_REF)
1590 case SYMBOL_GP_RELATIVE:
1591 /* Make sure that the offset refers to something within the
1592 same object block. This should guarantee that the final
1593 PC- or GP-relative offset is within the 16-bit limit. */
1594 return offset_within_block_p (x, INTVAL (offset));
1596 case SYMBOL_GOT_PAGE_OFST:
1597 case SYMBOL_GOTOFF_PAGE:
1598 /* If the symbol is global, the GOT entry will contain the symbol's
1599 address, and we will apply a 16-bit offset after loading it.
1600 If the symbol is local, the linker should provide enough local
1601 GOT entries for a 16-bit offset, but larger offsets may lead
1603 return SMALL_INT (offset);
1607 /* There is no carry between the HI and LO REL relocations, so the
1608 offset is only valid if we know it won't lead to such a carry. */
1609 return mips_offset_within_alignment_p (x, INTVAL (offset));
1611 case SYMBOL_GOT_DISP:
1612 case SYMBOL_GOTOFF_DISP:
1613 case SYMBOL_GOTOFF_CALL:
1614 case SYMBOL_GOTOFF_LOADGP:
1617 case SYMBOL_GOTTPREL:
1626 /* This function is used to implement REG_MODE_OK_FOR_BASE_P. */
1629 mips_regno_mode_ok_for_base_p (int regno, enum machine_mode mode, int strict)
1631 if (!HARD_REGISTER_NUM_P (regno))
1635 regno = reg_renumber[regno];
1638 /* These fake registers will be eliminated to either the stack or
1639 hard frame pointer, both of which are usually valid base registers.
1640 Reload deals with the cases where the eliminated form isn't valid. */
1641 if (regno == ARG_POINTER_REGNUM || regno == FRAME_POINTER_REGNUM)
1644 /* In mips16 mode, the stack pointer can only address word and doubleword
1645 values, nothing smaller. There are two problems here:
1647 (a) Instantiating virtual registers can introduce new uses of the
1648 stack pointer. If these virtual registers are valid addresses,
1649 the stack pointer should be too.
1651 (b) Most uses of the stack pointer are not made explicit until
1652 FRAME_POINTER_REGNUM and ARG_POINTER_REGNUM have been eliminated.
1653 We don't know until that stage whether we'll be eliminating to the
1654 stack pointer (which needs the restriction) or the hard frame
1655 pointer (which doesn't).
1657 All in all, it seems more consistent to only enforce this restriction
1658 during and after reload. */
1659 if (TARGET_MIPS16 && regno == STACK_POINTER_REGNUM)
1660 return !strict || GET_MODE_SIZE (mode) == 4 || GET_MODE_SIZE (mode) == 8;
1662 return TARGET_MIPS16 ? M16_REG_P (regno) : GP_REG_P (regno);
1666 /* Return true if X is a valid base register for the given mode.
1667 Allow only hard registers if STRICT. */
1670 mips_valid_base_register_p (rtx x, enum machine_mode mode, int strict)
1672 if (!strict && GET_CODE (x) == SUBREG)
1676 && mips_regno_mode_ok_for_base_p (REGNO (x), mode, strict));
1680 /* Return true if X is a valid address for machine mode MODE. If it is,
1681 fill in INFO appropriately. STRICT is true if we should only accept
1682 hard base registers. */
1685 mips_classify_address (struct mips_address_info *info, rtx x,
1686 enum machine_mode mode, int strict)
1688 switch (GET_CODE (x))
1692 info->type = ADDRESS_REG;
1694 info->offset = const0_rtx;
1695 return mips_valid_base_register_p (info->reg, mode, strict);
1698 info->type = ADDRESS_REG;
1699 info->reg = XEXP (x, 0);
1700 info->offset = XEXP (x, 1);
1701 return (mips_valid_base_register_p (info->reg, mode, strict)
1702 && const_arith_operand (info->offset, VOIDmode));
1705 info->type = ADDRESS_LO_SUM;
1706 info->reg = XEXP (x, 0);
1707 info->offset = XEXP (x, 1);
1708 return (mips_valid_base_register_p (info->reg, mode, strict)
1709 && mips_symbolic_constant_p (info->offset, SYMBOL_CONTEXT_MEM,
1711 && mips_symbol_insns (info->symbol_type, mode) > 0
1712 && mips_lo_relocs[info->symbol_type] != 0);
1715 /* Small-integer addresses don't occur very often, but they
1716 are legitimate if $0 is a valid base register. */
1717 info->type = ADDRESS_CONST_INT;
1718 return !TARGET_MIPS16 && SMALL_INT (x);
1723 info->type = ADDRESS_SYMBOLIC;
1724 return (mips_symbolic_constant_p (x, SYMBOL_CONTEXT_MEM,
1726 && mips_symbol_insns (info->symbol_type, mode) > 0
1727 && !mips_split_p[info->symbol_type]);
1734 /* Return true if X is a thread-local symbol. */
1737 mips_tls_operand_p (rtx x)
1739 return GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0;
1742 /* Return true if X can not be forced into a constant pool. */
1745 mips_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
1747 return mips_tls_operand_p (*x);
1750 /* Return true if X can not be forced into a constant pool. */
1753 mips_cannot_force_const_mem (rtx x)
1759 /* As an optimization, reject constants that mips_legitimize_move
1762 Suppose we have a multi-instruction sequence that loads constant C
1763 into register R. If R does not get allocated a hard register, and
1764 R is used in an operand that allows both registers and memory
1765 references, reload will consider forcing C into memory and using
1766 one of the instruction's memory alternatives. Returning false
1767 here will force it to use an input reload instead. */
1768 if (GET_CODE (x) == CONST_INT)
1771 split_const (x, &base, &offset);
1772 if (symbolic_operand (base, VOIDmode) && SMALL_INT (offset))
1776 if (TARGET_HAVE_TLS && for_each_rtx (&x, &mips_tls_symbol_ref_1, 0))
1782 /* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. We can't use blocks for
1783 constants when we're using a per-function constant pool. */
1786 mips_use_blocks_for_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1787 rtx x ATTRIBUTE_UNUSED)
1789 return !TARGET_MIPS16_PCREL_LOADS;
1792 /* Like mips_symbol_insns, but treat extended MIPS16 instructions as a
1793 single instruction. We rely on the fact that, in the worst case,
1794 all instructions involved in a MIPS16 address calculation are usually
1798 mips_symbol_insns_1 (enum mips_symbol_type type, enum machine_mode mode)
1802 case SYMBOL_ABSOLUTE:
1803 /* When using 64-bit symbols, we need 5 preparatory instructions,
1806 lui $at,%highest(symbol)
1807 daddiu $at,$at,%higher(symbol)
1809 daddiu $at,$at,%hi(symbol)
1812 The final address is then $at + %lo(symbol). With 32-bit
1813 symbols we just need a preparatory lui for normal mode and
1814 a preparatory "li; sll" for MIPS16. */
1815 return ABI_HAS_64BIT_SYMBOLS ? 6 : TARGET_MIPS16 ? 3 : 2;
1817 case SYMBOL_GP_RELATIVE:
1818 /* Treat GP-relative accesses as taking a single instruction on
1819 MIPS16 too; the copy of $gp can often be shared. */
1822 case SYMBOL_PC_RELATIVE:
1823 /* PC-relative constants can be only be used with addiupc,
1825 if (mode == MAX_MACHINE_MODE
1826 || GET_MODE_SIZE (mode) == 4
1827 || GET_MODE_SIZE (mode) == 8)
1830 /* The constant must be loaded using addiupc first. */
1833 case SYMBOL_FORCE_TO_MEM:
1834 /* The constant must be loaded from the constant pool. */
1837 case SYMBOL_GOT_DISP:
1838 /* The constant will have to be loaded from the GOT before it
1839 is used in an address. */
1840 if (mode != MAX_MACHINE_MODE)
1845 case SYMBOL_GOT_PAGE_OFST:
1846 /* Unless -funit-at-a-time is in effect, we can't be sure whether
1847 the local/global classification is accurate. See override_options
1850 The worst cases are:
1852 (1) For local symbols when generating o32 or o64 code. The assembler
1858 ...and the final address will be $at + %lo(symbol).
1860 (2) For global symbols when -mxgot. The assembler will use:
1862 lui $at,%got_hi(symbol)
1865 ...and the final address will be $at + %got_lo(symbol). */
1868 case SYMBOL_GOTOFF_PAGE:
1869 case SYMBOL_GOTOFF_DISP:
1870 case SYMBOL_GOTOFF_CALL:
1871 case SYMBOL_GOTOFF_LOADGP:
1872 case SYMBOL_32_HIGH:
1873 case SYMBOL_64_HIGH:
1879 case SYMBOL_GOTTPREL:
1882 /* A 16-bit constant formed by a single relocation, or a 32-bit
1883 constant formed from a high 16-bit relocation and a low 16-bit
1884 relocation. Use mips_split_p to determine which. */
1885 return !mips_split_p[type] ? 1 : TARGET_MIPS16 ? 3 : 2;
1888 /* We don't treat a bare TLS symbol as a constant. */
1894 /* If MODE is MAX_MACHINE_MODE, return the number of instructions needed
1895 to load symbols of type TYPE into a register. Return 0 if the given
1896 type of symbol cannot be used as an immediate operand.
1898 Otherwise, return the number of instructions needed to load or store
1899 values of mode MODE to or from addresses of type TYPE. Return 0 if
1900 the given type of symbol is not valid in addresses.
1902 In both cases, treat extended MIPS16 instructions as two instructions. */
1905 mips_symbol_insns (enum mips_symbol_type type, enum machine_mode mode)
1907 return mips_symbol_insns_1 (type, mode) * (TARGET_MIPS16 ? 2 : 1);
1910 /* Return true if X is a legitimate $sp-based address for mode MDOE. */
1913 mips_stack_address_p (rtx x, enum machine_mode mode)
1915 struct mips_address_info addr;
1917 return (mips_classify_address (&addr, x, mode, false)
1918 && addr.type == ADDRESS_REG
1919 && addr.reg == stack_pointer_rtx);
1922 /* Return true if a value at OFFSET bytes from BASE can be accessed
1923 using an unextended mips16 instruction. MODE is the mode of the
1926 Usually the offset in an unextended instruction is a 5-bit field.
1927 The offset is unsigned and shifted left once for HIs, twice
1928 for SIs, and so on. An exception is SImode accesses off the
1929 stack pointer, which have an 8-bit immediate field. */
1932 mips16_unextended_reference_p (enum machine_mode mode, rtx base, rtx offset)
1935 && GET_CODE (offset) == CONST_INT
1936 && INTVAL (offset) >= 0
1937 && (INTVAL (offset) & (GET_MODE_SIZE (mode) - 1)) == 0)
1939 if (GET_MODE_SIZE (mode) == 4 && base == stack_pointer_rtx)
1940 return INTVAL (offset) < 256 * GET_MODE_SIZE (mode);
1941 return INTVAL (offset) < 32 * GET_MODE_SIZE (mode);
1947 /* Return the number of instructions needed to load or store a value
1948 of mode MODE at X. Return 0 if X isn't valid for MODE. Assume that
1949 multiword moves may need to be split into word moves if MIGHT_SPLIT_P,
1950 otherwise assume that a single load or store is enough.
1952 For mips16 code, count extended instructions as two instructions. */
1955 mips_address_insns (rtx x, enum machine_mode mode, bool might_split_p)
1957 struct mips_address_info addr;
1960 /* BLKmode is used for single unaligned loads and stores and should
1961 not count as a multiword mode. (GET_MODE_SIZE (BLKmode) is pretty
1962 meaningless, so we have to single it out as a special case one way
1964 if (mode != BLKmode && might_split_p)
1965 factor = (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1969 if (mips_classify_address (&addr, x, mode, false))
1974 && !mips16_unextended_reference_p (mode, addr.reg, addr.offset))
1978 case ADDRESS_LO_SUM:
1979 return (TARGET_MIPS16 ? factor * 2 : factor);
1981 case ADDRESS_CONST_INT:
1984 case ADDRESS_SYMBOLIC:
1985 return factor * mips_symbol_insns (addr.symbol_type, mode);
1991 /* Likewise for constant X. */
1994 mips_const_insns (rtx x)
1996 struct mips_integer_op codes[MIPS_MAX_INTEGER_OPS];
1997 enum mips_symbol_type symbol_type;
2000 switch (GET_CODE (x))
2003 if (!mips_symbolic_constant_p (XEXP (x, 0), SYMBOL_CONTEXT_LEA,
2005 || !mips_split_p[symbol_type])
2008 /* This is simply an lui for normal mode. It is an extended
2009 "li" followed by an extended "sll" for MIPS16. */
2010 return TARGET_MIPS16 ? 4 : 1;
2014 /* Unsigned 8-bit constants can be loaded using an unextended
2015 LI instruction. Unsigned 16-bit constants can be loaded
2016 using an extended LI. Negative constants must be loaded
2017 using LI and then negated. */
2018 return (INTVAL (x) >= 0 && INTVAL (x) < 256 ? 1
2019 : SMALL_OPERAND_UNSIGNED (INTVAL (x)) ? 2
2020 : INTVAL (x) > -256 && INTVAL (x) < 0 ? 2
2021 : SMALL_OPERAND_UNSIGNED (-INTVAL (x)) ? 3
2024 return mips_build_integer (codes, INTVAL (x));
2028 return (!TARGET_MIPS16 && x == CONST0_RTX (GET_MODE (x)) ? 1 : 0);
2034 /* See if we can refer to X directly. */
2035 if (mips_symbolic_constant_p (x, SYMBOL_CONTEXT_LEA, &symbol_type))
2036 return mips_symbol_insns (symbol_type, MAX_MACHINE_MODE);
2038 /* Otherwise try splitting the constant into a base and offset.
2039 16-bit offsets can be added using an extra addiu. Larger offsets
2040 must be calculated separately and then added to the base. */
2041 split_const (x, &x, &offset);
2044 int n = mips_const_insns (x);
2047 if (SMALL_INT (offset))
2050 return n + 1 + mips_build_integer (codes, INTVAL (offset));
2057 return mips_symbol_insns (mips_classify_symbol (x, SYMBOL_CONTEXT_LEA),
2066 /* Return the number of instructions needed to implement INSN,
2067 given that it loads from or stores to MEM. Count extended
2068 mips16 instructions as two instructions. */
2071 mips_load_store_insns (rtx mem, rtx insn)
2073 enum machine_mode mode;
2077 gcc_assert (MEM_P (mem));
2078 mode = GET_MODE (mem);
2080 /* Try to prove that INSN does not need to be split. */
2081 might_split_p = true;
2082 if (GET_MODE_BITSIZE (mode) == 64)
2084 set = single_set (insn);
2085 if (set && !mips_split_64bit_move_p (SET_DEST (set), SET_SRC (set)))
2086 might_split_p = false;
2089 return mips_address_insns (XEXP (mem, 0), mode, might_split_p);
2093 /* Return the number of instructions needed for an integer division. */
2096 mips_idiv_insns (void)
2101 if (TARGET_CHECK_ZERO_DIV)
2103 if (GENERATE_DIVIDE_TRAPS)
2109 if (TARGET_FIX_R4000 || TARGET_FIX_R4400)
2114 /* This function is used to implement GO_IF_LEGITIMATE_ADDRESS. It
2115 returns a nonzero value if X is a legitimate address for a memory
2116 operand of the indicated MODE. STRICT is nonzero if this function
2117 is called during reload. */
2120 mips_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2122 struct mips_address_info addr;
2124 return mips_classify_address (&addr, x, mode, strict);
2127 /* Emit a move from SRC to DEST. Assume that the move expanders can
2128 handle all moves if !can_create_pseudo_p (). The distinction is
2129 important because, unlike emit_move_insn, the move expanders know
2130 how to force Pmode objects into the constant pool even when the
2131 constant pool address is not itself legitimate. */
2134 mips_emit_move (rtx dest, rtx src)
2136 return (can_create_pseudo_p ()
2137 ? emit_move_insn (dest, src)
2138 : emit_move_insn_1 (dest, src));
2141 /* Copy VALUE to a register and return that register. If new psuedos
2142 are allowed, copy it into a new register, otherwise use DEST. */
2145 mips_force_temporary (rtx dest, rtx value)
2147 if (can_create_pseudo_p ())
2148 return force_reg (Pmode, value);
2151 mips_emit_move (copy_rtx (dest), value);
2157 /* If MODE is MAX_MACHINE_MODE, ADDR appears as a move operand, otherwise
2158 it appears in a MEM of that mode. Return true if ADDR is a legitimate
2159 constant in that context and can be split into a high part and a LO_SUM.
2160 If so, and if LO_SUM_OUT is nonnull, emit the high part and return
2161 the LO_SUM in *LO_SUM_OUT. Leave *LO_SUM_OUT unchanged otherwise.
2163 TEMP is as for mips_force_temporary and is used to load the high
2164 part into a register. */
2167 mips_split_symbol (rtx temp, rtx addr, enum machine_mode mode, rtx *lo_sum_out)
2169 enum mips_symbol_context context;
2170 enum mips_symbol_type symbol_type;
2173 context = (mode == MAX_MACHINE_MODE
2174 ? SYMBOL_CONTEXT_LEA
2175 : SYMBOL_CONTEXT_MEM);
2176 if (!mips_symbolic_constant_p (addr, context, &symbol_type)
2177 || mips_symbol_insns (symbol_type, mode) == 0
2178 || !mips_split_p[symbol_type])
2183 if (symbol_type == SYMBOL_GP_RELATIVE)
2185 if (!can_create_pseudo_p ())
2187 emit_insn (gen_load_const_gp (copy_rtx (temp)));
2191 high = mips16_gp_pseudo_reg ();
2195 high = gen_rtx_HIGH (Pmode, copy_rtx (addr));
2196 high = mips_force_temporary (temp, high);
2198 *lo_sum_out = gen_rtx_LO_SUM (Pmode, high, addr);
2204 /* Wrap symbol or label BASE in an unspec address of type SYMBOL_TYPE
2205 and add CONST_INT OFFSET to the result. */
2208 mips_unspec_address_offset (rtx base, rtx offset,
2209 enum mips_symbol_type symbol_type)
2211 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base),
2212 UNSPEC_ADDRESS_FIRST + symbol_type);
2213 if (offset != const0_rtx)
2214 base = gen_rtx_PLUS (Pmode, base, offset);
2215 return gen_rtx_CONST (Pmode, base);
2218 /* Return an UNSPEC address with underlying address ADDRESS and symbol
2219 type SYMBOL_TYPE. */
2222 mips_unspec_address (rtx address, enum mips_symbol_type symbol_type)
2226 split_const (address, &base, &offset);
2227 return mips_unspec_address_offset (base, offset, symbol_type);
2231 /* If mips_unspec_address (ADDR, SYMBOL_TYPE) is a 32-bit value, add the
2232 high part to BASE and return the result. Just return BASE otherwise.
2233 TEMP is available as a temporary register if needed.
2235 The returned expression can be used as the first operand to a LO_SUM. */
2238 mips_unspec_offset_high (rtx temp, rtx base, rtx addr,
2239 enum mips_symbol_type symbol_type)
2241 if (mips_split_p[symbol_type])
2243 addr = gen_rtx_HIGH (Pmode, mips_unspec_address (addr, symbol_type));
2244 addr = mips_force_temporary (temp, addr);
2245 return mips_force_temporary (temp, gen_rtx_PLUS (Pmode, addr, base));
2251 /* Return a legitimate address for REG + OFFSET. TEMP is as for
2252 mips_force_temporary; it is only needed when OFFSET is not a
2256 mips_add_offset (rtx temp, rtx reg, HOST_WIDE_INT offset)
2258 if (!SMALL_OPERAND (offset))
2263 /* Load the full offset into a register so that we can use
2264 an unextended instruction for the address itself. */
2265 high = GEN_INT (offset);
2270 /* Leave OFFSET as a 16-bit offset and put the excess in HIGH. */
2271 high = GEN_INT (CONST_HIGH_PART (offset));
2272 offset = CONST_LOW_PART (offset);
2274 high = mips_force_temporary (temp, high);
2275 reg = mips_force_temporary (temp, gen_rtx_PLUS (Pmode, high, reg));
2277 return plus_constant (reg, offset);
2280 /* Emit a call to __tls_get_addr. SYM is the TLS symbol we are
2281 referencing, and TYPE is the symbol type to use (either global
2282 dynamic or local dynamic). V0 is an RTX for the return value
2283 location. The entire insn sequence is returned. */
2285 static GTY(()) rtx mips_tls_symbol;
2288 mips_call_tls_get_addr (rtx sym, enum mips_symbol_type type, rtx v0)
2290 rtx insn, loc, tga, a0;
2292 a0 = gen_rtx_REG (Pmode, GP_ARG_FIRST);
2294 if (!mips_tls_symbol)
2295 mips_tls_symbol = init_one_libfunc ("__tls_get_addr");
2297 loc = mips_unspec_address (sym, type);
2301 emit_insn (gen_rtx_SET (Pmode, a0,
2302 gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, loc)));
2303 tga = gen_rtx_MEM (Pmode, mips_tls_symbol);
2304 insn = emit_call_insn (gen_call_value (v0, tga, const0_rtx, const0_rtx));
2305 CONST_OR_PURE_CALL_P (insn) = 1;
2306 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), v0);
2307 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), a0);
2308 insn = get_insns ();
2315 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
2316 return value will be a valid address and move_operand (either a REG
2320 mips_legitimize_tls_address (rtx loc)
2322 rtx dest, insn, v0, v1, tmp1, tmp2, eqv;
2323 enum tls_model model;
2325 v0 = gen_rtx_REG (Pmode, GP_RETURN);
2326 v1 = gen_rtx_REG (Pmode, GP_RETURN + 1);
2328 model = SYMBOL_REF_TLS_MODEL (loc);
2329 /* Only TARGET_ABICALLS code can have more than one module; other
2330 code must be be static and should not use a GOT. All TLS models
2331 reduce to local exec in this situation. */
2332 if (!TARGET_ABICALLS)
2333 model = TLS_MODEL_LOCAL_EXEC;
2337 case TLS_MODEL_GLOBAL_DYNAMIC:
2338 insn = mips_call_tls_get_addr (loc, SYMBOL_TLSGD, v0);
2339 dest = gen_reg_rtx (Pmode);
2340 emit_libcall_block (insn, dest, v0, loc);
2343 case TLS_MODEL_LOCAL_DYNAMIC:
2344 insn = mips_call_tls_get_addr (loc, SYMBOL_TLSLDM, v0);
2345 tmp1 = gen_reg_rtx (Pmode);
2347 /* Attach a unique REG_EQUIV, to allow the RTL optimizers to
2348 share the LDM result with other LD model accesses. */
2349 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2351 emit_libcall_block (insn, tmp1, v0, eqv);
2353 tmp2 = mips_unspec_offset_high (NULL, tmp1, loc, SYMBOL_DTPREL);
2354 dest = gen_rtx_LO_SUM (Pmode, tmp2,
2355 mips_unspec_address (loc, SYMBOL_DTPREL));
2358 case TLS_MODEL_INITIAL_EXEC:
2359 tmp1 = gen_reg_rtx (Pmode);
2360 tmp2 = mips_unspec_address (loc, SYMBOL_GOTTPREL);
2361 if (Pmode == DImode)
2363 emit_insn (gen_tls_get_tp_di (v1));
2364 emit_insn (gen_load_gotdi (tmp1, pic_offset_table_rtx, tmp2));
2368 emit_insn (gen_tls_get_tp_si (v1));
2369 emit_insn (gen_load_gotsi (tmp1, pic_offset_table_rtx, tmp2));
2371 dest = gen_reg_rtx (Pmode);
2372 emit_insn (gen_add3_insn (dest, tmp1, v1));
2375 case TLS_MODEL_LOCAL_EXEC:
2376 if (Pmode == DImode)
2377 emit_insn (gen_tls_get_tp_di (v1));
2379 emit_insn (gen_tls_get_tp_si (v1));
2381 tmp1 = mips_unspec_offset_high (NULL, v1, loc, SYMBOL_TPREL);
2382 dest = gen_rtx_LO_SUM (Pmode, tmp1,
2383 mips_unspec_address (loc, SYMBOL_TPREL));
2393 /* This function is used to implement LEGITIMIZE_ADDRESS. If *XLOC can
2394 be legitimized in a way that the generic machinery might not expect,
2395 put the new address in *XLOC and return true. MODE is the mode of
2396 the memory being accessed. */
2399 mips_legitimize_address (rtx *xloc, enum machine_mode mode)
2401 if (mips_tls_operand_p (*xloc))
2403 *xloc = mips_legitimize_tls_address (*xloc);
2407 /* See if the address can split into a high part and a LO_SUM. */
2408 if (mips_split_symbol (NULL, *xloc, mode, xloc))
2411 if (GET_CODE (*xloc) == PLUS && GET_CODE (XEXP (*xloc, 1)) == CONST_INT)
2413 /* Handle REG + CONSTANT using mips_add_offset. */
2416 reg = XEXP (*xloc, 0);
2417 if (!mips_valid_base_register_p (reg, mode, 0))
2418 reg = copy_to_mode_reg (Pmode, reg);
2419 *xloc = mips_add_offset (0, reg, INTVAL (XEXP (*xloc, 1)));
2427 /* Subroutine of mips_build_integer (with the same interface).
2428 Assume that the final action in the sequence should be a left shift. */
2431 mips_build_shift (struct mips_integer_op *codes, HOST_WIDE_INT value)
2433 unsigned int i, shift;
2435 /* Shift VALUE right until its lowest bit is set. Shift arithmetically
2436 since signed numbers are easier to load than unsigned ones. */
2438 while ((value & 1) == 0)
2439 value /= 2, shift++;
2441 i = mips_build_integer (codes, value);
2442 codes[i].code = ASHIFT;
2443 codes[i].value = shift;
2448 /* As for mips_build_shift, but assume that the final action will be
2449 an IOR or PLUS operation. */
2452 mips_build_lower (struct mips_integer_op *codes, unsigned HOST_WIDE_INT value)
2454 unsigned HOST_WIDE_INT high;
2457 high = value & ~(unsigned HOST_WIDE_INT) 0xffff;
2458 if (!LUI_OPERAND (high) && (value & 0x18000) == 0x18000)
2460 /* The constant is too complex to load with a simple lui/ori pair
2461 so our goal is to clear as many trailing zeros as possible.
2462 In this case, we know bit 16 is set and that the low 16 bits
2463 form a negative number. If we subtract that number from VALUE,
2464 we will clear at least the lowest 17 bits, maybe more. */
2465 i = mips_build_integer (codes, CONST_HIGH_PART (value));
2466 codes[i].code = PLUS;
2467 codes[i].value = CONST_LOW_PART (value);
2471 i = mips_build_integer (codes, high);
2472 codes[i].code = IOR;
2473 codes[i].value = value & 0xffff;
2479 /* Fill CODES with a sequence of rtl operations to load VALUE.
2480 Return the number of operations needed. */
2483 mips_build_integer (struct mips_integer_op *codes,
2484 unsigned HOST_WIDE_INT value)
2486 if (SMALL_OPERAND (value)
2487 || SMALL_OPERAND_UNSIGNED (value)
2488 || LUI_OPERAND (value))
2490 /* The value can be loaded with a single instruction. */
2491 codes[0].code = UNKNOWN;
2492 codes[0].value = value;
2495 else if ((value & 1) != 0 || LUI_OPERAND (CONST_HIGH_PART (value)))
2497 /* Either the constant is a simple LUI/ORI combination or its
2498 lowest bit is set. We don't want to shift in this case. */
2499 return mips_build_lower (codes, value);
2501 else if ((value & 0xffff) == 0)
2503 /* The constant will need at least three actions. The lowest
2504 16 bits are clear, so the final action will be a shift. */
2505 return mips_build_shift (codes, value);
2509 /* The final action could be a shift, add or inclusive OR.
2510 Rather than use a complex condition to select the best
2511 approach, try both mips_build_shift and mips_build_lower
2512 and pick the one that gives the shortest sequence.
2513 Note that this case is only used once per constant. */
2514 struct mips_integer_op alt_codes[MIPS_MAX_INTEGER_OPS];
2515 unsigned int cost, alt_cost;
2517 cost = mips_build_shift (codes, value);
2518 alt_cost = mips_build_lower (alt_codes, value);
2519 if (alt_cost < cost)
2521 memcpy (codes, alt_codes, alt_cost * sizeof (codes[0]));
2529 /* Load VALUE into DEST, using TEMP as a temporary register if need be. */
2532 mips_move_integer (rtx dest, rtx temp, unsigned HOST_WIDE_INT value)
2534 struct mips_integer_op codes[MIPS_MAX_INTEGER_OPS];
2535 enum machine_mode mode;
2536 unsigned int i, cost;
2539 mode = GET_MODE (dest);
2540 cost = mips_build_integer (codes, value);
2542 /* Apply each binary operation to X. Invariant: X is a legitimate
2543 source operand for a SET pattern. */
2544 x = GEN_INT (codes[0].value);
2545 for (i = 1; i < cost; i++)
2547 if (!can_create_pseudo_p ())
2549 emit_insn (gen_rtx_SET (VOIDmode, temp, x));
2553 x = force_reg (mode, x);
2554 x = gen_rtx_fmt_ee (codes[i].code, mode, x, GEN_INT (codes[i].value));
2557 emit_insn (gen_rtx_SET (VOIDmode, dest, x));
2561 /* Subroutine of mips_legitimize_move. Move constant SRC into register
2562 DEST given that SRC satisfies immediate_operand but doesn't satisfy
2566 mips_legitimize_const_move (enum machine_mode mode, rtx dest, rtx src)
2570 /* Split moves of big integers into smaller pieces. */
2571 if (splittable_const_int_operand (src, mode))
2573 mips_move_integer (dest, dest, INTVAL (src));
2577 /* Split moves of symbolic constants into high/low pairs. */
2578 if (mips_split_symbol (dest, src, MAX_MACHINE_MODE, &src))
2580 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
2584 if (mips_tls_operand_p (src))
2586 mips_emit_move (dest, mips_legitimize_tls_address (src));
2590 /* If we have (const (plus symbol offset)), load the symbol first
2591 and then add in the offset. This is usually better than forcing
2592 the constant into memory, at least in non-mips16 code. */
2593 split_const (src, &base, &offset);
2595 && offset != const0_rtx
2596 && (can_create_pseudo_p () || SMALL_INT (offset)))
2598 base = mips_force_temporary (dest, base);
2599 mips_emit_move (dest, mips_add_offset (0, base, INTVAL (offset)));
2603 src = force_const_mem (mode, src);
2605 /* When using explicit relocs, constant pool references are sometimes
2606 not legitimate addresses. */
2607 mips_split_symbol (dest, XEXP (src, 0), mode, &XEXP (src, 0));
2608 mips_emit_move (dest, src);
2612 /* If (set DEST SRC) is not a valid instruction, emit an equivalent
2613 sequence that is valid. */
2616 mips_legitimize_move (enum machine_mode mode, rtx dest, rtx src)
2618 if (!register_operand (dest, mode) && !reg_or_0_operand (src, mode))
2620 mips_emit_move (dest, force_reg (mode, src));
2624 /* Check for individual, fully-reloaded mflo and mfhi instructions. */
2625 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
2626 && REG_P (src) && MD_REG_P (REGNO (src))
2627 && REG_P (dest) && GP_REG_P (REGNO (dest)))
2629 int other_regno = REGNO (src) == HI_REGNUM ? LO_REGNUM : HI_REGNUM;
2630 if (GET_MODE_SIZE (mode) <= 4)
2631 emit_insn (gen_mfhilo_si (gen_rtx_REG (SImode, REGNO (dest)),
2632 gen_rtx_REG (SImode, REGNO (src)),
2633 gen_rtx_REG (SImode, other_regno)));
2635 emit_insn (gen_mfhilo_di (gen_rtx_REG (DImode, REGNO (dest)),
2636 gen_rtx_REG (DImode, REGNO (src)),
2637 gen_rtx_REG (DImode, other_regno)));
2641 /* We need to deal with constants that would be legitimate
2642 immediate_operands but not legitimate move_operands. */
2643 if (CONSTANT_P (src) && !move_operand (src, mode))
2645 mips_legitimize_const_move (mode, dest, src);
2646 set_unique_reg_note (get_last_insn (), REG_EQUAL, copy_rtx (src));
2652 /* We need a lot of little routines to check constant values on the
2653 mips16. These are used to figure out how long the instruction will
2654 be. It would be much better to do this using constraints, but
2655 there aren't nearly enough letters available. */
2658 m16_check_op (rtx op, int low, int high, int mask)
2660 return (GET_CODE (op) == CONST_INT
2661 && INTVAL (op) >= low
2662 && INTVAL (op) <= high
2663 && (INTVAL (op) & mask) == 0);
2667 m16_uimm3_b (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2669 return m16_check_op (op, 0x1, 0x8, 0);
2673 m16_simm4_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2675 return m16_check_op (op, - 0x8, 0x7, 0);
2679 m16_nsimm4_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2681 return m16_check_op (op, - 0x7, 0x8, 0);
2685 m16_simm5_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2687 return m16_check_op (op, - 0x10, 0xf, 0);
2691 m16_nsimm5_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2693 return m16_check_op (op, - 0xf, 0x10, 0);
2697 m16_uimm5_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2699 return m16_check_op (op, (- 0x10) << 2, 0xf << 2, 3);
2703 m16_nuimm5_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2705 return m16_check_op (op, (- 0xf) << 2, 0x10 << 2, 3);
2709 m16_simm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2711 return m16_check_op (op, - 0x80, 0x7f, 0);
2715 m16_nsimm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2717 return m16_check_op (op, - 0x7f, 0x80, 0);
2721 m16_uimm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2723 return m16_check_op (op, 0x0, 0xff, 0);
2727 m16_nuimm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2729 return m16_check_op (op, - 0xff, 0x0, 0);
2733 m16_uimm8_m1_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2735 return m16_check_op (op, - 0x1, 0xfe, 0);
2739 m16_uimm8_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2741 return m16_check_op (op, 0x0, 0xff << 2, 3);
2745 m16_nuimm8_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2747 return m16_check_op (op, (- 0xff) << 2, 0x0, 3);
2751 m16_simm8_8 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2753 return m16_check_op (op, (- 0x80) << 3, 0x7f << 3, 7);
2757 m16_nsimm8_8 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2759 return m16_check_op (op, (- 0x7f) << 3, 0x80 << 3, 7);
2762 /* Return true if ADDR matches the pattern for the lwxs load scaled indexed
2763 address instruction. */
2766 mips_lwxs_address_p (rtx addr)
2769 && GET_CODE (addr) == PLUS
2770 && REG_P (XEXP (addr, 1)))
2772 rtx offset = XEXP (addr, 0);
2773 if (GET_CODE (offset) == MULT
2774 && REG_P (XEXP (offset, 0))
2775 && GET_CODE (XEXP (offset, 1)) == CONST_INT
2776 && INTVAL (XEXP (offset, 1)) == 4)
2783 mips_rtx_costs (rtx x, int code, int outer_code, int *total)
2785 enum machine_mode mode = GET_MODE (x);
2786 bool float_mode_p = FLOAT_MODE_P (mode);
2793 /* A number between 1 and 8 inclusive is efficient for a shift.
2794 Otherwise, we will need an extended instruction. */
2795 if ((outer_code) == ASHIFT || (outer_code) == ASHIFTRT
2796 || (outer_code) == LSHIFTRT)
2798 if (INTVAL (x) >= 1 && INTVAL (x) <= 8)
2801 *total = COSTS_N_INSNS (1);
2805 /* We can use cmpi for an xor with an unsigned 16-bit value. */
2806 if ((outer_code) == XOR
2807 && INTVAL (x) >= 0 && INTVAL (x) < 0x10000)
2813 /* We may be able to use slt or sltu for a comparison with a
2814 signed 16-bit value. (The boundary conditions aren't quite
2815 right, but this is just a heuristic anyhow.) */
2816 if (((outer_code) == LT || (outer_code) == LE
2817 || (outer_code) == GE || (outer_code) == GT
2818 || (outer_code) == LTU || (outer_code) == LEU
2819 || (outer_code) == GEU || (outer_code) == GTU)
2820 && INTVAL (x) >= -0x8000 && INTVAL (x) < 0x8000)
2826 /* Equality comparisons with 0 are cheap. */
2827 if (((outer_code) == EQ || (outer_code) == NE)
2834 /* Constants in the range 0...255 can be loaded with an unextended
2835 instruction. They are therefore as cheap as a register move.
2837 Given the choice between "li R1,0...255" and "move R1,R2"
2838 (where R2 is a known constant), it is usually better to use "li",
2839 since we do not want to unnecessarily extend the lifetime
2841 if (outer_code == SET
2843 && INTVAL (x) < 256)
2851 /* These can be used anywhere. */
2856 /* Otherwise fall through to the handling below because
2857 we'll need to construct the constant. */
2863 if (LEGITIMATE_CONSTANT_P (x))
2865 *total = COSTS_N_INSNS (1);
2870 /* The value will need to be fetched from the constant pool. */
2871 *total = CONSTANT_POOL_COST;
2877 /* If the address is legitimate, return the number of
2878 instructions it needs. */
2879 rtx addr = XEXP (x, 0);
2880 int n = mips_address_insns (addr, GET_MODE (x), true);
2883 *total = COSTS_N_INSNS (n + 1);
2886 /* Check for scaled indexed address. */
2887 if (mips_lwxs_address_p (addr))
2889 *total = COSTS_N_INSNS (2);
2892 /* Otherwise use the default handling. */
2897 *total = COSTS_N_INSNS (6);
2901 *total = COSTS_N_INSNS ((mode == DImode && !TARGET_64BIT) ? 2 : 1);
2907 if (mode == DImode && !TARGET_64BIT)
2909 *total = COSTS_N_INSNS (2);
2917 if (mode == DImode && !TARGET_64BIT)
2919 *total = COSTS_N_INSNS ((GET_CODE (XEXP (x, 1)) == CONST_INT)
2927 *total = COSTS_N_INSNS (1);
2929 *total = COSTS_N_INSNS (4);
2933 *total = COSTS_N_INSNS (1);
2940 *total = mips_cost->fp_add;
2944 else if (mode == DImode && !TARGET_64BIT)
2946 *total = COSTS_N_INSNS (4);
2952 if (mode == DImode && !TARGET_64BIT)
2954 *total = COSTS_N_INSNS (4);
2961 *total = mips_cost->fp_mult_sf;
2963 else if (mode == DFmode)
2964 *total = mips_cost->fp_mult_df;
2966 else if (mode == SImode)
2967 *total = mips_cost->int_mult_si;
2970 *total = mips_cost->int_mult_di;
2979 *total = mips_cost->fp_div_sf;
2981 *total = mips_cost->fp_div_df;
2990 *total = mips_cost->int_div_di;
2992 *total = mips_cost->int_div_si;
2997 /* A sign extend from SImode to DImode in 64-bit mode is often
2998 zero instructions, because the result can often be used
2999 directly by another instruction; we'll call it one. */
3000 if (TARGET_64BIT && mode == DImode
3001 && GET_MODE (XEXP (x, 0)) == SImode)
3002 *total = COSTS_N_INSNS (1);
3004 *total = COSTS_N_INSNS (2);
3008 if (TARGET_64BIT && mode == DImode
3009 && GET_MODE (XEXP (x, 0)) == SImode)
3010 *total = COSTS_N_INSNS (2);
3012 *total = COSTS_N_INSNS (1);
3016 case UNSIGNED_FLOAT:
3019 case FLOAT_TRUNCATE:
3021 *total = mips_cost->fp_add;
3029 /* Provide the costs of an addressing mode that contains ADDR.
3030 If ADDR is not a valid address, its cost is irrelevant. */
3033 mips_address_cost (rtx addr)
3035 return mips_address_insns (addr, SImode, false);
3038 /* Return one word of double-word value OP, taking into account the fixed
3039 endianness of certain registers. HIGH_P is true to select the high part,
3040 false to select the low part. */
3043 mips_subword (rtx op, int high_p)
3046 enum machine_mode mode;
3048 mode = GET_MODE (op);
3049 if (mode == VOIDmode)
3052 if (TARGET_BIG_ENDIAN ? !high_p : high_p)
3053 byte = UNITS_PER_WORD;
3057 if (FP_REG_RTX_P (op))
3058 return gen_rtx_REG (word_mode, high_p ? REGNO (op) + 1 : REGNO (op));
3061 return mips_rewrite_small_data (adjust_address (op, word_mode, byte));
3063 return simplify_gen_subreg (word_mode, op, mode, byte);
3067 /* Return true if a 64-bit move from SRC to DEST should be split into two. */
3070 mips_split_64bit_move_p (rtx dest, rtx src)
3075 /* FP->FP moves can be done in a single instruction. */
3076 if (FP_REG_RTX_P (src) && FP_REG_RTX_P (dest))
3079 /* Check for floating-point loads and stores. They can be done using
3080 ldc1 and sdc1 on MIPS II and above. */
3083 if (FP_REG_RTX_P (dest) && MEM_P (src))
3085 if (FP_REG_RTX_P (src) && MEM_P (dest))
3092 /* Split a 64-bit move from SRC to DEST assuming that
3093 mips_split_64bit_move_p holds.
3095 Moves into and out of FPRs cause some difficulty here. Such moves
3096 will always be DFmode, since paired FPRs are not allowed to store
3097 DImode values. The most natural representation would be two separate
3098 32-bit moves, such as:
3100 (set (reg:SI $f0) (mem:SI ...))
3101 (set (reg:SI $f1) (mem:SI ...))
3103 However, the second insn is invalid because odd-numbered FPRs are
3104 not allowed to store independent values. Use the patterns load_df_low,
3105 load_df_high and store_df_high instead. */
3108 mips_split_64bit_move (rtx dest, rtx src)
3110 if (FP_REG_RTX_P (dest))
3112 /* Loading an FPR from memory or from GPRs. */
3115 dest = gen_lowpart (DFmode, dest);
3116 emit_insn (gen_load_df_low (dest, mips_subword (src, 0)));
3117 emit_insn (gen_mthc1 (dest, mips_subword (src, 1),
3122 emit_insn (gen_load_df_low (copy_rtx (dest),
3123 mips_subword (src, 0)));
3124 emit_insn (gen_load_df_high (dest, mips_subword (src, 1),
3128 else if (FP_REG_RTX_P (src))
3130 /* Storing an FPR into memory or GPRs. */
3133 src = gen_lowpart (DFmode, src);
3134 mips_emit_move (mips_subword (dest, 0), mips_subword (src, 0));
3135 emit_insn (gen_mfhc1 (mips_subword (dest, 1), src));
3139 mips_emit_move (mips_subword (dest, 0), mips_subword (src, 0));
3140 emit_insn (gen_store_df_high (mips_subword (dest, 1), src));
3145 /* The operation can be split into two normal moves. Decide in
3146 which order to do them. */
3149 low_dest = mips_subword (dest, 0);
3150 if (REG_P (low_dest)
3151 && reg_overlap_mentioned_p (low_dest, src))
3153 mips_emit_move (mips_subword (dest, 1), mips_subword (src, 1));
3154 mips_emit_move (low_dest, mips_subword (src, 0));
3158 mips_emit_move (low_dest, mips_subword (src, 0));
3159 mips_emit_move (mips_subword (dest, 1), mips_subword (src, 1));
3164 /* Return the appropriate instructions to move SRC into DEST. Assume
3165 that SRC is operand 1 and DEST is operand 0. */
3168 mips_output_move (rtx dest, rtx src)
3170 enum rtx_code dest_code, src_code;
3171 enum mips_symbol_type symbol_type;
3174 dest_code = GET_CODE (dest);
3175 src_code = GET_CODE (src);
3176 dbl_p = (GET_MODE_SIZE (GET_MODE (dest)) == 8);
3178 if (dbl_p && mips_split_64bit_move_p (dest, src))
3181 if ((src_code == REG && GP_REG_P (REGNO (src)))
3182 || (!TARGET_MIPS16 && src == CONST0_RTX (GET_MODE (dest))))
3184 if (dest_code == REG)
3186 if (GP_REG_P (REGNO (dest)))
3187 return "move\t%0,%z1";
3189 if (MD_REG_P (REGNO (dest)))
3192 if (DSP_ACC_REG_P (REGNO (dest)))
3194 static char retval[] = "mt__\t%z1,%q0";
3195 retval[2] = reg_names[REGNO (dest)][4];
3196 retval[3] = reg_names[REGNO (dest)][5];
3200 if (FP_REG_P (REGNO (dest)))
3201 return (dbl_p ? "dmtc1\t%z1,%0" : "mtc1\t%z1,%0");
3203 if (ALL_COP_REG_P (REGNO (dest)))
3205 static char retval[] = "dmtc_\t%z1,%0";
3207 retval[4] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (dest));
3208 return (dbl_p ? retval : retval + 1);
3211 if (dest_code == MEM)
3212 return (dbl_p ? "sd\t%z1,%0" : "sw\t%z1,%0");
3214 if (dest_code == REG && GP_REG_P (REGNO (dest)))
3216 if (src_code == REG)
3218 if (DSP_ACC_REG_P (REGNO (src)))
3220 static char retval[] = "mf__\t%0,%q1";
3221 retval[2] = reg_names[REGNO (src)][4];
3222 retval[3] = reg_names[REGNO (src)][5];
3226 if (ST_REG_P (REGNO (src)) && ISA_HAS_8CC)
3227 return "lui\t%0,0x3f80\n\tmovf\t%0,%.,%1";
3229 if (FP_REG_P (REGNO (src)))
3230 return (dbl_p ? "dmfc1\t%0,%1" : "mfc1\t%0,%1");
3232 if (ALL_COP_REG_P (REGNO (src)))
3234 static char retval[] = "dmfc_\t%0,%1";
3236 retval[4] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (src));
3237 return (dbl_p ? retval : retval + 1);
3241 if (src_code == MEM)
3242 return (dbl_p ? "ld\t%0,%1" : "lw\t%0,%1");
3244 if (src_code == CONST_INT)
3246 /* Don't use the X format, because that will give out of
3247 range numbers for 64-bit hosts and 32-bit targets. */
3249 return "li\t%0,%1\t\t\t# %X1";
3251 if (INTVAL (src) >= 0 && INTVAL (src) <= 0xffff)
3254 if (INTVAL (src) < 0 && INTVAL (src) >= -0xffff)
3258 if (src_code == HIGH)
3259 return TARGET_MIPS16 ? "#" : "lui\t%0,%h1";
3261 if (CONST_GP_P (src))
3262 return "move\t%0,%1";
3264 if (mips_symbolic_constant_p (src, SYMBOL_CONTEXT_LEA, &symbol_type)
3265 && mips_lo_relocs[symbol_type] != 0)
3267 /* A signed 16-bit constant formed by applying a relocation
3268 operator to a symbolic address. */
3269 gcc_assert (!mips_split_p[symbol_type]);
3270 return "li\t%0,%R1";
3273 if (symbolic_operand (src, VOIDmode))
3275 gcc_assert (TARGET_MIPS16
3276 ? TARGET_MIPS16_TEXT_LOADS
3277 : !TARGET_EXPLICIT_RELOCS);
3278 return (dbl_p ? "dla\t%0,%1" : "la\t%0,%1");
3281 if (src_code == REG && FP_REG_P (REGNO (src)))
3283 if (dest_code == REG && FP_REG_P (REGNO (dest)))
3285 if (GET_MODE (dest) == V2SFmode)
3286 return "mov.ps\t%0,%1";
3288 return (dbl_p ? "mov.d\t%0,%1" : "mov.s\t%0,%1");
3291 if (dest_code == MEM)
3292 return (dbl_p ? "sdc1\t%1,%0" : "swc1\t%1,%0");
3294 if (dest_code == REG && FP_REG_P (REGNO (dest)))
3296 if (src_code == MEM)
3297 return (dbl_p ? "ldc1\t%0,%1" : "lwc1\t%0,%1");
3299 if (dest_code == REG && ALL_COP_REG_P (REGNO (dest)) && src_code == MEM)
3301 static char retval[] = "l_c_\t%0,%1";
3303 retval[1] = (dbl_p ? 'd' : 'w');
3304 retval[3] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (dest));
3307 if (dest_code == MEM && src_code == REG && ALL_COP_REG_P (REGNO (src)))
3309 static char retval[] = "s_c_\t%1,%0";
3311 retval[1] = (dbl_p ? 'd' : 'w');
3312 retval[3] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (src));
3318 /* Restore $gp from its save slot. Valid only when using o32 or
3322 mips_restore_gp (void)
3326 gcc_assert (TARGET_ABICALLS && TARGET_OLDABI);
3328 address = mips_add_offset (pic_offset_table_rtx,
3329 frame_pointer_needed
3330 ? hard_frame_pointer_rtx
3331 : stack_pointer_rtx,
3332 current_function_outgoing_args_size);
3333 slot = gen_rtx_MEM (Pmode, address);
3335 mips_emit_move (pic_offset_table_rtx, slot);
3336 if (!TARGET_EXPLICIT_RELOCS)
3337 emit_insn (gen_blockage ());
3340 /* Emit an instruction of the form (set TARGET (CODE OP0 OP1)). */
3343 mips_emit_binary (enum rtx_code code, rtx target, rtx op0, rtx op1)
3345 emit_insn (gen_rtx_SET (VOIDmode, target,
3346 gen_rtx_fmt_ee (code, GET_MODE (target), op0, op1)));
3349 /* Return true if CMP1 is a suitable second operand for relational
3350 operator CODE. See also the *sCC patterns in mips.md. */
3353 mips_relational_operand_ok_p (enum rtx_code code, rtx cmp1)
3359 return reg_or_0_operand (cmp1, VOIDmode);
3363 return !TARGET_MIPS16 && cmp1 == const1_rtx;
3367 return arith_operand (cmp1, VOIDmode);
3370 return sle_operand (cmp1, VOIDmode);
3373 return sleu_operand (cmp1, VOIDmode);
3380 /* Canonicalize LE or LEU comparisons into LT comparisons when
3381 possible to avoid extra instructions or inverting the
3385 mips_canonicalize_comparison (enum rtx_code *code, rtx *cmp1,
3386 enum machine_mode mode)
3388 HOST_WIDE_INT original, plus_one;
3390 if (GET_CODE (*cmp1) != CONST_INT)
3393 original = INTVAL (*cmp1);
3394 plus_one = trunc_int_for_mode ((unsigned HOST_WIDE_INT) original + 1, mode);
3399 if (original < plus_one)
3402 *cmp1 = force_reg (mode, GEN_INT (plus_one));
3411 *cmp1 = force_reg (mode, GEN_INT (plus_one));
3424 /* Compare CMP0 and CMP1 using relational operator CODE and store the
3425 result in TARGET. CMP0 and TARGET are register_operands that have
3426 the same integer mode. If INVERT_PTR is nonnull, it's OK to set
3427 TARGET to the inverse of the result and flip *INVERT_PTR instead. */
3430 mips_emit_int_relational (enum rtx_code code, bool *invert_ptr,
3431 rtx target, rtx cmp0, rtx cmp1)
3433 /* First see if there is a MIPS instruction that can do this operation
3434 with CMP1 in its current form. If not, try to canonicalize the
3435 comparison to LT. If that fails, try doing the same for the
3436 inverse operation. If that also fails, force CMP1 into a register
3438 if (mips_relational_operand_ok_p (code, cmp1))
3439 mips_emit_binary (code, target, cmp0, cmp1);
3440 else if (mips_canonicalize_comparison (&code, &cmp1, GET_MODE (target)))
3441 mips_emit_binary (code, target, cmp0, cmp1);
3444 enum rtx_code inv_code = reverse_condition (code);
3445 if (!mips_relational_operand_ok_p (inv_code, cmp1))
3447 cmp1 = force_reg (GET_MODE (cmp0), cmp1);
3448 mips_emit_int_relational (code, invert_ptr, target, cmp0, cmp1);
3450 else if (invert_ptr == 0)
3452 rtx inv_target = gen_reg_rtx (GET_MODE (target));
3453 mips_emit_binary (inv_code, inv_target, cmp0, cmp1);
3454 mips_emit_binary (XOR, target, inv_target, const1_rtx);
3458 *invert_ptr = !*invert_ptr;
3459 mips_emit_binary (inv_code, target, cmp0, cmp1);
3464 /* Return a register that is zero iff CMP0 and CMP1 are equal.
3465 The register will have the same mode as CMP0. */
3468 mips_zero_if_equal (rtx cmp0, rtx cmp1)
3470 if (cmp1 == const0_rtx)
3473 if (uns_arith_operand (cmp1, VOIDmode))
3474 return expand_binop (GET_MODE (cmp0), xor_optab,
3475 cmp0, cmp1, 0, 0, OPTAB_DIRECT);
3477 return expand_binop (GET_MODE (cmp0), sub_optab,
3478 cmp0, cmp1, 0, 0, OPTAB_DIRECT);
3481 /* Convert *CODE into a code that can be used in a floating-point
3482 scc instruction (c.<cond>.<fmt>). Return true if the values of
3483 the condition code registers will be inverted, with 0 indicating
3484 that the condition holds. */
3487 mips_reverse_fp_cond_p (enum rtx_code *code)
3494 *code = reverse_condition_maybe_unordered (*code);
3502 /* Convert a comparison into something that can be used in a branch or
3503 conditional move. cmp_operands[0] and cmp_operands[1] are the values
3504 being compared and *CODE is the code used to compare them.
3506 Update *CODE, *OP0 and *OP1 so that they describe the final comparison.
3507 If NEED_EQ_NE_P, then only EQ/NE comparisons against zero are possible,
3508 otherwise any standard branch condition can be used. The standard branch
3511 - EQ/NE between two registers.
3512 - any comparison between a register and zero. */
3515 mips_emit_compare (enum rtx_code *code, rtx *op0, rtx *op1, bool need_eq_ne_p)
3517 if (GET_MODE_CLASS (GET_MODE (cmp_operands[0])) == MODE_INT)
3519 if (!need_eq_ne_p && cmp_operands[1] == const0_rtx)
3521 *op0 = cmp_operands[0];
3522 *op1 = cmp_operands[1];
3524 else if (*code == EQ || *code == NE)
3528 *op0 = mips_zero_if_equal (cmp_operands[0], cmp_operands[1]);
3533 *op0 = cmp_operands[0];
3534 *op1 = force_reg (GET_MODE (*op0), cmp_operands[1]);
3539 /* The comparison needs a separate scc instruction. Store the
3540 result of the scc in *OP0 and compare it against zero. */
3541 bool invert = false;
3542 *op0 = gen_reg_rtx (GET_MODE (cmp_operands[0]));
3544 mips_emit_int_relational (*code, &invert, *op0,
3545 cmp_operands[0], cmp_operands[1]);
3546 *code = (invert ? EQ : NE);
3551 enum rtx_code cmp_code;
3553 /* Floating-point tests use a separate c.cond.fmt comparison to
3554 set a condition code register. The branch or conditional move
3555 will then compare that register against zero.
3557 Set CMP_CODE to the code of the comparison instruction and
3558 *CODE to the code that the branch or move should use. */
3560 *code = mips_reverse_fp_cond_p (&cmp_code) ? EQ : NE;
3562 ? gen_reg_rtx (CCmode)
3563 : gen_rtx_REG (CCmode, FPSW_REGNUM));
3565 mips_emit_binary (cmp_code, *op0, cmp_operands[0], cmp_operands[1]);
3569 /* Try comparing cmp_operands[0] and cmp_operands[1] using rtl code CODE.
3570 Store the result in TARGET and return true if successful.
3572 On 64-bit targets, TARGET may be wider than cmp_operands[0]. */
3575 mips_emit_scc (enum rtx_code code, rtx target)
3577 if (GET_MODE_CLASS (GET_MODE (cmp_operands[0])) != MODE_INT)
3580 target = gen_lowpart (GET_MODE (cmp_operands[0]), target);
3581 if (code == EQ || code == NE)
3583 rtx zie = mips_zero_if_equal (cmp_operands[0], cmp_operands[1]);
3584 mips_emit_binary (code, target, zie, const0_rtx);
3587 mips_emit_int_relational (code, 0, target,
3588 cmp_operands[0], cmp_operands[1]);
3592 /* Emit the common code for doing conditional branches.
3593 operand[0] is the label to jump to.
3594 The comparison operands are saved away by cmp{si,di,sf,df}. */
3597 gen_conditional_branch (rtx *operands, enum rtx_code code)
3599 rtx op0, op1, condition;
3601 mips_emit_compare (&code, &op0, &op1, TARGET_MIPS16);
3602 condition = gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
3603 emit_jump_insn (gen_condjump (condition, operands[0]));
3608 (set temp (COND:CCV2 CMP_OP0 CMP_OP1))
3609 (set DEST (unspec [TRUE_SRC FALSE_SRC temp] UNSPEC_MOVE_TF_PS)) */
3612 mips_expand_vcondv2sf (rtx dest, rtx true_src, rtx false_src,
3613 enum rtx_code cond, rtx cmp_op0, rtx cmp_op1)
3618 reversed_p = mips_reverse_fp_cond_p (&cond);
3619 cmp_result = gen_reg_rtx (CCV2mode);
3620 emit_insn (gen_scc_ps (cmp_result,
3621 gen_rtx_fmt_ee (cond, VOIDmode, cmp_op0, cmp_op1)));
3623 emit_insn (gen_mips_cond_move_tf_ps (dest, false_src, true_src,
3626 emit_insn (gen_mips_cond_move_tf_ps (dest, true_src, false_src,
3630 /* Emit the common code for conditional moves. OPERANDS is the array
3631 of operands passed to the conditional move define_expand. */
3634 gen_conditional_move (rtx *operands)
3639 code = GET_CODE (operands[1]);
3640 mips_emit_compare (&code, &op0, &op1, true);
3641 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3642 gen_rtx_IF_THEN_ELSE (GET_MODE (operands[0]),
3643 gen_rtx_fmt_ee (code,
3646 operands[2], operands[3])));
3649 /* Emit a conditional trap. OPERANDS is the array of operands passed to
3650 the conditional_trap expander. */
3653 mips_gen_conditional_trap (rtx *operands)
3656 enum rtx_code cmp_code = GET_CODE (operands[0]);
3657 enum machine_mode mode = GET_MODE (cmp_operands[0]);
3659 /* MIPS conditional trap machine instructions don't have GT or LE
3660 flavors, so we must invert the comparison and convert to LT and
3661 GE, respectively. */
3664 case GT: cmp_code = LT; break;
3665 case LE: cmp_code = GE; break;
3666 case GTU: cmp_code = LTU; break;
3667 case LEU: cmp_code = GEU; break;
3670 if (cmp_code == GET_CODE (operands[0]))
3672 op0 = cmp_operands[0];
3673 op1 = cmp_operands[1];
3677 op0 = cmp_operands[1];
3678 op1 = cmp_operands[0];
3680 op0 = force_reg (mode, op0);
3681 if (!arith_operand (op1, mode))
3682 op1 = force_reg (mode, op1);
3684 emit_insn (gen_rtx_TRAP_IF (VOIDmode,
3685 gen_rtx_fmt_ee (cmp_code, mode, op0, op1),
3689 /* Return true if calls to X can use R_MIPS_CALL* relocations. */
3692 mips_ok_for_lazy_binding_p (rtx x)
3694 return (TARGET_USE_GOT
3695 && GET_CODE (x) == SYMBOL_REF
3696 && !mips_symbol_binds_local_p (x));
3699 /* Load function address ADDR into register DEST. SIBCALL_P is true
3700 if the address is needed for a sibling call. */
3703 mips_load_call_address (rtx dest, rtx addr, int sibcall_p)
3705 /* If we're generating PIC, and this call is to a global function,
3706 try to allow its address to be resolved lazily. This isn't
3707 possible if TARGET_CALL_SAVED_GP since the value of $gp on entry
3708 to the stub would be our caller's gp, not ours. */
3709 if (TARGET_EXPLICIT_RELOCS
3710 && !(sibcall_p && TARGET_CALL_SAVED_GP)
3711 && mips_ok_for_lazy_binding_p (addr))
3713 rtx high, lo_sum_symbol;
3715 high = mips_unspec_offset_high (dest, pic_offset_table_rtx,
3716 addr, SYMBOL_GOTOFF_CALL);
3717 lo_sum_symbol = mips_unspec_address (addr, SYMBOL_GOTOFF_CALL);
3718 if (Pmode == SImode)
3719 emit_insn (gen_load_callsi (dest, high, lo_sum_symbol));
3721 emit_insn (gen_load_calldi (dest, high, lo_sum_symbol));
3724 mips_emit_move (dest, addr);
3728 /* Expand a call or call_value instruction. RESULT is where the
3729 result will go (null for calls), ADDR is the address of the
3730 function, ARGS_SIZE is the size of the arguments and AUX is
3731 the value passed to us by mips_function_arg. SIBCALL_P is true
3732 if we are expanding a sibling call, false if we're expanding
3736 mips_expand_call (rtx result, rtx addr, rtx args_size, rtx aux, int sibcall_p)
3738 rtx orig_addr, pattern, insn;
3741 if (!call_insn_operand (addr, VOIDmode))
3743 addr = gen_reg_rtx (Pmode);
3744 mips_load_call_address (addr, orig_addr, sibcall_p);
3748 && TARGET_HARD_FLOAT_ABI
3749 && build_mips16_call_stub (result, addr, args_size,
3750 aux == 0 ? 0 : (int) GET_MODE (aux)))
3754 pattern = (sibcall_p
3755 ? gen_sibcall_internal (addr, args_size)
3756 : gen_call_internal (addr, args_size));
3757 else if (GET_CODE (result) == PARALLEL && XVECLEN (result, 0) == 2)
3761 reg1 = XEXP (XVECEXP (result, 0, 0), 0);
3762 reg2 = XEXP (XVECEXP (result, 0, 1), 0);
3765 ? gen_sibcall_value_multiple_internal (reg1, addr, args_size, reg2)
3766 : gen_call_value_multiple_internal (reg1, addr, args_size, reg2));
3769 pattern = (sibcall_p
3770 ? gen_sibcall_value_internal (result, addr, args_size)
3771 : gen_call_value_internal (result, addr, args_size));
3773 insn = emit_call_insn (pattern);
3775 /* Lazy-binding stubs require $gp to be valid on entry. */
3776 if (mips_ok_for_lazy_binding_p (orig_addr))
3777 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
3781 /* We can handle any sibcall when TARGET_SIBCALLS is true. */
3784 mips_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
3785 tree exp ATTRIBUTE_UNUSED)
3787 return TARGET_SIBCALLS;
3790 /* Emit code to move general operand SRC into condition-code
3791 register DEST. SCRATCH is a scratch TFmode float register.
3798 where FP1 and FP2 are single-precision float registers
3799 taken from SCRATCH. */
3802 mips_emit_fcc_reload (rtx dest, rtx src, rtx scratch)
3806 /* Change the source to SFmode. */
3808 src = adjust_address (src, SFmode, 0);
3809 else if (REG_P (src) || GET_CODE (src) == SUBREG)
3810 src = gen_rtx_REG (SFmode, true_regnum (src));
3812 fp1 = gen_rtx_REG (SFmode, REGNO (scratch));
3813 fp2 = gen_rtx_REG (SFmode, REGNO (scratch) + MAX_FPRS_PER_FMT);
3815 mips_emit_move (copy_rtx (fp1), src);
3816 mips_emit_move (copy_rtx (fp2), CONST0_RTX (SFmode));
3817 emit_insn (gen_slt_sf (dest, fp2, fp1));
3820 /* Emit code to change the current function's return address to
3821 ADDRESS. SCRATCH is available as a scratch register, if needed.
3822 ADDRESS and SCRATCH are both word-mode GPRs. */
3825 mips_set_return_address (rtx address, rtx scratch)
3829 compute_frame_size (get_frame_size ());
3830 gcc_assert ((cfun->machine->frame.mask >> 31) & 1);
3831 slot_address = mips_add_offset (scratch, stack_pointer_rtx,
3832 cfun->machine->frame.gp_sp_offset);
3834 mips_emit_move (gen_rtx_MEM (GET_MODE (address), slot_address), address);
3837 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
3838 Assume that the areas do not overlap. */
3841 mips_block_move_straight (rtx dest, rtx src, HOST_WIDE_INT length)
3843 HOST_WIDE_INT offset, delta;
3844 unsigned HOST_WIDE_INT bits;
3846 enum machine_mode mode;
3849 /* Work out how many bits to move at a time. If both operands have
3850 half-word alignment, it is usually better to move in half words.
3851 For instance, lh/lh/sh/sh is usually better than lwl/lwr/swl/swr
3852 and lw/lw/sw/sw is usually better than ldl/ldr/sdl/sdr.
3853 Otherwise move word-sized chunks. */
3854 if (MEM_ALIGN (src) == BITS_PER_WORD / 2
3855 && MEM_ALIGN (dest) == BITS_PER_WORD / 2)
3856 bits = BITS_PER_WORD / 2;
3858 bits = BITS_PER_WORD;
3860 mode = mode_for_size (bits, MODE_INT, 0);
3861 delta = bits / BITS_PER_UNIT;
3863 /* Allocate a buffer for the temporary registers. */
3864 regs = alloca (sizeof (rtx) * length / delta);
3866 /* Load as many BITS-sized chunks as possible. Use a normal load if
3867 the source has enough alignment, otherwise use left/right pairs. */
3868 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
3870 regs[i] = gen_reg_rtx (mode);
3871 if (MEM_ALIGN (src) >= bits)
3872 mips_emit_move (regs[i], adjust_address (src, mode, offset));
3875 rtx part = adjust_address (src, BLKmode, offset);
3876 if (!mips_expand_unaligned_load (regs[i], part, bits, 0))
3881 /* Copy the chunks to the destination. */
3882 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
3883 if (MEM_ALIGN (dest) >= bits)
3884 mips_emit_move (adjust_address (dest, mode, offset), regs[i]);
3887 rtx part = adjust_address (dest, BLKmode, offset);
3888 if (!mips_expand_unaligned_store (part, regs[i], bits, 0))
3892 /* Mop up any left-over bytes. */
3893 if (offset < length)
3895 src = adjust_address (src, BLKmode, offset);
3896 dest = adjust_address (dest, BLKmode, offset);
3897 move_by_pieces (dest, src, length - offset,
3898 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
3902 #define MAX_MOVE_REGS 4
3903 #define MAX_MOVE_BYTES (MAX_MOVE_REGS * UNITS_PER_WORD)
3906 /* Helper function for doing a loop-based block operation on memory
3907 reference MEM. Each iteration of the loop will operate on LENGTH
3910 Create a new base register for use within the loop and point it to
3911 the start of MEM. Create a new memory reference that uses this
3912 register. Store them in *LOOP_REG and *LOOP_MEM respectively. */
3915 mips_adjust_block_mem (rtx mem, HOST_WIDE_INT length,
3916 rtx *loop_reg, rtx *loop_mem)
3918 *loop_reg = copy_addr_to_reg (XEXP (mem, 0));
3920 /* Although the new mem does not refer to a known location,
3921 it does keep up to LENGTH bytes of alignment. */
3922 *loop_mem = change_address (mem, BLKmode, *loop_reg);
3923 set_mem_align (*loop_mem, MIN (MEM_ALIGN (mem), length * BITS_PER_UNIT));
3927 /* Move LENGTH bytes from SRC to DEST using a loop that moves MAX_MOVE_BYTES
3928 per iteration. LENGTH must be at least MAX_MOVE_BYTES. Assume that the
3929 memory regions do not overlap. */
3932 mips_block_move_loop (rtx dest, rtx src, HOST_WIDE_INT length)
3934 rtx label, src_reg, dest_reg, final_src;
3935 HOST_WIDE_INT leftover;
3937 leftover = length % MAX_MOVE_BYTES;
3940 /* Create registers and memory references for use within the loop. */
3941 mips_adjust_block_mem (src, MAX_MOVE_BYTES, &src_reg, &src);
3942 mips_adjust_block_mem (dest, MAX_MOVE_BYTES, &dest_reg, &dest);
3944 /* Calculate the value that SRC_REG should have after the last iteration
3946 final_src = expand_simple_binop (Pmode, PLUS, src_reg, GEN_INT (length),
3949 /* Emit the start of the loop. */
3950 label = gen_label_rtx ();
3953 /* Emit the loop body. */
3954 mips_block_move_straight (dest, src, MAX_MOVE_BYTES);
3956 /* Move on to the next block. */
3957 mips_emit_move (src_reg, plus_constant (src_reg, MAX_MOVE_BYTES));
3958 mips_emit_move (dest_reg, plus_constant (dest_reg, MAX_MOVE_BYTES));
3960 /* Emit the loop condition. */
3961 if (Pmode == DImode)
3962 emit_insn (gen_cmpdi (src_reg, final_src));
3964 emit_insn (gen_cmpsi (src_reg, final_src));
3965 emit_jump_insn (gen_bne (label));
3967 /* Mop up any left-over bytes. */
3969 mips_block_move_straight (dest, src, leftover);
3973 /* Expand a loop of synci insns for the address range [BEGIN, END). */
3976 mips_expand_synci_loop (rtx begin, rtx end)
3978 rtx inc, label, cmp, cmp_result;
3980 /* Load INC with the cache line size (rdhwr INC,$1). */
3981 inc = gen_reg_rtx (SImode);
3982 emit_insn (gen_rdhwr (inc, const1_rtx));
3984 /* Loop back to here. */
3985 label = gen_label_rtx ();
3988 emit_insn (gen_synci (begin));
3990 cmp = gen_reg_rtx (Pmode);
3991 mips_emit_binary (GTU, cmp, begin, end);
3993 mips_emit_binary (PLUS, begin, begin, inc);
3995 cmp_result = gen_rtx_EQ (VOIDmode, cmp, const0_rtx);
3996 emit_jump_insn (gen_condjump (cmp_result, label));
3999 /* Expand a movmemsi instruction. */
4002 mips_expand_block_move (rtx dest, rtx src, rtx length)
4004 if (GET_CODE (length) == CONST_INT)
4006 if (INTVAL (length) <= 2 * MAX_MOVE_BYTES)
4008 mips_block_move_straight (dest, src, INTVAL (length));
4013 mips_block_move_loop (dest, src, INTVAL (length));
4020 /* Argument support functions. */
4022 /* Initialize CUMULATIVE_ARGS for a function. */
4025 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4026 rtx libname ATTRIBUTE_UNUSED)
4028 static CUMULATIVE_ARGS zero_cum;
4029 tree param, next_param;
4032 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4034 /* Determine if this function has variable arguments. This is
4035 indicated by the last argument being 'void_type_mode' if there
4036 are no variable arguments. The standard MIPS calling sequence
4037 passes all arguments in the general purpose registers in this case. */
4039 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
4040 param != 0; param = next_param)
4042 next_param = TREE_CHAIN (param);
4043 if (next_param == 0 && TREE_VALUE (param) != void_type_node)
4044 cum->gp_reg_found = 1;
4049 /* Fill INFO with information about a single argument. CUM is the
4050 cumulative state for earlier arguments. MODE is the mode of this
4051 argument and TYPE is its type (if known). NAMED is true if this
4052 is a named (fixed) argument rather than a variable one. */
4055 mips_arg_info (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
4056 tree type, int named, struct mips_arg_info *info)
4058 bool doubleword_aligned_p;
4059 unsigned int num_bytes, num_words, max_regs;
4061 /* Work out the size of the argument. */
4062 num_bytes = type ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
4063 num_words = (num_bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4065 /* Decide whether it should go in a floating-point register, assuming
4066 one is free. Later code checks for availability.
4068 The checks against UNITS_PER_FPVALUE handle the soft-float and
4069 single-float cases. */
4073 /* The EABI conventions have traditionally been defined in terms
4074 of TYPE_MODE, regardless of the actual type. */
4075 info->fpr_p = ((GET_MODE_CLASS (mode) == MODE_FLOAT
4076 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
4077 && GET_MODE_SIZE (mode) <= UNITS_PER_FPVALUE);
4082 /* Only leading floating-point scalars are passed in
4083 floating-point registers. We also handle vector floats the same
4084 say, which is OK because they are not covered by the standard ABI. */
4085 info->fpr_p = (!cum->gp_reg_found
4086 && cum->arg_number < 2
4087 && (type == 0 || SCALAR_FLOAT_TYPE_P (type)
4088 || VECTOR_FLOAT_TYPE_P (type))
4089 && (GET_MODE_CLASS (mode) == MODE_FLOAT
4090 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
4091 && GET_MODE_SIZE (mode) <= UNITS_PER_FPVALUE);
4096 /* Scalar and complex floating-point types are passed in
4097 floating-point registers. */
4098 info->fpr_p = (named
4099 && (type == 0 || FLOAT_TYPE_P (type))
4100 && (GET_MODE_CLASS (mode) == MODE_FLOAT
4101 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
4102 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
4103 && GET_MODE_UNIT_SIZE (mode) <= UNITS_PER_FPVALUE);
4105 /* ??? According to the ABI documentation, the real and imaginary
4106 parts of complex floats should be passed in individual registers.
4107 The real and imaginary parts of stack arguments are supposed
4108 to be contiguous and there should be an extra word of padding
4111 This has two problems. First, it makes it impossible to use a
4112 single "void *" va_list type, since register and stack arguments
4113 are passed differently. (At the time of writing, MIPSpro cannot
4114 handle complex float varargs correctly.) Second, it's unclear
4115 what should happen when there is only one register free.
4117 For now, we assume that named complex floats should go into FPRs
4118 if there are two FPRs free, otherwise they should be passed in the
4119 same way as a struct containing two floats. */
4121 && GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
4122 && GET_MODE_UNIT_SIZE (mode) < UNITS_PER_FPVALUE)
4124 if (cum->num_gprs >= MAX_ARGS_IN_REGISTERS - 1)
4125 info->fpr_p = false;
4135 /* See whether the argument has doubleword alignment. */
4136 doubleword_aligned_p = FUNCTION_ARG_BOUNDARY (mode, type) > BITS_PER_WORD;
4138 /* Set REG_OFFSET to the register count we're interested in.
4139 The EABI allocates the floating-point registers separately,
4140 but the other ABIs allocate them like integer registers. */
4141 info->reg_offset = (mips_abi == ABI_EABI && info->fpr_p
4145 /* Advance to an even register if the argument is doubleword-aligned. */
4146 if (doubleword_aligned_p)
4147 info->reg_offset += info->reg_offset & 1;
4149 /* Work out the offset of a stack argument. */
4150 info->stack_offset = cum->stack_words;
4151 if (doubleword_aligned_p)
4152 info->stack_offset += info->stack_offset & 1;
4154 max_regs = MAX_ARGS_IN_REGISTERS - info->reg_offset;
4156 /* Partition the argument between registers and stack. */
4157 info->reg_words = MIN (num_words, max_regs);
4158 info->stack_words = num_words - info->reg_words;
4162 /* INFO describes an argument that is passed in a single-register value.
4163 Return the register it uses, assuming that FPRs are available if
4167 mips_arg_regno (const struct mips_arg_info *info, bool hard_float_p)
4169 if (!info->fpr_p || !hard_float_p)
4170 return GP_ARG_FIRST + info->reg_offset;
4171 else if (mips_abi == ABI_32 && TARGET_DOUBLE_FLOAT && info->reg_offset > 0)
4172 /* In o32, the second argument is always passed in $f14
4173 for TARGET_DOUBLE_FLOAT, regardless of whether the
4174 first argument was a word or doubleword. */
4175 return FP_ARG_FIRST + 2;
4177 return FP_ARG_FIRST + info->reg_offset;
4180 /* Implement FUNCTION_ARG_ADVANCE. */
4183 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4184 tree type, int named)
4186 struct mips_arg_info info;
4188 mips_arg_info (cum, mode, type, named, &info);
4191 cum->gp_reg_found = true;
4193 /* See the comment above the cumulative args structure in mips.h
4194 for an explanation of what this code does. It assumes the O32
4195 ABI, which passes at most 2 arguments in float registers. */
4196 if (cum->arg_number < 2 && info.fpr_p)
4197 cum->fp_code += (mode == SFmode ? 1 : 2) << (cum->arg_number * 2);
4199 if (mips_abi != ABI_EABI || !info.fpr_p)
4200 cum->num_gprs = info.reg_offset + info.reg_words;
4201 else if (info.reg_words > 0)
4202 cum->num_fprs += MAX_FPRS_PER_FMT;
4204 if (info.stack_words > 0)
4205 cum->stack_words = info.stack_offset + info.stack_words;
4210 /* Implement FUNCTION_ARG. */
4213 function_arg (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
4214 tree type, int named)
4216 struct mips_arg_info info;
4218 /* We will be called with a mode of VOIDmode after the last argument
4219 has been seen. Whatever we return will be passed to the call
4220 insn. If we need a mips16 fp_code, return a REG with the code
4221 stored as the mode. */
4222 if (mode == VOIDmode)
4224 if (TARGET_MIPS16 && cum->fp_code != 0)
4225 return gen_rtx_REG ((enum machine_mode) cum->fp_code, 0);
4231 mips_arg_info (cum, mode, type, named, &info);
4233 /* Return straight away if the whole argument is passed on the stack. */
4234 if (info.reg_offset == MAX_ARGS_IN_REGISTERS)
4238 && TREE_CODE (type) == RECORD_TYPE
4240 && TYPE_SIZE_UNIT (type)
4241 && host_integerp (TYPE_SIZE_UNIT (type), 1)
4244 /* The Irix 6 n32/n64 ABIs say that if any 64-bit chunk of the
4245 structure contains a double in its entirety, then that 64-bit
4246 chunk is passed in a floating point register. */
4249 /* First check to see if there is any such field. */
4250 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4251 if (TREE_CODE (field) == FIELD_DECL
4252 && TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
4253 && TYPE_PRECISION (TREE_TYPE (field)) == BITS_PER_WORD
4254 && host_integerp (bit_position (field), 0)
4255 && int_bit_position (field) % BITS_PER_WORD == 0)
4260 /* Now handle the special case by returning a PARALLEL
4261 indicating where each 64-bit chunk goes. INFO.REG_WORDS
4262 chunks are passed in registers. */
4264 HOST_WIDE_INT bitpos;
4267 /* assign_parms checks the mode of ENTRY_PARM, so we must
4268 use the actual mode here. */
4269 ret = gen_rtx_PARALLEL (mode, rtvec_alloc (info.reg_words));
4272 field = TYPE_FIELDS (type);
4273 for (i = 0; i < info.reg_words; i++)
4277 for (; field; field = TREE_CHAIN (field))
4278 if (TREE_CODE (field) == FIELD_DECL
4279 && int_bit_position (field) >= bitpos)
4283 && int_bit_position (field) == bitpos
4284 && TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
4285 && !TARGET_SOFT_FLOAT
4286 && TYPE_PRECISION (TREE_TYPE (field)) == BITS_PER_WORD)
4287 reg = gen_rtx_REG (DFmode, FP_ARG_FIRST + info.reg_offset + i);
4289 reg = gen_rtx_REG (DImode, GP_ARG_FIRST + info.reg_offset + i);
4292 = gen_rtx_EXPR_LIST (VOIDmode, reg,
4293 GEN_INT (bitpos / BITS_PER_UNIT));
4295 bitpos += BITS_PER_WORD;
4301 /* Handle the n32/n64 conventions for passing complex floating-point
4302 arguments in FPR pairs. The real part goes in the lower register
4303 and the imaginary part goes in the upper register. */
4306 && GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
4309 enum machine_mode inner;
4312 inner = GET_MODE_INNER (mode);
4313 reg = FP_ARG_FIRST + info.reg_offset;
4314 if (info.reg_words * UNITS_PER_WORD == GET_MODE_SIZE (inner))
4316 /* Real part in registers, imaginary part on stack. */
4317 gcc_assert (info.stack_words == info.reg_words);
4318 return gen_rtx_REG (inner, reg);
4322 gcc_assert (info.stack_words == 0);
4323 real = gen_rtx_EXPR_LIST (VOIDmode,
4324 gen_rtx_REG (inner, reg),
4326 imag = gen_rtx_EXPR_LIST (VOIDmode,
4328 reg + info.reg_words / 2),
4329 GEN_INT (GET_MODE_SIZE (inner)));
4330 return gen_rtx_PARALLEL (mode, gen_rtvec (2, real, imag));
4334 return gen_rtx_REG (mode, mips_arg_regno (&info, TARGET_HARD_FLOAT));
4338 /* Implement TARGET_ARG_PARTIAL_BYTES. */
4341 mips_arg_partial_bytes (CUMULATIVE_ARGS *cum,
4342 enum machine_mode mode, tree type, bool named)
4344 struct mips_arg_info info;
4346 mips_arg_info (cum, mode, type, named, &info);
4347 return info.stack_words > 0 ? info.reg_words * UNITS_PER_WORD : 0;
4351 /* Implement FUNCTION_ARG_BOUNDARY. Every parameter gets at least
4352 PARM_BOUNDARY bits of alignment, but will be given anything up
4353 to STACK_BOUNDARY bits if the type requires it. */
4356 function_arg_boundary (enum machine_mode mode, tree type)
4358 unsigned int alignment;
4360 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
4361 if (alignment < PARM_BOUNDARY)
4362 alignment = PARM_BOUNDARY;
4363 if (alignment > STACK_BOUNDARY)
4364 alignment = STACK_BOUNDARY;
4368 /* Return true if FUNCTION_ARG_PADDING (MODE, TYPE) should return
4369 upward rather than downward. In other words, return true if the
4370 first byte of the stack slot has useful data, false if the last
4374 mips_pad_arg_upward (enum machine_mode mode, tree type)
4376 /* On little-endian targets, the first byte of every stack argument
4377 is passed in the first byte of the stack slot. */
4378 if (!BYTES_BIG_ENDIAN)
4381 /* Otherwise, integral types are padded downward: the last byte of a
4382 stack argument is passed in the last byte of the stack slot. */
4384 ? INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
4385 : GET_MODE_CLASS (mode) == MODE_INT)
4388 /* Big-endian o64 pads floating-point arguments downward. */
4389 if (mips_abi == ABI_O64)
4390 if (type != 0 ? FLOAT_TYPE_P (type) : GET_MODE_CLASS (mode) == MODE_FLOAT)
4393 /* Other types are padded upward for o32, o64, n32 and n64. */
4394 if (mips_abi != ABI_EABI)
4397 /* Arguments smaller than a stack slot are padded downward. */
4398 if (mode != BLKmode)
4399 return (GET_MODE_BITSIZE (mode) >= PARM_BOUNDARY);
4401 return (int_size_in_bytes (type) >= (PARM_BOUNDARY / BITS_PER_UNIT));
4405 /* Likewise BLOCK_REG_PADDING (MODE, TYPE, ...). Return !BYTES_BIG_ENDIAN
4406 if the least significant byte of the register has useful data. Return
4407 the opposite if the most significant byte does. */
4410 mips_pad_reg_upward (enum machine_mode mode, tree type)
4412 /* No shifting is required for floating-point arguments. */
4413 if (type != 0 ? FLOAT_TYPE_P (type) : GET_MODE_CLASS (mode) == MODE_FLOAT)
4414 return !BYTES_BIG_ENDIAN;
4416 /* Otherwise, apply the same padding to register arguments as we do
4417 to stack arguments. */
4418 return mips_pad_arg_upward (mode, type);
4422 mips_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4423 tree type, int *pretend_size ATTRIBUTE_UNUSED,
4426 CUMULATIVE_ARGS local_cum;
4427 int gp_saved, fp_saved;
4429 /* The caller has advanced CUM up to, but not beyond, the last named
4430 argument. Advance a local copy of CUM past the last "real" named
4431 argument, to find out how many registers are left over. */
4434 FUNCTION_ARG_ADVANCE (local_cum, mode, type, 1);
4436 /* Found out how many registers we need to save. */
4437 gp_saved = MAX_ARGS_IN_REGISTERS - local_cum.num_gprs;
4438 fp_saved = (EABI_FLOAT_VARARGS_P
4439 ? MAX_ARGS_IN_REGISTERS - local_cum.num_fprs
4448 ptr = plus_constant (virtual_incoming_args_rtx,
4449 REG_PARM_STACK_SPACE (cfun->decl)
4450 - gp_saved * UNITS_PER_WORD);
4451 mem = gen_rtx_MEM (BLKmode, ptr);
4452 set_mem_alias_set (mem, get_varargs_alias_set ());
4454 move_block_from_reg (local_cum.num_gprs + GP_ARG_FIRST,
4459 /* We can't use move_block_from_reg, because it will use
4461 enum machine_mode mode;
4464 /* Set OFF to the offset from virtual_incoming_args_rtx of
4465 the first float register. The FP save area lies below
4466 the integer one, and is aligned to UNITS_PER_FPVALUE bytes. */
4467 off = -gp_saved * UNITS_PER_WORD;
4468 off &= ~(UNITS_PER_FPVALUE - 1);
4469 off -= fp_saved * UNITS_PER_FPREG;
4471 mode = TARGET_SINGLE_FLOAT ? SFmode : DFmode;
4473 for (i = local_cum.num_fprs; i < MAX_ARGS_IN_REGISTERS;
4474 i += MAX_FPRS_PER_FMT)
4478 ptr = plus_constant (virtual_incoming_args_rtx, off);
4479 mem = gen_rtx_MEM (mode, ptr);
4480 set_mem_alias_set (mem, get_varargs_alias_set ());
4481 mips_emit_move (mem, gen_rtx_REG (mode, FP_ARG_FIRST + i));
4482 off += UNITS_PER_HWFPVALUE;
4486 if (REG_PARM_STACK_SPACE (cfun->decl) == 0)
4487 cfun->machine->varargs_size = (gp_saved * UNITS_PER_WORD
4488 + fp_saved * UNITS_PER_FPREG);
4491 /* Create the va_list data type.
4492 We keep 3 pointers, and two offsets.
4493 Two pointers are to the overflow area, which starts at the CFA.
4494 One of these is constant, for addressing into the GPR save area below it.
4495 The other is advanced up the stack through the overflow region.
4496 The third pointer is to the GPR save area. Since the FPR save area
4497 is just below it, we can address FPR slots off this pointer.
4498 We also keep two one-byte offsets, which are to be subtracted from the
4499 constant pointers to yield addresses in the GPR and FPR save areas.
4500 These are downcounted as float or non-float arguments are used,
4501 and when they get to zero, the argument must be obtained from the
4503 If !EABI_FLOAT_VARARGS_P, then no FPR save area exists, and a single
4504 pointer is enough. It's started at the GPR save area, and is
4506 Note that the GPR save area is not constant size, due to optimization
4507 in the prologue. Hence, we can't use a design with two pointers
4508 and two offsets, although we could have designed this with two pointers
4509 and three offsets. */
4512 mips_build_builtin_va_list (void)
4514 if (EABI_FLOAT_VARARGS_P)
4516 tree f_ovfl, f_gtop, f_ftop, f_goff, f_foff, f_res, record;
4519 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4521 f_ovfl = build_decl (FIELD_DECL, get_identifier ("__overflow_argptr"),
4523 f_gtop = build_decl (FIELD_DECL, get_identifier ("__gpr_top"),
4525 f_ftop = build_decl (FIELD_DECL, get_identifier ("__fpr_top"),
4527 f_goff = build_decl (FIELD_DECL, get_identifier ("__gpr_offset"),
4528 unsigned_char_type_node);
4529 f_foff = build_decl (FIELD_DECL, get_identifier ("__fpr_offset"),
4530 unsigned_char_type_node);
4531 /* Explicitly pad to the size of a pointer, so that -Wpadded won't
4532 warn on every user file. */
4533 index = build_int_cst (NULL_TREE, GET_MODE_SIZE (ptr_mode) - 2 - 1);
4534 array = build_array_type (unsigned_char_type_node,
4535 build_index_type (index));
4536 f_res = build_decl (FIELD_DECL, get_identifier ("__reserved"), array);
4538 DECL_FIELD_CONTEXT (f_ovfl) = record;
4539 DECL_FIELD_CONTEXT (f_gtop) = record;
4540 DECL_FIELD_CONTEXT (f_ftop) = record;
4541 DECL_FIELD_CONTEXT (f_goff) = record;
4542 DECL_FIELD_CONTEXT (f_foff) = record;
4543 DECL_FIELD_CONTEXT (f_res) = record;
4545 TYPE_FIELDS (record) = f_ovfl;
4546 TREE_CHAIN (f_ovfl) = f_gtop;
4547 TREE_CHAIN (f_gtop) = f_ftop;
4548 TREE_CHAIN (f_ftop) = f_goff;
4549 TREE_CHAIN (f_goff) = f_foff;
4550 TREE_CHAIN (f_foff) = f_res;
4552 layout_type (record);
4555 else if (TARGET_IRIX && TARGET_IRIX6)
4556 /* On IRIX 6, this type is 'char *'. */
4557 return build_pointer_type (char_type_node);
4559 /* Otherwise, we use 'void *'. */
4560 return ptr_type_node;
4563 /* Implement va_start. */
4566 mips_va_start (tree valist, rtx nextarg)
4568 if (EABI_FLOAT_VARARGS_P)
4570 const CUMULATIVE_ARGS *cum;
4571 tree f_ovfl, f_gtop, f_ftop, f_goff, f_foff;
4572 tree ovfl, gtop, ftop, goff, foff;
4574 int gpr_save_area_size;
4575 int fpr_save_area_size;
4578 cum = ¤t_function_args_info;
4580 = (MAX_ARGS_IN_REGISTERS - cum->num_gprs) * UNITS_PER_WORD;
4582 = (MAX_ARGS_IN_REGISTERS - cum->num_fprs) * UNITS_PER_FPREG;
4584 f_ovfl = TYPE_FIELDS (va_list_type_node);
4585 f_gtop = TREE_CHAIN (f_ovfl);
4586 f_ftop = TREE_CHAIN (f_gtop);
4587 f_goff = TREE_CHAIN (f_ftop);
4588 f_foff = TREE_CHAIN (f_goff);
4590 ovfl = build3 (COMPONENT_REF, TREE_TYPE (f_ovfl), valist, f_ovfl,
4592 gtop = build3 (COMPONENT_REF, TREE_TYPE (f_gtop), valist, f_gtop,
4594 ftop = build3 (COMPONENT_REF, TREE_TYPE (f_ftop), valist, f_ftop,
4596 goff = build3 (COMPONENT_REF, TREE_TYPE (f_goff), valist, f_goff,
4598 foff = build3 (COMPONENT_REF, TREE_TYPE (f_foff), valist, f_foff,
4601 /* Emit code to initialize OVFL, which points to the next varargs
4602 stack argument. CUM->STACK_WORDS gives the number of stack
4603 words used by named arguments. */
4604 t = make_tree (TREE_TYPE (ovfl), virtual_incoming_args_rtx);
4605 if (cum->stack_words > 0)
4606 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovfl), t,
4607 size_int (cum->stack_words * UNITS_PER_WORD));
4608 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovfl), ovfl, t);
4609 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4611 /* Emit code to initialize GTOP, the top of the GPR save area. */
4612 t = make_tree (TREE_TYPE (gtop), virtual_incoming_args_rtx);
4613 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gtop), gtop, t);
4614 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4616 /* Emit code to initialize FTOP, the top of the FPR save area.
4617 This address is gpr_save_area_bytes below GTOP, rounded
4618 down to the next fp-aligned boundary. */
4619 t = make_tree (TREE_TYPE (ftop), virtual_incoming_args_rtx);
4620 fpr_offset = gpr_save_area_size + UNITS_PER_FPVALUE - 1;
4621 fpr_offset &= ~(UNITS_PER_FPVALUE - 1);
4623 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ftop), t,
4624 size_int (-fpr_offset));
4625 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ftop), ftop, t);
4626 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4628 /* Emit code to initialize GOFF, the offset from GTOP of the
4629 next GPR argument. */
4630 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (goff), goff,
4631 build_int_cst (NULL_TREE, gpr_save_area_size));
4632 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4634 /* Likewise emit code to initialize FOFF, the offset from FTOP
4635 of the next FPR argument. */
4636 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (foff), foff,
4637 build_int_cst (NULL_TREE, fpr_save_area_size));
4638 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4642 nextarg = plus_constant (nextarg, -cfun->machine->varargs_size);
4643 std_expand_builtin_va_start (valist, nextarg);
4647 /* Implement va_arg. */
4650 mips_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4652 HOST_WIDE_INT size, rsize;
4656 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, 0);
4659 type = build_pointer_type (type);
4661 size = int_size_in_bytes (type);
4662 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
4664 if (mips_abi != ABI_EABI || !EABI_FLOAT_VARARGS_P)
4665 addr = std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
4668 /* Not a simple merged stack. */
4670 tree f_ovfl, f_gtop, f_ftop, f_goff, f_foff;
4671 tree ovfl, top, off, align;
4672 HOST_WIDE_INT osize;
4675 f_ovfl = TYPE_FIELDS (va_list_type_node);
4676 f_gtop = TREE_CHAIN (f_ovfl);
4677 f_ftop = TREE_CHAIN (f_gtop);
4678 f_goff = TREE_CHAIN (f_ftop);
4679 f_foff = TREE_CHAIN (f_goff);
4681 /* We maintain separate pointers and offsets for floating-point
4682 and integer arguments, but we need similar code in both cases.
4685 TOP be the top of the register save area;
4686 OFF be the offset from TOP of the next register;
4687 ADDR_RTX be the address of the argument;
4688 RSIZE be the number of bytes used to store the argument
4689 when it's in the register save area;
4690 OSIZE be the number of bytes used to store it when it's
4691 in the stack overflow area; and
4692 PADDING be (BYTES_BIG_ENDIAN ? OSIZE - RSIZE : 0)
4694 The code we want is:
4696 1: off &= -rsize; // round down
4699 4: addr_rtx = top - off;
4704 9: ovfl += ((intptr_t) ovfl + osize - 1) & -osize;
4705 10: addr_rtx = ovfl + PADDING;
4709 [1] and [9] can sometimes be optimized away. */
4711 ovfl = build3 (COMPONENT_REF, TREE_TYPE (f_ovfl), valist, f_ovfl,
4714 if (GET_MODE_CLASS (TYPE_MODE (type)) == MODE_FLOAT
4715 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_FPVALUE)
4717 top = build3 (COMPONENT_REF, TREE_TYPE (f_ftop), valist, f_ftop,
4719 off = build3 (COMPONENT_REF, TREE_TYPE (f_foff), valist, f_foff,
4722 /* When floating-point registers are saved to the stack,
4723 each one will take up UNITS_PER_HWFPVALUE bytes, regardless
4724 of the float's precision. */
4725 rsize = UNITS_PER_HWFPVALUE;
4727 /* Overflow arguments are padded to UNITS_PER_WORD bytes
4728 (= PARM_BOUNDARY bits). This can be different from RSIZE
4731 (1) On 32-bit targets when TYPE is a structure such as:
4733 struct s { float f; };
4735 Such structures are passed in paired FPRs, so RSIZE
4736 will be 8 bytes. However, the structure only takes
4737 up 4 bytes of memory, so OSIZE will only be 4.
4739 (2) In combinations such as -mgp64 -msingle-float
4740 -fshort-double. Doubles passed in registers
4741 will then take up 4 (UNITS_PER_HWFPVALUE) bytes,
4742 but those passed on the stack take up
4743 UNITS_PER_WORD bytes. */
4744 osize = MAX (GET_MODE_SIZE (TYPE_MODE (type)), UNITS_PER_WORD);
4748 top = build3 (COMPONENT_REF, TREE_TYPE (f_gtop), valist, f_gtop,
4750 off = build3 (COMPONENT_REF, TREE_TYPE (f_goff), valist, f_goff,
4752 if (rsize > UNITS_PER_WORD)
4754 /* [1] Emit code for: off &= -rsize. */
4755 t = build2 (BIT_AND_EXPR, TREE_TYPE (off), off,
4756 build_int_cst (NULL_TREE, -rsize));
4757 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (off), off, t);
4758 gimplify_and_add (t, pre_p);
4763 /* [2] Emit code to branch if off == 0. */
4764 t = build2 (NE_EXPR, boolean_type_node, off,
4765 build_int_cst (TREE_TYPE (off), 0));
4766 addr = build3 (COND_EXPR, ptr_type_node, t, NULL_TREE, NULL_TREE);
4768 /* [5] Emit code for: off -= rsize. We do this as a form of
4769 post-increment not available to C. Also widen for the
4770 coming pointer arithmetic. */
4771 t = fold_convert (TREE_TYPE (off), build_int_cst (NULL_TREE, rsize));
4772 t = build2 (POSTDECREMENT_EXPR, TREE_TYPE (off), off, t);
4773 t = fold_convert (sizetype, t);
4774 t = fold_build1 (NEGATE_EXPR, sizetype, t);
4776 /* [4] Emit code for: addr_rtx = top - off. On big endian machines,
4777 the argument has RSIZE - SIZE bytes of leading padding. */
4778 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (top), top, t);
4779 if (BYTES_BIG_ENDIAN && rsize > size)
4781 u = size_int (rsize - size);
4782 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, u);
4784 COND_EXPR_THEN (addr) = t;
4786 if (osize > UNITS_PER_WORD)
4788 /* [9] Emit: ovfl += ((intptr_t) ovfl + osize - 1) & -osize. */
4789 u = size_int (osize - 1);
4790 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovfl), ovfl, u);
4791 t = fold_convert (sizetype, t);
4792 u = size_int (-osize);
4793 t = build2 (BIT_AND_EXPR, sizetype, t, u);
4794 t = fold_convert (TREE_TYPE (ovfl), t);
4795 align = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovfl), ovfl, t);
4800 /* [10, 11]. Emit code to store ovfl in addr_rtx, then
4801 post-increment ovfl by osize. On big-endian machines,
4802 the argument has OSIZE - SIZE bytes of leading padding. */
4803 u = fold_convert (TREE_TYPE (ovfl),
4804 build_int_cst (NULL_TREE, osize));
4805 t = build2 (POSTINCREMENT_EXPR, TREE_TYPE (ovfl), ovfl, u);
4806 if (BYTES_BIG_ENDIAN && osize > size)
4808 u = size_int (osize - size);
4809 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, u);
4812 /* String [9] and [10,11] together. */
4814 t = build2 (COMPOUND_EXPR, TREE_TYPE (t), align, t);
4815 COND_EXPR_ELSE (addr) = t;
4817 addr = fold_convert (build_pointer_type (type), addr);
4818 addr = build_va_arg_indirect_ref (addr);
4822 addr = build_va_arg_indirect_ref (addr);
4827 /* Return true if it is possible to use left/right accesses for a
4828 bitfield of WIDTH bits starting BITPOS bits into *OP. When
4829 returning true, update *OP, *LEFT and *RIGHT as follows:
4831 *OP is a BLKmode reference to the whole field.
4833 *LEFT is a QImode reference to the first byte if big endian or
4834 the last byte if little endian. This address can be used in the
4835 left-side instructions (lwl, swl, ldl, sdl).
4837 *RIGHT is a QImode reference to the opposite end of the field and
4838 can be used in the patterning right-side instruction. */
4841 mips_get_unaligned_mem (rtx *op, unsigned int width, int bitpos,
4842 rtx *left, rtx *right)
4846 /* Check that the operand really is a MEM. Not all the extv and
4847 extzv predicates are checked. */
4851 /* Check that the size is valid. */
4852 if (width != 32 && (!TARGET_64BIT || width != 64))
4855 /* We can only access byte-aligned values. Since we are always passed
4856 a reference to the first byte of the field, it is not necessary to
4857 do anything with BITPOS after this check. */
4858 if (bitpos % BITS_PER_UNIT != 0)
4861 /* Reject aligned bitfields: we want to use a normal load or store
4862 instead of a left/right pair. */
4863 if (MEM_ALIGN (*op) >= width)
4866 /* Adjust *OP to refer to the whole field. This also has the effect
4867 of legitimizing *OP's address for BLKmode, possibly simplifying it. */
4868 *op = adjust_address (*op, BLKmode, 0);
4869 set_mem_size (*op, GEN_INT (width / BITS_PER_UNIT));
4871 /* Get references to both ends of the field. We deliberately don't
4872 use the original QImode *OP for FIRST since the new BLKmode one
4873 might have a simpler address. */
4874 first = adjust_address (*op, QImode, 0);
4875 last = adjust_address (*op, QImode, width / BITS_PER_UNIT - 1);
4877 /* Allocate to LEFT and RIGHT according to endianness. LEFT should
4878 be the upper word and RIGHT the lower word. */
4879 if (TARGET_BIG_ENDIAN)
4880 *left = first, *right = last;
4882 *left = last, *right = first;
4888 /* Try to emit the equivalent of (set DEST (zero_extract SRC WIDTH BITPOS)).
4889 Return true on success. We only handle cases where zero_extract is
4890 equivalent to sign_extract. */
4893 mips_expand_unaligned_load (rtx dest, rtx src, unsigned int width, int bitpos)
4895 rtx left, right, temp;
4897 /* If TARGET_64BIT, the destination of a 32-bit load will be a
4898 paradoxical word_mode subreg. This is the only case in which
4899 we allow the destination to be larger than the source. */
4900 if (GET_CODE (dest) == SUBREG
4901 && GET_MODE (dest) == DImode
4902 && SUBREG_BYTE (dest) == 0
4903 && GET_MODE (SUBREG_REG (dest)) == SImode)
4904 dest = SUBREG_REG (dest);
4906 /* After the above adjustment, the destination must be the same
4907 width as the source. */
4908 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4911 if (!mips_get_unaligned_mem (&src, width, bitpos, &left, &right))
4914 temp = gen_reg_rtx (GET_MODE (dest));
4915 if (GET_MODE (dest) == DImode)
4917 emit_insn (gen_mov_ldl (temp, src, left));
4918 emit_insn (gen_mov_ldr (dest, copy_rtx (src), right, temp));
4922 emit_insn (gen_mov_lwl (temp, src, left));
4923 emit_insn (gen_mov_lwr (dest, copy_rtx (src), right, temp));
4929 /* Try to expand (set (zero_extract DEST WIDTH BITPOS) SRC). Return
4933 mips_expand_unaligned_store (rtx dest, rtx src, unsigned int width, int bitpos)
4936 enum machine_mode mode;
4938 if (!mips_get_unaligned_mem (&dest, width, bitpos, &left, &right))
4941 mode = mode_for_size (width, MODE_INT, 0);
4942 src = gen_lowpart (mode, src);
4946 emit_insn (gen_mov_sdl (dest, src, left));
4947 emit_insn (gen_mov_sdr (copy_rtx (dest), copy_rtx (src), right));
4951 emit_insn (gen_mov_swl (dest, src, left));
4952 emit_insn (gen_mov_swr (copy_rtx (dest), copy_rtx (src), right));
4957 /* Return true if X is a MEM with the same size as MODE. */
4960 mips_mem_fits_mode_p (enum machine_mode mode, rtx x)
4967 size = MEM_SIZE (x);
4968 return size && INTVAL (size) == GET_MODE_SIZE (mode);
4971 /* Return true if (zero_extract OP SIZE POSITION) can be used as the
4972 source of an "ext" instruction or the destination of an "ins"
4973 instruction. OP must be a register operand and the following
4974 conditions must hold:
4976 0 <= POSITION < GET_MODE_BITSIZE (GET_MODE (op))
4977 0 < SIZE <= GET_MODE_BITSIZE (GET_MODE (op))
4978 0 < POSITION + SIZE <= GET_MODE_BITSIZE (GET_MODE (op))
4980 Also reject lengths equal to a word as they are better handled
4981 by the move patterns. */
4984 mips_use_ins_ext_p (rtx op, rtx size, rtx position)
4986 HOST_WIDE_INT len, pos;
4988 if (!ISA_HAS_EXT_INS
4989 || !register_operand (op, VOIDmode)
4990 || GET_MODE_BITSIZE (GET_MODE (op)) > BITS_PER_WORD)
4993 len = INTVAL (size);
4994 pos = INTVAL (position);
4996 if (len <= 0 || len >= GET_MODE_BITSIZE (GET_MODE (op))
4997 || pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (op)))
5003 /* Set up globals to generate code for the ISA or processor
5004 described by INFO. */
5007 mips_set_architecture (const struct mips_cpu_info *info)
5011 mips_arch_info = info;
5012 mips_arch = info->cpu;
5013 mips_isa = info->isa;
5018 /* Likewise for tuning. */
5021 mips_set_tune (const struct mips_cpu_info *info)
5025 mips_tune_info = info;
5026 mips_tune = info->cpu;
5030 /* Implement TARGET_HANDLE_OPTION. */
5033 mips_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
5038 if (strcmp (arg, "32") == 0)
5040 else if (strcmp (arg, "o64") == 0)
5042 else if (strcmp (arg, "n32") == 0)
5044 else if (strcmp (arg, "64") == 0)
5046 else if (strcmp (arg, "eabi") == 0)
5047 mips_abi = ABI_EABI;
5054 return mips_parse_cpu (arg) != 0;
5057 mips_isa_info = mips_parse_cpu (ACONCAT (("mips", arg, NULL)));
5058 return mips_isa_info != 0;
5060 case OPT_mno_flush_func:
5061 mips_cache_flush_func = NULL;
5064 case OPT_mcode_readable_:
5065 if (strcmp (arg, "yes") == 0)
5066 mips_code_readable = CODE_READABLE_YES;
5067 else if (strcmp (arg, "pcrel") == 0)
5068 mips_code_readable = CODE_READABLE_PCREL;
5069 else if (strcmp (arg, "no") == 0)
5070 mips_code_readable = CODE_READABLE_NO;
5080 /* Set up the threshold for data to go into the small data area, instead
5081 of the normal data area, and detect any conflicts in the switches. */
5084 override_options (void)
5086 int i, start, regno;
5087 enum machine_mode mode;
5089 #ifdef SUBTARGET_OVERRIDE_OPTIONS
5090 SUBTARGET_OVERRIDE_OPTIONS;
5093 mips_section_threshold = g_switch_set ? g_switch_value : MIPS_DEFAULT_GVALUE;
5095 /* The following code determines the architecture and register size.
5096 Similar code was added to GAS 2.14 (see tc-mips.c:md_after_parse_args()).
5097 The GAS and GCC code should be kept in sync as much as possible. */
5099 if (mips_arch_string != 0)
5100 mips_set_architecture (mips_parse_cpu (mips_arch_string));
5102 if (mips_isa_info != 0)
5104 if (mips_arch_info == 0)
5105 mips_set_architecture (mips_isa_info);
5106 else if (mips_arch_info->isa != mips_isa_info->isa)
5107 error ("-%s conflicts with the other architecture options, "
5108 "which specify a %s processor",
5109 mips_isa_info->name,
5110 mips_cpu_info_from_isa (mips_arch_info->isa)->name);
5113 if (mips_arch_info == 0)
5115 #ifdef MIPS_CPU_STRING_DEFAULT
5116 mips_set_architecture (mips_parse_cpu (MIPS_CPU_STRING_DEFAULT));
5118 mips_set_architecture (mips_cpu_info_from_isa (MIPS_ISA_DEFAULT));
5122 if (ABI_NEEDS_64BIT_REGS && !ISA_HAS_64BIT_REGS)
5123 error ("-march=%s is not compatible with the selected ABI",
5124 mips_arch_info->name);
5126 /* Optimize for mips_arch, unless -mtune selects a different processor. */
5127 if (mips_tune_string != 0)
5128 mips_set_tune (mips_parse_cpu (mips_tune_string));
5130 if (mips_tune_info == 0)
5131 mips_set_tune (mips_arch_info);
5133 /* Set cost structure for the processor. */
5135 mips_cost = &mips_rtx_cost_optimize_size;
5137 mips_cost = &mips_rtx_cost_data[mips_tune];
5139 /* If the user hasn't specified a branch cost, use the processor's
5141 if (mips_branch_cost == 0)
5142 mips_branch_cost = mips_cost->branch_cost;
5144 if ((target_flags_explicit & MASK_64BIT) != 0)
5146 /* The user specified the size of the integer registers. Make sure
5147 it agrees with the ABI and ISA. */
5148 if (TARGET_64BIT && !ISA_HAS_64BIT_REGS)
5149 error ("-mgp64 used with a 32-bit processor");
5150 else if (!TARGET_64BIT && ABI_NEEDS_64BIT_REGS)
5151 error ("-mgp32 used with a 64-bit ABI");
5152 else if (TARGET_64BIT && ABI_NEEDS_32BIT_REGS)
5153 error ("-mgp64 used with a 32-bit ABI");
5157 /* Infer the integer register size from the ABI and processor.
5158 Restrict ourselves to 32-bit registers if that's all the
5159 processor has, or if the ABI cannot handle 64-bit registers. */
5160 if (ABI_NEEDS_32BIT_REGS || !ISA_HAS_64BIT_REGS)
5161 target_flags &= ~MASK_64BIT;
5163 target_flags |= MASK_64BIT;
5166 if ((target_flags_explicit & MASK_FLOAT64) != 0)
5168 /* Really, -mfp32 and -mfp64 are ornamental options. There's
5169 only one right answer here. */
5170 if (TARGET_64BIT && TARGET_DOUBLE_FLOAT && !TARGET_FLOAT64)
5171 error ("unsupported combination: %s", "-mgp64 -mfp32 -mdouble-float");
5172 else if (!TARGET_64BIT && TARGET_FLOAT64
5173 && !(ISA_HAS_MXHC1 && mips_abi == ABI_32))
5174 error ("-mgp32 and -mfp64 can only be combined if the target"
5175 " supports the mfhc1 and mthc1 instructions");
5176 else if (TARGET_SINGLE_FLOAT && TARGET_FLOAT64)
5177 error ("unsupported combination: %s", "-mfp64 -msingle-float");
5181 /* -msingle-float selects 32-bit float registers. Otherwise the
5182 float registers should be the same size as the integer ones. */
5183 if (TARGET_64BIT && TARGET_DOUBLE_FLOAT)
5184 target_flags |= MASK_FLOAT64;
5186 target_flags &= ~MASK_FLOAT64;
5189 /* End of code shared with GAS. */
5191 if ((target_flags_explicit & MASK_LONG64) == 0)
5193 if ((mips_abi == ABI_EABI && TARGET_64BIT) || mips_abi == ABI_64)
5194 target_flags |= MASK_LONG64;
5196 target_flags &= ~MASK_LONG64;
5199 if (MIPS_MARCH_CONTROLS_SOFT_FLOAT
5200 && (target_flags_explicit & MASK_SOFT_FLOAT_ABI) == 0)
5202 /* For some configurations, it is useful to have -march control
5203 the default setting of MASK_SOFT_FLOAT_ABI. */
5204 switch ((int) mips_arch)
5206 case PROCESSOR_R4100:
5207 case PROCESSOR_R4111:
5208 case PROCESSOR_R4120:
5209 case PROCESSOR_R4130:
5210 target_flags |= MASK_SOFT_FLOAT_ABI;
5214 target_flags &= ~MASK_SOFT_FLOAT_ABI;
5220 flag_pcc_struct_return = 0;
5222 if ((target_flags_explicit & MASK_BRANCHLIKELY) == 0)
5224 /* If neither -mbranch-likely nor -mno-branch-likely was given
5225 on the command line, set MASK_BRANCHLIKELY based on the target
5228 By default, we enable use of Branch Likely instructions on
5229 all architectures which support them with the following
5230 exceptions: when creating MIPS32 or MIPS64 code, and when
5231 tuning for architectures where their use tends to hurt
5234 The MIPS32 and MIPS64 architecture specifications say "Software
5235 is strongly encouraged to avoid use of Branch Likely
5236 instructions, as they will be removed from a future revision
5237 of the [MIPS32 and MIPS64] architecture." Therefore, we do not
5238 issue those instructions unless instructed to do so by
5240 if (ISA_HAS_BRANCHLIKELY
5241 && !(ISA_MIPS32 || ISA_MIPS32R2 || ISA_MIPS64)
5242 && !(TUNE_MIPS5500 || TUNE_SB1))
5243 target_flags |= MASK_BRANCHLIKELY;
5245 target_flags &= ~MASK_BRANCHLIKELY;
5247 if (TARGET_BRANCHLIKELY && !ISA_HAS_BRANCHLIKELY)
5248 warning (0, "generation of Branch Likely instructions enabled, but not supported by architecture");
5250 /* The effect of -mabicalls isn't defined for the EABI. */
5251 if (mips_abi == ABI_EABI && TARGET_ABICALLS)
5253 error ("unsupported combination: %s", "-mabicalls -mabi=eabi");
5254 target_flags &= ~MASK_ABICALLS;
5257 if (TARGET_ABICALLS)
5259 /* We need to set flag_pic for executables as well as DSOs
5260 because we may reference symbols that are not defined in
5261 the final executable. (MIPS does not use things like
5262 copy relocs, for example.)
5264 Also, there is a body of code that uses __PIC__ to distinguish
5265 between -mabicalls and -mno-abicalls code. */
5267 if (mips_section_threshold > 0)
5268 warning (0, "%<-G%> is incompatible with %<-mabicalls%>");
5271 if (TARGET_VXWORKS_RTP && mips_section_threshold > 0)
5272 warning (0, "-G and -mrtp are incompatible");
5274 /* mips_split_addresses is a half-way house between explicit
5275 relocations and the traditional assembler macros. It can
5276 split absolute 32-bit symbolic constants into a high/lo_sum
5277 pair but uses macros for other sorts of access.
5279 Like explicit relocation support for REL targets, it relies
5280 on GNU extensions in the assembler and the linker.
5282 Although this code should work for -O0, it has traditionally
5283 been treated as an optimization. */
5284 if (!TARGET_MIPS16 && TARGET_SPLIT_ADDRESSES
5285 && optimize && !flag_pic
5286 && !ABI_HAS_64BIT_SYMBOLS)
5287 mips_split_addresses = 1;
5289 mips_split_addresses = 0;
5291 /* -mvr4130-align is a "speed over size" optimization: it usually produces
5292 faster code, but at the expense of more nops. Enable it at -O3 and
5294 if (optimize > 2 && (target_flags_explicit & MASK_VR4130_ALIGN) == 0)
5295 target_flags |= MASK_VR4130_ALIGN;
5299 /* Don't run the scheduler before reload, since it tends to
5300 increase register pressure. */
5301 flag_schedule_insns = 0;
5303 /* Don't do hot/cold partitioning. The constant layout code expects
5304 the whole function to be in a single section. */
5305 flag_reorder_blocks_and_partition = 0;
5307 /* Silently disable -mexplicit-relocs since it doesn't apply
5308 to mips16 code. Even so, it would overly pedantic to warn
5309 about "-mips16 -mexplicit-relocs", especially given that
5310 we use a %gprel() operator. */
5311 target_flags &= ~MASK_EXPLICIT_RELOCS;
5314 /* When using explicit relocs, we call dbr_schedule from within
5316 if (TARGET_EXPLICIT_RELOCS)
5318 mips_flag_delayed_branch = flag_delayed_branch;
5319 flag_delayed_branch = 0;
5322 #ifdef MIPS_TFMODE_FORMAT
5323 REAL_MODE_FORMAT (TFmode) = &MIPS_TFMODE_FORMAT;
5326 /* Make sure that the user didn't turn off paired single support when
5327 MIPS-3D support is requested. */
5328 if (TARGET_MIPS3D && (target_flags_explicit & MASK_PAIRED_SINGLE_FLOAT)
5329 && !TARGET_PAIRED_SINGLE_FLOAT)
5330 error ("-mips3d requires -mpaired-single");
5332 /* If TARGET_MIPS3D, enable MASK_PAIRED_SINGLE_FLOAT. */
5334 target_flags |= MASK_PAIRED_SINGLE_FLOAT;
5336 /* Make sure that when TARGET_PAIRED_SINGLE_FLOAT is true, TARGET_FLOAT64
5337 and TARGET_HARD_FLOAT are both true. */
5338 if (TARGET_PAIRED_SINGLE_FLOAT && !(TARGET_FLOAT64 && TARGET_HARD_FLOAT))
5339 error ("-mips3d/-mpaired-single must be used with -mfp64 -mhard-float");
5341 /* Make sure that the ISA supports TARGET_PAIRED_SINGLE_FLOAT when it is
5343 if (TARGET_PAIRED_SINGLE_FLOAT && !ISA_MIPS64)
5344 error ("-mips3d/-mpaired-single must be used with -mips64");
5346 /* If TARGET_DSPR2, enable MASK_DSP. */
5348 target_flags |= MASK_DSP;
5350 if (TARGET_MIPS16 && TARGET_DSP)
5351 error ("-mips16 and -mdsp cannot be used together");
5353 mips_print_operand_punct['?'] = 1;
5354 mips_print_operand_punct['#'] = 1;
5355 mips_print_operand_punct['/'] = 1;
5356 mips_print_operand_punct['&'] = 1;
5357 mips_print_operand_punct['!'] = 1;
5358 mips_print_operand_punct['*'] = 1;
5359 mips_print_operand_punct['@'] = 1;
5360 mips_print_operand_punct['.'] = 1;
5361 mips_print_operand_punct['('] = 1;
5362 mips_print_operand_punct[')'] = 1;
5363 mips_print_operand_punct['['] = 1;
5364 mips_print_operand_punct[']'] = 1;
5365 mips_print_operand_punct['<'] = 1;
5366 mips_print_operand_punct['>'] = 1;
5367 mips_print_operand_punct['{'] = 1;
5368 mips_print_operand_punct['}'] = 1;
5369 mips_print_operand_punct['^'] = 1;
5370 mips_print_operand_punct['$'] = 1;
5371 mips_print_operand_punct['+'] = 1;
5372 mips_print_operand_punct['~'] = 1;
5374 /* Set up array to map GCC register number to debug register number.
5375 Ignore the special purpose register numbers. */
5377 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5379 mips_dbx_regno[i] = INVALID_REGNUM;
5380 if (GP_REG_P (i) || FP_REG_P (i) || ALL_COP_REG_P (i))
5381 mips_dwarf_regno[i] = i;
5383 mips_dwarf_regno[i] = INVALID_REGNUM;
5386 start = GP_DBX_FIRST - GP_REG_FIRST;
5387 for (i = GP_REG_FIRST; i <= GP_REG_LAST; i++)
5388 mips_dbx_regno[i] = i + start;
5390 start = FP_DBX_FIRST - FP_REG_FIRST;
5391 for (i = FP_REG_FIRST; i <= FP_REG_LAST; i++)
5392 mips_dbx_regno[i] = i + start;
5394 /* HI and LO debug registers use big-endian ordering. */
5395 mips_dbx_regno[HI_REGNUM] = MD_DBX_FIRST + 0;
5396 mips_dbx_regno[LO_REGNUM] = MD_DBX_FIRST + 1;
5397 mips_dwarf_regno[HI_REGNUM] = MD_REG_FIRST + 0;
5398 mips_dwarf_regno[LO_REGNUM] = MD_REG_FIRST + 1;
5399 for (i = DSP_ACC_REG_FIRST; i <= DSP_ACC_REG_LAST; i += 2)
5401 mips_dwarf_regno[i + TARGET_LITTLE_ENDIAN] = i;
5402 mips_dwarf_regno[i + TARGET_BIG_ENDIAN] = i + 1;
5405 /* Set up array giving whether a given register can hold a given mode. */
5407 for (mode = VOIDmode;
5408 mode != MAX_MACHINE_MODE;
5409 mode = (enum machine_mode) ((int)mode + 1))
5411 register int size = GET_MODE_SIZE (mode);
5412 register enum mode_class class = GET_MODE_CLASS (mode);
5414 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5418 if (mode == CCV2mode)
5421 && (regno - ST_REG_FIRST) % 2 == 0);
5423 else if (mode == CCV4mode)
5426 && (regno - ST_REG_FIRST) % 4 == 0);
5428 else if (mode == CCmode)
5431 temp = (regno == FPSW_REGNUM);
5433 temp = (ST_REG_P (regno) || GP_REG_P (regno)
5434 || FP_REG_P (regno));
5437 else if (GP_REG_P (regno))
5438 temp = ((regno & 1) == 0 || size <= UNITS_PER_WORD);
5440 else if (FP_REG_P (regno))
5441 temp = ((((regno % MAX_FPRS_PER_FMT) == 0)
5442 || (MIN_FPRS_PER_FMT == 1
5443 && size <= UNITS_PER_FPREG))
5444 && (((class == MODE_FLOAT || class == MODE_COMPLEX_FLOAT
5445 || class == MODE_VECTOR_FLOAT)
5446 && size <= UNITS_PER_FPVALUE)
5447 /* Allow integer modes that fit into a single
5448 register. We need to put integers into FPRs
5449 when using instructions like cvt and trunc.
5450 We can't allow sizes smaller than a word,
5451 the FPU has no appropriate load/store
5452 instructions for those. */
5453 || (class == MODE_INT
5454 && size >= MIN_UNITS_PER_WORD
5455 && size <= UNITS_PER_FPREG)
5456 /* Allow TFmode for CCmode reloads. */
5457 || (ISA_HAS_8CC && mode == TFmode)));
5459 else if (ACC_REG_P (regno))
5460 temp = (INTEGRAL_MODE_P (mode)
5461 && size <= UNITS_PER_WORD * 2
5462 && (size <= UNITS_PER_WORD
5463 || regno == MD_REG_FIRST
5464 || (DSP_ACC_REG_P (regno)
5465 && ((regno - DSP_ACC_REG_FIRST) & 1) == 0)));
5467 else if (ALL_COP_REG_P (regno))
5468 temp = (class == MODE_INT && size <= UNITS_PER_WORD);
5472 mips_hard_regno_mode_ok[(int)mode][regno] = temp;
5476 /* Save GPR registers in word_mode sized hunks. word_mode hasn't been
5477 initialized yet, so we can't use that here. */
5478 gpr_mode = TARGET_64BIT ? DImode : SImode;
5480 /* Provide default values for align_* for 64-bit targets. */
5481 if (TARGET_64BIT && !TARGET_MIPS16)
5483 if (align_loops == 0)
5485 if (align_jumps == 0)
5487 if (align_functions == 0)
5488 align_functions = 8;
5491 /* Function to allocate machine-dependent function status. */
5492 init_machine_status = &mips_init_machine_status;
5494 if (ABI_HAS_64BIT_SYMBOLS)
5496 if (TARGET_EXPLICIT_RELOCS)
5498 mips_split_p[SYMBOL_64_HIGH] = true;
5499 mips_hi_relocs[SYMBOL_64_HIGH] = "%highest(";
5500 mips_lo_relocs[SYMBOL_64_HIGH] = "%higher(";
5502 mips_split_p[SYMBOL_64_MID] = true;
5503 mips_hi_relocs[SYMBOL_64_MID] = "%higher(";
5504 mips_lo_relocs[SYMBOL_64_MID] = "%hi(";
5506 mips_split_p[SYMBOL_64_LOW] = true;
5507 mips_hi_relocs[SYMBOL_64_LOW] = "%hi(";
5508 mips_lo_relocs[SYMBOL_64_LOW] = "%lo(";
5510 mips_split_p[SYMBOL_ABSOLUTE] = true;
5511 mips_lo_relocs[SYMBOL_ABSOLUTE] = "%lo(";
5516 if (TARGET_EXPLICIT_RELOCS || mips_split_addresses || TARGET_MIPS16)
5518 mips_split_p[SYMBOL_ABSOLUTE] = true;
5519 mips_hi_relocs[SYMBOL_ABSOLUTE] = "%hi(";
5520 mips_lo_relocs[SYMBOL_ABSOLUTE] = "%lo(";
5522 mips_lo_relocs[SYMBOL_32_HIGH] = "%hi(";
5528 /* The high part is provided by a pseudo copy of $gp. */
5529 mips_split_p[SYMBOL_GP_RELATIVE] = true;
5530 mips_lo_relocs[SYMBOL_GP_RELATIVE] = "%gprel(";
5533 if (TARGET_EXPLICIT_RELOCS)
5535 /* Small data constants are kept whole until after reload,
5536 then lowered by mips_rewrite_small_data. */
5537 mips_lo_relocs[SYMBOL_GP_RELATIVE] = "%gp_rel(";
5539 mips_split_p[SYMBOL_GOT_PAGE_OFST] = true;
5542 mips_lo_relocs[SYMBOL_GOTOFF_PAGE] = "%got_page(";
5543 mips_lo_relocs[SYMBOL_GOT_PAGE_OFST] = "%got_ofst(";
5547 mips_lo_relocs[SYMBOL_GOTOFF_PAGE] = "%got(";
5548 mips_lo_relocs[SYMBOL_GOT_PAGE_OFST] = "%lo(";
5553 /* The HIGH and LO_SUM are matched by special .md patterns. */
5554 mips_split_p[SYMBOL_GOT_DISP] = true;
5556 mips_split_p[SYMBOL_GOTOFF_DISP] = true;
5557 mips_hi_relocs[SYMBOL_GOTOFF_DISP] = "%got_hi(";
5558 mips_lo_relocs[SYMBOL_GOTOFF_DISP] = "%got_lo(";
5560 mips_split_p[SYMBOL_GOTOFF_CALL] = true;
5561 mips_hi_relocs[SYMBOL_GOTOFF_CALL] = "%call_hi(";
5562 mips_lo_relocs[SYMBOL_GOTOFF_CALL] = "%call_lo(";
5567 mips_lo_relocs[SYMBOL_GOTOFF_DISP] = "%got_disp(";
5569 mips_lo_relocs[SYMBOL_GOTOFF_DISP] = "%got(";
5570 mips_lo_relocs[SYMBOL_GOTOFF_CALL] = "%call16(";
5576 mips_split_p[SYMBOL_GOTOFF_LOADGP] = true;
5577 mips_hi_relocs[SYMBOL_GOTOFF_LOADGP] = "%hi(%neg(%gp_rel(";
5578 mips_lo_relocs[SYMBOL_GOTOFF_LOADGP] = "%lo(%neg(%gp_rel(";
5581 /* Thread-local relocation operators. */
5582 mips_lo_relocs[SYMBOL_TLSGD] = "%tlsgd(";
5583 mips_lo_relocs[SYMBOL_TLSLDM] = "%tlsldm(";
5584 mips_split_p[SYMBOL_DTPREL] = 1;
5585 mips_hi_relocs[SYMBOL_DTPREL] = "%dtprel_hi(";
5586 mips_lo_relocs[SYMBOL_DTPREL] = "%dtprel_lo(";
5587 mips_lo_relocs[SYMBOL_GOTTPREL] = "%gottprel(";
5588 mips_split_p[SYMBOL_TPREL] = 1;
5589 mips_hi_relocs[SYMBOL_TPREL] = "%tprel_hi(";
5590 mips_lo_relocs[SYMBOL_TPREL] = "%tprel_lo(";
5592 mips_lo_relocs[SYMBOL_HALF] = "%half(";
5594 /* We don't have a thread pointer access instruction on MIPS16, or
5595 appropriate TLS relocations. */
5597 targetm.have_tls = false;
5599 /* Default to working around R4000 errata only if the processor
5600 was selected explicitly. */
5601 if ((target_flags_explicit & MASK_FIX_R4000) == 0
5602 && mips_matching_cpu_name_p (mips_arch_info->name, "r4000"))
5603 target_flags |= MASK_FIX_R4000;
5605 /* Default to working around R4400 errata only if the processor
5606 was selected explicitly. */
5607 if ((target_flags_explicit & MASK_FIX_R4400) == 0
5608 && mips_matching_cpu_name_p (mips_arch_info->name, "r4400"))
5609 target_flags |= MASK_FIX_R4400;
5612 /* Swap the register information for registers I and I + 1, which
5613 currently have the wrong endianness. Note that the registers'
5614 fixedness and call-clobberedness might have been set on the
5618 mips_swap_registers (unsigned int i)
5623 #define SWAP_INT(X, Y) (tmpi = (X), (X) = (Y), (Y) = tmpi)
5624 #define SWAP_STRING(X, Y) (tmps = (X), (X) = (Y), (Y) = tmps)
5626 SWAP_INT (fixed_regs[i], fixed_regs[i + 1]);
5627 SWAP_INT (call_used_regs[i], call_used_regs[i + 1]);
5628 SWAP_INT (call_really_used_regs[i], call_really_used_regs[i + 1]);
5629 SWAP_STRING (reg_names[i], reg_names[i + 1]);
5635 /* Implement CONDITIONAL_REGISTER_USAGE. */
5638 mips_conditional_register_usage (void)
5644 for (regno = DSP_ACC_REG_FIRST; regno <= DSP_ACC_REG_LAST; regno++)
5645 fixed_regs[regno] = call_used_regs[regno] = 1;
5647 if (!TARGET_HARD_FLOAT)
5651 for (regno = FP_REG_FIRST; regno <= FP_REG_LAST; regno++)
5652 fixed_regs[regno] = call_used_regs[regno] = 1;
5653 for (regno = ST_REG_FIRST; regno <= ST_REG_LAST; regno++)
5654 fixed_regs[regno] = call_used_regs[regno] = 1;
5656 else if (! ISA_HAS_8CC)
5660 /* We only have a single condition code register. We
5661 implement this by hiding all the condition code registers,
5662 and generating RTL that refers directly to ST_REG_FIRST. */
5663 for (regno = ST_REG_FIRST; regno <= ST_REG_LAST; regno++)
5664 fixed_regs[regno] = call_used_regs[regno] = 1;
5666 /* In mips16 mode, we permit the $t temporary registers to be used
5667 for reload. We prohibit the unused $s registers, since they
5668 are caller saved, and saving them via a mips16 register would
5669 probably waste more time than just reloading the value. */
5672 fixed_regs[18] = call_used_regs[18] = 1;
5673 fixed_regs[19] = call_used_regs[19] = 1;
5674 fixed_regs[20] = call_used_regs[20] = 1;
5675 fixed_regs[21] = call_used_regs[21] = 1;
5676 fixed_regs[22] = call_used_regs[22] = 1;
5677 fixed_regs[23] = call_used_regs[23] = 1;
5678 fixed_regs[26] = call_used_regs[26] = 1;
5679 fixed_regs[27] = call_used_regs[27] = 1;
5680 fixed_regs[30] = call_used_regs[30] = 1;
5682 /* fp20-23 are now caller saved. */
5683 if (mips_abi == ABI_64)
5686 for (regno = FP_REG_FIRST + 20; regno < FP_REG_FIRST + 24; regno++)
5687 call_really_used_regs[regno] = call_used_regs[regno] = 1;
5689 /* Odd registers from fp21 to fp31 are now caller saved. */
5690 if (mips_abi == ABI_N32)
5693 for (regno = FP_REG_FIRST + 21; regno <= FP_REG_FIRST + 31; regno+=2)
5694 call_really_used_regs[regno] = call_used_regs[regno] = 1;
5696 /* Make sure that double-register accumulator values are correctly
5697 ordered for the current endianness. */
5698 if (TARGET_LITTLE_ENDIAN)
5701 mips_swap_registers (MD_REG_FIRST);
5702 for (regno = DSP_ACC_REG_FIRST; regno <= DSP_ACC_REG_LAST; regno += 2)
5703 mips_swap_registers (regno);
5707 /* Allocate a chunk of memory for per-function machine-dependent data. */
5708 static struct machine_function *
5709 mips_init_machine_status (void)
5711 return ((struct machine_function *)
5712 ggc_alloc_cleared (sizeof (struct machine_function)));
5715 /* On the mips16, we want to allocate $24 (T_REG) before other
5716 registers for instructions for which it is possible. This helps
5717 avoid shuffling registers around in order to set up for an xor,
5718 encouraging the compiler to use a cmp instead. */
5721 mips_order_regs_for_local_alloc (void)
5725 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5726 reg_alloc_order[i] = i;
5730 /* It really doesn't matter where we put register 0, since it is
5731 a fixed register anyhow. */
5732 reg_alloc_order[0] = 24;
5733 reg_alloc_order[24] = 0;
5738 /* The MIPS debug format wants all automatic variables and arguments
5739 to be in terms of the virtual frame pointer (stack pointer before
5740 any adjustment in the function), while the MIPS 3.0 linker wants
5741 the frame pointer to be the stack pointer after the initial
5742 adjustment. So, we do the adjustment here. The arg pointer (which
5743 is eliminated) points to the virtual frame pointer, while the frame
5744 pointer (which may be eliminated) points to the stack pointer after
5745 the initial adjustments. */
5748 mips_debugger_offset (rtx addr, HOST_WIDE_INT offset)
5750 rtx offset2 = const0_rtx;
5751 rtx reg = eliminate_constant_term (addr, &offset2);
5754 offset = INTVAL (offset2);
5756 if (reg == stack_pointer_rtx || reg == frame_pointer_rtx
5757 || reg == hard_frame_pointer_rtx)
5759 HOST_WIDE_INT frame_size = (!cfun->machine->frame.initialized)
5760 ? compute_frame_size (get_frame_size ())
5761 : cfun->machine->frame.total_size;
5763 /* MIPS16 frame is smaller */
5764 if (frame_pointer_needed && TARGET_MIPS16)
5765 frame_size -= cfun->machine->frame.args_size;
5767 offset = offset - frame_size;
5770 /* sdbout_parms does not want this to crash for unrecognized cases. */
5772 else if (reg != arg_pointer_rtx)
5773 fatal_insn ("mips_debugger_offset called with non stack/frame/arg pointer",
5780 /* If OP is an UNSPEC address, return the address to which it refers,
5781 otherwise return OP itself. */
5784 mips_strip_unspec_address (rtx op)
5788 split_const (op, &base, &offset);
5789 if (UNSPEC_ADDRESS_P (base))
5790 op = plus_constant (UNSPEC_ADDRESS (base), INTVAL (offset));
5794 /* Implement the PRINT_OPERAND macro. The MIPS-specific operand codes are:
5796 'X' OP is CONST_INT, prints 32 bits in hexadecimal format = "0x%08x",
5797 'x' OP is CONST_INT, prints 16 bits in hexadecimal format = "0x%04x",
5798 'h' OP is HIGH, prints %hi(X),
5799 'd' output integer constant in decimal,
5800 'z' if the operand is 0, use $0 instead of normal operand.
5801 'D' print second part of double-word register or memory operand.
5802 'L' print low-order register of double-word register operand.
5803 'M' print high-order register of double-word register operand.
5804 'C' print part of opcode for a branch condition.
5805 'F' print part of opcode for a floating-point branch condition.
5806 'N' print part of opcode for a branch condition, inverted.
5807 'W' print part of opcode for a floating-point branch condition, inverted.
5808 'T' print 'f' for (eq:CC ...), 't' for (ne:CC ...),
5809 'z' for (eq:?I ...), 'n' for (ne:?I ...).
5810 't' like 'T', but with the EQ/NE cases reversed
5811 'Y' for a CONST_INT X, print mips_fp_conditions[X]
5812 'Z' print the operand and a comma for ISA_HAS_8CC, otherwise print nothing
5813 'R' print the reloc associated with LO_SUM
5814 'q' print DSP accumulator registers
5816 The punctuation characters are:
5818 '(' Turn on .set noreorder
5819 ')' Turn on .set reorder
5820 '[' Turn on .set noat
5822 '<' Turn on .set nomacro
5823 '>' Turn on .set macro
5824 '{' Turn on .set volatile (not GAS)
5825 '}' Turn on .set novolatile (not GAS)
5826 '&' Turn on .set noreorder if filling delay slots
5827 '*' Turn on both .set noreorder and .set nomacro if filling delay slots
5828 '!' Turn on .set nomacro if filling delay slots
5829 '#' Print nop if in a .set noreorder section.
5830 '/' Like '#', but does nothing within a delayed branch sequence
5831 '?' Print 'l' if we are to use a branch likely instead of normal branch.
5832 '@' Print the name of the assembler temporary register (at or $1).
5833 '.' Print the name of the register with a hard-wired zero (zero or $0).
5834 '^' Print the name of the pic call-through register (t9 or $25).
5835 '$' Print the name of the stack pointer register (sp or $29).
5836 '+' Print the name of the gp register (usually gp or $28).
5837 '~' Output a branch alignment to LABEL_ALIGN(NULL). */
5840 print_operand (FILE *file, rtx op, int letter)
5842 register enum rtx_code code;
5844 if (PRINT_OPERAND_PUNCT_VALID_P (letter))
5849 if (mips_branch_likely)
5854 fputs (reg_names [GP_REG_FIRST + 1], file);
5858 fputs (reg_names [PIC_FUNCTION_ADDR_REGNUM], file);
5862 fputs (reg_names [GP_REG_FIRST + 0], file);
5866 fputs (reg_names[STACK_POINTER_REGNUM], file);
5870 fputs (reg_names[PIC_OFFSET_TABLE_REGNUM], file);
5874 if (final_sequence != 0 && set_noreorder++ == 0)
5875 fputs (".set\tnoreorder\n\t", file);
5879 if (final_sequence != 0)
5881 if (set_noreorder++ == 0)
5882 fputs (".set\tnoreorder\n\t", file);
5884 if (set_nomacro++ == 0)
5885 fputs (".set\tnomacro\n\t", file);
5890 if (final_sequence != 0 && set_nomacro++ == 0)
5891 fputs ("\n\t.set\tnomacro", file);
5895 if (set_noreorder != 0)
5896 fputs ("\n\tnop", file);
5900 /* Print an extra newline so that the delayed insn is separated
5901 from the following ones. This looks neater and is consistent
5902 with non-nop delayed sequences. */
5903 if (set_noreorder != 0 && final_sequence == 0)
5904 fputs ("\n\tnop\n", file);
5908 if (set_noreorder++ == 0)
5909 fputs (".set\tnoreorder\n\t", file);
5913 if (set_noreorder == 0)
5914 error ("internal error: %%) found without a %%( in assembler pattern");
5916 else if (--set_noreorder == 0)
5917 fputs ("\n\t.set\treorder", file);
5922 if (set_noat++ == 0)
5923 fputs (".set\tnoat\n\t", file);
5928 error ("internal error: %%] found without a %%[ in assembler pattern");
5929 else if (--set_noat == 0)
5930 fputs ("\n\t.set\tat", file);
5935 if (set_nomacro++ == 0)
5936 fputs (".set\tnomacro\n\t", file);
5940 if (set_nomacro == 0)
5941 error ("internal error: %%> found without a %%< in assembler pattern");
5942 else if (--set_nomacro == 0)
5943 fputs ("\n\t.set\tmacro", file);
5948 if (set_volatile++ == 0)
5949 fputs ("#.set\tvolatile\n\t", file);
5953 if (set_volatile == 0)
5954 error ("internal error: %%} found without a %%{ in assembler pattern");
5955 else if (--set_volatile == 0)
5956 fputs ("\n\t#.set\tnovolatile", file);
5962 if (align_labels_log > 0)
5963 ASM_OUTPUT_ALIGN (file, align_labels_log);
5968 error ("PRINT_OPERAND: unknown punctuation '%c'", letter);
5977 error ("PRINT_OPERAND null pointer");
5981 code = GET_CODE (op);
5986 case EQ: fputs ("eq", file); break;
5987 case NE: fputs ("ne", file); break;
5988 case GT: fputs ("gt", file); break;
5989 case GE: fputs ("ge", file); break;
5990 case LT: fputs ("lt", file); break;
5991 case LE: fputs ("le", file); break;
5992 case GTU: fputs ("gtu", file); break;
5993 case GEU: fputs ("geu", file); break;
5994 case LTU: fputs ("ltu", file); break;
5995 case LEU: fputs ("leu", file); break;
5997 fatal_insn ("PRINT_OPERAND, invalid insn for %%C", op);
6000 else if (letter == 'N')
6003 case EQ: fputs ("ne", file); break;
6004 case NE: fputs ("eq", file); break;
6005 case GT: fputs ("le", file); break;
6006 case GE: fputs ("lt", file); break;
6007 case LT: fputs ("ge", file); break;
6008 case LE: fputs ("gt", file); break;
6009 case GTU: fputs ("leu", file); break;
6010 case GEU: fputs ("ltu", file); break;
6011 case LTU: fputs ("geu", file); break;
6012 case LEU: fputs ("gtu", file); break;
6014 fatal_insn ("PRINT_OPERAND, invalid insn for %%N", op);
6017 else if (letter == 'F')
6020 case EQ: fputs ("c1f", file); break;
6021 case NE: fputs ("c1t", file); break;
6023 fatal_insn ("PRINT_OPERAND, invalid insn for %%F", op);
6026 else if (letter == 'W')
6029 case EQ: fputs ("c1t", file); break;
6030 case NE: fputs ("c1f", file); break;
6032 fatal_insn ("PRINT_OPERAND, invalid insn for %%W", op);
6035 else if (letter == 'h')
6037 if (GET_CODE (op) == HIGH)
6040 print_operand_reloc (file, op, SYMBOL_CONTEXT_LEA, mips_hi_relocs);
6043 else if (letter == 'R')
6044 print_operand_reloc (file, op, SYMBOL_CONTEXT_LEA, mips_lo_relocs);
6046 else if (letter == 'Y')
6048 if (GET_CODE (op) == CONST_INT
6049 && ((unsigned HOST_WIDE_INT) INTVAL (op)
6050 < ARRAY_SIZE (mips_fp_conditions)))
6051 fputs (mips_fp_conditions[INTVAL (op)], file);
6053 output_operand_lossage ("invalid %%Y value");
6056 else if (letter == 'Z')
6060 print_operand (file, op, 0);
6065 else if (letter == 'q')
6070 fatal_insn ("PRINT_OPERAND, invalid insn for %%q", op);
6072 regnum = REGNO (op);
6073 if (MD_REG_P (regnum))
6074 fprintf (file, "$ac0");
6075 else if (DSP_ACC_REG_P (regnum))
6076 fprintf (file, "$ac%c", reg_names[regnum][3]);
6078 fatal_insn ("PRINT_OPERAND, invalid insn for %%q", op);
6081 else if (code == REG || code == SUBREG)
6083 register int regnum;
6086 regnum = REGNO (op);
6088 regnum = true_regnum (op);
6090 if ((letter == 'M' && ! WORDS_BIG_ENDIAN)
6091 || (letter == 'L' && WORDS_BIG_ENDIAN)
6095 fprintf (file, "%s", reg_names[regnum]);
6098 else if (code == MEM)
6101 output_address (plus_constant (XEXP (op, 0), 4));
6103 output_address (XEXP (op, 0));
6106 else if (letter == 'x' && GET_CODE (op) == CONST_INT)
6107 fprintf (file, HOST_WIDE_INT_PRINT_HEX, 0xffff & INTVAL(op));
6109 else if (letter == 'X' && GET_CODE(op) == CONST_INT)
6110 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (op));
6112 else if (letter == 'd' && GET_CODE(op) == CONST_INT)
6113 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (INTVAL(op)));
6115 else if (letter == 'z' && op == CONST0_RTX (GET_MODE (op)))
6116 fputs (reg_names[GP_REG_FIRST], file);
6118 else if (letter == 'd' || letter == 'x' || letter == 'X')
6119 output_operand_lossage ("invalid use of %%d, %%x, or %%X");
6121 else if (letter == 'T' || letter == 't')
6123 int truth = (code == NE) == (letter == 'T');
6124 fputc ("zfnt"[truth * 2 + (GET_MODE (op) == CCmode)], file);
6127 else if (CONST_GP_P (op))
6128 fputs (reg_names[GLOBAL_POINTER_REGNUM], file);
6131 output_addr_const (file, mips_strip_unspec_address (op));
6135 /* Print symbolic operand OP, which is part of a HIGH or LO_SUM
6136 in context CONTEXT. RELOCS is the array of relocations to use. */
6139 print_operand_reloc (FILE *file, rtx op, enum mips_symbol_context context,
6140 const char **relocs)
6142 enum mips_symbol_type symbol_type;
6145 if (!mips_symbolic_constant_p (op, context, &symbol_type)
6146 || relocs[symbol_type] == 0)
6147 fatal_insn ("PRINT_OPERAND, invalid operand for relocation", op);
6149 fputs (relocs[symbol_type], file);
6150 output_addr_const (file, mips_strip_unspec_address (op));
6151 for (p = relocs[symbol_type]; *p != 0; p++)
6156 /* Output address operand X to FILE. */
6159 print_operand_address (FILE *file, rtx x)
6161 struct mips_address_info addr;
6163 if (mips_classify_address (&addr, x, word_mode, true))
6167 print_operand (file, addr.offset, 0);
6168 fprintf (file, "(%s)", reg_names[REGNO (addr.reg)]);
6171 case ADDRESS_LO_SUM:
6172 print_operand_reloc (file, addr.offset, SYMBOL_CONTEXT_MEM,
6174 fprintf (file, "(%s)", reg_names[REGNO (addr.reg)]);
6177 case ADDRESS_CONST_INT:
6178 output_addr_const (file, x);
6179 fprintf (file, "(%s)", reg_names[0]);
6182 case ADDRESS_SYMBOLIC:
6183 output_addr_const (file, mips_strip_unspec_address (x));
6189 /* When using assembler macros, keep track of all of small-data externs
6190 so that mips_file_end can emit the appropriate declarations for them.
6192 In most cases it would be safe (though pointless) to emit .externs
6193 for other symbols too. One exception is when an object is within
6194 the -G limit but declared by the user to be in a section other
6195 than .sbss or .sdata. */
6198 mips_output_external (FILE *file, tree decl, const char *name)
6200 default_elf_asm_output_external (file, decl, name);
6202 /* We output the name if and only if TREE_SYMBOL_REFERENCED is
6203 set in order to avoid putting out names that are never really
6205 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
6207 if (!TARGET_EXPLICIT_RELOCS && mips_in_small_data_p (decl))
6209 fputs ("\t.extern\t", file);
6210 assemble_name (file, name);
6211 fprintf (file, ", " HOST_WIDE_INT_PRINT_DEC "\n",
6212 int_size_in_bytes (TREE_TYPE (decl)));
6214 else if (TARGET_IRIX
6215 && mips_abi == ABI_32
6216 && TREE_CODE (decl) == FUNCTION_DECL)
6218 /* In IRIX 5 or IRIX 6 for the O32 ABI, we must output a
6219 `.global name .text' directive for every used but
6220 undefined function. If we don't, the linker may perform
6221 an optimization (skipping over the insns that set $gp)
6222 when it is unsafe. */
6223 fputs ("\t.globl ", file);
6224 assemble_name (file, name);
6225 fputs (" .text\n", file);
6230 /* Emit a new filename to a stream. If we are smuggling stabs, try to
6231 put out a MIPS ECOFF file and a stab. */
6234 mips_output_filename (FILE *stream, const char *name)
6237 /* If we are emitting DWARF-2, let dwarf2out handle the ".file"
6239 if (write_symbols == DWARF2_DEBUG)
6241 else if (mips_output_filename_first_time)
6243 mips_output_filename_first_time = 0;
6244 num_source_filenames += 1;
6245 current_function_file = name;
6246 fprintf (stream, "\t.file\t%d ", num_source_filenames);
6247 output_quoted_string (stream, name);
6248 putc ('\n', stream);
6251 /* If we are emitting stabs, let dbxout.c handle this (except for
6252 the mips_output_filename_first_time case). */
6253 else if (write_symbols == DBX_DEBUG)
6256 else if (name != current_function_file
6257 && strcmp (name, current_function_file) != 0)
6259 num_source_filenames += 1;
6260 current_function_file = name;
6261 fprintf (stream, "\t.file\t%d ", num_source_filenames);
6262 output_quoted_string (stream, name);
6263 putc ('\n', stream);
6267 /* Output an ASCII string, in a space-saving way. PREFIX is the string
6268 that should be written before the opening quote, such as "\t.ascii\t"
6269 for real string data or "\t# " for a comment. */
6272 mips_output_ascii (FILE *stream, const char *string_param, size_t len,
6277 register const unsigned char *string =
6278 (const unsigned char *)string_param;
6280 fprintf (stream, "%s\"", prefix);
6281 for (i = 0; i < len; i++)
6283 register int c = string[i];
6287 if (c == '\\' || c == '\"')
6289 putc ('\\', stream);
6297 fprintf (stream, "\\%03o", c);
6301 if (cur_pos > 72 && i+1 < len)
6304 fprintf (stream, "\"\n%s\"", prefix);
6307 fprintf (stream, "\"\n");
6310 /* Implement TARGET_ASM_FILE_START. */
6313 mips_file_start (void)
6315 default_file_start ();
6319 /* Generate a special section to describe the ABI switches used to
6320 produce the resultant binary. This used to be done by the assembler
6321 setting bits in the ELF header's flags field, but we have run out of
6322 bits. GDB needs this information in order to be able to correctly
6323 debug these binaries. See the function mips_gdbarch_init() in
6324 gdb/mips-tdep.c. This is unnecessary for the IRIX 5/6 ABIs and
6325 causes unnecessary IRIX 6 ld warnings. */
6326 const char * abi_string = NULL;
6330 case ABI_32: abi_string = "abi32"; break;
6331 case ABI_N32: abi_string = "abiN32"; break;
6332 case ABI_64: abi_string = "abi64"; break;
6333 case ABI_O64: abi_string = "abiO64"; break;
6334 case ABI_EABI: abi_string = TARGET_64BIT ? "eabi64" : "eabi32"; break;
6338 /* Note - we use fprintf directly rather than calling switch_to_section
6339 because in this way we can avoid creating an allocated section. We
6340 do not want this section to take up any space in the running
6342 fprintf (asm_out_file, "\t.section .mdebug.%s\n", abi_string);
6344 /* There is no ELF header flag to distinguish long32 forms of the
6345 EABI from long64 forms. Emit a special section to help tools
6346 such as GDB. Do the same for o64, which is sometimes used with
6348 if (mips_abi == ABI_EABI || mips_abi == ABI_O64)
6349 fprintf (asm_out_file, "\t.section .gcc_compiled_long%d\n",
6350 TARGET_LONG64 ? 64 : 32);
6352 /* Restore the default section. */
6353 fprintf (asm_out_file, "\t.previous\n");
6355 #ifdef HAVE_AS_GNU_ATTRIBUTE
6356 fprintf (asm_out_file, "\t.gnu_attribute 4, %d\n",
6357 TARGET_HARD_FLOAT_ABI ? (TARGET_DOUBLE_FLOAT ? 1 : 2) : 3);
6361 /* Generate the pseudo ops that System V.4 wants. */
6362 if (TARGET_ABICALLS)
6363 fprintf (asm_out_file, "\t.abicalls\n");
6366 fprintf (asm_out_file, "\t.set\tmips16\n");
6368 if (flag_verbose_asm)
6369 fprintf (asm_out_file, "\n%s -G value = %d, Arch = %s, ISA = %d\n",
6371 mips_section_threshold, mips_arch_info->name, mips_isa);
6374 #ifdef BSS_SECTION_ASM_OP
6375 /* Implement ASM_OUTPUT_ALIGNED_BSS. This differs from the default only
6376 in the use of sbss. */
6379 mips_output_aligned_bss (FILE *stream, tree decl, const char *name,
6380 unsigned HOST_WIDE_INT size, int align)
6382 extern tree last_assemble_variable_decl;
6384 if (mips_in_small_data_p (decl))
6385 switch_to_section (get_named_section (NULL, ".sbss", 0));
6387 switch_to_section (bss_section);
6388 ASM_OUTPUT_ALIGN (stream, floor_log2 (align / BITS_PER_UNIT));
6389 last_assemble_variable_decl = decl;
6390 ASM_DECLARE_OBJECT_NAME (stream, name, decl);
6391 ASM_OUTPUT_SKIP (stream, size != 0 ? size : 1);
6395 /* Implement ASM_OUTPUT_ALIGNED_DECL_COMMON. This is usually the same as the
6396 elfos.h version, but we also need to handle -muninit-const-in-rodata. */
6399 mips_output_aligned_decl_common (FILE *stream, tree decl, const char *name,
6400 unsigned HOST_WIDE_INT size,
6403 /* If the target wants uninitialized const declarations in
6404 .rdata then don't put them in .comm. */
6405 if (TARGET_EMBEDDED_DATA && TARGET_UNINIT_CONST_IN_RODATA
6406 && TREE_CODE (decl) == VAR_DECL && TREE_READONLY (decl)
6407 && (DECL_INITIAL (decl) == 0 || DECL_INITIAL (decl) == error_mark_node))
6409 if (TREE_PUBLIC (decl) && DECL_NAME (decl))
6410 targetm.asm_out.globalize_label (stream, name);
6412 switch_to_section (readonly_data_section);
6413 ASM_OUTPUT_ALIGN (stream, floor_log2 (align / BITS_PER_UNIT));
6414 mips_declare_object (stream, name, "",
6415 ":\n\t.space\t" HOST_WIDE_INT_PRINT_UNSIGNED "\n",
6419 mips_declare_common_object (stream, name, "\n\t.comm\t",
6423 /* Declare a common object of SIZE bytes using asm directive INIT_STRING.
6424 NAME is the name of the object and ALIGN is the required alignment
6425 in bytes. TAKES_ALIGNMENT_P is true if the directive takes a third
6426 alignment argument. */
6429 mips_declare_common_object (FILE *stream, const char *name,
6430 const char *init_string,
6431 unsigned HOST_WIDE_INT size,
6432 unsigned int align, bool takes_alignment_p)
6434 if (!takes_alignment_p)
6436 size += (align / BITS_PER_UNIT) - 1;
6437 size -= size % (align / BITS_PER_UNIT);
6438 mips_declare_object (stream, name, init_string,
6439 "," HOST_WIDE_INT_PRINT_UNSIGNED "\n", size);
6442 mips_declare_object (stream, name, init_string,
6443 "," HOST_WIDE_INT_PRINT_UNSIGNED ",%u\n",
6444 size, align / BITS_PER_UNIT);
6447 /* Emit either a label, .comm, or .lcomm directive. When using assembler
6448 macros, mark the symbol as written so that mips_file_end won't emit an
6449 .extern for it. STREAM is the output file, NAME is the name of the
6450 symbol, INIT_STRING is the string that should be written before the
6451 symbol and FINAL_STRING is the string that should be written after it.
6452 FINAL_STRING is a printf() format that consumes the remaining arguments. */
6455 mips_declare_object (FILE *stream, const char *name, const char *init_string,
6456 const char *final_string, ...)
6460 fputs (init_string, stream);
6461 assemble_name (stream, name);
6462 va_start (ap, final_string);
6463 vfprintf (stream, final_string, ap);
6466 if (!TARGET_EXPLICIT_RELOCS)
6468 tree name_tree = get_identifier (name);
6469 TREE_ASM_WRITTEN (name_tree) = 1;
6473 #ifdef ASM_OUTPUT_SIZE_DIRECTIVE
6474 extern int size_directive_output;
6476 /* Implement ASM_DECLARE_OBJECT_NAME. This is like most of the standard ELF
6477 definitions except that it uses mips_declare_object() to emit the label. */
6480 mips_declare_object_name (FILE *stream, const char *name,
6481 tree decl ATTRIBUTE_UNUSED)
6483 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
6484 ASM_OUTPUT_TYPE_DIRECTIVE (stream, name, "object");
6487 size_directive_output = 0;
6488 if (!flag_inhibit_size_directive && DECL_SIZE (decl))
6492 size_directive_output = 1;
6493 size = int_size_in_bytes (TREE_TYPE (decl));
6494 ASM_OUTPUT_SIZE_DIRECTIVE (stream, name, size);
6497 mips_declare_object (stream, name, "", ":\n");
6500 /* Implement ASM_FINISH_DECLARE_OBJECT. This is generic ELF stuff. */
6503 mips_finish_declare_object (FILE *stream, tree decl, int top_level, int at_end)
6507 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6508 if (!flag_inhibit_size_directive
6509 && DECL_SIZE (decl) != 0
6510 && !at_end && top_level
6511 && DECL_INITIAL (decl) == error_mark_node
6512 && !size_directive_output)
6516 size_directive_output = 1;
6517 size = int_size_in_bytes (TREE_TYPE (decl));
6518 ASM_OUTPUT_SIZE_DIRECTIVE (stream, name, size);
6523 /* Return true if X in context CONTEXT is a small data address that can
6524 be rewritten as a LO_SUM. */
6527 mips_rewrite_small_data_p (rtx x, enum mips_symbol_context context)
6529 enum mips_symbol_type symbol_type;
6531 return (TARGET_EXPLICIT_RELOCS
6532 && mips_symbolic_constant_p (x, context, &symbol_type)
6533 && symbol_type == SYMBOL_GP_RELATIVE);
6537 /* A for_each_rtx callback for mips_small_data_pattern_p. DATA is the
6538 containing MEM, or null if none. */
6541 mips_small_data_pattern_1 (rtx *loc, void *data)
6543 enum mips_symbol_context context;
6545 if (GET_CODE (*loc) == LO_SUM)
6550 if (for_each_rtx (&XEXP (*loc, 0), mips_small_data_pattern_1, *loc))
6555 context = data ? SYMBOL_CONTEXT_MEM : SYMBOL_CONTEXT_LEA;
6556 return mips_rewrite_small_data_p (*loc, context);
6559 /* Return true if OP refers to small data symbols directly, not through
6563 mips_small_data_pattern_p (rtx op)
6565 return for_each_rtx (&op, mips_small_data_pattern_1, 0);
6568 /* A for_each_rtx callback, used by mips_rewrite_small_data.
6569 DATA is the containing MEM, or null if none. */
6572 mips_rewrite_small_data_1 (rtx *loc, void *data)
6574 enum mips_symbol_context context;
6578 for_each_rtx (&XEXP (*loc, 0), mips_rewrite_small_data_1, *loc);
6582 context = data ? SYMBOL_CONTEXT_MEM : SYMBOL_CONTEXT_LEA;
6583 if (mips_rewrite_small_data_p (*loc, context))
6584 *loc = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, *loc);
6586 if (GET_CODE (*loc) == LO_SUM)
6592 /* If possible, rewrite OP so that it refers to small data using
6593 explicit relocations. */
6596 mips_rewrite_small_data (rtx op)
6598 op = copy_insn (op);
6599 for_each_rtx (&op, mips_rewrite_small_data_1, 0);
6603 /* Return true if the current function has an insn that implicitly
6607 mips_function_has_gp_insn (void)
6609 /* Don't bother rechecking if we found one last time. */
6610 if (!cfun->machine->has_gp_insn_p)
6614 push_topmost_sequence ();
6615 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6617 && GET_CODE (PATTERN (insn)) != USE
6618 && GET_CODE (PATTERN (insn)) != CLOBBER
6619 && (get_attr_got (insn) != GOT_UNSET
6620 || small_data_pattern (PATTERN (insn), VOIDmode)))
6622 pop_topmost_sequence ();
6624 cfun->machine->has_gp_insn_p = (insn != 0);
6626 return cfun->machine->has_gp_insn_p;
6630 /* Return the register that should be used as the global pointer
6631 within this function. Return 0 if the function doesn't need
6632 a global pointer. */
6635 mips_global_pointer (void)
6639 /* $gp is always available unless we're using a GOT. */
6640 if (!TARGET_USE_GOT)
6641 return GLOBAL_POINTER_REGNUM;
6643 /* We must always provide $gp when it is used implicitly. */
6644 if (!TARGET_EXPLICIT_RELOCS)
6645 return GLOBAL_POINTER_REGNUM;
6647 /* FUNCTION_PROFILER includes a jal macro, so we need to give it
6649 if (current_function_profile)
6650 return GLOBAL_POINTER_REGNUM;
6652 /* If the function has a nonlocal goto, $gp must hold the correct
6653 global pointer for the target function. */
6654 if (current_function_has_nonlocal_goto)
6655 return GLOBAL_POINTER_REGNUM;
6657 /* If the gp is never referenced, there's no need to initialize it.
6658 Note that reload can sometimes introduce constant pool references
6659 into a function that otherwise didn't need them. For example,
6660 suppose we have an instruction like:
6662 (set (reg:DF R1) (float:DF (reg:SI R2)))
6664 If R2 turns out to be constant such as 1, the instruction may have a
6665 REG_EQUAL note saying that R1 == 1.0. Reload then has the option of
6666 using this constant if R2 doesn't get allocated to a register.
6668 In cases like these, reload will have added the constant to the pool
6669 but no instruction will yet refer to it. */
6670 if (!df_regs_ever_live_p (GLOBAL_POINTER_REGNUM)
6671 && !current_function_uses_const_pool
6672 && !mips_function_has_gp_insn ())
6675 /* We need a global pointer, but perhaps we can use a call-clobbered
6676 register instead of $gp. */
6677 if (TARGET_CALL_SAVED_GP && current_function_is_leaf)
6678 for (regno = GP_REG_FIRST; regno <= GP_REG_LAST; regno++)
6679 if (!df_regs_ever_live_p (regno)
6680 && call_used_regs[regno]
6681 && !fixed_regs[regno]
6682 && regno != PIC_FUNCTION_ADDR_REGNUM)
6685 return GLOBAL_POINTER_REGNUM;
6689 /* Return true if the function return value MODE will get returned in a
6690 floating-point register. */
6693 mips_return_mode_in_fpr_p (enum machine_mode mode)
6695 return ((GET_MODE_CLASS (mode) == MODE_FLOAT
6696 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
6697 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6698 && GET_MODE_UNIT_SIZE (mode) <= UNITS_PER_HWFPVALUE);
6701 /* Return a two-character string representing a function floating-point
6702 return mode, used to name MIPS16 function stubs. */
6705 mips16_call_stub_mode_suffix (enum machine_mode mode)
6709 else if (mode == DFmode)
6711 else if (mode == SCmode)
6713 else if (mode == DCmode)
6715 else if (mode == V2SFmode)
6721 /* Return true if the current function returns its value in a floating-point
6722 register in MIPS16 mode. */
6725 mips16_cfun_returns_in_fpr_p (void)
6727 tree return_type = DECL_RESULT (current_function_decl);
6728 return (TARGET_MIPS16
6729 && TARGET_HARD_FLOAT_ABI
6730 && !aggregate_value_p (return_type, current_function_decl)
6731 && mips_return_mode_in_fpr_p (DECL_MODE (return_type)));
6735 /* Return true if the current function must save REGNO. */
6738 mips_save_reg_p (unsigned int regno)
6740 /* We only need to save $gp if TARGET_CALL_SAVED_GP and only then
6741 if we have not chosen a call-clobbered substitute. */
6742 if (regno == GLOBAL_POINTER_REGNUM)
6743 return TARGET_CALL_SAVED_GP && cfun->machine->global_pointer == regno;
6745 /* Check call-saved registers. */
6746 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
6749 /* Save both registers in an FPR pair if either one is used. This is
6750 needed for the case when MIN_FPRS_PER_FMT == 1, which allows the odd
6751 register to be used without the even register. */
6752 if (FP_REG_P (regno)
6753 && MAX_FPRS_PER_FMT == 2
6754 && df_regs_ever_live_p (regno + 1)
6755 && !call_used_regs[regno + 1])
6758 /* We need to save the old frame pointer before setting up a new one. */
6759 if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed)
6762 /* We need to save the incoming return address if it is ever clobbered
6763 within the function. */
6764 if (regno == GP_REG_FIRST + 31 && df_regs_ever_live_p (regno))
6769 /* $18 is a special case in mips16 code. It may be used to call
6770 a function which returns a floating point value, but it is
6771 marked in call_used_regs. */
6772 if (regno == GP_REG_FIRST + 18 && df_regs_ever_live_p (regno))
6775 /* $31 is also a special case. It will be used to copy a return
6776 value into the floating point registers if the return value is
6778 if (regno == GP_REG_FIRST + 31
6779 && mips16_cfun_returns_in_fpr_p ())
6786 /* Return the index of the lowest X in the range [0, SIZE) for which
6787 bit REGS[X] is set in MASK. Return SIZE if there is no such X. */
6790 mips16e_find_first_register (unsigned int mask, const unsigned char *regs,
6795 for (i = 0; i < size; i++)
6796 if (BITSET_P (mask, regs[i]))
6802 /* *MASK_PTR is a mask of general purpose registers and *GP_REG_SIZE_PTR
6803 is the number of bytes that they occupy. If *MASK_PTR contains REGS[X]
6804 for some X in [0, SIZE), adjust *MASK_PTR and *GP_REG_SIZE_PTR so that
6805 the same is true for all indexes (X, SIZE). */
6808 mips16e_mask_registers (unsigned int *mask_ptr, const unsigned char *regs,
6809 unsigned int size, HOST_WIDE_INT *gp_reg_size_ptr)
6813 i = mips16e_find_first_register (*mask_ptr, regs, size);
6814 for (i++; i < size; i++)
6815 if (!BITSET_P (*mask_ptr, regs[i]))
6817 *gp_reg_size_ptr += GET_MODE_SIZE (gpr_mode);
6818 *mask_ptr |= 1 << regs[i];
6822 /* Return the bytes needed to compute the frame pointer from the current
6823 stack pointer. SIZE is the size (in bytes) of the local variables.
6825 MIPS stack frames look like:
6827 Before call After call
6828 high +-----------------------+ +-----------------------+
6830 | caller's temps. | | caller's temps. |
6832 +-----------------------+ +-----------------------+
6834 | arguments on stack. | | arguments on stack. |
6836 +-----------------------+ +-----------------------+
6837 | 4 words to save | | 4 words to save |
6838 | arguments passed | | arguments passed |
6839 | in registers, even | | in registers, even |
6840 | if not passed. | | if not passed. |
6841 SP->+-----------------------+ VFP->+-----------------------+
6842 (VFP = SP+fp_sp_offset) | |\
6843 | fp register save | | fp_reg_size
6845 SP+gp_sp_offset->+-----------------------+
6847 | | gp register save | | gp_reg_size
6848 gp_reg_rounded | | |/
6849 | +-----------------------+
6850 \| alignment padding |
6851 +-----------------------+
6853 | local variables | | var_size
6855 +-----------------------+
6857 | alloca allocations |
6859 +-----------------------+
6861 cprestore_size | | GP save for V.4 abi |
6863 +-----------------------+
6865 | arguments on stack | |
6867 +-----------------------+ |
6868 | 4 words to save | | args_size
6869 | arguments passed | |
6870 | in registers, even | |
6871 | if not passed. | |
6872 low | (TARGET_OLDABI only) |/
6873 memory SP->+-----------------------+
6878 compute_frame_size (HOST_WIDE_INT size)
6881 HOST_WIDE_INT total_size; /* # bytes that the entire frame takes up */
6882 HOST_WIDE_INT var_size; /* # bytes that variables take up */
6883 HOST_WIDE_INT args_size; /* # bytes that outgoing arguments take up */
6884 HOST_WIDE_INT cprestore_size; /* # bytes that the cprestore slot takes up */
6885 HOST_WIDE_INT gp_reg_rounded; /* # bytes needed to store gp after rounding */
6886 HOST_WIDE_INT gp_reg_size; /* # bytes needed to store gp regs */
6887 HOST_WIDE_INT fp_reg_size; /* # bytes needed to store fp regs */
6888 unsigned int mask; /* mask of saved gp registers */
6889 unsigned int fmask; /* mask of saved fp registers */
6891 cfun->machine->global_pointer = mips_global_pointer ();
6897 var_size = MIPS_STACK_ALIGN (size);
6898 args_size = current_function_outgoing_args_size;
6899 cprestore_size = MIPS_STACK_ALIGN (STARTING_FRAME_OFFSET) - args_size;
6901 /* The space set aside by STARTING_FRAME_OFFSET isn't needed in leaf
6902 functions. If the function has local variables, we're committed
6903 to allocating it anyway. Otherwise reclaim it here. */
6904 if (var_size == 0 && current_function_is_leaf)
6905 cprestore_size = args_size = 0;
6907 /* The MIPS 3.0 linker does not like functions that dynamically
6908 allocate the stack and have 0 for STACK_DYNAMIC_OFFSET, since it
6909 looks like we are trying to create a second frame pointer to the
6910 function, so allocate some stack space to make it happy. */
6912 if (args_size == 0 && current_function_calls_alloca)
6913 args_size = 4 * UNITS_PER_WORD;
6915 total_size = var_size + args_size + cprestore_size;
6917 /* Calculate space needed for gp registers. */
6918 for (regno = GP_REG_FIRST; regno <= GP_REG_LAST; regno++)
6919 if (mips_save_reg_p (regno))
6921 gp_reg_size += GET_MODE_SIZE (gpr_mode);
6922 mask |= 1 << (regno - GP_REG_FIRST);
6925 /* We need to restore these for the handler. */
6926 if (current_function_calls_eh_return)
6931 regno = EH_RETURN_DATA_REGNO (i);
6932 if (regno == INVALID_REGNUM)
6934 gp_reg_size += GET_MODE_SIZE (gpr_mode);
6935 mask |= 1 << (regno - GP_REG_FIRST);
6939 /* The MIPS16e SAVE and RESTORE instructions have two ranges of registers:
6940 $a3-$a0 and $s2-$s8. If we save one register in the range, we must
6941 save all later registers too. */
6942 if (GENERATE_MIPS16E_SAVE_RESTORE)
6944 mips16e_mask_registers (&mask, mips16e_s2_s8_regs,
6945 ARRAY_SIZE (mips16e_s2_s8_regs), &gp_reg_size);
6946 mips16e_mask_registers (&mask, mips16e_a0_a3_regs,
6947 ARRAY_SIZE (mips16e_a0_a3_regs), &gp_reg_size);
6950 /* This loop must iterate over the same space as its companion in
6951 mips_for_each_saved_reg. */
6952 for (regno = (FP_REG_LAST - MAX_FPRS_PER_FMT + 1);
6953 regno >= FP_REG_FIRST;
6954 regno -= MAX_FPRS_PER_FMT)
6956 if (mips_save_reg_p (regno))
6958 fp_reg_size += MAX_FPRS_PER_FMT * UNITS_PER_FPREG;
6959 fmask |= ((1 << MAX_FPRS_PER_FMT) - 1) << (regno - FP_REG_FIRST);
6963 gp_reg_rounded = MIPS_STACK_ALIGN (gp_reg_size);
6964 total_size += gp_reg_rounded + MIPS_STACK_ALIGN (fp_reg_size);
6966 /* Add in the space required for saving incoming register arguments. */
6967 total_size += current_function_pretend_args_size;
6968 total_size += MIPS_STACK_ALIGN (cfun->machine->varargs_size);
6970 /* Save other computed information. */
6971 cfun->machine->frame.total_size = total_size;
6972 cfun->machine->frame.var_size = var_size;
6973 cfun->machine->frame.args_size = args_size;
6974 cfun->machine->frame.cprestore_size = cprestore_size;
6975 cfun->machine->frame.gp_reg_size = gp_reg_size;
6976 cfun->machine->frame.fp_reg_size = fp_reg_size;
6977 cfun->machine->frame.mask = mask;
6978 cfun->machine->frame.fmask = fmask;
6979 cfun->machine->frame.initialized = reload_completed;
6980 cfun->machine->frame.num_gp = gp_reg_size / UNITS_PER_WORD;
6981 cfun->machine->frame.num_fp = (fp_reg_size
6982 / (MAX_FPRS_PER_FMT * UNITS_PER_FPREG));
6986 HOST_WIDE_INT offset;
6988 if (GENERATE_MIPS16E_SAVE_RESTORE)
6989 /* MIPS16e SAVE and RESTORE instructions require the GP save area
6990 to be aligned at the high end with any padding at the low end.
6991 It is only safe to use this calculation for o32, where we never
6992 have pretend arguments, and where any varargs will be saved in
6993 the caller-allocated area rather than at the top of the frame. */
6994 offset = (total_size - GET_MODE_SIZE (gpr_mode));
6996 offset = (args_size + cprestore_size + var_size
6997 + gp_reg_size - GET_MODE_SIZE (gpr_mode));
6998 cfun->machine->frame.gp_sp_offset = offset;
6999 cfun->machine->frame.gp_save_offset = offset - total_size;
7003 cfun->machine->frame.gp_sp_offset = 0;
7004 cfun->machine->frame.gp_save_offset = 0;
7009 HOST_WIDE_INT offset;
7011 offset = (args_size + cprestore_size + var_size
7012 + gp_reg_rounded + fp_reg_size
7013 - MAX_FPRS_PER_FMT * UNITS_PER_FPREG);
7014 cfun->machine->frame.fp_sp_offset = offset;
7015 cfun->machine->frame.fp_save_offset = offset - total_size;
7019 cfun->machine->frame.fp_sp_offset = 0;
7020 cfun->machine->frame.fp_save_offset = 0;
7023 /* Ok, we're done. */
7027 /* Implement INITIAL_ELIMINATION_OFFSET. FROM is either the frame
7028 pointer or argument pointer. TO is either the stack pointer or
7029 hard frame pointer. */
7032 mips_initial_elimination_offset (int from, int to)
7034 HOST_WIDE_INT offset;
7036 compute_frame_size (get_frame_size ());
7038 /* Set OFFSET to the offset from the stack pointer. */
7041 case FRAME_POINTER_REGNUM:
7045 case ARG_POINTER_REGNUM:
7046 offset = (cfun->machine->frame.total_size
7047 - current_function_pretend_args_size);
7054 if (TARGET_MIPS16 && to == HARD_FRAME_POINTER_REGNUM)
7055 offset -= cfun->machine->frame.args_size;
7060 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
7061 back to a previous frame. */
7063 mips_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
7068 return get_hard_reg_initial_val (Pmode, GP_REG_FIRST + 31);
7071 /* Use FN to save or restore register REGNO. MODE is the register's
7072 mode and OFFSET is the offset of its save slot from the current
7076 mips_save_restore_reg (enum machine_mode mode, int regno,
7077 HOST_WIDE_INT offset, mips_save_restore_fn fn)
7081 mem = gen_frame_mem (mode, plus_constant (stack_pointer_rtx, offset));
7083 fn (gen_rtx_REG (mode, regno), mem);
7087 /* Call FN for each register that is saved by the current function.
7088 SP_OFFSET is the offset of the current stack pointer from the start
7092 mips_for_each_saved_reg (HOST_WIDE_INT sp_offset, mips_save_restore_fn fn)
7094 enum machine_mode fpr_mode;
7095 HOST_WIDE_INT offset;
7098 /* Save registers starting from high to low. The debuggers prefer at least
7099 the return register be stored at func+4, and also it allows us not to
7100 need a nop in the epilogue if at least one register is reloaded in
7101 addition to return address. */
7102 offset = cfun->machine->frame.gp_sp_offset - sp_offset;
7103 for (regno = GP_REG_LAST; regno >= GP_REG_FIRST; regno--)
7104 if (BITSET_P (cfun->machine->frame.mask, regno - GP_REG_FIRST))
7106 mips_save_restore_reg (gpr_mode, regno, offset, fn);
7107 offset -= GET_MODE_SIZE (gpr_mode);
7110 /* This loop must iterate over the same space as its companion in
7111 compute_frame_size. */
7112 offset = cfun->machine->frame.fp_sp_offset - sp_offset;
7113 fpr_mode = (TARGET_SINGLE_FLOAT ? SFmode : DFmode);
7114 for (regno = (FP_REG_LAST - MAX_FPRS_PER_FMT + 1);
7115 regno >= FP_REG_FIRST;
7116 regno -= MAX_FPRS_PER_FMT)
7117 if (BITSET_P (cfun->machine->frame.fmask, regno - FP_REG_FIRST))
7119 mips_save_restore_reg (fpr_mode, regno, offset, fn);
7120 offset -= GET_MODE_SIZE (fpr_mode);
7124 /* If we're generating n32 or n64 abicalls, and the current function
7125 does not use $28 as its global pointer, emit a cplocal directive.
7126 Use pic_offset_table_rtx as the argument to the directive. */
7129 mips_output_cplocal (void)
7131 if (!TARGET_EXPLICIT_RELOCS
7132 && cfun->machine->global_pointer > 0
7133 && cfun->machine->global_pointer != GLOBAL_POINTER_REGNUM)
7134 output_asm_insn (".cplocal %+", 0);
7137 /* Return the style of GP load sequence that is being used for the
7138 current function. */
7140 enum mips_loadgp_style
7141 mips_current_loadgp_style (void)
7143 if (!TARGET_USE_GOT || cfun->machine->global_pointer == 0)
7149 if (TARGET_ABSOLUTE_ABICALLS)
7150 return LOADGP_ABSOLUTE;
7152 return TARGET_NEWABI ? LOADGP_NEWABI : LOADGP_OLDABI;
7155 /* The __gnu_local_gp symbol. */
7157 static GTY(()) rtx mips_gnu_local_gp;
7159 /* If we're generating n32 or n64 abicalls, emit instructions
7160 to set up the global pointer. */
7163 mips_emit_loadgp (void)
7165 rtx addr, offset, incoming_address, base, index;
7167 switch (mips_current_loadgp_style ())
7169 case LOADGP_ABSOLUTE:
7170 if (mips_gnu_local_gp == NULL)
7172 mips_gnu_local_gp = gen_rtx_SYMBOL_REF (Pmode, "__gnu_local_gp");
7173 SYMBOL_REF_FLAGS (mips_gnu_local_gp) |= SYMBOL_FLAG_LOCAL;
7175 emit_insn (gen_loadgp_absolute (mips_gnu_local_gp));
7179 addr = XEXP (DECL_RTL (current_function_decl), 0);
7180 offset = mips_unspec_address (addr, SYMBOL_GOTOFF_LOADGP);
7181 incoming_address = gen_rtx_REG (Pmode, PIC_FUNCTION_ADDR_REGNUM);
7182 emit_insn (gen_loadgp_newabi (offset, incoming_address));
7183 if (!TARGET_EXPLICIT_RELOCS)
7184 emit_insn (gen_loadgp_blockage ());
7188 base = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (VXWORKS_GOTT_BASE));
7189 index = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (VXWORKS_GOTT_INDEX));
7190 emit_insn (gen_loadgp_rtp (base, index));
7191 if (!TARGET_EXPLICIT_RELOCS)
7192 emit_insn (gen_loadgp_blockage ());
7200 /* Set up the stack and frame (if desired) for the function. */
7203 mips_output_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
7206 HOST_WIDE_INT tsize = cfun->machine->frame.total_size;
7208 #ifdef SDB_DEBUGGING_INFO
7209 if (debug_info_level != DINFO_LEVEL_TERSE && write_symbols == SDB_DEBUG)
7210 SDB_OUTPUT_SOURCE_LINE (file, DECL_SOURCE_LINE (current_function_decl));
7213 /* In mips16 mode, we may need to generate a 32 bit to handle
7214 floating point arguments. The linker will arrange for any 32-bit
7215 functions to call this stub, which will then jump to the 16-bit
7218 && TARGET_HARD_FLOAT_ABI
7219 && current_function_args_info.fp_code != 0)
7220 build_mips16_function_stub (file);
7222 if (!FUNCTION_NAME_ALREADY_DECLARED)
7224 /* Get the function name the same way that toplev.c does before calling
7225 assemble_start_function. This is needed so that the name used here
7226 exactly matches the name used in ASM_DECLARE_FUNCTION_NAME. */
7227 fnname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
7229 if (!flag_inhibit_size_directive)
7231 fputs ("\t.ent\t", file);
7232 assemble_name (file, fnname);
7236 assemble_name (file, fnname);
7237 fputs (":\n", file);
7240 /* Stop mips_file_end from treating this function as external. */
7241 if (TARGET_IRIX && mips_abi == ABI_32)
7242 TREE_ASM_WRITTEN (DECL_NAME (cfun->decl)) = 1;
7244 if (!flag_inhibit_size_directive)
7246 /* .frame FRAMEREG, FRAMESIZE, RETREG */
7248 "\t.frame\t%s," HOST_WIDE_INT_PRINT_DEC ",%s\t\t"
7249 "# vars= " HOST_WIDE_INT_PRINT_DEC ", regs= %d/%d"
7250 ", args= " HOST_WIDE_INT_PRINT_DEC
7251 ", gp= " HOST_WIDE_INT_PRINT_DEC "\n",
7252 (reg_names[(frame_pointer_needed)
7253 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM]),
7254 ((frame_pointer_needed && TARGET_MIPS16)
7255 ? tsize - cfun->machine->frame.args_size
7257 reg_names[GP_REG_FIRST + 31],
7258 cfun->machine->frame.var_size,
7259 cfun->machine->frame.num_gp,
7260 cfun->machine->frame.num_fp,
7261 cfun->machine->frame.args_size,
7262 cfun->machine->frame.cprestore_size);
7264 /* .mask MASK, GPOFFSET; .fmask FPOFFSET */
7265 fprintf (file, "\t.mask\t0x%08x," HOST_WIDE_INT_PRINT_DEC "\n",
7266 cfun->machine->frame.mask,
7267 cfun->machine->frame.gp_save_offset);
7268 fprintf (file, "\t.fmask\t0x%08x," HOST_WIDE_INT_PRINT_DEC "\n",
7269 cfun->machine->frame.fmask,
7270 cfun->machine->frame.fp_save_offset);
7273 OLD_SP == *FRAMEREG + FRAMESIZE => can find old_sp from nominated FP reg.
7274 HIGHEST_GP_SAVED == *FRAMEREG + FRAMESIZE + GPOFFSET => can find saved regs. */
7277 if (mips_current_loadgp_style () == LOADGP_OLDABI)
7279 /* Handle the initialization of $gp for SVR4 PIC. */
7280 if (!cfun->machine->all_noreorder_p)
7281 output_asm_insn ("%(.cpload\t%^%)", 0);
7283 output_asm_insn ("%(.cpload\t%^\n\t%<", 0);
7285 else if (cfun->machine->all_noreorder_p)
7286 output_asm_insn ("%(%<", 0);
7288 /* Tell the assembler which register we're using as the global
7289 pointer. This is needed for thunks, since they can use either
7290 explicit relocs or assembler macros. */
7291 mips_output_cplocal ();
7294 /* Make the last instruction frame related and note that it performs
7295 the operation described by FRAME_PATTERN. */
7298 mips_set_frame_expr (rtx frame_pattern)
7302 insn = get_last_insn ();
7303 RTX_FRAME_RELATED_P (insn) = 1;
7304 REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7310 /* Return a frame-related rtx that stores REG at MEM.
7311 REG must be a single register. */
7314 mips_frame_set (rtx mem, rtx reg)
7318 /* If we're saving the return address register and the dwarf return
7319 address column differs from the hard register number, adjust the
7320 note reg to refer to the former. */
7321 if (REGNO (reg) == GP_REG_FIRST + 31
7322 && DWARF_FRAME_RETURN_COLUMN != GP_REG_FIRST + 31)
7323 reg = gen_rtx_REG (GET_MODE (reg), DWARF_FRAME_RETURN_COLUMN);
7325 set = gen_rtx_SET (VOIDmode, mem, reg);
7326 RTX_FRAME_RELATED_P (set) = 1;
7332 /* Save register REG to MEM. Make the instruction frame-related. */
7335 mips_save_reg (rtx reg, rtx mem)
7337 if (GET_MODE (reg) == DFmode && !TARGET_FLOAT64)
7341 if (mips_split_64bit_move_p (mem, reg))
7342 mips_split_64bit_move (mem, reg);
7344 mips_emit_move (mem, reg);
7346 x1 = mips_frame_set (mips_subword (mem, 0), mips_subword (reg, 0));
7347 x2 = mips_frame_set (mips_subword (mem, 1), mips_subword (reg, 1));
7348 mips_set_frame_expr (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, x1, x2)));
7353 && REGNO (reg) != GP_REG_FIRST + 31
7354 && !M16_REG_P (REGNO (reg)))
7356 /* Save a non-mips16 register by moving it through a temporary.
7357 We don't need to do this for $31 since there's a special
7358 instruction for it. */
7359 mips_emit_move (MIPS_PROLOGUE_TEMP (GET_MODE (reg)), reg);
7360 mips_emit_move (mem, MIPS_PROLOGUE_TEMP (GET_MODE (reg)));
7363 mips_emit_move (mem, reg);
7365 mips_set_frame_expr (mips_frame_set (mem, reg));
7369 /* Return a move between register REGNO and memory location SP + OFFSET.
7370 Make the move a load if RESTORE_P, otherwise make it a frame-related
7374 mips16e_save_restore_reg (bool restore_p, HOST_WIDE_INT offset,
7379 mem = gen_frame_mem (SImode, plus_constant (stack_pointer_rtx, offset));
7380 reg = gen_rtx_REG (SImode, regno);
7382 ? gen_rtx_SET (VOIDmode, reg, mem)
7383 : mips_frame_set (mem, reg));
7386 /* Return RTL for a MIPS16e SAVE or RESTORE instruction; RESTORE_P says which.
7387 The instruction must:
7389 - Allocate or deallocate SIZE bytes in total; SIZE is known
7392 - Save or restore as many registers in *MASK_PTR as possible.
7393 The instruction saves the first registers at the top of the
7394 allocated area, with the other registers below it.
7396 - Save NARGS argument registers above the allocated area.
7398 (NARGS is always zero if RESTORE_P.)
7400 The SAVE and RESTORE instructions cannot save and restore all general
7401 registers, so there may be some registers left over for the caller to
7402 handle. Destructively modify *MASK_PTR so that it contains the registers
7403 that still need to be saved or restored. The caller can save these
7404 registers in the memory immediately below *OFFSET_PTR, which is a
7405 byte offset from the bottom of the allocated stack area. */
7408 mips16e_build_save_restore (bool restore_p, unsigned int *mask_ptr,
7409 HOST_WIDE_INT *offset_ptr, unsigned int nargs,
7413 HOST_WIDE_INT offset, top_offset;
7414 unsigned int i, regno;
7417 gcc_assert (cfun->machine->frame.fp_reg_size == 0);
7419 /* Calculate the number of elements in the PARALLEL. We need one element
7420 for the stack adjustment, one for each argument register save, and one
7421 for each additional register move. */
7423 for (i = 0; i < ARRAY_SIZE (mips16e_save_restore_regs); i++)
7424 if (BITSET_P (*mask_ptr, mips16e_save_restore_regs[i]))
7427 /* Create the final PARALLEL. */
7428 pattern = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (n));
7431 /* Add the stack pointer adjustment. */
7432 set = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7433 plus_constant (stack_pointer_rtx,
7434 restore_p ? size : -size));
7435 RTX_FRAME_RELATED_P (set) = 1;
7436 XVECEXP (pattern, 0, n++) = set;
7438 /* Stack offsets in the PARALLEL are relative to the old stack pointer. */
7439 top_offset = restore_p ? size : 0;
7441 /* Save the arguments. */
7442 for (i = 0; i < nargs; i++)
7444 offset = top_offset + i * GET_MODE_SIZE (gpr_mode);
7445 set = mips16e_save_restore_reg (restore_p, offset, GP_ARG_FIRST + i);
7446 XVECEXP (pattern, 0, n++) = set;
7449 /* Then fill in the other register moves. */
7450 offset = top_offset;
7451 for (i = 0; i < ARRAY_SIZE (mips16e_save_restore_regs); i++)
7453 regno = mips16e_save_restore_regs[i];
7454 if (BITSET_P (*mask_ptr, regno))
7456 offset -= UNITS_PER_WORD;
7457 set = mips16e_save_restore_reg (restore_p, offset, regno);
7458 XVECEXP (pattern, 0, n++) = set;
7459 *mask_ptr &= ~(1 << regno);
7463 /* Tell the caller what offset it should use for the remaining registers. */
7464 *offset_ptr = size + (offset - top_offset) + size;
7466 gcc_assert (n == XVECLEN (pattern, 0));
7471 /* PATTERN is a PARALLEL whose first element adds ADJUST to the stack
7472 pointer. Return true if PATTERN matches the kind of instruction
7473 generated by mips16e_build_save_restore. If INFO is nonnull,
7474 initialize it when returning true. */
7477 mips16e_save_restore_pattern_p (rtx pattern, HOST_WIDE_INT adjust,
7478 struct mips16e_save_restore_info *info)
7480 unsigned int i, nargs, mask;
7481 HOST_WIDE_INT top_offset, save_offset, offset, extra;
7482 rtx set, reg, mem, base;
7485 if (!GENERATE_MIPS16E_SAVE_RESTORE)
7488 /* Stack offsets in the PARALLEL are relative to the old stack pointer. */
7489 top_offset = adjust > 0 ? adjust : 0;
7491 /* Interpret all other members of the PARALLEL. */
7492 save_offset = top_offset - GET_MODE_SIZE (gpr_mode);
7496 for (n = 1; n < XVECLEN (pattern, 0); n++)
7498 /* Check that we have a SET. */
7499 set = XVECEXP (pattern, 0, n);
7500 if (GET_CODE (set) != SET)
7503 /* Check that the SET is a load (if restoring) or a store
7505 mem = adjust > 0 ? SET_SRC (set) : SET_DEST (set);
7509 /* Check that the address is the sum of the stack pointer and a
7510 possibly-zero constant offset. */
7511 mips_split_plus (XEXP (mem, 0), &base, &offset);
7512 if (base != stack_pointer_rtx)
7515 /* Check that SET's other operand is a register. */
7516 reg = adjust > 0 ? SET_DEST (set) : SET_SRC (set);
7520 /* Check for argument saves. */
7521 if (offset == top_offset + nargs * GET_MODE_SIZE (gpr_mode)
7522 && REGNO (reg) == GP_ARG_FIRST + nargs)
7524 else if (offset == save_offset)
7526 while (mips16e_save_restore_regs[i++] != REGNO (reg))
7527 if (i == ARRAY_SIZE (mips16e_save_restore_regs))
7530 mask |= 1 << REGNO (reg);
7531 save_offset -= GET_MODE_SIZE (gpr_mode);
7537 /* Check that the restrictions on register ranges are met. */
7539 mips16e_mask_registers (&mask, mips16e_s2_s8_regs,
7540 ARRAY_SIZE (mips16e_s2_s8_regs), &extra);
7541 mips16e_mask_registers (&mask, mips16e_a0_a3_regs,
7542 ARRAY_SIZE (mips16e_a0_a3_regs), &extra);
7546 /* Make sure that the topmost argument register is not saved twice.
7547 The checks above ensure that the same is then true for the other
7548 argument registers. */
7549 if (nargs > 0 && BITSET_P (mask, GP_ARG_FIRST + nargs - 1))
7552 /* Pass back information, if requested. */
7555 info->nargs = nargs;
7557 info->size = (adjust > 0 ? adjust : -adjust);
7563 /* Add a MIPS16e SAVE or RESTORE register-range argument to string S
7564 for the register range [MIN_REG, MAX_REG]. Return a pointer to
7565 the null terminator. */
7568 mips16e_add_register_range (char *s, unsigned int min_reg,
7569 unsigned int max_reg)
7571 if (min_reg != max_reg)
7572 s += sprintf (s, ",%s-%s", reg_names[min_reg], reg_names[max_reg]);
7574 s += sprintf (s, ",%s", reg_names[min_reg]);
7578 /* Return the assembly instruction for a MIPS16e SAVE or RESTORE instruction.
7579 PATTERN and ADJUST are as for mips16e_save_restore_pattern_p. */
7582 mips16e_output_save_restore (rtx pattern, HOST_WIDE_INT adjust)
7584 static char buffer[300];
7586 struct mips16e_save_restore_info info;
7587 unsigned int i, end;
7590 /* Parse the pattern. */
7591 if (!mips16e_save_restore_pattern_p (pattern, adjust, &info))
7594 /* Add the mnemonic. */
7595 s = strcpy (buffer, adjust > 0 ? "restore\t" : "save\t");
7598 /* Save the arguments. */
7600 s += sprintf (s, "%s-%s,", reg_names[GP_ARG_FIRST],
7601 reg_names[GP_ARG_FIRST + info.nargs - 1]);
7602 else if (info.nargs == 1)
7603 s += sprintf (s, "%s,", reg_names[GP_ARG_FIRST]);
7605 /* Emit the amount of stack space to allocate or deallocate. */
7606 s += sprintf (s, "%d", (int) info.size);
7608 /* Save or restore $16. */
7609 if (BITSET_P (info.mask, 16))
7610 s += sprintf (s, ",%s", reg_names[GP_REG_FIRST + 16]);
7612 /* Save or restore $17. */
7613 if (BITSET_P (info.mask, 17))
7614 s += sprintf (s, ",%s", reg_names[GP_REG_FIRST + 17]);
7616 /* Save or restore registers in the range $s2...$s8, which
7617 mips16e_s2_s8_regs lists in decreasing order. Note that this
7618 is a software register range; the hardware registers are not
7619 numbered consecutively. */
7620 end = ARRAY_SIZE (mips16e_s2_s8_regs);
7621 i = mips16e_find_first_register (info.mask, mips16e_s2_s8_regs, end);
7623 s = mips16e_add_register_range (s, mips16e_s2_s8_regs[end - 1],
7624 mips16e_s2_s8_regs[i]);
7626 /* Save or restore registers in the range $a0...$a3. */
7627 end = ARRAY_SIZE (mips16e_a0_a3_regs);
7628 i = mips16e_find_first_register (info.mask, mips16e_a0_a3_regs, end);
7630 s = mips16e_add_register_range (s, mips16e_a0_a3_regs[i],
7631 mips16e_a0_a3_regs[end - 1]);
7633 /* Save or restore $31. */
7634 if (BITSET_P (info.mask, 31))
7635 s += sprintf (s, ",%s", reg_names[GP_REG_FIRST + 31]);
7640 /* Return a simplified form of X using the register values in REG_VALUES.
7641 REG_VALUES[R] is the last value assigned to hard register R, or null
7642 if R has not been modified.
7644 This function is rather limited, but is good enough for our purposes. */
7647 mips16e_collect_propagate_value (rtx x, rtx *reg_values)
7651 x = avoid_constant_pool_reference (x);
7655 x0 = mips16e_collect_propagate_value (XEXP (x, 0), reg_values);
7656 return simplify_gen_unary (GET_CODE (x), GET_MODE (x),
7657 x0, GET_MODE (XEXP (x, 0)));
7660 if (ARITHMETIC_P (x))
7662 x0 = mips16e_collect_propagate_value (XEXP (x, 0), reg_values);
7663 x1 = mips16e_collect_propagate_value (XEXP (x, 1), reg_values);
7664 return simplify_gen_binary (GET_CODE (x), GET_MODE (x), x0, x1);
7668 && reg_values[REGNO (x)]
7669 && !rtx_unstable_p (reg_values[REGNO (x)]))
7670 return reg_values[REGNO (x)];
7675 /* Return true if (set DEST SRC) stores an argument register into its
7676 caller-allocated save slot, storing the number of that argument
7677 register in *REGNO_PTR if so. REG_VALUES is as for
7678 mips16e_collect_propagate_value. */
7681 mips16e_collect_argument_save_p (rtx dest, rtx src, rtx *reg_values,
7682 unsigned int *regno_ptr)
7684 unsigned int argno, regno;
7685 HOST_WIDE_INT offset, required_offset;
7688 /* Check that this is a word-mode store. */
7689 if (!MEM_P (dest) || !REG_P (src) || GET_MODE (dest) != word_mode)
7692 /* Check that the register being saved is an unmodified argument
7694 regno = REGNO (src);
7695 if (regno < GP_ARG_FIRST || regno > GP_ARG_LAST || reg_values[regno])
7697 argno = regno - GP_ARG_FIRST;
7699 /* Check whether the address is an appropriate stack pointer or
7700 frame pointer access. The frame pointer is offset from the
7701 stack pointer by the size of the outgoing arguments. */
7702 addr = mips16e_collect_propagate_value (XEXP (dest, 0), reg_values);
7703 mips_split_plus (addr, &base, &offset);
7704 required_offset = cfun->machine->frame.total_size + argno * UNITS_PER_WORD;
7705 if (base == hard_frame_pointer_rtx)
7706 required_offset -= cfun->machine->frame.args_size;
7707 else if (base != stack_pointer_rtx)
7709 if (offset != required_offset)
7716 /* A subroutine of mips_expand_prologue, called only when generating
7717 MIPS16e SAVE instructions. Search the start of the function for any
7718 instructions that save argument registers into their caller-allocated
7719 save slots. Delete such instructions and return a value N such that
7720 saving [GP_ARG_FIRST, GP_ARG_FIRST + N) would make all the deleted
7721 instructions redundant. */
7724 mips16e_collect_argument_saves (void)
7726 rtx reg_values[FIRST_PSEUDO_REGISTER];
7727 rtx insn, next, set, dest, src;
7728 unsigned int nargs, regno;
7730 push_topmost_sequence ();
7732 memset (reg_values, 0, sizeof (reg_values));
7733 for (insn = get_insns (); insn; insn = next)
7735 next = NEXT_INSN (insn);
7742 set = PATTERN (insn);
7743 if (GET_CODE (set) != SET)
7746 dest = SET_DEST (set);
7747 src = SET_SRC (set);
7748 if (mips16e_collect_argument_save_p (dest, src, reg_values, ®no))
7750 if (!BITSET_P (cfun->machine->frame.mask, regno))
7753 nargs = MAX (nargs, (regno - GP_ARG_FIRST) + 1);
7756 else if (REG_P (dest) && GET_MODE (dest) == word_mode)
7757 reg_values[REGNO (dest)]
7758 = mips16e_collect_propagate_value (src, reg_values);
7762 pop_topmost_sequence ();
7767 /* Expand the prologue into a bunch of separate insns. */
7770 mips_expand_prologue (void)
7776 if (cfun->machine->global_pointer > 0)
7777 SET_REGNO (pic_offset_table_rtx, cfun->machine->global_pointer);
7779 size = compute_frame_size (get_frame_size ());
7781 /* Save the registers. Allocate up to MIPS_MAX_FIRST_STACK_STEP
7782 bytes beforehand; this is enough to cover the register save area
7783 without going out of range. */
7784 if ((cfun->machine->frame.mask | cfun->machine->frame.fmask) != 0)
7786 HOST_WIDE_INT step1;
7788 step1 = MIN (size, MIPS_MAX_FIRST_STACK_STEP);
7790 if (GENERATE_MIPS16E_SAVE_RESTORE)
7792 HOST_WIDE_INT offset;
7793 unsigned int mask, regno;
7795 /* Try to merge argument stores into the save instruction. */
7796 nargs = mips16e_collect_argument_saves ();
7798 /* Build the save instruction. */
7799 mask = cfun->machine->frame.mask;
7800 insn = mips16e_build_save_restore (false, &mask, &offset,
7802 RTX_FRAME_RELATED_P (emit_insn (insn)) = 1;
7805 /* Check if we need to save other registers. */
7806 for (regno = GP_REG_FIRST; regno < GP_REG_LAST; regno++)
7807 if (BITSET_P (mask, regno - GP_REG_FIRST))
7809 offset -= GET_MODE_SIZE (gpr_mode);
7810 mips_save_restore_reg (gpr_mode, regno, offset, mips_save_reg);
7815 insn = gen_add3_insn (stack_pointer_rtx,
7818 RTX_FRAME_RELATED_P (emit_insn (insn)) = 1;
7820 mips_for_each_saved_reg (size, mips_save_reg);
7824 /* Allocate the rest of the frame. */
7827 if (SMALL_OPERAND (-size))
7828 RTX_FRAME_RELATED_P (emit_insn (gen_add3_insn (stack_pointer_rtx,
7830 GEN_INT (-size)))) = 1;
7833 mips_emit_move (MIPS_PROLOGUE_TEMP (Pmode), GEN_INT (size));
7836 /* There are no instructions to add or subtract registers
7837 from the stack pointer, so use the frame pointer as a
7838 temporary. We should always be using a frame pointer
7839 in this case anyway. */
7840 gcc_assert (frame_pointer_needed);
7841 mips_emit_move (hard_frame_pointer_rtx, stack_pointer_rtx);
7842 emit_insn (gen_sub3_insn (hard_frame_pointer_rtx,
7843 hard_frame_pointer_rtx,
7844 MIPS_PROLOGUE_TEMP (Pmode)));
7845 mips_emit_move (stack_pointer_rtx, hard_frame_pointer_rtx);
7848 emit_insn (gen_sub3_insn (stack_pointer_rtx,
7850 MIPS_PROLOGUE_TEMP (Pmode)));
7852 /* Describe the combined effect of the previous instructions. */
7854 (gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7855 plus_constant (stack_pointer_rtx, -size)));
7859 /* Set up the frame pointer, if we're using one. In mips16 code,
7860 we point the frame pointer ahead of the outgoing argument area.
7861 This should allow more variables & incoming arguments to be
7862 accessed with unextended instructions. */
7863 if (frame_pointer_needed)
7865 if (TARGET_MIPS16 && cfun->machine->frame.args_size != 0)
7867 rtx offset = GEN_INT (cfun->machine->frame.args_size);
7868 if (SMALL_OPERAND (cfun->machine->frame.args_size))
7870 (emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
7875 mips_emit_move (MIPS_PROLOGUE_TEMP (Pmode), offset);
7876 mips_emit_move (hard_frame_pointer_rtx, stack_pointer_rtx);
7877 emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
7878 hard_frame_pointer_rtx,
7879 MIPS_PROLOGUE_TEMP (Pmode)));
7881 (gen_rtx_SET (VOIDmode, hard_frame_pointer_rtx,
7882 plus_constant (stack_pointer_rtx,
7883 cfun->machine->frame.args_size)));
7887 RTX_FRAME_RELATED_P (mips_emit_move (hard_frame_pointer_rtx,
7888 stack_pointer_rtx)) = 1;
7891 mips_emit_loadgp ();
7893 /* If generating o32/o64 abicalls, save $gp on the stack. */
7894 if (TARGET_ABICALLS && TARGET_OLDABI && !current_function_is_leaf)
7895 emit_insn (gen_cprestore (GEN_INT (current_function_outgoing_args_size)));
7897 /* If we are profiling, make sure no instructions are scheduled before
7898 the call to mcount. */
7900 if (current_function_profile)
7901 emit_insn (gen_blockage ());
7904 /* Do any necessary cleanup after a function to restore stack, frame,
7907 #define RA_MASK BITMASK_HIGH /* 1 << 31 */
7910 mips_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
7911 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
7913 /* Reinstate the normal $gp. */
7914 SET_REGNO (pic_offset_table_rtx, GLOBAL_POINTER_REGNUM);
7915 mips_output_cplocal ();
7917 if (cfun->machine->all_noreorder_p)
7919 /* Avoid using %>%) since it adds excess whitespace. */
7920 output_asm_insn (".set\tmacro", 0);
7921 output_asm_insn (".set\treorder", 0);
7922 set_noreorder = set_nomacro = 0;
7925 if (!FUNCTION_NAME_ALREADY_DECLARED && !flag_inhibit_size_directive)
7929 /* Get the function name the same way that toplev.c does before calling
7930 assemble_start_function. This is needed so that the name used here
7931 exactly matches the name used in ASM_DECLARE_FUNCTION_NAME. */
7932 fnname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
7933 fputs ("\t.end\t", file);
7934 assemble_name (file, fnname);
7939 /* Emit instructions to restore register REG from slot MEM. */
7942 mips_restore_reg (rtx reg, rtx mem)
7944 /* There's no mips16 instruction to load $31 directly. Load into
7945 $7 instead and adjust the return insn appropriately. */
7946 if (TARGET_MIPS16 && REGNO (reg) == GP_REG_FIRST + 31)
7947 reg = gen_rtx_REG (GET_MODE (reg), 7);
7949 if (TARGET_MIPS16 && !M16_REG_P (REGNO (reg)))
7951 /* Can't restore directly; move through a temporary. */
7952 mips_emit_move (MIPS_EPILOGUE_TEMP (GET_MODE (reg)), mem);
7953 mips_emit_move (reg, MIPS_EPILOGUE_TEMP (GET_MODE (reg)));
7956 mips_emit_move (reg, mem);
7960 /* Expand the epilogue into a bunch of separate insns. SIBCALL_P is true
7961 if this epilogue precedes a sibling call, false if it is for a normal
7962 "epilogue" pattern. */
7965 mips_expand_epilogue (int sibcall_p)
7967 HOST_WIDE_INT step1, step2;
7970 if (!sibcall_p && mips_can_use_return_insn ())
7972 emit_jump_insn (gen_return ());
7976 /* In mips16 mode, if the return value should go into a floating-point
7977 register, we need to call a helper routine to copy it over. */
7978 if (mips16_cfun_returns_in_fpr_p ())
7987 enum machine_mode return_mode;
7989 return_type = DECL_RESULT (current_function_decl);
7990 return_mode = DECL_MODE (return_type);
7992 name = ACONCAT (("__mips16_ret_",
7993 mips16_call_stub_mode_suffix (return_mode),
7995 id = get_identifier (name);
7996 func = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (id));
7997 retval = gen_rtx_REG (return_mode, GP_RETURN);
7998 call = gen_call_value_internal (retval, func, const0_rtx);
7999 insn = emit_call_insn (call);
8000 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), retval);
8003 /* Split the frame into two. STEP1 is the amount of stack we should
8004 deallocate before restoring the registers. STEP2 is the amount we
8005 should deallocate afterwards.
8007 Start off by assuming that no registers need to be restored. */
8008 step1 = cfun->machine->frame.total_size;
8011 /* Work out which register holds the frame address. Account for the
8012 frame pointer offset used by mips16 code. */
8013 if (!frame_pointer_needed)
8014 base = stack_pointer_rtx;
8017 base = hard_frame_pointer_rtx;
8019 step1 -= cfun->machine->frame.args_size;
8022 /* If we need to restore registers, deallocate as much stack as
8023 possible in the second step without going out of range. */
8024 if ((cfun->machine->frame.mask | cfun->machine->frame.fmask) != 0)
8026 step2 = MIN (step1, MIPS_MAX_FIRST_STACK_STEP);
8030 /* Set TARGET to BASE + STEP1. */
8036 /* Get an rtx for STEP1 that we can add to BASE. */
8037 adjust = GEN_INT (step1);
8038 if (!SMALL_OPERAND (step1))
8040 mips_emit_move (MIPS_EPILOGUE_TEMP (Pmode), adjust);
8041 adjust = MIPS_EPILOGUE_TEMP (Pmode);
8044 /* Normal mode code can copy the result straight into $sp. */
8046 target = stack_pointer_rtx;
8048 emit_insn (gen_add3_insn (target, base, adjust));
8051 /* Copy TARGET into the stack pointer. */
8052 if (target != stack_pointer_rtx)
8053 mips_emit_move (stack_pointer_rtx, target);
8055 /* If we're using addressing macros, $gp is implicitly used by all
8056 SYMBOL_REFs. We must emit a blockage insn before restoring $gp
8058 if (TARGET_CALL_SAVED_GP && !TARGET_EXPLICIT_RELOCS)
8059 emit_insn (gen_blockage ());
8061 if (GENERATE_MIPS16E_SAVE_RESTORE && cfun->machine->frame.mask != 0)
8063 unsigned int regno, mask;
8064 HOST_WIDE_INT offset;
8067 /* Generate the restore instruction. */
8068 mask = cfun->machine->frame.mask;
8069 restore = mips16e_build_save_restore (true, &mask, &offset, 0, step2);
8071 /* Restore any other registers manually. */
8072 for (regno = GP_REG_FIRST; regno < GP_REG_LAST; regno++)
8073 if (BITSET_P (mask, regno - GP_REG_FIRST))
8075 offset -= GET_MODE_SIZE (gpr_mode);
8076 mips_save_restore_reg (gpr_mode, regno, offset, mips_restore_reg);
8079 /* Restore the remaining registers and deallocate the final bit
8081 emit_insn (restore);
8085 /* Restore the registers. */
8086 mips_for_each_saved_reg (cfun->machine->frame.total_size - step2,
8089 /* Deallocate the final bit of the frame. */
8091 emit_insn (gen_add3_insn (stack_pointer_rtx,
8096 /* Add in the __builtin_eh_return stack adjustment. We need to
8097 use a temporary in mips16 code. */
8098 if (current_function_calls_eh_return)
8102 mips_emit_move (MIPS_EPILOGUE_TEMP (Pmode), stack_pointer_rtx);
8103 emit_insn (gen_add3_insn (MIPS_EPILOGUE_TEMP (Pmode),
8104 MIPS_EPILOGUE_TEMP (Pmode),
8105 EH_RETURN_STACKADJ_RTX));
8106 mips_emit_move (stack_pointer_rtx, MIPS_EPILOGUE_TEMP (Pmode));
8109 emit_insn (gen_add3_insn (stack_pointer_rtx,
8111 EH_RETURN_STACKADJ_RTX));
8116 /* When generating MIPS16 code, the normal mips_for_each_saved_reg
8117 path will restore the return address into $7 rather than $31. */
8119 && !GENERATE_MIPS16E_SAVE_RESTORE
8120 && (cfun->machine->frame.mask & RA_MASK) != 0)
8121 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode,
8122 GP_REG_FIRST + 7)));
8124 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode,
8125 GP_REG_FIRST + 31)));
8129 /* Return nonzero if this function is known to have a null epilogue.
8130 This allows the optimizer to omit jumps to jumps if no stack
8134 mips_can_use_return_insn (void)
8136 if (! reload_completed)
8139 if (df_regs_ever_live_p (31) || current_function_profile)
8142 /* In mips16 mode, a function that returns a floating point value
8143 needs to arrange to copy the return value into the floating point
8145 if (mips16_cfun_returns_in_fpr_p ())
8148 if (cfun->machine->frame.initialized)
8149 return cfun->machine->frame.total_size == 0;
8151 return compute_frame_size (get_frame_size ()) == 0;
8154 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. Generate rtl rather than asm text
8155 in order to avoid duplicating too much logic from elsewhere. */
8158 mips_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
8159 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8162 rtx this, temp1, temp2, insn, fnaddr;
8164 /* Pretend to be a post-reload pass while generating rtl. */
8165 reload_completed = 1;
8167 /* Mark the end of the (empty) prologue. */
8168 emit_note (NOTE_INSN_PROLOGUE_END);
8170 /* Pick a global pointer. Use a call-clobbered register if
8171 TARGET_CALL_SAVED_GP, so that we can use a sibcall. */
8174 cfun->machine->global_pointer =
8175 TARGET_CALL_SAVED_GP ? 15 : GLOBAL_POINTER_REGNUM;
8177 SET_REGNO (pic_offset_table_rtx, cfun->machine->global_pointer);
8181 /* Set up the global pointer for n32 or n64 abicalls. If
8182 LOADGP_ABSOLUTE then the thunk does not use the gp and there is
8183 no need to load it.*/
8184 if (mips_current_loadgp_style () != LOADGP_ABSOLUTE
8185 || !targetm.binds_local_p (function))
8186 mips_emit_loadgp ();
8188 /* We need two temporary registers in some cases. */
8189 temp1 = gen_rtx_REG (Pmode, 2);
8190 temp2 = gen_rtx_REG (Pmode, 3);
8192 /* Find out which register contains the "this" pointer. */
8193 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8194 this = gen_rtx_REG (Pmode, GP_ARG_FIRST + 1);
8196 this = gen_rtx_REG (Pmode, GP_ARG_FIRST);
8198 /* Add DELTA to THIS. */
8201 rtx offset = GEN_INT (delta);
8202 if (!SMALL_OPERAND (delta))
8204 mips_emit_move (temp1, offset);
8207 emit_insn (gen_add3_insn (this, this, offset));
8210 /* If needed, add *(*THIS + VCALL_OFFSET) to THIS. */
8211 if (vcall_offset != 0)
8215 /* Set TEMP1 to *THIS. */
8216 mips_emit_move (temp1, gen_rtx_MEM (Pmode, this));
8218 /* Set ADDR to a legitimate address for *THIS + VCALL_OFFSET. */
8219 addr = mips_add_offset (temp2, temp1, vcall_offset);
8221 /* Load the offset and add it to THIS. */
8222 mips_emit_move (temp1, gen_rtx_MEM (Pmode, addr));
8223 emit_insn (gen_add3_insn (this, this, temp1));
8226 /* Jump to the target function. Use a sibcall if direct jumps are
8227 allowed, otherwise load the address into a register first. */
8228 fnaddr = XEXP (DECL_RTL (function), 0);
8229 if (TARGET_MIPS16 || TARGET_USE_GOT || SYMBOL_REF_LONG_CALL_P (fnaddr))
8231 /* This is messy. gas treats "la $25,foo" as part of a call
8232 sequence and may allow a global "foo" to be lazily bound.
8233 The general move patterns therefore reject this combination.
8235 In this context, lazy binding would actually be OK
8236 for TARGET_CALL_CLOBBERED_GP, but it's still wrong for
8237 TARGET_CALL_SAVED_GP; see mips_load_call_address.
8238 We must therefore load the address via a temporary
8239 register if mips_dangerous_for_la25_p.
8241 If we jump to the temporary register rather than $25, the assembler
8242 can use the move insn to fill the jump's delay slot. */
8243 if (TARGET_USE_PIC_FN_ADDR_REG
8244 && !mips_dangerous_for_la25_p (fnaddr))
8245 temp1 = gen_rtx_REG (Pmode, PIC_FUNCTION_ADDR_REGNUM);
8246 mips_load_call_address (temp1, fnaddr, true);
8248 if (TARGET_USE_PIC_FN_ADDR_REG
8249 && REGNO (temp1) != PIC_FUNCTION_ADDR_REGNUM)
8250 mips_emit_move (gen_rtx_REG (Pmode, PIC_FUNCTION_ADDR_REGNUM), temp1);
8251 emit_jump_insn (gen_indirect_jump (temp1));
8255 insn = emit_call_insn (gen_sibcall_internal (fnaddr, const0_rtx));
8256 SIBLING_CALL_P (insn) = 1;
8259 /* Run just enough of rest_of_compilation. This sequence was
8260 "borrowed" from alpha.c. */
8261 insn = get_insns ();
8262 insn_locators_alloc ();
8263 split_all_insns_noflow ();
8264 mips16_lay_out_constants ();
8265 shorten_branches (insn);
8266 final_start_function (insn, file, 1);
8267 final (insn, file, 1);
8268 final_end_function ();
8270 /* Clean up the vars set above. Note that final_end_function resets
8271 the global pointer for us. */
8272 reload_completed = 0;
8275 /* Returns nonzero if X contains a SYMBOL_REF. */
8278 symbolic_expression_p (rtx x)
8280 if (GET_CODE (x) == SYMBOL_REF)
8283 if (GET_CODE (x) == CONST)
8284 return symbolic_expression_p (XEXP (x, 0));
8287 return symbolic_expression_p (XEXP (x, 0));
8289 if (ARITHMETIC_P (x))
8290 return (symbolic_expression_p (XEXP (x, 0))
8291 || symbolic_expression_p (XEXP (x, 1)));
8296 /* Choose the section to use for the constant rtx expression X that has
8300 mips_select_rtx_section (enum machine_mode mode, rtx x,
8301 unsigned HOST_WIDE_INT align)
8303 if (TARGET_EMBEDDED_DATA)
8305 /* For embedded applications, always put constants in read-only data,
8306 in order to reduce RAM usage. */
8307 return mergeable_constant_section (mode, align, 0);
8311 /* For hosted applications, always put constants in small data if
8312 possible, as this gives the best performance. */
8313 /* ??? Consider using mergeable small data sections. */
8315 if (GET_MODE_SIZE (mode) <= (unsigned) mips_section_threshold
8316 && mips_section_threshold > 0)
8317 return get_named_section (NULL, ".sdata", 0);
8318 else if (flag_pic && symbolic_expression_p (x))
8319 return get_named_section (NULL, ".data.rel.ro", 3);
8321 return mergeable_constant_section (mode, align, 0);
8325 /* Implement TARGET_ASM_FUNCTION_RODATA_SECTION.
8327 The complication here is that, with the combination TARGET_ABICALLS
8328 && !TARGET_GPWORD, jump tables will use absolute addresses, and should
8329 therefore not be included in the read-only part of a DSO. Handle such
8330 cases by selecting a normal data section instead of a read-only one.
8331 The logic apes that in default_function_rodata_section. */
8334 mips_function_rodata_section (tree decl)
8336 if (!TARGET_ABICALLS || TARGET_GPWORD)
8337 return default_function_rodata_section (decl);
8339 if (decl && DECL_SECTION_NAME (decl))
8341 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
8342 if (DECL_ONE_ONLY (decl) && strncmp (name, ".gnu.linkonce.t.", 16) == 0)
8344 char *rname = ASTRDUP (name);
8346 return get_section (rname, SECTION_LINKONCE | SECTION_WRITE, decl);
8348 else if (flag_function_sections && flag_data_sections
8349 && strncmp (name, ".text.", 6) == 0)
8351 char *rname = ASTRDUP (name);
8352 memcpy (rname + 1, "data", 4);
8353 return get_section (rname, SECTION_WRITE, decl);
8356 return data_section;
8359 /* Implement TARGET_IN_SMALL_DATA_P. This function controls whether
8360 locally-defined objects go in a small data section. It also controls
8361 the setting of the SYMBOL_REF_SMALL_P flag, which in turn helps
8362 mips_classify_symbol decide when to use %gp_rel(...)($gp) accesses. */
8365 mips_in_small_data_p (tree decl)
8369 if (TREE_CODE (decl) == STRING_CST || TREE_CODE (decl) == FUNCTION_DECL)
8372 /* We don't yet generate small-data references for -mabicalls or
8373 VxWorks RTP code. See the related -G handling in override_options. */
8374 if (TARGET_ABICALLS || TARGET_VXWORKS_RTP)
8377 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl) != 0)
8381 /* Reject anything that isn't in a known small-data section. */
8382 name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
8383 if (strcmp (name, ".sdata") != 0 && strcmp (name, ".sbss") != 0)
8386 /* If a symbol is defined externally, the assembler will use the
8387 usual -G rules when deciding how to implement macros. */
8388 if (TARGET_EXPLICIT_RELOCS || !DECL_EXTERNAL (decl))
8391 else if (TARGET_EMBEDDED_DATA)
8393 /* Don't put constants into the small data section: we want them
8394 to be in ROM rather than RAM. */
8395 if (TREE_CODE (decl) != VAR_DECL)
8398 if (TREE_READONLY (decl)
8399 && !TREE_SIDE_EFFECTS (decl)
8400 && (!DECL_INITIAL (decl) || TREE_CONSTANT (DECL_INITIAL (decl))))
8404 size = int_size_in_bytes (TREE_TYPE (decl));
8405 return (size > 0 && size <= mips_section_threshold);
8408 /* Implement TARGET_USE_ANCHORS_FOR_SYMBOL_P. We don't want to use
8409 anchors for small data: the GP register acts as an anchor in that
8410 case. We also don't want to use them for PC-relative accesses,
8411 where the PC acts as an anchor. */
8414 mips_use_anchors_for_symbol_p (rtx symbol)
8416 switch (mips_classify_symbol (symbol, SYMBOL_CONTEXT_MEM))
8418 case SYMBOL_PC_RELATIVE:
8419 case SYMBOL_GP_RELATIVE:
8427 /* See whether VALTYPE is a record whose fields should be returned in
8428 floating-point registers. If so, return the number of fields and
8429 list them in FIELDS (which should have two elements). Return 0
8432 For n32 & n64, a structure with one or two fields is returned in
8433 floating-point registers as long as every field has a floating-point
8437 mips_fpr_return_fields (tree valtype, tree *fields)
8445 if (TREE_CODE (valtype) != RECORD_TYPE)
8449 for (field = TYPE_FIELDS (valtype); field != 0; field = TREE_CHAIN (field))
8451 if (TREE_CODE (field) != FIELD_DECL)
8454 if (TREE_CODE (TREE_TYPE (field)) != REAL_TYPE)
8460 fields[i++] = field;
8466 /* Implement TARGET_RETURN_IN_MSB. For n32 & n64, we should return
8467 a value in the most significant part of $2/$3 if:
8469 - the target is big-endian;
8471 - the value has a structure or union type (we generalize this to
8472 cover aggregates from other languages too); and
8474 - the structure is not returned in floating-point registers. */
8477 mips_return_in_msb (tree valtype)
8481 return (TARGET_NEWABI
8482 && TARGET_BIG_ENDIAN
8483 && AGGREGATE_TYPE_P (valtype)
8484 && mips_fpr_return_fields (valtype, fields) == 0);
8488 /* Return a composite value in a pair of floating-point registers.
8489 MODE1 and OFFSET1 are the mode and byte offset for the first value,
8490 likewise MODE2 and OFFSET2 for the second. MODE is the mode of the
8493 For n32 & n64, $f0 always holds the first value and $f2 the second.
8494 Otherwise the values are packed together as closely as possible. */
8497 mips_return_fpr_pair (enum machine_mode mode,
8498 enum machine_mode mode1, HOST_WIDE_INT offset1,
8499 enum machine_mode mode2, HOST_WIDE_INT offset2)
8503 inc = (TARGET_NEWABI ? 2 : MAX_FPRS_PER_FMT);
8504 return gen_rtx_PARALLEL
8507 gen_rtx_EXPR_LIST (VOIDmode,
8508 gen_rtx_REG (mode1, FP_RETURN),
8510 gen_rtx_EXPR_LIST (VOIDmode,
8511 gen_rtx_REG (mode2, FP_RETURN + inc),
8512 GEN_INT (offset2))));
8517 /* Implement FUNCTION_VALUE and LIBCALL_VALUE. For normal calls,
8518 VALTYPE is the return type and MODE is VOIDmode. For libcalls,
8519 VALTYPE is null and MODE is the mode of the return value. */
8522 mips_function_value (tree valtype, tree func ATTRIBUTE_UNUSED,
8523 enum machine_mode mode)
8530 mode = TYPE_MODE (valtype);
8531 unsignedp = TYPE_UNSIGNED (valtype);
8533 /* Since we define TARGET_PROMOTE_FUNCTION_RETURN that returns
8534 true, we must promote the mode just as PROMOTE_MODE does. */
8535 mode = promote_mode (valtype, mode, &unsignedp, 1);
8537 /* Handle structures whose fields are returned in $f0/$f2. */
8538 switch (mips_fpr_return_fields (valtype, fields))
8541 return gen_rtx_REG (mode, FP_RETURN);
8544 return mips_return_fpr_pair (mode,
8545 TYPE_MODE (TREE_TYPE (fields[0])),
8546 int_byte_position (fields[0]),
8547 TYPE_MODE (TREE_TYPE (fields[1])),
8548 int_byte_position (fields[1]));
8551 /* If a value is passed in the most significant part of a register, see
8552 whether we have to round the mode up to a whole number of words. */
8553 if (mips_return_in_msb (valtype))
8555 HOST_WIDE_INT size = int_size_in_bytes (valtype);
8556 if (size % UNITS_PER_WORD != 0)
8558 size += UNITS_PER_WORD - size % UNITS_PER_WORD;
8559 mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
8563 /* For EABI, the class of return register depends entirely on MODE.
8564 For example, "struct { some_type x; }" and "union { some_type x; }"
8565 are returned in the same way as a bare "some_type" would be.
8566 Other ABIs only use FPRs for scalar, complex or vector types. */
8567 if (mips_abi != ABI_EABI && !FLOAT_TYPE_P (valtype))
8568 return gen_rtx_REG (mode, GP_RETURN);
8573 /* Handle long doubles for n32 & n64. */
8575 return mips_return_fpr_pair (mode,
8577 DImode, GET_MODE_SIZE (mode) / 2);
8579 if (mips_return_mode_in_fpr_p (mode))
8581 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8582 return mips_return_fpr_pair (mode,
8583 GET_MODE_INNER (mode), 0,
8584 GET_MODE_INNER (mode),
8585 GET_MODE_SIZE (mode) / 2);
8587 return gen_rtx_REG (mode, FP_RETURN);
8591 return gen_rtx_REG (mode, GP_RETURN);
8594 /* Return nonzero when an argument must be passed by reference. */
8597 mips_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
8598 enum machine_mode mode, tree type,
8599 bool named ATTRIBUTE_UNUSED)
8601 if (mips_abi == ABI_EABI)
8605 /* ??? How should SCmode be handled? */
8606 if (mode == DImode || mode == DFmode)
8609 size = type ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
8610 return size == -1 || size > UNITS_PER_WORD;
8614 /* If we have a variable-sized parameter, we have no choice. */
8615 return targetm.calls.must_pass_in_stack (mode, type);
8620 mips_callee_copies (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
8621 enum machine_mode mode ATTRIBUTE_UNUSED,
8622 tree type ATTRIBUTE_UNUSED, bool named)
8624 return mips_abi == ABI_EABI && named;
8627 /* Return true if registers of class CLASS cannot change from mode FROM
8631 mips_cannot_change_mode_class (enum machine_mode from,
8632 enum machine_mode to, enum reg_class class)
8634 if (MIN (GET_MODE_SIZE (from), GET_MODE_SIZE (to)) <= UNITS_PER_WORD
8635 && MAX (GET_MODE_SIZE (from), GET_MODE_SIZE (to)) > UNITS_PER_WORD)
8637 if (TARGET_BIG_ENDIAN)
8639 /* When a multi-word value is stored in paired floating-point
8640 registers, the first register always holds the low word.
8641 We therefore can't allow FPRs to change between single-word
8642 and multi-word modes. */
8643 if (MAX_FPRS_PER_FMT > 1 && reg_classes_intersect_p (FP_REGS, class))
8648 /* gcc assumes that each word of a multiword register can be accessed
8649 individually using SUBREGs. This is not true for floating-point
8650 registers if they are bigger than a word. */
8651 if (UNITS_PER_FPREG > UNITS_PER_WORD
8652 && GET_MODE_SIZE (from) > UNITS_PER_WORD
8653 && GET_MODE_SIZE (to) < UNITS_PER_FPREG
8654 && reg_classes_intersect_p (FP_REGS, class))
8657 /* Loading a 32-bit value into a 64-bit floating-point register
8658 will not sign-extend the value, despite what LOAD_EXTEND_OP says.
8659 We can't allow 64-bit float registers to change from SImode to
8664 && GET_MODE_SIZE (to) >= UNITS_PER_WORD
8665 && reg_classes_intersect_p (FP_REGS, class))
8671 /* Return true if X should not be moved directly into register $25.
8672 We need this because many versions of GAS will treat "la $25,foo" as
8673 part of a call sequence and so allow a global "foo" to be lazily bound. */
8676 mips_dangerous_for_la25_p (rtx x)
8678 return (!TARGET_EXPLICIT_RELOCS
8680 && GET_CODE (x) == SYMBOL_REF
8681 && mips_global_symbol_p (x));
8684 /* Implement PREFERRED_RELOAD_CLASS. */
8687 mips_preferred_reload_class (rtx x, enum reg_class class)
8689 if (mips_dangerous_for_la25_p (x) && reg_class_subset_p (LEA_REGS, class))
8692 if (TARGET_HARD_FLOAT
8693 && FLOAT_MODE_P (GET_MODE (x))
8694 && reg_class_subset_p (FP_REGS, class))
8697 if (reg_class_subset_p (GR_REGS, class))
8700 if (TARGET_MIPS16 && reg_class_subset_p (M16_REGS, class))
8706 /* This function returns the register class required for a secondary
8707 register when copying between one of the registers in CLASS, and X,
8708 using MODE. If IN_P is nonzero, the copy is going from X to the
8709 register, otherwise the register is the source. A return value of
8710 NO_REGS means that no secondary register is required. */
8713 mips_secondary_reload_class (enum reg_class class,
8714 enum machine_mode mode, rtx x, int in_p)
8716 enum reg_class gr_regs = TARGET_MIPS16 ? M16_REGS : GR_REGS;
8720 if (REG_P (x)|| GET_CODE (x) == SUBREG)
8721 regno = true_regnum (x);
8723 gp_reg_p = TARGET_MIPS16 ? M16_REG_P (regno) : GP_REG_P (regno);
8725 if (mips_dangerous_for_la25_p (x))
8728 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], 25))
8732 /* Copying from HI or LO to anywhere other than a general register
8733 requires a general register.
8734 This rule applies to both the original HI/LO pair and the new
8735 DSP accumulators. */
8736 if (reg_class_subset_p (class, ACC_REGS))
8738 if (TARGET_MIPS16 && in_p)
8740 /* We can't really copy to HI or LO at all in mips16 mode. */
8743 return gp_reg_p ? NO_REGS : gr_regs;
8745 if (ACC_REG_P (regno))
8747 if (TARGET_MIPS16 && ! in_p)
8749 /* We can't really copy to HI or LO at all in mips16 mode. */
8752 return class == gr_regs ? NO_REGS : gr_regs;
8755 /* We can only copy a value to a condition code register from a
8756 floating point register, and even then we require a scratch
8757 floating point register. We can only copy a value out of a
8758 condition code register into a general register. */
8759 if (class == ST_REGS)
8763 return gp_reg_p ? NO_REGS : gr_regs;
8765 if (ST_REG_P (regno))
8769 return class == gr_regs ? NO_REGS : gr_regs;
8772 if (class == FP_REGS)
8776 /* In this case we can use lwc1, swc1, ldc1 or sdc1. */
8779 else if (CONSTANT_P (x) && GET_MODE_CLASS (mode) == MODE_FLOAT)
8781 /* We can use the l.s and l.d macros to load floating-point
8782 constants. ??? For l.s, we could probably get better
8783 code by returning GR_REGS here. */
8786 else if (gp_reg_p || x == CONST0_RTX (mode))
8788 /* In this case we can use mtc1, mfc1, dmtc1 or dmfc1. */
8791 else if (FP_REG_P (regno))
8793 /* In this case we can use mov.s or mov.d. */
8798 /* Otherwise, we need to reload through an integer register. */
8803 /* In mips16 mode, going between memory and anything but M16_REGS
8804 requires an M16_REG. */
8807 if (class != M16_REGS && class != M16_NA_REGS)
8815 if (class == M16_REGS || class == M16_NA_REGS)
8824 /* Implement CLASS_MAX_NREGS.
8826 - UNITS_PER_FPREG controls the number of registers needed by FP_REGS.
8828 - ST_REGS are always hold CCmode values, and CCmode values are
8829 considered to be 4 bytes wide.
8831 All other register classes are covered by UNITS_PER_WORD. Note that
8832 this is true even for unions of integer and float registers when the
8833 latter are smaller than the former. The only supported combination
8834 in which case this occurs is -mgp64 -msingle-float, which has 64-bit
8835 words but 32-bit float registers. A word-based calculation is correct
8836 in that case since -msingle-float disallows multi-FPR values. */
8839 mips_class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,
8840 enum machine_mode mode)
8842 if (class == ST_REGS)
8843 return (GET_MODE_SIZE (mode) + 3) / 4;
8844 else if (class == FP_REGS)
8845 return (GET_MODE_SIZE (mode) + UNITS_PER_FPREG - 1) / UNITS_PER_FPREG;
8847 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
8851 mips_valid_pointer_mode (enum machine_mode mode)
8853 return (mode == SImode || (TARGET_64BIT && mode == DImode));
8856 /* Target hook for vector_mode_supported_p. */
8859 mips_vector_mode_supported_p (enum machine_mode mode)
8864 return TARGET_PAIRED_SINGLE_FLOAT;
8875 /* If we can access small data directly (using gp-relative relocation
8876 operators) return the small data pointer, otherwise return null.
8878 For each mips16 function which refers to GP relative symbols, we
8879 use a pseudo register, initialized at the start of the function, to
8880 hold the $gp value. */
8883 mips16_gp_pseudo_reg (void)
8885 if (cfun->machine->mips16_gp_pseudo_rtx == NULL_RTX)
8886 cfun->machine->mips16_gp_pseudo_rtx = gen_reg_rtx (Pmode);
8888 /* Don't initialize the pseudo register if we are being called from
8889 the tree optimizers' cost-calculation routines. */
8890 if (!cfun->machine->initialized_mips16_gp_pseudo_p
8891 && (current_ir_type () != IR_GIMPLE || currently_expanding_to_rtl))
8895 /* We want to initialize this to a value which gcc will believe
8897 insn = gen_load_const_gp (cfun->machine->mips16_gp_pseudo_rtx);
8899 push_topmost_sequence ();
8900 /* We need to emit the initialization after the FUNCTION_BEG
8901 note, so that it will be integrated. */
8902 for (scan = get_insns (); scan != NULL_RTX; scan = NEXT_INSN (scan))
8904 && NOTE_KIND (scan) == NOTE_INSN_FUNCTION_BEG)
8906 if (scan == NULL_RTX)
8907 scan = get_insns ();
8908 insn = emit_insn_after (insn, scan);
8909 pop_topmost_sequence ();
8911 cfun->machine->initialized_mips16_gp_pseudo_p = true;
8914 return cfun->machine->mips16_gp_pseudo_rtx;
8917 /* Write out code to move floating point arguments in or out of
8918 general registers. Output the instructions to FILE. FP_CODE is
8919 the code describing which arguments are present (see the comment at
8920 the definition of CUMULATIVE_ARGS in mips.h). FROM_FP_P is nonzero if
8921 we are copying from the floating point registers. */
8924 mips16_fp_args (FILE *file, int fp_code, int from_fp_p)
8929 CUMULATIVE_ARGS cum;
8931 /* This code only works for the original 32-bit ABI and the O64 ABI. */
8932 gcc_assert (TARGET_OLDABI);
8939 init_cumulative_args (&cum, NULL, NULL);
8941 for (f = (unsigned int) fp_code; f != 0; f >>= 2)
8943 enum machine_mode mode;
8944 struct mips_arg_info info;
8948 else if ((f & 3) == 2)
8953 mips_arg_info (&cum, mode, NULL, true, &info);
8954 gparg = mips_arg_regno (&info, false);
8955 fparg = mips_arg_regno (&info, true);
8958 fprintf (file, "\t%s\t%s,%s\n", s,
8959 reg_names[gparg], reg_names[fparg]);
8960 else if (TARGET_64BIT)
8961 fprintf (file, "\td%s\t%s,%s\n", s,
8962 reg_names[gparg], reg_names[fparg]);
8963 else if (ISA_HAS_MXHC1)
8964 /* -mips32r2 -mfp64 */
8965 fprintf (file, "\t%s\t%s,%s\n\t%s\t%s,%s\n",
8967 reg_names[gparg + (WORDS_BIG_ENDIAN ? 1 : 0)],
8969 from_fp_p ? "mfhc1" : "mthc1",
8970 reg_names[gparg + (WORDS_BIG_ENDIAN ? 0 : 1)],
8972 else if (TARGET_BIG_ENDIAN)
8973 fprintf (file, "\t%s\t%s,%s\n\t%s\t%s,%s\n", s,
8974 reg_names[gparg], reg_names[fparg + 1], s,
8975 reg_names[gparg + 1], reg_names[fparg]);
8977 fprintf (file, "\t%s\t%s,%s\n\t%s\t%s,%s\n", s,
8978 reg_names[gparg], reg_names[fparg], s,
8979 reg_names[gparg + 1], reg_names[fparg + 1]);
8981 function_arg_advance (&cum, mode, NULL, true);
8985 /* Build a mips16 function stub. This is used for functions which
8986 take arguments in the floating point registers. It is 32-bit code
8987 that moves the floating point args into the general registers, and
8988 then jumps to the 16-bit code. */
8991 build_mips16_function_stub (FILE *file)
8994 char *secname, *stubname;
8995 tree stubid, stubdecl;
8999 fnname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9000 secname = (char *) alloca (strlen (fnname) + 20);
9001 sprintf (secname, ".mips16.fn.%s", fnname);
9002 stubname = (char *) alloca (strlen (fnname) + 20);
9003 sprintf (stubname, "__fn_stub_%s", fnname);
9004 stubid = get_identifier (stubname);
9005 stubdecl = build_decl (FUNCTION_DECL, stubid,
9006 build_function_type (void_type_node, NULL_TREE));
9007 DECL_SECTION_NAME (stubdecl) = build_string (strlen (secname), secname);
9008 DECL_RESULT (stubdecl) = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
9010 fprintf (file, "\t# Stub function for %s (", current_function_name ());
9012 for (f = (unsigned int) current_function_args_info.fp_code; f != 0; f >>= 2)
9014 fprintf (file, "%s%s",
9015 need_comma ? ", " : "",
9016 (f & 3) == 1 ? "float" : "double");
9019 fprintf (file, ")\n");
9021 fprintf (file, "\t.set\tnomips16\n");
9022 switch_to_section (function_section (stubdecl));
9023 ASM_OUTPUT_ALIGN (file, floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT));
9025 /* ??? If FUNCTION_NAME_ALREADY_DECLARED is defined, then we are
9026 within a .ent, and we cannot emit another .ent. */
9027 if (!FUNCTION_NAME_ALREADY_DECLARED)
9029 fputs ("\t.ent\t", file);
9030 assemble_name (file, stubname);
9034 assemble_name (file, stubname);
9035 fputs (":\n", file);
9037 /* We don't want the assembler to insert any nops here. */
9038 fprintf (file, "\t.set\tnoreorder\n");
9040 mips16_fp_args (file, current_function_args_info.fp_code, 1);
9042 fprintf (asm_out_file, "\t.set\tnoat\n");
9043 fprintf (asm_out_file, "\tla\t%s,", reg_names[GP_REG_FIRST + 1]);
9044 assemble_name (file, fnname);
9045 fprintf (file, "\n");
9046 fprintf (asm_out_file, "\tjr\t%s\n", reg_names[GP_REG_FIRST + 1]);
9047 fprintf (asm_out_file, "\t.set\tat\n");
9049 /* Unfortunately, we can't fill the jump delay slot. We can't fill
9050 with one of the mfc1 instructions, because the result is not
9051 available for one instruction, so if the very first instruction
9052 in the function refers to the register, it will see the wrong
9054 fprintf (file, "\tnop\n");
9056 fprintf (file, "\t.set\treorder\n");
9058 if (!FUNCTION_NAME_ALREADY_DECLARED)
9060 fputs ("\t.end\t", file);
9061 assemble_name (file, stubname);
9065 fprintf (file, "\t.set\tmips16\n");
9067 switch_to_section (function_section (current_function_decl));
9070 /* We keep a list of functions for which we have already built stubs
9071 in build_mips16_call_stub. */
9075 struct mips16_stub *next;
9080 static struct mips16_stub *mips16_stubs;
9082 /* Emit code to return a double value from a mips16 stub. GPREG is the
9083 first GP reg to use, FPREG is the first FP reg to use. */
9086 mips16_fpret_double (int gpreg, int fpreg)
9089 fprintf (asm_out_file, "\tdmfc1\t%s,%s\n",
9090 reg_names[gpreg], reg_names[fpreg]);
9091 else if (TARGET_FLOAT64)
9093 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9094 reg_names[gpreg + WORDS_BIG_ENDIAN],
9096 fprintf (asm_out_file, "\tmfhc1\t%s,%s\n",
9097 reg_names[gpreg + !WORDS_BIG_ENDIAN],
9102 if (TARGET_BIG_ENDIAN)
9104 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9105 reg_names[gpreg + 0],
9106 reg_names[fpreg + 1]);
9107 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9108 reg_names[gpreg + 1],
9109 reg_names[fpreg + 0]);
9113 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9114 reg_names[gpreg + 0],
9115 reg_names[fpreg + 0]);
9116 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9117 reg_names[gpreg + 1],
9118 reg_names[fpreg + 1]);
9123 /* Build a call stub for a mips16 call. A stub is needed if we are
9124 passing any floating point values which should go into the floating
9125 point registers. If we are, and the call turns out to be to a
9126 32-bit function, the stub will be used to move the values into the
9127 floating point registers before calling the 32-bit function. The
9128 linker will magically adjust the function call to either the 16-bit
9129 function or the 32-bit stub, depending upon where the function call
9130 is actually defined.
9132 Similarly, we need a stub if the return value might come back in a
9133 floating point register.
9135 RETVAL is the location of the return value, or null if this is
9136 a call rather than a call_value. FN is the address of the
9137 function and ARG_SIZE is the size of the arguments. FP_CODE
9138 is the code built by function_arg. This function returns a nonzero
9139 value if it builds the call instruction itself. */
9142 build_mips16_call_stub (rtx retval, rtx fn, rtx arg_size, int fp_code)
9146 char *secname, *stubname;
9147 struct mips16_stub *l;
9148 tree stubid, stubdecl;
9152 /* We don't need to do anything if we aren't in mips16 mode, or if
9153 we were invoked with the -msoft-float option. */
9154 if (!TARGET_MIPS16 || TARGET_SOFT_FLOAT_ABI)
9157 /* Figure out whether the value might come back in a floating point
9160 fpret = mips_return_mode_in_fpr_p (GET_MODE (retval));
9162 /* We don't need to do anything if there were no floating point
9163 arguments and the value will not be returned in a floating point
9165 if (fp_code == 0 && ! fpret)
9168 /* We don't need to do anything if this is a call to a special
9169 mips16 support function. */
9170 if (GET_CODE (fn) == SYMBOL_REF
9171 && strncmp (XSTR (fn, 0), "__mips16_", 9) == 0)
9174 /* This code will only work for o32 and o64 abis. The other ABI's
9175 require more sophisticated support. */
9176 gcc_assert (TARGET_OLDABI);
9178 /* If we're calling via a function pointer, then we must always call
9179 via a stub. There are magic stubs provided in libgcc.a for each
9180 of the required cases. Each of them expects the function address
9181 to arrive in register $2. */
9183 if (GET_CODE (fn) != SYMBOL_REF)
9189 /* ??? If this code is modified to support other ABI's, we need
9190 to handle PARALLEL return values here. */
9193 sprintf (buf, "__mips16_call_stub_%s_%d",
9194 mips16_call_stub_mode_suffix (GET_MODE (retval)),
9197 sprintf (buf, "__mips16_call_stub_%d",
9200 id = get_identifier (buf);
9201 stub_fn = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (id));
9203 mips_emit_move (gen_rtx_REG (Pmode, 2), fn);
9205 if (retval == NULL_RTX)
9206 insn = gen_call_internal (stub_fn, arg_size);
9208 insn = gen_call_value_internal (retval, stub_fn, arg_size);
9209 insn = emit_call_insn (insn);
9211 /* Put the register usage information on the CALL. */
9212 CALL_INSN_FUNCTION_USAGE (insn) =
9213 gen_rtx_EXPR_LIST (VOIDmode,
9214 gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 2)),
9215 CALL_INSN_FUNCTION_USAGE (insn));
9217 /* If we are handling a floating point return value, we need to
9218 save $18 in the function prologue. Putting a note on the
9219 call will mean that df_regs_ever_live_p ($18) will be true if the
9220 call is not eliminated, and we can check that in the prologue
9223 CALL_INSN_FUNCTION_USAGE (insn) =
9224 gen_rtx_EXPR_LIST (VOIDmode,
9225 gen_rtx_USE (VOIDmode,
9226 gen_rtx_REG (word_mode, 18)),
9227 CALL_INSN_FUNCTION_USAGE (insn));
9229 /* Return 1 to tell the caller that we've generated the call
9234 /* We know the function we are going to call. If we have already
9235 built a stub, we don't need to do anything further. */
9237 fnname = XSTR (fn, 0);
9238 for (l = mips16_stubs; l != NULL; l = l->next)
9239 if (strcmp (l->name, fnname) == 0)
9244 /* Build a special purpose stub. When the linker sees a
9245 function call in mips16 code, it will check where the target
9246 is defined. If the target is a 32-bit call, the linker will
9247 search for the section defined here. It can tell which
9248 symbol this section is associated with by looking at the
9249 relocation information (the name is unreliable, since this
9250 might be a static function). If such a section is found, the
9251 linker will redirect the call to the start of the magic
9254 If the function does not return a floating point value, the
9255 special stub section is named
9258 If the function does return a floating point value, the stub
9260 .mips16.call.fp.FNNAME
9263 secname = (char *) alloca (strlen (fnname) + 40);
9264 sprintf (secname, ".mips16.call.%s%s",
9267 stubname = (char *) alloca (strlen (fnname) + 20);
9268 sprintf (stubname, "__call_stub_%s%s",
9271 stubid = get_identifier (stubname);
9272 stubdecl = build_decl (FUNCTION_DECL, stubid,
9273 build_function_type (void_type_node, NULL_TREE));
9274 DECL_SECTION_NAME (stubdecl) = build_string (strlen (secname), secname);
9275 DECL_RESULT (stubdecl) = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
9277 fprintf (asm_out_file, "\t# Stub function to call %s%s (",
9279 ? (GET_MODE (retval) == SFmode ? "float " : "double ")
9283 for (f = (unsigned int) fp_code; f != 0; f >>= 2)
9285 fprintf (asm_out_file, "%s%s",
9286 need_comma ? ", " : "",
9287 (f & 3) == 1 ? "float" : "double");
9290 fprintf (asm_out_file, ")\n");
9292 fprintf (asm_out_file, "\t.set\tnomips16\n");
9293 assemble_start_function (stubdecl, stubname);
9295 if (!FUNCTION_NAME_ALREADY_DECLARED)
9297 fputs ("\t.ent\t", asm_out_file);
9298 assemble_name (asm_out_file, stubname);
9299 fputs ("\n", asm_out_file);
9301 assemble_name (asm_out_file, stubname);
9302 fputs (":\n", asm_out_file);
9305 /* We build the stub code by hand. That's the only way we can
9306 do it, since we can't generate 32-bit code during a 16-bit
9309 /* We don't want the assembler to insert any nops here. */
9310 fprintf (asm_out_file, "\t.set\tnoreorder\n");
9312 mips16_fp_args (asm_out_file, fp_code, 0);
9316 fprintf (asm_out_file, "\t.set\tnoat\n");
9317 fprintf (asm_out_file, "\tla\t%s,%s\n", reg_names[GP_REG_FIRST + 1],
9319 fprintf (asm_out_file, "\tjr\t%s\n", reg_names[GP_REG_FIRST + 1]);
9320 fprintf (asm_out_file, "\t.set\tat\n");
9321 /* Unfortunately, we can't fill the jump delay slot. We
9322 can't fill with one of the mtc1 instructions, because the
9323 result is not available for one instruction, so if the
9324 very first instruction in the function refers to the
9325 register, it will see the wrong value. */
9326 fprintf (asm_out_file, "\tnop\n");
9330 fprintf (asm_out_file, "\tmove\t%s,%s\n",
9331 reg_names[GP_REG_FIRST + 18], reg_names[GP_REG_FIRST + 31]);
9332 fprintf (asm_out_file, "\tjal\t%s\n", fnname);
9333 /* As above, we can't fill the delay slot. */
9334 fprintf (asm_out_file, "\tnop\n");
9335 if (GET_MODE (retval) == SFmode)
9336 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9337 reg_names[GP_REG_FIRST + 2], reg_names[FP_REG_FIRST + 0]);
9338 else if (GET_MODE (retval) == SCmode)
9340 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9341 reg_names[GP_REG_FIRST + 2],
9342 reg_names[FP_REG_FIRST + 0]);
9343 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9344 reg_names[GP_REG_FIRST + 3],
9345 reg_names[FP_REG_FIRST + MAX_FPRS_PER_FMT]);
9347 else if (GET_MODE (retval) == DFmode
9348 || GET_MODE (retval) == V2SFmode)
9350 mips16_fpret_double (GP_REG_FIRST + 2, FP_REG_FIRST + 0);
9352 else if (GET_MODE (retval) == DCmode)
9354 mips16_fpret_double (GP_REG_FIRST + 2,
9356 mips16_fpret_double (GP_REG_FIRST + 4,
9357 FP_REG_FIRST + MAX_FPRS_PER_FMT);
9361 if (TARGET_BIG_ENDIAN)
9363 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9364 reg_names[GP_REG_FIRST + 2],
9365 reg_names[FP_REG_FIRST + 1]);
9366 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9367 reg_names[GP_REG_FIRST + 3],
9368 reg_names[FP_REG_FIRST + 0]);
9372 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9373 reg_names[GP_REG_FIRST + 2],
9374 reg_names[FP_REG_FIRST + 0]);
9375 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9376 reg_names[GP_REG_FIRST + 3],
9377 reg_names[FP_REG_FIRST + 1]);
9380 fprintf (asm_out_file, "\tj\t%s\n", reg_names[GP_REG_FIRST + 18]);
9381 /* As above, we can't fill the delay slot. */
9382 fprintf (asm_out_file, "\tnop\n");
9385 fprintf (asm_out_file, "\t.set\treorder\n");
9387 #ifdef ASM_DECLARE_FUNCTION_SIZE
9388 ASM_DECLARE_FUNCTION_SIZE (asm_out_file, stubname, stubdecl);
9391 if (!FUNCTION_NAME_ALREADY_DECLARED)
9393 fputs ("\t.end\t", asm_out_file);
9394 assemble_name (asm_out_file, stubname);
9395 fputs ("\n", asm_out_file);
9398 fprintf (asm_out_file, "\t.set\tmips16\n");
9400 /* Record this stub. */
9401 l = (struct mips16_stub *) xmalloc (sizeof *l);
9402 l->name = xstrdup (fnname);
9404 l->next = mips16_stubs;
9408 /* If we expect a floating point return value, but we've built a
9409 stub which does not expect one, then we're in trouble. We can't
9410 use the existing stub, because it won't handle the floating point
9411 value. We can't build a new stub, because the linker won't know
9412 which stub to use for the various calls in this object file.
9413 Fortunately, this case is illegal, since it means that a function
9414 was declared in two different ways in a single compilation. */
9415 if (fpret && ! l->fpret)
9416 error ("cannot handle inconsistent calls to %qs", fnname);
9418 /* If we are calling a stub which handles a floating point return
9419 value, we need to arrange to save $18 in the prologue. We do
9420 this by marking the function call as using the register. The
9421 prologue will later see that it is used, and emit code to save
9428 if (retval == NULL_RTX)
9429 insn = gen_call_internal (fn, arg_size);
9431 insn = gen_call_value_internal (retval, fn, arg_size);
9432 insn = emit_call_insn (insn);
9434 CALL_INSN_FUNCTION_USAGE (insn) =
9435 gen_rtx_EXPR_LIST (VOIDmode,
9436 gen_rtx_USE (VOIDmode, gen_rtx_REG (word_mode, 18)),
9437 CALL_INSN_FUNCTION_USAGE (insn));
9439 /* Return 1 to tell the caller that we've generated the call
9444 /* Return 0 to let the caller generate the call insn. */
9448 /* An entry in the mips16 constant pool. VALUE is the pool constant,
9449 MODE is its mode, and LABEL is the CODE_LABEL associated with it. */
9451 struct mips16_constant {
9452 struct mips16_constant *next;
9455 enum machine_mode mode;
9458 /* Information about an incomplete mips16 constant pool. FIRST is the
9459 first constant, HIGHEST_ADDRESS is the highest address that the first
9460 byte of the pool can have, and INSN_ADDRESS is the current instruction
9463 struct mips16_constant_pool {
9464 struct mips16_constant *first;
9465 int highest_address;
9469 /* Add constant VALUE to POOL and return its label. MODE is the
9470 value's mode (used for CONST_INTs, etc.). */
9473 add_constant (struct mips16_constant_pool *pool,
9474 rtx value, enum machine_mode mode)
9476 struct mips16_constant **p, *c;
9477 bool first_of_size_p;
9479 /* See whether the constant is already in the pool. If so, return the
9480 existing label, otherwise leave P pointing to the place where the
9481 constant should be added.
9483 Keep the pool sorted in increasing order of mode size so that we can
9484 reduce the number of alignments needed. */
9485 first_of_size_p = true;
9486 for (p = &pool->first; *p != 0; p = &(*p)->next)
9488 if (mode == (*p)->mode && rtx_equal_p (value, (*p)->value))
9490 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE ((*p)->mode))
9492 if (GET_MODE_SIZE (mode) == GET_MODE_SIZE ((*p)->mode))
9493 first_of_size_p = false;
9496 /* In the worst case, the constant needed by the earliest instruction
9497 will end up at the end of the pool. The entire pool must then be
9498 accessible from that instruction.
9500 When adding the first constant, set the pool's highest address to
9501 the address of the first out-of-range byte. Adjust this address
9502 downwards each time a new constant is added. */
9503 if (pool->first == 0)
9504 /* For pc-relative lw, addiu and daddiu instructions, the base PC value
9505 is the address of the instruction with the lowest two bits clear.
9506 The base PC value for ld has the lowest three bits clear. Assume
9507 the worst case here. */
9508 pool->highest_address = pool->insn_address - (UNITS_PER_WORD - 2) + 0x8000;
9509 pool->highest_address -= GET_MODE_SIZE (mode);
9510 if (first_of_size_p)
9511 /* Take into account the worst possible padding due to alignment. */
9512 pool->highest_address -= GET_MODE_SIZE (mode) - 1;
9514 /* Create a new entry. */
9515 c = (struct mips16_constant *) xmalloc (sizeof *c);
9518 c->label = gen_label_rtx ();
9525 /* Output constant VALUE after instruction INSN and return the last
9526 instruction emitted. MODE is the mode of the constant. */
9529 dump_constants_1 (enum machine_mode mode, rtx value, rtx insn)
9531 switch (GET_MODE_CLASS (mode))
9535 rtx size = GEN_INT (GET_MODE_SIZE (mode));
9536 return emit_insn_after (gen_consttable_int (value, size), insn);
9540 return emit_insn_after (gen_consttable_float (value), insn);
9542 case MODE_VECTOR_FLOAT:
9543 case MODE_VECTOR_INT:
9546 for (i = 0; i < CONST_VECTOR_NUNITS (value); i++)
9547 insn = dump_constants_1 (GET_MODE_INNER (mode),
9548 CONST_VECTOR_ELT (value, i), insn);
9558 /* Dump out the constants in CONSTANTS after INSN. */
9561 dump_constants (struct mips16_constant *constants, rtx insn)
9563 struct mips16_constant *c, *next;
9567 for (c = constants; c != NULL; c = next)
9569 /* If necessary, increase the alignment of PC. */
9570 if (align < GET_MODE_SIZE (c->mode))
9572 int align_log = floor_log2 (GET_MODE_SIZE (c->mode));
9573 insn = emit_insn_after (gen_align (GEN_INT (align_log)), insn);
9575 align = GET_MODE_SIZE (c->mode);
9577 insn = emit_label_after (c->label, insn);
9578 insn = dump_constants_1 (c->mode, c->value, insn);
9584 emit_barrier_after (insn);
9587 /* Return the length of instruction INSN. */
9590 mips16_insn_length (rtx insn)
9594 rtx body = PATTERN (insn);
9595 if (GET_CODE (body) == ADDR_VEC)
9596 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, 0);
9597 if (GET_CODE (body) == ADDR_DIFF_VEC)
9598 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, 1);
9600 return get_attr_length (insn);
9603 /* Rewrite *X so that constant pool references refer to the constant's
9604 label instead. DATA points to the constant pool structure. */
9607 mips16_rewrite_pool_refs (rtx *x, void *data)
9609 struct mips16_constant_pool *pool = data;
9610 rtx base, offset, label;
9614 else if (!TARGET_MIPS16_TEXT_LOADS)
9617 split_const (*x, &base, &offset);
9618 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
9620 label = add_constant (pool, get_pool_constant (base),
9621 get_pool_mode (base));
9622 base = gen_rtx_LABEL_REF (Pmode, label);
9623 *x = mips_unspec_address_offset (base, offset, SYMBOL_PC_RELATIVE);
9626 return GET_CODE (*x) == CONST ? -1 : 0;
9629 /* Build MIPS16 constant pools. */
9632 mips16_lay_out_constants (void)
9634 struct mips16_constant_pool pool;
9637 if (!TARGET_MIPS16_PCREL_LOADS)
9641 memset (&pool, 0, sizeof (pool));
9642 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9644 /* Rewrite constant pool references in INSN. */
9646 for_each_rtx (&PATTERN (insn), mips16_rewrite_pool_refs, &pool);
9648 pool.insn_address += mips16_insn_length (insn);
9650 if (pool.first != NULL)
9652 /* If there are no natural barriers between the first user of
9653 the pool and the highest acceptable address, we'll need to
9654 create a new instruction to jump around the constant pool.
9655 In the worst case, this instruction will be 4 bytes long.
9657 If it's too late to do this transformation after INSN,
9658 do it immediately before INSN. */
9659 if (barrier == 0 && pool.insn_address + 4 > pool.highest_address)
9663 label = gen_label_rtx ();
9665 jump = emit_jump_insn_before (gen_jump (label), insn);
9666 JUMP_LABEL (jump) = label;
9667 LABEL_NUSES (label) = 1;
9668 barrier = emit_barrier_after (jump);
9670 emit_label_after (label, barrier);
9671 pool.insn_address += 4;
9674 /* See whether the constant pool is now out of range of the first
9675 user. If so, output the constants after the previous barrier.
9676 Note that any instructions between BARRIER and INSN (inclusive)
9677 will use negative offsets to refer to the pool. */
9678 if (pool.insn_address > pool.highest_address)
9680 dump_constants (pool.first, barrier);
9684 else if (BARRIER_P (insn))
9688 dump_constants (pool.first, get_last_insn ());
9691 /* A temporary variable used by for_each_rtx callbacks, etc. */
9692 static rtx mips_sim_insn;
9694 /* A structure representing the state of the processor pipeline.
9695 Used by the mips_sim_* family of functions. */
9697 /* The maximum number of instructions that can be issued in a cycle.
9698 (Caches mips_issue_rate.) */
9699 unsigned int issue_rate;
9701 /* The current simulation time. */
9704 /* How many more instructions can be issued in the current cycle. */
9705 unsigned int insns_left;
9707 /* LAST_SET[X].INSN is the last instruction to set register X.
9708 LAST_SET[X].TIME is the time at which that instruction was issued.
9709 INSN is null if no instruction has yet set register X. */
9713 } last_set[FIRST_PSEUDO_REGISTER];
9715 /* The pipeline's current DFA state. */
9719 /* Reset STATE to the initial simulation state. */
9722 mips_sim_reset (struct mips_sim *state)
9725 state->insns_left = state->issue_rate;
9726 memset (&state->last_set, 0, sizeof (state->last_set));
9727 state_reset (state->dfa_state);
9730 /* Initialize STATE before its first use. DFA_STATE points to an
9731 allocated but uninitialized DFA state. */
9734 mips_sim_init (struct mips_sim *state, state_t dfa_state)
9736 state->issue_rate = mips_issue_rate ();
9737 state->dfa_state = dfa_state;
9738 mips_sim_reset (state);
9741 /* Advance STATE by one clock cycle. */
9744 mips_sim_next_cycle (struct mips_sim *state)
9747 state->insns_left = state->issue_rate;
9748 state_transition (state->dfa_state, 0);
9751 /* Advance simulation state STATE until instruction INSN can read
9755 mips_sim_wait_reg (struct mips_sim *state, rtx insn, rtx reg)
9759 for (i = 0; i < HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)); i++)
9760 if (state->last_set[REGNO (reg) + i].insn != 0)
9764 t = state->last_set[REGNO (reg) + i].time;
9765 t += insn_latency (state->last_set[REGNO (reg) + i].insn, insn);
9766 while (state->time < t)
9767 mips_sim_next_cycle (state);
9771 /* A for_each_rtx callback. If *X is a register, advance simulation state
9772 DATA until mips_sim_insn can read the register's value. */
9775 mips_sim_wait_regs_2 (rtx *x, void *data)
9778 mips_sim_wait_reg (data, mips_sim_insn, *x);
9782 /* Call mips_sim_wait_regs_2 (R, DATA) for each register R mentioned in *X. */
9785 mips_sim_wait_regs_1 (rtx *x, void *data)
9787 for_each_rtx (x, mips_sim_wait_regs_2, data);
9790 /* Advance simulation state STATE until all of INSN's register
9791 dependencies are satisfied. */
9794 mips_sim_wait_regs (struct mips_sim *state, rtx insn)
9796 mips_sim_insn = insn;
9797 note_uses (&PATTERN (insn), mips_sim_wait_regs_1, state);
9800 /* Advance simulation state STATE until the units required by
9801 instruction INSN are available. */
9804 mips_sim_wait_units (struct mips_sim *state, rtx insn)
9808 tmp_state = alloca (state_size ());
9809 while (state->insns_left == 0
9810 || (memcpy (tmp_state, state->dfa_state, state_size ()),
9811 state_transition (tmp_state, insn) >= 0))
9812 mips_sim_next_cycle (state);
9815 /* Advance simulation state STATE until INSN is ready to issue. */
9818 mips_sim_wait_insn (struct mips_sim *state, rtx insn)
9820 mips_sim_wait_regs (state, insn);
9821 mips_sim_wait_units (state, insn);
9824 /* mips_sim_insn has just set X. Update the LAST_SET array
9825 in simulation state DATA. */
9828 mips_sim_record_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
9830 struct mips_sim *state;
9835 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
9837 state->last_set[REGNO (x) + i].insn = mips_sim_insn;
9838 state->last_set[REGNO (x) + i].time = state->time;
9842 /* Issue instruction INSN in scheduler state STATE. Assume that INSN
9843 can issue immediately (i.e., that mips_sim_wait_insn has already
9847 mips_sim_issue_insn (struct mips_sim *state, rtx insn)
9849 state_transition (state->dfa_state, insn);
9850 state->insns_left--;
9852 mips_sim_insn = insn;
9853 note_stores (PATTERN (insn), mips_sim_record_set, state);
9856 /* Simulate issuing a NOP in state STATE. */
9859 mips_sim_issue_nop (struct mips_sim *state)
9861 if (state->insns_left == 0)
9862 mips_sim_next_cycle (state);
9863 state->insns_left--;
9866 /* Update simulation state STATE so that it's ready to accept the instruction
9867 after INSN. INSN should be part of the main rtl chain, not a member of a
9871 mips_sim_finish_insn (struct mips_sim *state, rtx insn)
9873 /* If INSN is a jump with an implicit delay slot, simulate a nop. */
9875 mips_sim_issue_nop (state);
9877 switch (GET_CODE (SEQ_BEGIN (insn)))
9881 /* We can't predict the processor state after a call or label. */
9882 mips_sim_reset (state);
9886 /* The delay slots of branch likely instructions are only executed
9887 when the branch is taken. Therefore, if the caller has simulated
9888 the delay slot instruction, STATE does not really reflect the state
9889 of the pipeline for the instruction after the delay slot. Also,
9890 branch likely instructions tend to incur a penalty when not taken,
9891 so there will probably be an extra delay between the branch and
9892 the instruction after the delay slot. */
9893 if (INSN_ANNULLED_BRANCH_P (SEQ_BEGIN (insn)))
9894 mips_sim_reset (state);
9902 /* The VR4130 pipeline issues aligned pairs of instructions together,
9903 but it stalls the second instruction if it depends on the first.
9904 In order to cut down the amount of logic required, this dependence
9905 check is not based on a full instruction decode. Instead, any non-SPECIAL
9906 instruction is assumed to modify the register specified by bits 20-16
9907 (which is usually the "rt" field).
9909 In beq, beql, bne and bnel instructions, the rt field is actually an
9910 input, so we can end up with a false dependence between the branch
9911 and its delay slot. If this situation occurs in instruction INSN,
9912 try to avoid it by swapping rs and rt. */
9915 vr4130_avoid_branch_rt_conflict (rtx insn)
9919 first = SEQ_BEGIN (insn);
9920 second = SEQ_END (insn);
9922 && NONJUMP_INSN_P (second)
9923 && GET_CODE (PATTERN (first)) == SET
9924 && GET_CODE (SET_DEST (PATTERN (first))) == PC
9925 && GET_CODE (SET_SRC (PATTERN (first))) == IF_THEN_ELSE)
9927 /* Check for the right kind of condition. */
9928 rtx cond = XEXP (SET_SRC (PATTERN (first)), 0);
9929 if ((GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
9930 && REG_P (XEXP (cond, 0))
9931 && REG_P (XEXP (cond, 1))
9932 && reg_referenced_p (XEXP (cond, 1), PATTERN (second))
9933 && !reg_referenced_p (XEXP (cond, 0), PATTERN (second)))
9935 /* SECOND mentions the rt register but not the rs register. */
9936 rtx tmp = XEXP (cond, 0);
9937 XEXP (cond, 0) = XEXP (cond, 1);
9938 XEXP (cond, 1) = tmp;
9943 /* Implement -mvr4130-align. Go through each basic block and simulate the
9944 processor pipeline. If we find that a pair of instructions could execute
9945 in parallel, and the first of those instruction is not 8-byte aligned,
9946 insert a nop to make it aligned. */
9949 vr4130_align_insns (void)
9951 struct mips_sim state;
9952 rtx insn, subinsn, last, last2, next;
9957 /* LAST is the last instruction before INSN to have a nonzero length.
9958 LAST2 is the last such instruction before LAST. */
9962 /* ALIGNED_P is true if INSN is known to be at an aligned address. */
9965 mips_sim_init (&state, alloca (state_size ()));
9966 for (insn = get_insns (); insn != 0; insn = next)
9968 unsigned int length;
9970 next = NEXT_INSN (insn);
9972 /* See the comment above vr4130_avoid_branch_rt_conflict for details.
9973 This isn't really related to the alignment pass, but we do it on
9974 the fly to avoid a separate instruction walk. */
9975 vr4130_avoid_branch_rt_conflict (insn);
9977 if (USEFUL_INSN_P (insn))
9978 FOR_EACH_SUBINSN (subinsn, insn)
9980 mips_sim_wait_insn (&state, subinsn);
9982 /* If we want this instruction to issue in parallel with the
9983 previous one, make sure that the previous instruction is
9984 aligned. There are several reasons why this isn't worthwhile
9985 when the second instruction is a call:
9987 - Calls are less likely to be performance critical,
9988 - There's a good chance that the delay slot can execute
9989 in parallel with the call.
9990 - The return address would then be unaligned.
9992 In general, if we're going to insert a nop between instructions
9993 X and Y, it's better to insert it immediately after X. That
9994 way, if the nop makes Y aligned, it will also align any labels
9996 if (state.insns_left != state.issue_rate
9997 && !CALL_P (subinsn))
9999 if (subinsn == SEQ_BEGIN (insn) && aligned_p)
10001 /* SUBINSN is the first instruction in INSN and INSN is
10002 aligned. We want to align the previous instruction
10003 instead, so insert a nop between LAST2 and LAST.
10005 Note that LAST could be either a single instruction
10006 or a branch with a delay slot. In the latter case,
10007 LAST, like INSN, is already aligned, but the delay
10008 slot must have some extra delay that stops it from
10009 issuing at the same time as the branch. We therefore
10010 insert a nop before the branch in order to align its
10012 emit_insn_after (gen_nop (), last2);
10015 else if (subinsn != SEQ_BEGIN (insn) && !aligned_p)
10017 /* SUBINSN is the delay slot of INSN, but INSN is
10018 currently unaligned. Insert a nop between
10019 LAST and INSN to align it. */
10020 emit_insn_after (gen_nop (), last);
10024 mips_sim_issue_insn (&state, subinsn);
10026 mips_sim_finish_insn (&state, insn);
10028 /* Update LAST, LAST2 and ALIGNED_P for the next instruction. */
10029 length = get_attr_length (insn);
10032 /* If the instruction is an asm statement or multi-instruction
10033 mips.md patern, the length is only an estimate. Insert an
10034 8 byte alignment after it so that the following instructions
10035 can be handled correctly. */
10036 if (NONJUMP_INSN_P (SEQ_BEGIN (insn))
10037 && (recog_memoized (insn) < 0 || length >= 8))
10039 next = emit_insn_after (gen_align (GEN_INT (3)), insn);
10040 next = NEXT_INSN (next);
10041 mips_sim_next_cycle (&state);
10044 else if (length & 4)
10045 aligned_p = !aligned_p;
10050 /* See whether INSN is an aligned label. */
10051 if (LABEL_P (insn) && label_to_alignment (insn) >= 3)
10057 /* Subroutine of mips_reorg. If there is a hazard between INSN
10058 and a previous instruction, avoid it by inserting nops after
10061 *DELAYED_REG and *HILO_DELAY describe the hazards that apply at
10062 this point. If *DELAYED_REG is non-null, INSN must wait a cycle
10063 before using the value of that register. *HILO_DELAY counts the
10064 number of instructions since the last hilo hazard (that is,
10065 the number of instructions since the last mflo or mfhi).
10067 After inserting nops for INSN, update *DELAYED_REG and *HILO_DELAY
10068 for the next instruction.
10070 LO_REG is an rtx for the LO register, used in dependence checking. */
10073 mips_avoid_hazard (rtx after, rtx insn, int *hilo_delay,
10074 rtx *delayed_reg, rtx lo_reg)
10079 if (!INSN_P (insn))
10082 pattern = PATTERN (insn);
10084 /* Do not put the whole function in .set noreorder if it contains
10085 an asm statement. We don't know whether there will be hazards
10086 between the asm statement and the gcc-generated code. */
10087 if (GET_CODE (pattern) == ASM_INPUT || asm_noperands (pattern) >= 0)
10088 cfun->machine->all_noreorder_p = false;
10090 /* Ignore zero-length instructions (barriers and the like). */
10091 ninsns = get_attr_length (insn) / 4;
10095 /* Work out how many nops are needed. Note that we only care about
10096 registers that are explicitly mentioned in the instruction's pattern.
10097 It doesn't matter that calls use the argument registers or that they
10098 clobber hi and lo. */
10099 if (*hilo_delay < 2 && reg_set_p (lo_reg, pattern))
10100 nops = 2 - *hilo_delay;
10101 else if (*delayed_reg != 0 && reg_referenced_p (*delayed_reg, pattern))
10106 /* Insert the nops between this instruction and the previous one.
10107 Each new nop takes us further from the last hilo hazard. */
10108 *hilo_delay += nops;
10110 emit_insn_after (gen_hazard_nop (), after);
10112 /* Set up the state for the next instruction. */
10113 *hilo_delay += ninsns;
10115 if (INSN_CODE (insn) >= 0)
10116 switch (get_attr_hazard (insn))
10126 set = single_set (insn);
10127 gcc_assert (set != 0);
10128 *delayed_reg = SET_DEST (set);
10134 /* Go through the instruction stream and insert nops where necessary.
10135 See if the whole function can then be put into .set noreorder &
10139 mips_avoid_hazards (void)
10141 rtx insn, last_insn, lo_reg, delayed_reg;
10144 /* Force all instructions to be split into their final form. */
10145 split_all_insns_noflow ();
10147 /* Recalculate instruction lengths without taking nops into account. */
10148 cfun->machine->ignore_hazard_length_p = true;
10149 shorten_branches (get_insns ());
10151 cfun->machine->all_noreorder_p = true;
10153 /* Profiled functions can't be all noreorder because the profiler
10154 support uses assembler macros. */
10155 if (current_function_profile)
10156 cfun->machine->all_noreorder_p = false;
10158 /* Code compiled with -mfix-vr4120 can't be all noreorder because
10159 we rely on the assembler to work around some errata. */
10160 if (TARGET_FIX_VR4120)
10161 cfun->machine->all_noreorder_p = false;
10163 /* The same is true for -mfix-vr4130 if we might generate mflo or
10164 mfhi instructions. Note that we avoid using mflo and mfhi if
10165 the VR4130 macc and dmacc instructions are available instead;
10166 see the *mfhilo_{si,di}_macc patterns. */
10167 if (TARGET_FIX_VR4130 && !ISA_HAS_MACCHI)
10168 cfun->machine->all_noreorder_p = false;
10173 lo_reg = gen_rtx_REG (SImode, LO_REGNUM);
10175 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10178 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
10179 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
10180 mips_avoid_hazard (last_insn, XVECEXP (PATTERN (insn), 0, i),
10181 &hilo_delay, &delayed_reg, lo_reg);
10183 mips_avoid_hazard (last_insn, insn, &hilo_delay,
10184 &delayed_reg, lo_reg);
10191 /* Implement TARGET_MACHINE_DEPENDENT_REORG. */
10196 mips16_lay_out_constants ();
10197 if (TARGET_EXPLICIT_RELOCS)
10199 if (mips_flag_delayed_branch)
10200 dbr_schedule (get_insns ());
10201 mips_avoid_hazards ();
10202 if (TUNE_MIPS4130 && TARGET_VR4130_ALIGN)
10203 vr4130_align_insns ();
10207 /* This function does three things:
10209 - Register the special divsi3 and modsi3 functions if -mfix-vr4120.
10210 - Register the mips16 hardware floating point stubs.
10211 - Register the gofast functions if selected using --enable-gofast. */
10213 #include "config/gofast.h"
10216 mips_init_libfuncs (void)
10218 if (TARGET_FIX_VR4120)
10220 set_optab_libfunc (sdiv_optab, SImode, "__vr4120_divsi3");
10221 set_optab_libfunc (smod_optab, SImode, "__vr4120_modsi3");
10224 if (TARGET_MIPS16 && TARGET_HARD_FLOAT_ABI)
10226 set_optab_libfunc (add_optab, SFmode, "__mips16_addsf3");
10227 set_optab_libfunc (sub_optab, SFmode, "__mips16_subsf3");
10228 set_optab_libfunc (smul_optab, SFmode, "__mips16_mulsf3");
10229 set_optab_libfunc (sdiv_optab, SFmode, "__mips16_divsf3");
10231 set_optab_libfunc (eq_optab, SFmode, "__mips16_eqsf2");
10232 set_optab_libfunc (ne_optab, SFmode, "__mips16_nesf2");
10233 set_optab_libfunc (gt_optab, SFmode, "__mips16_gtsf2");
10234 set_optab_libfunc (ge_optab, SFmode, "__mips16_gesf2");
10235 set_optab_libfunc (lt_optab, SFmode, "__mips16_ltsf2");
10236 set_optab_libfunc (le_optab, SFmode, "__mips16_lesf2");
10237 set_optab_libfunc (unord_optab, SFmode, "__mips16_unordsf2");
10239 set_conv_libfunc (sfix_optab, SImode, SFmode, "__mips16_fix_truncsfsi");
10240 set_conv_libfunc (sfloat_optab, SFmode, SImode, "__mips16_floatsisf");
10241 set_conv_libfunc (ufloat_optab, SFmode, SImode, "__mips16_floatunsisf");
10243 if (TARGET_DOUBLE_FLOAT)
10245 set_optab_libfunc (add_optab, DFmode, "__mips16_adddf3");
10246 set_optab_libfunc (sub_optab, DFmode, "__mips16_subdf3");
10247 set_optab_libfunc (smul_optab, DFmode, "__mips16_muldf3");
10248 set_optab_libfunc (sdiv_optab, DFmode, "__mips16_divdf3");
10250 set_optab_libfunc (eq_optab, DFmode, "__mips16_eqdf2");
10251 set_optab_libfunc (ne_optab, DFmode, "__mips16_nedf2");
10252 set_optab_libfunc (gt_optab, DFmode, "__mips16_gtdf2");
10253 set_optab_libfunc (ge_optab, DFmode, "__mips16_gedf2");
10254 set_optab_libfunc (lt_optab, DFmode, "__mips16_ltdf2");
10255 set_optab_libfunc (le_optab, DFmode, "__mips16_ledf2");
10256 set_optab_libfunc (unord_optab, DFmode, "__mips16_unorddf2");
10258 set_conv_libfunc (sext_optab, DFmode, SFmode, "__mips16_extendsfdf2");
10259 set_conv_libfunc (trunc_optab, SFmode, DFmode, "__mips16_truncdfsf2");
10261 set_conv_libfunc (sfix_optab, SImode, DFmode, "__mips16_fix_truncdfsi");
10262 set_conv_libfunc (sfloat_optab, DFmode, SImode, "__mips16_floatsidf");
10263 set_conv_libfunc (ufloat_optab, DFmode, SImode, "__mips16_floatunsidf");
10267 gofast_maybe_init_libfuncs ();
10270 /* Return a number assessing the cost of moving a register in class
10271 FROM to class TO. The classes are expressed using the enumeration
10272 values such as `GENERAL_REGS'. A value of 2 is the default; other
10273 values are interpreted relative to that.
10275 It is not required that the cost always equal 2 when FROM is the
10276 same as TO; on some machines it is expensive to move between
10277 registers if they are not general registers.
10279 If reload sees an insn consisting of a single `set' between two
10280 hard registers, and if `REGISTER_MOVE_COST' applied to their
10281 classes returns a value of 2, reload does not check to ensure that
10282 the constraints of the insn are met. Setting a cost of other than
10283 2 will allow reload to verify that the constraints are met. You
10284 should do this if the `movM' pattern's constraints do not allow
10287 ??? We make the cost of moving from HI/LO into general
10288 registers the same as for one of moving general registers to
10289 HI/LO for TARGET_MIPS16 in order to prevent allocating a
10290 pseudo to HI/LO. This might hurt optimizations though, it
10291 isn't clear if it is wise. And it might not work in all cases. We
10292 could solve the DImode LO reg problem by using a multiply, just
10293 like reload_{in,out}si. We could solve the SImode/HImode HI reg
10294 problem by using divide instructions. divu puts the remainder in
10295 the HI reg, so doing a divide by -1 will move the value in the HI
10296 reg for all values except -1. We could handle that case by using a
10297 signed divide, e.g. -1 / 2 (or maybe 1 / -2?). We'd have to emit
10298 a compare/branch to test the input value to see which instruction
10299 we need to use. This gets pretty messy, but it is feasible. */
10302 mips_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
10303 enum reg_class to, enum reg_class from)
10305 if (from == M16_REGS && reg_class_subset_p (to, GENERAL_REGS))
10307 else if (from == M16_NA_REGS && reg_class_subset_p (to, GENERAL_REGS))
10309 else if (reg_class_subset_p (from, GENERAL_REGS))
10311 if (to == M16_REGS)
10313 else if (to == M16_NA_REGS)
10315 else if (reg_class_subset_p (to, GENERAL_REGS))
10322 else if (to == FP_REGS)
10324 else if (reg_class_subset_p (to, ACC_REGS))
10331 else if (reg_class_subset_p (to, ALL_COP_REGS))
10336 else if (from == FP_REGS)
10338 if (reg_class_subset_p (to, GENERAL_REGS))
10340 else if (to == FP_REGS)
10342 else if (to == ST_REGS)
10345 else if (reg_class_subset_p (from, ACC_REGS))
10347 if (reg_class_subset_p (to, GENERAL_REGS))
10355 else if (from == ST_REGS && reg_class_subset_p (to, GENERAL_REGS))
10357 else if (reg_class_subset_p (from, ALL_COP_REGS))
10363 ??? What cases are these? Shouldn't we return 2 here? */
10368 /* Return the length of INSN. LENGTH is the initial length computed by
10369 attributes in the machine-description file. */
10372 mips_adjust_insn_length (rtx insn, int length)
10374 /* A unconditional jump has an unfilled delay slot if it is not part
10375 of a sequence. A conditional jump normally has a delay slot, but
10376 does not on MIPS16. */
10377 if (CALL_P (insn) || (TARGET_MIPS16 ? simplejump_p (insn) : JUMP_P (insn)))
10380 /* See how many nops might be needed to avoid hardware hazards. */
10381 if (!cfun->machine->ignore_hazard_length_p && INSN_CODE (insn) >= 0)
10382 switch (get_attr_hazard (insn))
10396 /* All MIPS16 instructions are a measly two bytes. */
10404 /* Return an asm sequence to start a noat block and load the address
10405 of a label into $1. */
10408 mips_output_load_label (void)
10410 if (TARGET_EXPLICIT_RELOCS)
10414 return "%[lw\t%@,%%got_page(%0)(%+)\n\taddiu\t%@,%@,%%got_ofst(%0)";
10417 return "%[ld\t%@,%%got_page(%0)(%+)\n\tdaddiu\t%@,%@,%%got_ofst(%0)";
10420 if (ISA_HAS_LOAD_DELAY)
10421 return "%[lw\t%@,%%got(%0)(%+)%#\n\taddiu\t%@,%@,%%lo(%0)";
10422 return "%[lw\t%@,%%got(%0)(%+)\n\taddiu\t%@,%@,%%lo(%0)";
10426 if (Pmode == DImode)
10427 return "%[dla\t%@,%0";
10429 return "%[la\t%@,%0";
10433 /* Return the assembly code for INSN, which has the operands given by
10434 OPERANDS, and which branches to OPERANDS[1] if some condition is true.
10435 BRANCH_IF_TRUE is the asm template that should be used if OPERANDS[1]
10436 is in range of a direct branch. BRANCH_IF_FALSE is an inverted
10437 version of BRANCH_IF_TRUE. */
10440 mips_output_conditional_branch (rtx insn, rtx *operands,
10441 const char *branch_if_true,
10442 const char *branch_if_false)
10444 unsigned int length;
10445 rtx taken, not_taken;
10447 length = get_attr_length (insn);
10450 /* Just a simple conditional branch. */
10451 mips_branch_likely = (final_sequence && INSN_ANNULLED_BRANCH_P (insn));
10452 return branch_if_true;
10455 /* Generate a reversed branch around a direct jump. This fallback does
10456 not use branch-likely instructions. */
10457 mips_branch_likely = false;
10458 not_taken = gen_label_rtx ();
10459 taken = operands[1];
10461 /* Generate the reversed branch to NOT_TAKEN. */
10462 operands[1] = not_taken;
10463 output_asm_insn (branch_if_false, operands);
10465 /* If INSN has a delay slot, we must provide delay slots for both the
10466 branch to NOT_TAKEN and the conditional jump. We must also ensure
10467 that INSN's delay slot is executed in the appropriate cases. */
10468 if (final_sequence)
10470 /* This first delay slot will always be executed, so use INSN's
10471 delay slot if is not annulled. */
10472 if (!INSN_ANNULLED_BRANCH_P (insn))
10474 final_scan_insn (XVECEXP (final_sequence, 0, 1),
10475 asm_out_file, optimize, 1, NULL);
10476 INSN_DELETED_P (XVECEXP (final_sequence, 0, 1)) = 1;
10479 output_asm_insn ("nop", 0);
10480 fprintf (asm_out_file, "\n");
10483 /* Output the unconditional branch to TAKEN. */
10485 output_asm_insn ("j\t%0%/", &taken);
10488 output_asm_insn (mips_output_load_label (), &taken);
10489 output_asm_insn ("jr\t%@%]%/", 0);
10492 /* Now deal with its delay slot; see above. */
10493 if (final_sequence)
10495 /* This delay slot will only be executed if the branch is taken.
10496 Use INSN's delay slot if is annulled. */
10497 if (INSN_ANNULLED_BRANCH_P (insn))
10499 final_scan_insn (XVECEXP (final_sequence, 0, 1),
10500 asm_out_file, optimize, 1, NULL);
10501 INSN_DELETED_P (XVECEXP (final_sequence, 0, 1)) = 1;
10504 output_asm_insn ("nop", 0);
10505 fprintf (asm_out_file, "\n");
10508 /* Output NOT_TAKEN. */
10509 (*targetm.asm_out.internal_label) (asm_out_file, "L",
10510 CODE_LABEL_NUMBER (not_taken));
10514 /* Return the assembly code for INSN, which branches to OPERANDS[1]
10515 if some ordered condition is true. The condition is given by
10516 OPERANDS[0] if !INVERTED_P, otherwise it is the inverse of
10517 OPERANDS[0]. OPERANDS[2] is the comparison's first operand;
10518 its second is always zero. */
10521 mips_output_order_conditional_branch (rtx insn, rtx *operands, bool inverted_p)
10523 const char *branch[2];
10525 /* Make BRANCH[1] branch to OPERANDS[1] when the condition is true.
10526 Make BRANCH[0] branch on the inverse condition. */
10527 switch (GET_CODE (operands[0]))
10529 /* These cases are equivalent to comparisons against zero. */
10531 inverted_p = !inverted_p;
10532 /* Fall through. */
10534 branch[!inverted_p] = MIPS_BRANCH ("bne", "%2,%.,%1");
10535 branch[inverted_p] = MIPS_BRANCH ("beq", "%2,%.,%1");
10538 /* These cases are always true or always false. */
10540 inverted_p = !inverted_p;
10541 /* Fall through. */
10543 branch[!inverted_p] = MIPS_BRANCH ("beq", "%.,%.,%1");
10544 branch[inverted_p] = MIPS_BRANCH ("bne", "%.,%.,%1");
10548 branch[!inverted_p] = MIPS_BRANCH ("b%C0z", "%2,%1");
10549 branch[inverted_p] = MIPS_BRANCH ("b%N0z", "%2,%1");
10552 return mips_output_conditional_branch (insn, operands, branch[1], branch[0]);
10555 /* Used to output div or ddiv instruction DIVISION, which has the operands
10556 given by OPERANDS. Add in a divide-by-zero check if needed.
10558 When working around R4000 and R4400 errata, we need to make sure that
10559 the division is not immediately followed by a shift[1][2]. We also
10560 need to stop the division from being put into a branch delay slot[3].
10561 The easiest way to avoid both problems is to add a nop after the
10562 division. When a divide-by-zero check is needed, this nop can be
10563 used to fill the branch delay slot.
10565 [1] If a double-word or a variable shift executes immediately
10566 after starting an integer division, the shift may give an
10567 incorrect result. See quotations of errata #16 and #28 from
10568 "MIPS R4000PC/SC Errata, Processor Revision 2.2 and 3.0"
10569 in mips.md for details.
10571 [2] A similar bug to [1] exists for all revisions of the
10572 R4000 and the R4400 when run in an MC configuration.
10573 From "MIPS R4000MC Errata, Processor Revision 2.2 and 3.0":
10575 "19. In this following sequence:
10577 ddiv (or ddivu or div or divu)
10578 dsll32 (or dsrl32, dsra32)
10580 if an MPT stall occurs, while the divide is slipping the cpu
10581 pipeline, then the following double shift would end up with an
10584 Workaround: The compiler needs to avoid generating any
10585 sequence with divide followed by extended double shift."
10587 This erratum is also present in "MIPS R4400MC Errata, Processor
10588 Revision 1.0" and "MIPS R4400MC Errata, Processor Revision 2.0
10589 & 3.0" as errata #10 and #4, respectively.
10591 [3] From "MIPS R4000PC/SC Errata, Processor Revision 2.2 and 3.0"
10592 (also valid for MIPS R4000MC processors):
10594 "52. R4000SC: This bug does not apply for the R4000PC.
10596 There are two flavors of this bug:
10598 1) If the instruction just after divide takes an RF exception
10599 (tlb-refill, tlb-invalid) and gets an instruction cache
10600 miss (both primary and secondary) and the line which is
10601 currently in secondary cache at this index had the first
10602 data word, where the bits 5..2 are set, then R4000 would
10603 get a wrong result for the div.
10608 ------------------- # end-of page. -tlb-refill
10613 ------------------- # end-of page. -tlb-invalid
10616 2) If the divide is in the taken branch delay slot, where the
10617 target takes RF exception and gets an I-cache miss for the
10618 exception vector or where I-cache miss occurs for the
10619 target address, under the above mentioned scenarios, the
10620 div would get wrong results.
10623 j r2 # to next page mapped or unmapped
10624 div r8,r9 # this bug would be there as long
10625 # as there is an ICache miss and
10626 nop # the "data pattern" is present
10629 beq r0, r0, NextPage # to Next page
10633 This bug is present for div, divu, ddiv, and ddivu
10636 Workaround: For item 1), OS could make sure that the next page
10637 after the divide instruction is also mapped. For item 2), the
10638 compiler could make sure that the divide instruction is not in
10639 the branch delay slot."
10641 These processors have PRId values of 0x00004220 and 0x00004300 for
10642 the R4000 and 0x00004400, 0x00004500 and 0x00004600 for the R4400. */
10645 mips_output_division (const char *division, rtx *operands)
10650 if (TARGET_FIX_R4000 || TARGET_FIX_R4400)
10652 output_asm_insn (s, operands);
10655 if (TARGET_CHECK_ZERO_DIV)
10659 output_asm_insn (s, operands);
10660 s = "bnez\t%2,1f\n\tbreak\t7\n1:";
10662 else if (GENERATE_DIVIDE_TRAPS)
10664 output_asm_insn (s, operands);
10665 s = "teq\t%2,%.,7";
10669 output_asm_insn ("%(bne\t%2,%.,1f", operands);
10670 output_asm_insn (s, operands);
10671 s = "break\t7%)\n1:";
10677 /* Return true if GIVEN is the same as CANONICAL, or if it is CANONICAL
10678 with a final "000" replaced by "k". Ignore case.
10680 Note: this function is shared between GCC and GAS. */
10683 mips_strict_matching_cpu_name_p (const char *canonical, const char *given)
10685 while (*given != 0 && TOLOWER (*given) == TOLOWER (*canonical))
10686 given++, canonical++;
10688 return ((*given == 0 && *canonical == 0)
10689 || (strcmp (canonical, "000") == 0 && strcasecmp (given, "k") == 0));
10693 /* Return true if GIVEN matches CANONICAL, where GIVEN is a user-supplied
10694 CPU name. We've traditionally allowed a lot of variation here.
10696 Note: this function is shared between GCC and GAS. */
10699 mips_matching_cpu_name_p (const char *canonical, const char *given)
10701 /* First see if the name matches exactly, or with a final "000"
10702 turned into "k". */
10703 if (mips_strict_matching_cpu_name_p (canonical, given))
10706 /* If not, try comparing based on numerical designation alone.
10707 See if GIVEN is an unadorned number, or 'r' followed by a number. */
10708 if (TOLOWER (*given) == 'r')
10710 if (!ISDIGIT (*given))
10713 /* Skip over some well-known prefixes in the canonical name,
10714 hoping to find a number there too. */
10715 if (TOLOWER (canonical[0]) == 'v' && TOLOWER (canonical[1]) == 'r')
10717 else if (TOLOWER (canonical[0]) == 'r' && TOLOWER (canonical[1]) == 'm')
10719 else if (TOLOWER (canonical[0]) == 'r')
10722 return mips_strict_matching_cpu_name_p (canonical, given);
10726 /* Return the mips_cpu_info entry for the processor or ISA given
10727 by CPU_STRING. Return null if the string isn't recognized.
10729 A similar function exists in GAS. */
10731 static const struct mips_cpu_info *
10732 mips_parse_cpu (const char *cpu_string)
10734 const struct mips_cpu_info *p;
10737 /* In the past, we allowed upper-case CPU names, but it doesn't
10738 work well with the multilib machinery. */
10739 for (s = cpu_string; *s != 0; s++)
10742 warning (0, "the cpu name must be lower case");
10746 /* 'from-abi' selects the most compatible architecture for the given
10747 ABI: MIPS I for 32-bit ABIs and MIPS III for 64-bit ABIs. For the
10748 EABIs, we have to decide whether we're using the 32-bit or 64-bit
10749 version. Look first at the -mgp options, if given, otherwise base
10750 the choice on MASK_64BIT in TARGET_DEFAULT. */
10751 if (strcasecmp (cpu_string, "from-abi") == 0)
10752 return mips_cpu_info_from_isa (ABI_NEEDS_32BIT_REGS ? 1
10753 : ABI_NEEDS_64BIT_REGS ? 3
10754 : (TARGET_64BIT ? 3 : 1));
10756 /* 'default' has traditionally been a no-op. Probably not very useful. */
10757 if (strcasecmp (cpu_string, "default") == 0)
10760 for (p = mips_cpu_info_table; p->name != 0; p++)
10761 if (mips_matching_cpu_name_p (p->name, cpu_string))
10768 /* Return the processor associated with the given ISA level, or null
10769 if the ISA isn't valid. */
10771 static const struct mips_cpu_info *
10772 mips_cpu_info_from_isa (int isa)
10774 const struct mips_cpu_info *p;
10776 for (p = mips_cpu_info_table; p->name != 0; p++)
10783 /* Implement HARD_REGNO_NREGS. The size of FP registers is controlled
10784 by UNITS_PER_FPREG. The size of FP status registers is always 4, because
10785 they only hold condition code modes, and CCmode is always considered to
10786 be 4 bytes wide. All other registers are word sized. */
10789 mips_hard_regno_nregs (int regno, enum machine_mode mode)
10791 if (ST_REG_P (regno))
10792 return ((GET_MODE_SIZE (mode) + 3) / 4);
10793 else if (! FP_REG_P (regno))
10794 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
10796 return ((GET_MODE_SIZE (mode) + UNITS_PER_FPREG - 1) / UNITS_PER_FPREG);
10799 /* Implement TARGET_RETURN_IN_MEMORY. Under the old (i.e., 32 and O64 ABIs)
10800 all BLKmode objects are returned in memory. Under the new (N32 and
10801 64-bit MIPS ABIs) small structures are returned in a register.
10802 Objects with varying size must still be returned in memory, of
10806 mips_return_in_memory (tree type, tree fndecl ATTRIBUTE_UNUSED)
10809 return (TYPE_MODE (type) == BLKmode);
10811 return ((int_size_in_bytes (type) > (2 * UNITS_PER_WORD))
10812 || (int_size_in_bytes (type) == -1));
10816 mips_strict_argument_naming (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
10818 return !TARGET_OLDABI;
10821 /* Return true if INSN is a multiply-add or multiply-subtract
10822 instruction and PREV assigns to the accumulator operand. */
10825 mips_linked_madd_p (rtx prev, rtx insn)
10829 x = single_set (insn);
10835 if (GET_CODE (x) == PLUS
10836 && GET_CODE (XEXP (x, 0)) == MULT
10837 && reg_set_p (XEXP (x, 1), prev))
10840 if (GET_CODE (x) == MINUS
10841 && GET_CODE (XEXP (x, 1)) == MULT
10842 && reg_set_p (XEXP (x, 0), prev))
10848 /* Used by TUNE_MACC_CHAINS to record the last scheduled instruction
10849 that may clobber hi or lo. */
10851 static rtx mips_macc_chains_last_hilo;
10853 /* A TUNE_MACC_CHAINS helper function. Record that instruction INSN has
10854 been scheduled, updating mips_macc_chains_last_hilo appropriately. */
10857 mips_macc_chains_record (rtx insn)
10859 if (get_attr_may_clobber_hilo (insn))
10860 mips_macc_chains_last_hilo = insn;
10863 /* A TUNE_MACC_CHAINS helper function. Search ready queue READY, which
10864 has NREADY elements, looking for a multiply-add or multiply-subtract
10865 instruction that is cumulative with mips_macc_chains_last_hilo.
10866 If there is one, promote it ahead of anything else that might
10867 clobber hi or lo. */
10870 mips_macc_chains_reorder (rtx *ready, int nready)
10874 if (mips_macc_chains_last_hilo != 0)
10875 for (i = nready - 1; i >= 0; i--)
10876 if (mips_linked_madd_p (mips_macc_chains_last_hilo, ready[i]))
10878 for (j = nready - 1; j > i; j--)
10879 if (recog_memoized (ready[j]) >= 0
10880 && get_attr_may_clobber_hilo (ready[j]))
10882 mips_promote_ready (ready, i, j);
10889 /* The last instruction to be scheduled. */
10891 static rtx vr4130_last_insn;
10893 /* A note_stores callback used by vr4130_true_reg_dependence_p. DATA
10894 points to an rtx that is initially an instruction. Nullify the rtx
10895 if the instruction uses the value of register X. */
10898 vr4130_true_reg_dependence_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
10900 rtx *insn_ptr = data;
10903 && reg_referenced_p (x, PATTERN (*insn_ptr)))
10907 /* Return true if there is true register dependence between vr4130_last_insn
10911 vr4130_true_reg_dependence_p (rtx insn)
10913 note_stores (PATTERN (vr4130_last_insn),
10914 vr4130_true_reg_dependence_p_1, &insn);
10918 /* A TUNE_MIPS4130 helper function. Given that INSN1 is at the head of
10919 the ready queue and that INSN2 is the instruction after it, return
10920 true if it is worth promoting INSN2 ahead of INSN1. Look for cases
10921 in which INSN1 and INSN2 can probably issue in parallel, but for
10922 which (INSN2, INSN1) should be less sensitive to instruction
10923 alignment than (INSN1, INSN2). See 4130.md for more details. */
10926 vr4130_swap_insns_p (rtx insn1, rtx insn2)
10930 /* Check for the following case:
10932 1) there is some other instruction X with an anti dependence on INSN1;
10933 2) X has a higher priority than INSN2; and
10934 3) X is an arithmetic instruction (and thus has no unit restrictions).
10936 If INSN1 is the last instruction blocking X, it would better to
10937 choose (INSN1, X) over (INSN2, INSN1). */
10938 FOR_EACH_DEP_LINK (dep, INSN_FORW_DEPS (insn1))
10939 if (DEP_LINK_KIND (dep) == REG_DEP_ANTI
10940 && INSN_PRIORITY (DEP_LINK_CON (dep)) > INSN_PRIORITY (insn2)
10941 && recog_memoized (DEP_LINK_CON (dep)) >= 0
10942 && get_attr_vr4130_class (DEP_LINK_CON (dep)) == VR4130_CLASS_ALU)
10945 if (vr4130_last_insn != 0
10946 && recog_memoized (insn1) >= 0
10947 && recog_memoized (insn2) >= 0)
10949 /* See whether INSN1 and INSN2 use different execution units,
10950 or if they are both ALU-type instructions. If so, they can
10951 probably execute in parallel. */
10952 enum attr_vr4130_class class1 = get_attr_vr4130_class (insn1);
10953 enum attr_vr4130_class class2 = get_attr_vr4130_class (insn2);
10954 if (class1 != class2 || class1 == VR4130_CLASS_ALU)
10956 /* If only one of the instructions has a dependence on
10957 vr4130_last_insn, prefer to schedule the other one first. */
10958 bool dep1 = vr4130_true_reg_dependence_p (insn1);
10959 bool dep2 = vr4130_true_reg_dependence_p (insn2);
10963 /* Prefer to schedule INSN2 ahead of INSN1 if vr4130_last_insn
10964 is not an ALU-type instruction and if INSN1 uses the same
10965 execution unit. (Note that if this condition holds, we already
10966 know that INSN2 uses a different execution unit.) */
10967 if (class1 != VR4130_CLASS_ALU
10968 && recog_memoized (vr4130_last_insn) >= 0
10969 && class1 == get_attr_vr4130_class (vr4130_last_insn))
10976 /* A TUNE_MIPS4130 helper function. (READY, NREADY) describes a ready
10977 queue with at least two instructions. Swap the first two if
10978 vr4130_swap_insns_p says that it could be worthwhile. */
10981 vr4130_reorder (rtx *ready, int nready)
10983 if (vr4130_swap_insns_p (ready[nready - 1], ready[nready - 2]))
10984 mips_promote_ready (ready, nready - 2, nready - 1);
10987 /* Remove the instruction at index LOWER from ready queue READY and
10988 reinsert it in front of the instruction at index HIGHER. LOWER must
10992 mips_promote_ready (rtx *ready, int lower, int higher)
10997 new_head = ready[lower];
10998 for (i = lower; i < higher; i++)
10999 ready[i] = ready[i + 1];
11000 ready[i] = new_head;
11003 /* Implement TARGET_SCHED_REORDER. */
11006 mips_sched_reorder (FILE *file ATTRIBUTE_UNUSED, int verbose ATTRIBUTE_UNUSED,
11007 rtx *ready, int *nreadyp, int cycle)
11009 if (!reload_completed && TUNE_MACC_CHAINS)
11012 mips_macc_chains_last_hilo = 0;
11014 mips_macc_chains_reorder (ready, *nreadyp);
11016 if (reload_completed && TUNE_MIPS4130 && !TARGET_VR4130_ALIGN)
11019 vr4130_last_insn = 0;
11021 vr4130_reorder (ready, *nreadyp);
11023 return mips_issue_rate ();
11026 /* Implement TARGET_SCHED_VARIABLE_ISSUE. */
11029 mips_variable_issue (FILE *file ATTRIBUTE_UNUSED, int verbose ATTRIBUTE_UNUSED,
11030 rtx insn, int more)
11032 switch (GET_CODE (PATTERN (insn)))
11036 /* Don't count USEs and CLOBBERs against the issue rate. */
11041 if (!reload_completed && TUNE_MACC_CHAINS)
11042 mips_macc_chains_record (insn);
11043 vr4130_last_insn = insn;
11049 /* Implement TARGET_SCHED_ADJUST_COST. We assume that anti and output
11050 dependencies have no cost, except on the 20Kc where output-dependence
11051 is treated like input-dependence. */
11054 mips_adjust_cost (rtx insn ATTRIBUTE_UNUSED, rtx link,
11055 rtx dep ATTRIBUTE_UNUSED, int cost)
11057 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
11060 if (REG_NOTE_KIND (link) != 0)
11065 /* Return the number of instructions that can be issued per cycle. */
11068 mips_issue_rate (void)
11072 case PROCESSOR_74KC:
11073 case PROCESSOR_74KF2_1:
11074 case PROCESSOR_74KF1_1:
11075 case PROCESSOR_74KF3_2:
11076 /* The 74k is not strictly quad-issue cpu, but can be seen as one
11077 by the scheduler. It can issue 1 ALU, 1 AGEN and 2 FPU insns,
11078 but in reality only a maximum of 3 insns can be issued as the
11079 floating point load/stores also require a slot in the AGEN pipe. */
11082 case PROCESSOR_20KC:
11083 case PROCESSOR_R4130:
11084 case PROCESSOR_R5400:
11085 case PROCESSOR_R5500:
11086 case PROCESSOR_R7000:
11087 case PROCESSOR_R9000:
11090 case PROCESSOR_SB1:
11091 case PROCESSOR_SB1A:
11092 /* This is actually 4, but we get better performance if we claim 3.
11093 This is partly because of unwanted speculative code motion with the
11094 larger number, and partly because in most common cases we can't
11095 reach the theoretical max of 4. */
11103 /* Implements TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD. This should
11104 be as wide as the scheduling freedom in the DFA. */
11107 mips_multipass_dfa_lookahead (void)
11109 /* Can schedule up to 4 of the 6 function units in any one cycle. */
11116 /* Implements a store data bypass check. We need this because the cprestore
11117 pattern is type store, but defined using an UNSPEC. This UNSPEC causes the
11118 default routine to abort. We just return false for that case. */
11119 /* ??? Should try to give a better result here than assuming false. */
11122 mips_store_data_bypass_p (rtx out_insn, rtx in_insn)
11124 if (GET_CODE (PATTERN (in_insn)) == UNSPEC_VOLATILE)
11127 return ! store_data_bypass_p (out_insn, in_insn);
11130 /* Given that we have an rtx of the form (prefetch ... WRITE LOCALITY),
11131 return the first operand of the associated "pref" or "prefx" insn. */
11134 mips_prefetch_cookie (rtx write, rtx locality)
11136 /* store_streamed / load_streamed. */
11137 if (INTVAL (locality) <= 0)
11138 return GEN_INT (INTVAL (write) + 4);
11140 /* store / load. */
11141 if (INTVAL (locality) <= 2)
11144 /* store_retained / load_retained. */
11145 return GEN_INT (INTVAL (write) + 6);
11148 /* MIPS builtin function support. */
11150 struct builtin_description
11152 /* The code of the main .md file instruction. See mips_builtin_type
11153 for more information. */
11154 enum insn_code icode;
11156 /* The floating-point comparison code to use with ICODE, if any. */
11157 enum mips_fp_condition cond;
11159 /* The name of the builtin function. */
11162 /* Specifies how the function should be expanded. */
11163 enum mips_builtin_type builtin_type;
11165 /* The function's prototype. */
11166 enum mips_function_type function_type;
11168 /* The target flags required for this function. */
11172 /* Define a MIPS_BUILTIN_DIRECT function for instruction CODE_FOR_mips_<INSN>.
11173 FUNCTION_TYPE and TARGET_FLAGS are builtin_description fields. */
11174 #define DIRECT_BUILTIN(INSN, FUNCTION_TYPE, TARGET_FLAGS) \
11175 { CODE_FOR_mips_ ## INSN, 0, "__builtin_mips_" #INSN, \
11176 MIPS_BUILTIN_DIRECT, FUNCTION_TYPE, TARGET_FLAGS }
11178 /* Define __builtin_mips_<INSN>_<COND>_{s,d}, both of which require
11180 #define CMP_SCALAR_BUILTINS(INSN, COND, TARGET_FLAGS) \
11181 { CODE_FOR_mips_ ## INSN ## _cond_s, MIPS_FP_COND_ ## COND, \
11182 "__builtin_mips_" #INSN "_" #COND "_s", \
11183 MIPS_BUILTIN_CMP_SINGLE, MIPS_INT_FTYPE_SF_SF, TARGET_FLAGS }, \
11184 { CODE_FOR_mips_ ## INSN ## _cond_d, MIPS_FP_COND_ ## COND, \
11185 "__builtin_mips_" #INSN "_" #COND "_d", \
11186 MIPS_BUILTIN_CMP_SINGLE, MIPS_INT_FTYPE_DF_DF, TARGET_FLAGS }
11188 /* Define __builtin_mips_{any,all,upper,lower}_<INSN>_<COND>_ps.
11189 The lower and upper forms require TARGET_FLAGS while the any and all
11190 forms require MASK_MIPS3D. */
11191 #define CMP_PS_BUILTINS(INSN, COND, TARGET_FLAGS) \
11192 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11193 "__builtin_mips_any_" #INSN "_" #COND "_ps", \
11194 MIPS_BUILTIN_CMP_ANY, MIPS_INT_FTYPE_V2SF_V2SF, MASK_MIPS3D }, \
11195 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11196 "__builtin_mips_all_" #INSN "_" #COND "_ps", \
11197 MIPS_BUILTIN_CMP_ALL, MIPS_INT_FTYPE_V2SF_V2SF, MASK_MIPS3D }, \
11198 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11199 "__builtin_mips_lower_" #INSN "_" #COND "_ps", \
11200 MIPS_BUILTIN_CMP_LOWER, MIPS_INT_FTYPE_V2SF_V2SF, TARGET_FLAGS }, \
11201 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11202 "__builtin_mips_upper_" #INSN "_" #COND "_ps", \
11203 MIPS_BUILTIN_CMP_UPPER, MIPS_INT_FTYPE_V2SF_V2SF, TARGET_FLAGS }
11205 /* Define __builtin_mips_{any,all}_<INSN>_<COND>_4s. The functions
11206 require MASK_MIPS3D. */
11207 #define CMP_4S_BUILTINS(INSN, COND) \
11208 { CODE_FOR_mips_ ## INSN ## _cond_4s, MIPS_FP_COND_ ## COND, \
11209 "__builtin_mips_any_" #INSN "_" #COND "_4s", \
11210 MIPS_BUILTIN_CMP_ANY, MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF, \
11212 { CODE_FOR_mips_ ## INSN ## _cond_4s, MIPS_FP_COND_ ## COND, \
11213 "__builtin_mips_all_" #INSN "_" #COND "_4s", \
11214 MIPS_BUILTIN_CMP_ALL, MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF, \
11217 /* Define __builtin_mips_mov{t,f}_<INSN>_<COND>_ps. The comparison
11218 instruction requires TARGET_FLAGS. */
11219 #define MOVTF_BUILTINS(INSN, COND, TARGET_FLAGS) \
11220 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11221 "__builtin_mips_movt_" #INSN "_" #COND "_ps", \
11222 MIPS_BUILTIN_MOVT, MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF, \
11224 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11225 "__builtin_mips_movf_" #INSN "_" #COND "_ps", \
11226 MIPS_BUILTIN_MOVF, MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF, \
11229 /* Define all the builtins related to c.cond.fmt condition COND. */
11230 #define CMP_BUILTINS(COND) \
11231 MOVTF_BUILTINS (c, COND, MASK_PAIRED_SINGLE_FLOAT), \
11232 MOVTF_BUILTINS (cabs, COND, MASK_MIPS3D), \
11233 CMP_SCALAR_BUILTINS (cabs, COND, MASK_MIPS3D), \
11234 CMP_PS_BUILTINS (c, COND, MASK_PAIRED_SINGLE_FLOAT), \
11235 CMP_PS_BUILTINS (cabs, COND, MASK_MIPS3D), \
11236 CMP_4S_BUILTINS (c, COND), \
11237 CMP_4S_BUILTINS (cabs, COND)
11239 static const struct builtin_description mips_bdesc[] =
11241 DIRECT_BUILTIN (pll_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
11242 DIRECT_BUILTIN (pul_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
11243 DIRECT_BUILTIN (plu_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
11244 DIRECT_BUILTIN (puu_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
11245 DIRECT_BUILTIN (cvt_ps_s, MIPS_V2SF_FTYPE_SF_SF, MASK_PAIRED_SINGLE_FLOAT),
11246 DIRECT_BUILTIN (cvt_s_pl, MIPS_SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT),
11247 DIRECT_BUILTIN (cvt_s_pu, MIPS_SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT),
11248 DIRECT_BUILTIN (abs_ps, MIPS_V2SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT),
11250 DIRECT_BUILTIN (alnv_ps, MIPS_V2SF_FTYPE_V2SF_V2SF_INT,
11251 MASK_PAIRED_SINGLE_FLOAT),
11252 DIRECT_BUILTIN (addr_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
11253 DIRECT_BUILTIN (mulr_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
11254 DIRECT_BUILTIN (cvt_pw_ps, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
11255 DIRECT_BUILTIN (cvt_ps_pw, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
11257 DIRECT_BUILTIN (recip1_s, MIPS_SF_FTYPE_SF, MASK_MIPS3D),
11258 DIRECT_BUILTIN (recip1_d, MIPS_DF_FTYPE_DF, MASK_MIPS3D),
11259 DIRECT_BUILTIN (recip1_ps, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
11260 DIRECT_BUILTIN (recip2_s, MIPS_SF_FTYPE_SF_SF, MASK_MIPS3D),
11261 DIRECT_BUILTIN (recip2_d, MIPS_DF_FTYPE_DF_DF, MASK_MIPS3D),
11262 DIRECT_BUILTIN (recip2_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
11264 DIRECT_BUILTIN (rsqrt1_s, MIPS_SF_FTYPE_SF, MASK_MIPS3D),
11265 DIRECT_BUILTIN (rsqrt1_d, MIPS_DF_FTYPE_DF, MASK_MIPS3D),
11266 DIRECT_BUILTIN (rsqrt1_ps, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
11267 DIRECT_BUILTIN (rsqrt2_s, MIPS_SF_FTYPE_SF_SF, MASK_MIPS3D),
11268 DIRECT_BUILTIN (rsqrt2_d, MIPS_DF_FTYPE_DF_DF, MASK_MIPS3D),
11269 DIRECT_BUILTIN (rsqrt2_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
11271 MIPS_FP_CONDITIONS (CMP_BUILTINS)
11274 /* Builtin functions for the SB-1 processor. */
11276 #define CODE_FOR_mips_sqrt_ps CODE_FOR_sqrtv2sf2
11278 static const struct builtin_description sb1_bdesc[] =
11280 DIRECT_BUILTIN (sqrt_ps, MIPS_V2SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT)
11283 /* Builtin functions for DSP ASE. */
11285 #define CODE_FOR_mips_addq_ph CODE_FOR_addv2hi3
11286 #define CODE_FOR_mips_addu_qb CODE_FOR_addv4qi3
11287 #define CODE_FOR_mips_subq_ph CODE_FOR_subv2hi3
11288 #define CODE_FOR_mips_subu_qb CODE_FOR_subv4qi3
11289 #define CODE_FOR_mips_mul_ph CODE_FOR_mulv2hi3
11291 /* Define a MIPS_BUILTIN_DIRECT_NO_TARGET function for instruction
11292 CODE_FOR_mips_<INSN>. FUNCTION_TYPE and TARGET_FLAGS are
11293 builtin_description fields. */
11294 #define DIRECT_NO_TARGET_BUILTIN(INSN, FUNCTION_TYPE, TARGET_FLAGS) \
11295 { CODE_FOR_mips_ ## INSN, 0, "__builtin_mips_" #INSN, \
11296 MIPS_BUILTIN_DIRECT_NO_TARGET, FUNCTION_TYPE, TARGET_FLAGS }
11298 /* Define __builtin_mips_bposge<VALUE>. <VALUE> is 32 for the MIPS32 DSP
11299 branch instruction. TARGET_FLAGS is a builtin_description field. */
11300 #define BPOSGE_BUILTIN(VALUE, TARGET_FLAGS) \
11301 { CODE_FOR_mips_bposge, 0, "__builtin_mips_bposge" #VALUE, \
11302 MIPS_BUILTIN_BPOSGE ## VALUE, MIPS_SI_FTYPE_VOID, TARGET_FLAGS }
11304 static const struct builtin_description dsp_bdesc[] =
11306 DIRECT_BUILTIN (addq_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
11307 DIRECT_BUILTIN (addq_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
11308 DIRECT_BUILTIN (addq_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11309 DIRECT_BUILTIN (addu_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
11310 DIRECT_BUILTIN (addu_s_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
11311 DIRECT_BUILTIN (subq_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
11312 DIRECT_BUILTIN (subq_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
11313 DIRECT_BUILTIN (subq_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11314 DIRECT_BUILTIN (subu_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
11315 DIRECT_BUILTIN (subu_s_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
11316 DIRECT_BUILTIN (addsc, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11317 DIRECT_BUILTIN (addwc, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11318 DIRECT_BUILTIN (modsub, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11319 DIRECT_BUILTIN (raddu_w_qb, MIPS_SI_FTYPE_V4QI, MASK_DSP),
11320 DIRECT_BUILTIN (absq_s_ph, MIPS_V2HI_FTYPE_V2HI, MASK_DSP),
11321 DIRECT_BUILTIN (absq_s_w, MIPS_SI_FTYPE_SI, MASK_DSP),
11322 DIRECT_BUILTIN (precrq_qb_ph, MIPS_V4QI_FTYPE_V2HI_V2HI, MASK_DSP),
11323 DIRECT_BUILTIN (precrq_ph_w, MIPS_V2HI_FTYPE_SI_SI, MASK_DSP),
11324 DIRECT_BUILTIN (precrq_rs_ph_w, MIPS_V2HI_FTYPE_SI_SI, MASK_DSP),
11325 DIRECT_BUILTIN (precrqu_s_qb_ph, MIPS_V4QI_FTYPE_V2HI_V2HI, MASK_DSP),
11326 DIRECT_BUILTIN (preceq_w_phl, MIPS_SI_FTYPE_V2HI, MASK_DSP),
11327 DIRECT_BUILTIN (preceq_w_phr, MIPS_SI_FTYPE_V2HI, MASK_DSP),
11328 DIRECT_BUILTIN (precequ_ph_qbl, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11329 DIRECT_BUILTIN (precequ_ph_qbr, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11330 DIRECT_BUILTIN (precequ_ph_qbla, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11331 DIRECT_BUILTIN (precequ_ph_qbra, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11332 DIRECT_BUILTIN (preceu_ph_qbl, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11333 DIRECT_BUILTIN (preceu_ph_qbr, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11334 DIRECT_BUILTIN (preceu_ph_qbla, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11335 DIRECT_BUILTIN (preceu_ph_qbra, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
11336 DIRECT_BUILTIN (shll_qb, MIPS_V4QI_FTYPE_V4QI_SI, MASK_DSP),
11337 DIRECT_BUILTIN (shll_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
11338 DIRECT_BUILTIN (shll_s_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
11339 DIRECT_BUILTIN (shll_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11340 DIRECT_BUILTIN (shrl_qb, MIPS_V4QI_FTYPE_V4QI_SI, MASK_DSP),
11341 DIRECT_BUILTIN (shra_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
11342 DIRECT_BUILTIN (shra_r_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
11343 DIRECT_BUILTIN (shra_r_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11344 DIRECT_BUILTIN (muleu_s_ph_qbl, MIPS_V2HI_FTYPE_V4QI_V2HI, MASK_DSP),
11345 DIRECT_BUILTIN (muleu_s_ph_qbr, MIPS_V2HI_FTYPE_V4QI_V2HI, MASK_DSP),
11346 DIRECT_BUILTIN (mulq_rs_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
11347 DIRECT_BUILTIN (muleq_s_w_phl, MIPS_SI_FTYPE_V2HI_V2HI, MASK_DSP),
11348 DIRECT_BUILTIN (muleq_s_w_phr, MIPS_SI_FTYPE_V2HI_V2HI, MASK_DSP),
11349 DIRECT_BUILTIN (bitrev, MIPS_SI_FTYPE_SI, MASK_DSP),
11350 DIRECT_BUILTIN (insv, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
11351 DIRECT_BUILTIN (repl_qb, MIPS_V4QI_FTYPE_SI, MASK_DSP),
11352 DIRECT_BUILTIN (repl_ph, MIPS_V2HI_FTYPE_SI, MASK_DSP),
11353 DIRECT_NO_TARGET_BUILTIN (cmpu_eq_qb, MIPS_VOID_FTYPE_V4QI_V4QI, MASK_DSP),
11354 DIRECT_NO_TARGET_BUILTIN (cmpu_lt_qb, MIPS_VOID_FTYPE_V4QI_V4QI, MASK_DSP),
11355 DIRECT_NO_TARGET_BUILTIN (cmpu_le_qb, MIPS_VOID_FTYPE_V4QI_V4QI, MASK_DSP),
11356 DIRECT_BUILTIN (cmpgu_eq_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSP),
11357 DIRECT_BUILTIN (cmpgu_lt_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSP),
11358 DIRECT_BUILTIN (cmpgu_le_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSP),
11359 DIRECT_NO_TARGET_BUILTIN (cmp_eq_ph, MIPS_VOID_FTYPE_V2HI_V2HI, MASK_DSP),
11360 DIRECT_NO_TARGET_BUILTIN (cmp_lt_ph, MIPS_VOID_FTYPE_V2HI_V2HI, MASK_DSP),
11361 DIRECT_NO_TARGET_BUILTIN (cmp_le_ph, MIPS_VOID_FTYPE_V2HI_V2HI, MASK_DSP),
11362 DIRECT_BUILTIN (pick_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
11363 DIRECT_BUILTIN (pick_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
11364 DIRECT_BUILTIN (packrl_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
11365 DIRECT_NO_TARGET_BUILTIN (wrdsp, MIPS_VOID_FTYPE_SI_SI, MASK_DSP),
11366 DIRECT_BUILTIN (rddsp, MIPS_SI_FTYPE_SI, MASK_DSP),
11367 DIRECT_BUILTIN (lbux, MIPS_SI_FTYPE_PTR_SI, MASK_DSP),
11368 DIRECT_BUILTIN (lhx, MIPS_SI_FTYPE_PTR_SI, MASK_DSP),
11369 DIRECT_BUILTIN (lwx, MIPS_SI_FTYPE_PTR_SI, MASK_DSP),
11370 BPOSGE_BUILTIN (32, MASK_DSP),
11372 /* The following are for the MIPS DSP ASE REV 2. */
11373 DIRECT_BUILTIN (absq_s_qb, MIPS_V4QI_FTYPE_V4QI, MASK_DSPR2),
11374 DIRECT_BUILTIN (addu_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11375 DIRECT_BUILTIN (addu_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11376 DIRECT_BUILTIN (adduh_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSPR2),
11377 DIRECT_BUILTIN (adduh_r_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSPR2),
11378 DIRECT_BUILTIN (append, MIPS_SI_FTYPE_SI_SI_SI, MASK_DSPR2),
11379 DIRECT_BUILTIN (balign, MIPS_SI_FTYPE_SI_SI_SI, MASK_DSPR2),
11380 DIRECT_BUILTIN (cmpgdu_eq_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSPR2),
11381 DIRECT_BUILTIN (cmpgdu_lt_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSPR2),
11382 DIRECT_BUILTIN (cmpgdu_le_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSPR2),
11383 DIRECT_BUILTIN (mul_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11384 DIRECT_BUILTIN (mul_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11385 DIRECT_BUILTIN (mulq_rs_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
11386 DIRECT_BUILTIN (mulq_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11387 DIRECT_BUILTIN (mulq_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
11388 DIRECT_BUILTIN (precr_qb_ph, MIPS_V4QI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11389 DIRECT_BUILTIN (precr_sra_ph_w, MIPS_V2HI_FTYPE_SI_SI_SI, MASK_DSPR2),
11390 DIRECT_BUILTIN (precr_sra_r_ph_w, MIPS_V2HI_FTYPE_SI_SI_SI, MASK_DSPR2),
11391 DIRECT_BUILTIN (prepend, MIPS_SI_FTYPE_SI_SI_SI, MASK_DSPR2),
11392 DIRECT_BUILTIN (shra_qb, MIPS_V4QI_FTYPE_V4QI_SI, MASK_DSPR2),
11393 DIRECT_BUILTIN (shra_r_qb, MIPS_V4QI_FTYPE_V4QI_SI, MASK_DSPR2),
11394 DIRECT_BUILTIN (shrl_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSPR2),
11395 DIRECT_BUILTIN (subu_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11396 DIRECT_BUILTIN (subu_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11397 DIRECT_BUILTIN (subuh_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSPR2),
11398 DIRECT_BUILTIN (subuh_r_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSPR2),
11399 DIRECT_BUILTIN (addqh_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11400 DIRECT_BUILTIN (addqh_r_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11401 DIRECT_BUILTIN (addqh_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
11402 DIRECT_BUILTIN (addqh_r_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
11403 DIRECT_BUILTIN (subqh_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11404 DIRECT_BUILTIN (subqh_r_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
11405 DIRECT_BUILTIN (subqh_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
11406 DIRECT_BUILTIN (subqh_r_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2)
11409 static const struct builtin_description dsp_32only_bdesc[] =
11411 DIRECT_BUILTIN (dpau_h_qbl, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
11412 DIRECT_BUILTIN (dpau_h_qbr, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
11413 DIRECT_BUILTIN (dpsu_h_qbl, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
11414 DIRECT_BUILTIN (dpsu_h_qbr, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
11415 DIRECT_BUILTIN (dpaq_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
11416 DIRECT_BUILTIN (dpsq_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
11417 DIRECT_BUILTIN (mulsaq_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
11418 DIRECT_BUILTIN (dpaq_sa_l_w, MIPS_DI_FTYPE_DI_SI_SI, MASK_DSP),
11419 DIRECT_BUILTIN (dpsq_sa_l_w, MIPS_DI_FTYPE_DI_SI_SI, MASK_DSP),
11420 DIRECT_BUILTIN (maq_s_w_phl, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
11421 DIRECT_BUILTIN (maq_s_w_phr, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
11422 DIRECT_BUILTIN (maq_sa_w_phl, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
11423 DIRECT_BUILTIN (maq_sa_w_phr, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
11424 DIRECT_BUILTIN (extr_w, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
11425 DIRECT_BUILTIN (extr_r_w, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
11426 DIRECT_BUILTIN (extr_rs_w, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
11427 DIRECT_BUILTIN (extr_s_h, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
11428 DIRECT_BUILTIN (extp, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
11429 DIRECT_BUILTIN (extpdp, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
11430 DIRECT_BUILTIN (shilo, MIPS_DI_FTYPE_DI_SI, MASK_DSP),
11431 DIRECT_BUILTIN (mthlip, MIPS_DI_FTYPE_DI_SI, MASK_DSP),
11433 /* The following are for the MIPS DSP ASE REV 2. */
11434 DIRECT_BUILTIN (dpa_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11435 DIRECT_BUILTIN (dps_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11436 DIRECT_BUILTIN (madd, MIPS_DI_FTYPE_DI_SI_SI, MASK_DSPR2),
11437 DIRECT_BUILTIN (maddu, MIPS_DI_FTYPE_DI_USI_USI, MASK_DSPR2),
11438 DIRECT_BUILTIN (msub, MIPS_DI_FTYPE_DI_SI_SI, MASK_DSPR2),
11439 DIRECT_BUILTIN (msubu, MIPS_DI_FTYPE_DI_USI_USI, MASK_DSPR2),
11440 DIRECT_BUILTIN (mulsa_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11441 DIRECT_BUILTIN (mult, MIPS_DI_FTYPE_SI_SI, MASK_DSPR2),
11442 DIRECT_BUILTIN (multu, MIPS_DI_FTYPE_USI_USI, MASK_DSPR2),
11443 DIRECT_BUILTIN (dpax_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11444 DIRECT_BUILTIN (dpsx_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11445 DIRECT_BUILTIN (dpaqx_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11446 DIRECT_BUILTIN (dpaqx_sa_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11447 DIRECT_BUILTIN (dpsqx_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
11448 DIRECT_BUILTIN (dpsqx_sa_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2)
11451 /* This helps provide a mapping from builtin function codes to bdesc
11456 /* The builtin function table that this entry describes. */
11457 const struct builtin_description *bdesc;
11459 /* The number of entries in the builtin function table. */
11462 /* The target processor that supports these builtin functions.
11463 PROCESSOR_MAX means we enable them for all processors. */
11464 enum processor_type proc;
11466 /* If the target has these flags, this builtin function table
11467 will not be supported. */
11468 int unsupported_target_flags;
11471 static const struct bdesc_map bdesc_arrays[] =
11473 { mips_bdesc, ARRAY_SIZE (mips_bdesc), PROCESSOR_MAX, 0 },
11474 { sb1_bdesc, ARRAY_SIZE (sb1_bdesc), PROCESSOR_SB1, 0 },
11475 { dsp_bdesc, ARRAY_SIZE (dsp_bdesc), PROCESSOR_MAX, 0 },
11476 { dsp_32only_bdesc, ARRAY_SIZE (dsp_32only_bdesc), PROCESSOR_MAX,
11480 /* Take the argument ARGNUM of the arglist of EXP and convert it into a form
11481 suitable for input operand OP of instruction ICODE. Return the value. */
11484 mips_prepare_builtin_arg (enum insn_code icode,
11485 unsigned int op, tree exp, unsigned int argnum)
11488 enum machine_mode mode;
11490 value = expand_normal (CALL_EXPR_ARG (exp, argnum));
11491 mode = insn_data[icode].operand[op].mode;
11492 if (!insn_data[icode].operand[op].predicate (value, mode))
11494 value = copy_to_mode_reg (mode, value);
11495 /* Check the predicate again. */
11496 if (!insn_data[icode].operand[op].predicate (value, mode))
11498 error ("invalid argument to builtin function");
11506 /* Return an rtx suitable for output operand OP of instruction ICODE.
11507 If TARGET is non-null, try to use it where possible. */
11510 mips_prepare_builtin_target (enum insn_code icode, unsigned int op, rtx target)
11512 enum machine_mode mode;
11514 mode = insn_data[icode].operand[op].mode;
11515 if (target == 0 || !insn_data[icode].operand[op].predicate (target, mode))
11516 target = gen_reg_rtx (mode);
11521 /* Expand builtin functions. This is called from TARGET_EXPAND_BUILTIN. */
11524 mips_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
11525 enum machine_mode mode ATTRIBUTE_UNUSED,
11526 int ignore ATTRIBUTE_UNUSED)
11528 enum insn_code icode;
11529 enum mips_builtin_type type;
11531 unsigned int fcode;
11532 const struct builtin_description *bdesc;
11533 const struct bdesc_map *m;
11535 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
11536 fcode = DECL_FUNCTION_CODE (fndecl);
11539 for (m = bdesc_arrays; m < &bdesc_arrays[ARRAY_SIZE (bdesc_arrays)]; m++)
11541 if (fcode < m->size)
11544 icode = bdesc[fcode].icode;
11545 type = bdesc[fcode].builtin_type;
11555 case MIPS_BUILTIN_DIRECT:
11556 return mips_expand_builtin_direct (icode, target, exp, true);
11558 case MIPS_BUILTIN_DIRECT_NO_TARGET:
11559 return mips_expand_builtin_direct (icode, target, exp, false);
11561 case MIPS_BUILTIN_MOVT:
11562 case MIPS_BUILTIN_MOVF:
11563 return mips_expand_builtin_movtf (type, icode, bdesc[fcode].cond,
11566 case MIPS_BUILTIN_CMP_ANY:
11567 case MIPS_BUILTIN_CMP_ALL:
11568 case MIPS_BUILTIN_CMP_UPPER:
11569 case MIPS_BUILTIN_CMP_LOWER:
11570 case MIPS_BUILTIN_CMP_SINGLE:
11571 return mips_expand_builtin_compare (type, icode, bdesc[fcode].cond,
11574 case MIPS_BUILTIN_BPOSGE32:
11575 return mips_expand_builtin_bposge (type, target);
11582 /* Init builtin functions. This is called from TARGET_INIT_BUILTIN. */
11585 mips_init_builtins (void)
11587 const struct builtin_description *d;
11588 const struct bdesc_map *m;
11589 tree types[(int) MIPS_MAX_FTYPE_MAX];
11590 tree V2SF_type_node;
11591 tree V2HI_type_node;
11592 tree V4QI_type_node;
11593 unsigned int offset;
11595 /* We have only builtins for -mpaired-single, -mips3d and -mdsp. */
11596 if (!TARGET_PAIRED_SINGLE_FLOAT && !TARGET_DSP)
11599 if (TARGET_PAIRED_SINGLE_FLOAT)
11601 V2SF_type_node = build_vector_type_for_mode (float_type_node, V2SFmode);
11603 types[MIPS_V2SF_FTYPE_V2SF]
11604 = build_function_type_list (V2SF_type_node, V2SF_type_node, NULL_TREE);
11606 types[MIPS_V2SF_FTYPE_V2SF_V2SF]
11607 = build_function_type_list (V2SF_type_node,
11608 V2SF_type_node, V2SF_type_node, NULL_TREE);
11610 types[MIPS_V2SF_FTYPE_V2SF_V2SF_INT]
11611 = build_function_type_list (V2SF_type_node,
11612 V2SF_type_node, V2SF_type_node,
11613 integer_type_node, NULL_TREE);
11615 types[MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF]
11616 = build_function_type_list (V2SF_type_node,
11617 V2SF_type_node, V2SF_type_node,
11618 V2SF_type_node, V2SF_type_node, NULL_TREE);
11620 types[MIPS_V2SF_FTYPE_SF_SF]
11621 = build_function_type_list (V2SF_type_node,
11622 float_type_node, float_type_node, NULL_TREE);
11624 types[MIPS_INT_FTYPE_V2SF_V2SF]
11625 = build_function_type_list (integer_type_node,
11626 V2SF_type_node, V2SF_type_node, NULL_TREE);
11628 types[MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF]
11629 = build_function_type_list (integer_type_node,
11630 V2SF_type_node, V2SF_type_node,
11631 V2SF_type_node, V2SF_type_node, NULL_TREE);
11633 types[MIPS_INT_FTYPE_SF_SF]
11634 = build_function_type_list (integer_type_node,
11635 float_type_node, float_type_node, NULL_TREE);
11637 types[MIPS_INT_FTYPE_DF_DF]
11638 = build_function_type_list (integer_type_node,
11639 double_type_node, double_type_node, NULL_TREE);
11641 types[MIPS_SF_FTYPE_V2SF]
11642 = build_function_type_list (float_type_node, V2SF_type_node, NULL_TREE);
11644 types[MIPS_SF_FTYPE_SF]
11645 = build_function_type_list (float_type_node,
11646 float_type_node, NULL_TREE);
11648 types[MIPS_SF_FTYPE_SF_SF]
11649 = build_function_type_list (float_type_node,
11650 float_type_node, float_type_node, NULL_TREE);
11652 types[MIPS_DF_FTYPE_DF]
11653 = build_function_type_list (double_type_node,
11654 double_type_node, NULL_TREE);
11656 types[MIPS_DF_FTYPE_DF_DF]
11657 = build_function_type_list (double_type_node,
11658 double_type_node, double_type_node, NULL_TREE);
11663 V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
11664 V4QI_type_node = build_vector_type_for_mode (intQI_type_node, V4QImode);
11666 types[MIPS_V2HI_FTYPE_V2HI_V2HI]
11667 = build_function_type_list (V2HI_type_node,
11668 V2HI_type_node, V2HI_type_node,
11671 types[MIPS_SI_FTYPE_SI_SI]
11672 = build_function_type_list (intSI_type_node,
11673 intSI_type_node, intSI_type_node,
11676 types[MIPS_V4QI_FTYPE_V4QI_V4QI]
11677 = build_function_type_list (V4QI_type_node,
11678 V4QI_type_node, V4QI_type_node,
11681 types[MIPS_SI_FTYPE_V4QI]
11682 = build_function_type_list (intSI_type_node,
11686 types[MIPS_V2HI_FTYPE_V2HI]
11687 = build_function_type_list (V2HI_type_node,
11691 types[MIPS_SI_FTYPE_SI]
11692 = build_function_type_list (intSI_type_node,
11696 types[MIPS_V4QI_FTYPE_V2HI_V2HI]
11697 = build_function_type_list (V4QI_type_node,
11698 V2HI_type_node, V2HI_type_node,
11701 types[MIPS_V2HI_FTYPE_SI_SI]
11702 = build_function_type_list (V2HI_type_node,
11703 intSI_type_node, intSI_type_node,
11706 types[MIPS_SI_FTYPE_V2HI]
11707 = build_function_type_list (intSI_type_node,
11711 types[MIPS_V2HI_FTYPE_V4QI]
11712 = build_function_type_list (V2HI_type_node,
11716 types[MIPS_V4QI_FTYPE_V4QI_SI]
11717 = build_function_type_list (V4QI_type_node,
11718 V4QI_type_node, intSI_type_node,
11721 types[MIPS_V2HI_FTYPE_V2HI_SI]
11722 = build_function_type_list (V2HI_type_node,
11723 V2HI_type_node, intSI_type_node,
11726 types[MIPS_V2HI_FTYPE_V4QI_V2HI]
11727 = build_function_type_list (V2HI_type_node,
11728 V4QI_type_node, V2HI_type_node,
11731 types[MIPS_SI_FTYPE_V2HI_V2HI]
11732 = build_function_type_list (intSI_type_node,
11733 V2HI_type_node, V2HI_type_node,
11736 types[MIPS_DI_FTYPE_DI_V4QI_V4QI]
11737 = build_function_type_list (intDI_type_node,
11738 intDI_type_node, V4QI_type_node, V4QI_type_node,
11741 types[MIPS_DI_FTYPE_DI_V2HI_V2HI]
11742 = build_function_type_list (intDI_type_node,
11743 intDI_type_node, V2HI_type_node, V2HI_type_node,
11746 types[MIPS_DI_FTYPE_DI_SI_SI]
11747 = build_function_type_list (intDI_type_node,
11748 intDI_type_node, intSI_type_node, intSI_type_node,
11751 types[MIPS_V4QI_FTYPE_SI]
11752 = build_function_type_list (V4QI_type_node,
11756 types[MIPS_V2HI_FTYPE_SI]
11757 = build_function_type_list (V2HI_type_node,
11761 types[MIPS_VOID_FTYPE_V4QI_V4QI]
11762 = build_function_type_list (void_type_node,
11763 V4QI_type_node, V4QI_type_node,
11766 types[MIPS_SI_FTYPE_V4QI_V4QI]
11767 = build_function_type_list (intSI_type_node,
11768 V4QI_type_node, V4QI_type_node,
11771 types[MIPS_VOID_FTYPE_V2HI_V2HI]
11772 = build_function_type_list (void_type_node,
11773 V2HI_type_node, V2HI_type_node,
11776 types[MIPS_SI_FTYPE_DI_SI]
11777 = build_function_type_list (intSI_type_node,
11778 intDI_type_node, intSI_type_node,
11781 types[MIPS_DI_FTYPE_DI_SI]
11782 = build_function_type_list (intDI_type_node,
11783 intDI_type_node, intSI_type_node,
11786 types[MIPS_VOID_FTYPE_SI_SI]
11787 = build_function_type_list (void_type_node,
11788 intSI_type_node, intSI_type_node,
11791 types[MIPS_SI_FTYPE_PTR_SI]
11792 = build_function_type_list (intSI_type_node,
11793 ptr_type_node, intSI_type_node,
11796 types[MIPS_SI_FTYPE_VOID]
11797 = build_function_type (intSI_type_node, void_list_node);
11801 types[MIPS_V4QI_FTYPE_V4QI]
11802 = build_function_type_list (V4QI_type_node,
11806 types[MIPS_SI_FTYPE_SI_SI_SI]
11807 = build_function_type_list (intSI_type_node,
11808 intSI_type_node, intSI_type_node,
11809 intSI_type_node, NULL_TREE);
11811 types[MIPS_DI_FTYPE_DI_USI_USI]
11812 = build_function_type_list (intDI_type_node,
11814 unsigned_intSI_type_node,
11815 unsigned_intSI_type_node, NULL_TREE);
11817 types[MIPS_DI_FTYPE_SI_SI]
11818 = build_function_type_list (intDI_type_node,
11819 intSI_type_node, intSI_type_node,
11822 types[MIPS_DI_FTYPE_USI_USI]
11823 = build_function_type_list (intDI_type_node,
11824 unsigned_intSI_type_node,
11825 unsigned_intSI_type_node, NULL_TREE);
11827 types[MIPS_V2HI_FTYPE_SI_SI_SI]
11828 = build_function_type_list (V2HI_type_node,
11829 intSI_type_node, intSI_type_node,
11830 intSI_type_node, NULL_TREE);
11835 /* Iterate through all of the bdesc arrays, initializing all of the
11836 builtin functions. */
11839 for (m = bdesc_arrays; m < &bdesc_arrays[ARRAY_SIZE (bdesc_arrays)]; m++)
11841 if ((m->proc == PROCESSOR_MAX || (m->proc == mips_arch))
11842 && (m->unsupported_target_flags & target_flags) == 0)
11843 for (d = m->bdesc; d < &m->bdesc[m->size]; d++)
11844 if ((d->target_flags & target_flags) == d->target_flags)
11845 add_builtin_function (d->name, types[d->function_type],
11846 d - m->bdesc + offset,
11847 BUILT_IN_MD, NULL, NULL);
11852 /* Expand a MIPS_BUILTIN_DIRECT function. ICODE is the code of the
11853 .md pattern and CALL is the function expr with arguments. TARGET,
11854 if nonnull, suggests a good place to put the result.
11855 HAS_TARGET indicates the function must return something. */
11858 mips_expand_builtin_direct (enum insn_code icode, rtx target, tree exp,
11861 rtx ops[MAX_RECOG_OPERANDS];
11867 /* We save target to ops[0]. */
11868 ops[0] = mips_prepare_builtin_target (icode, 0, target);
11872 /* We need to test if the arglist is not zero. Some instructions have extra
11873 clobber registers. */
11874 for (; i < insn_data[icode].n_operands && i <= call_expr_nargs (exp); i++, j++)
11875 ops[i] = mips_prepare_builtin_arg (icode, i, exp, j);
11880 emit_insn (GEN_FCN (icode) (ops[0], ops[1]));
11884 emit_insn (GEN_FCN (icode) (ops[0], ops[1], ops[2]));
11888 emit_insn (GEN_FCN (icode) (ops[0], ops[1], ops[2], ops[3]));
11892 gcc_unreachable ();
11897 /* Expand a __builtin_mips_movt_*_ps() or __builtin_mips_movf_*_ps()
11898 function (TYPE says which). EXP is the tree for the function
11899 function, ICODE is the instruction that should be used to compare
11900 the first two arguments, and COND is the condition it should test.
11901 TARGET, if nonnull, suggests a good place to put the result. */
11904 mips_expand_builtin_movtf (enum mips_builtin_type type,
11905 enum insn_code icode, enum mips_fp_condition cond,
11906 rtx target, tree exp)
11908 rtx cmp_result, op0, op1;
11910 cmp_result = mips_prepare_builtin_target (icode, 0, 0);
11911 op0 = mips_prepare_builtin_arg (icode, 1, exp, 0);
11912 op1 = mips_prepare_builtin_arg (icode, 2, exp, 1);
11913 emit_insn (GEN_FCN (icode) (cmp_result, op0, op1, GEN_INT (cond)));
11915 icode = CODE_FOR_mips_cond_move_tf_ps;
11916 target = mips_prepare_builtin_target (icode, 0, target);
11917 if (type == MIPS_BUILTIN_MOVT)
11919 op1 = mips_prepare_builtin_arg (icode, 2, exp, 2);
11920 op0 = mips_prepare_builtin_arg (icode, 1, exp, 3);
11924 op0 = mips_prepare_builtin_arg (icode, 1, exp, 2);
11925 op1 = mips_prepare_builtin_arg (icode, 2, exp, 3);
11927 emit_insn (gen_mips_cond_move_tf_ps (target, op0, op1, cmp_result));
11931 /* Move VALUE_IF_TRUE into TARGET if CONDITION is true; move VALUE_IF_FALSE
11932 into TARGET otherwise. Return TARGET. */
11935 mips_builtin_branch_and_move (rtx condition, rtx target,
11936 rtx value_if_true, rtx value_if_false)
11938 rtx true_label, done_label;
11940 true_label = gen_label_rtx ();
11941 done_label = gen_label_rtx ();
11943 /* First assume that CONDITION is false. */
11944 mips_emit_move (target, value_if_false);
11946 /* Branch to TRUE_LABEL if CONDITION is true and DONE_LABEL otherwise. */
11947 emit_jump_insn (gen_condjump (condition, true_label));
11948 emit_jump_insn (gen_jump (done_label));
11951 /* Fix TARGET if CONDITION is true. */
11952 emit_label (true_label);
11953 mips_emit_move (target, value_if_true);
11955 emit_label (done_label);
11959 /* Expand a comparison builtin of type BUILTIN_TYPE. ICODE is the code
11960 of the comparison instruction and COND is the condition it should test.
11961 EXP is the function call and arguments and TARGET, if nonnull,
11962 suggests a good place to put the boolean result. */
11965 mips_expand_builtin_compare (enum mips_builtin_type builtin_type,
11966 enum insn_code icode, enum mips_fp_condition cond,
11967 rtx target, tree exp)
11969 rtx offset, condition, cmp_result, ops[MAX_RECOG_OPERANDS];
11973 if (target == 0 || GET_MODE (target) != SImode)
11974 target = gen_reg_rtx (SImode);
11976 /* Prepare the operands to the comparison. */
11977 cmp_result = mips_prepare_builtin_target (icode, 0, 0);
11978 for (i = 1; i < insn_data[icode].n_operands - 1; i++, j++)
11979 ops[i] = mips_prepare_builtin_arg (icode, i, exp, j);
11981 switch (insn_data[icode].n_operands)
11984 emit_insn (GEN_FCN (icode) (cmp_result, ops[1], ops[2], GEN_INT (cond)));
11988 emit_insn (GEN_FCN (icode) (cmp_result, ops[1], ops[2],
11989 ops[3], ops[4], GEN_INT (cond)));
11993 gcc_unreachable ();
11996 /* If the comparison sets more than one register, we define the result
11997 to be 0 if all registers are false and -1 if all registers are true.
11998 The value of the complete result is indeterminate otherwise. */
11999 switch (builtin_type)
12001 case MIPS_BUILTIN_CMP_ALL:
12002 condition = gen_rtx_NE (VOIDmode, cmp_result, constm1_rtx);
12003 return mips_builtin_branch_and_move (condition, target,
12004 const0_rtx, const1_rtx);
12006 case MIPS_BUILTIN_CMP_UPPER:
12007 case MIPS_BUILTIN_CMP_LOWER:
12008 offset = GEN_INT (builtin_type == MIPS_BUILTIN_CMP_UPPER);
12009 condition = gen_single_cc (cmp_result, offset);
12010 return mips_builtin_branch_and_move (condition, target,
12011 const1_rtx, const0_rtx);
12014 condition = gen_rtx_NE (VOIDmode, cmp_result, const0_rtx);
12015 return mips_builtin_branch_and_move (condition, target,
12016 const1_rtx, const0_rtx);
12020 /* Expand a bposge builtin of type BUILTIN_TYPE. TARGET, if nonnull,
12021 suggests a good place to put the boolean result. */
12024 mips_expand_builtin_bposge (enum mips_builtin_type builtin_type, rtx target)
12026 rtx condition, cmp_result;
12029 if (target == 0 || GET_MODE (target) != SImode)
12030 target = gen_reg_rtx (SImode);
12032 cmp_result = gen_rtx_REG (CCDSPmode, CCDSP_PO_REGNUM);
12034 if (builtin_type == MIPS_BUILTIN_BPOSGE32)
12039 condition = gen_rtx_GE (VOIDmode, cmp_result, GEN_INT (cmp_value));
12040 return mips_builtin_branch_and_move (condition, target,
12041 const1_rtx, const0_rtx);
12044 /* Set SYMBOL_REF_FLAGS for the SYMBOL_REF inside RTL, which belongs to DECL.
12045 FIRST is true if this is the first time handling this decl. */
12048 mips_encode_section_info (tree decl, rtx rtl, int first)
12050 default_encode_section_info (decl, rtl, first);
12052 if (TREE_CODE (decl) == FUNCTION_DECL)
12054 rtx symbol = XEXP (rtl, 0);
12056 if ((TARGET_LONG_CALLS && !mips_near_type_p (TREE_TYPE (decl)))
12057 || mips_far_type_p (TREE_TYPE (decl)))
12058 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LONG_CALL;
12062 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. Some code models use the incoming
12063 value of PIC_FUNCTION_ADDR_REGNUM to set up the global pointer. */
12066 mips_extra_live_on_entry (bitmap regs)
12068 if (TARGET_USE_GOT && !TARGET_ABSOLUTE_ABICALLS)
12069 bitmap_set_bit (regs, PIC_FUNCTION_ADDR_REGNUM);
12072 /* SImode values are represented as sign-extended to DImode. */
12075 mips_mode_rep_extended (enum machine_mode mode, enum machine_mode mode_rep)
12077 if (TARGET_64BIT && mode == SImode && mode_rep == DImode)
12078 return SIGN_EXTEND;
12083 /* MIPS implementation of TARGET_ASM_OUTPUT_DWARF_DTPREL. */
12086 mips_output_dwarf_dtprel (FILE *file, int size, rtx x)
12091 fputs ("\t.dtprelword\t", file);
12095 fputs ("\t.dtpreldword\t", file);
12099 gcc_unreachable ();
12101 output_addr_const (file, x);
12102 fputs ("+0x8000", file);
12105 #include "gt-mips.h"