1 /* Subroutines used for MIPS code generation.
2 Copyright (C) 1989, 1990, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5 Contributed by A. Lichnewsky, lich@inria.inria.fr.
6 Changes by Michael Meissner, meissner@osf.org.
7 64-bit r4000 support by Ian Lance Taylor, ian@cygnus.com, and
8 Brendan Eich, brendan@microunity.com.
10 This file is part of GCC.
12 GCC is free software; you can redistribute it and/or modify
13 it under the terms of the GNU General Public License as published by
14 the Free Software Foundation; either version 3, or (at your option)
17 GCC is distributed in the hope that it will be useful,
18 but WITHOUT ANY WARRANTY; without even the implied warranty of
19 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 GNU General Public License for more details.
22 You should have received a copy of the GNU General Public License
23 along with GCC; see the file COPYING3. If not see
24 <http://www.gnu.org/licenses/>. */
28 #include "coretypes.h"
33 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "insn-attr.h"
53 #include "target-def.h"
54 #include "integrate.h"
55 #include "langhooks.h"
56 #include "cfglayout.h"
57 #include "sched-int.h"
58 #include "tree-gimple.h"
60 #include "diagnostic.h"
62 /* True if X is an unspec wrapper around a SYMBOL_REF or LABEL_REF. */
63 #define UNSPEC_ADDRESS_P(X) \
64 (GET_CODE (X) == UNSPEC \
65 && XINT (X, 1) >= UNSPEC_ADDRESS_FIRST \
66 && XINT (X, 1) < UNSPEC_ADDRESS_FIRST + NUM_SYMBOL_TYPES)
68 /* Extract the symbol or label from UNSPEC wrapper X. */
69 #define UNSPEC_ADDRESS(X) \
72 /* Extract the symbol type from UNSPEC wrapper X. */
73 #define UNSPEC_ADDRESS_TYPE(X) \
74 ((enum mips_symbol_type) (XINT (X, 1) - UNSPEC_ADDRESS_FIRST))
76 /* The maximum distance between the top of the stack frame and the
77 value $sp has when we save and restore registers.
79 The value for normal-mode code must be a SMALL_OPERAND and must
80 preserve the maximum stack alignment. We therefore use a value
81 of 0x7ff0 in this case.
83 MIPS16e SAVE and RESTORE instructions can adjust the stack pointer by
84 up to 0x7f8 bytes and can usually save or restore all the registers
85 that we need to save or restore. (Note that we can only use these
86 instructions for o32, for which the stack alignment is 8 bytes.)
88 We use a maximum gap of 0x100 or 0x400 for MIPS16 code when SAVE and
89 RESTORE are not available. We can then use unextended instructions
90 to save and restore registers, and to allocate and deallocate the top
92 #define MIPS_MAX_FIRST_STACK_STEP \
93 (!TARGET_MIPS16 ? 0x7ff0 \
94 : GENERATE_MIPS16E_SAVE_RESTORE ? 0x7f8 \
95 : TARGET_64BIT ? 0x100 : 0x400)
97 /* True if INSN is a mips.md pattern or asm statement. */
98 #define USEFUL_INSN_P(INSN) \
100 && GET_CODE (PATTERN (INSN)) != USE \
101 && GET_CODE (PATTERN (INSN)) != CLOBBER \
102 && GET_CODE (PATTERN (INSN)) != ADDR_VEC \
103 && GET_CODE (PATTERN (INSN)) != ADDR_DIFF_VEC)
105 /* If INSN is a delayed branch sequence, return the first instruction
106 in the sequence, otherwise return INSN itself. */
107 #define SEQ_BEGIN(INSN) \
108 (INSN_P (INSN) && GET_CODE (PATTERN (INSN)) == SEQUENCE \
109 ? XVECEXP (PATTERN (INSN), 0, 0) \
112 /* Likewise for the last instruction in a delayed branch sequence. */
113 #define SEQ_END(INSN) \
114 (INSN_P (INSN) && GET_CODE (PATTERN (INSN)) == SEQUENCE \
115 ? XVECEXP (PATTERN (INSN), 0, XVECLEN (PATTERN (INSN), 0) - 1) \
118 /* Execute the following loop body with SUBINSN set to each instruction
119 between SEQ_BEGIN (INSN) and SEQ_END (INSN) inclusive. */
120 #define FOR_EACH_SUBINSN(SUBINSN, INSN) \
121 for ((SUBINSN) = SEQ_BEGIN (INSN); \
122 (SUBINSN) != NEXT_INSN (SEQ_END (INSN)); \
123 (SUBINSN) = NEXT_INSN (SUBINSN))
125 /* True if bit BIT is set in VALUE. */
126 #define BITSET_P(VALUE, BIT) (((VALUE) & (1 << (BIT))) != 0)
128 /* Classifies an address.
131 A natural register + offset address. The register satisfies
132 mips_valid_base_register_p and the offset is a const_arith_operand.
135 A LO_SUM rtx. The first operand is a valid base register and
136 the second operand is a symbolic address.
139 A signed 16-bit constant address.
142 A constant symbolic address (equivalent to CONSTANT_SYMBOLIC). */
143 enum mips_address_type {
150 /* Classifies the prototype of a builtin function. */
151 enum mips_function_type
153 MIPS_V2SF_FTYPE_V2SF,
154 MIPS_V2SF_FTYPE_V2SF_V2SF,
155 MIPS_V2SF_FTYPE_V2SF_V2SF_INT,
156 MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF,
157 MIPS_V2SF_FTYPE_SF_SF,
158 MIPS_INT_FTYPE_V2SF_V2SF,
159 MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF,
160 MIPS_INT_FTYPE_SF_SF,
161 MIPS_INT_FTYPE_DF_DF,
168 /* For MIPS DSP ASE */
170 MIPS_DI_FTYPE_DI_SI_SI,
171 MIPS_DI_FTYPE_DI_V2HI_V2HI,
172 MIPS_DI_FTYPE_DI_V4QI_V4QI,
174 MIPS_SI_FTYPE_PTR_SI,
178 MIPS_SI_FTYPE_V2HI_V2HI,
180 MIPS_SI_FTYPE_V4QI_V4QI,
183 MIPS_V2HI_FTYPE_SI_SI,
184 MIPS_V2HI_FTYPE_V2HI,
185 MIPS_V2HI_FTYPE_V2HI_SI,
186 MIPS_V2HI_FTYPE_V2HI_V2HI,
187 MIPS_V2HI_FTYPE_V4QI,
188 MIPS_V2HI_FTYPE_V4QI_V2HI,
190 MIPS_V4QI_FTYPE_V2HI_V2HI,
191 MIPS_V4QI_FTYPE_V4QI_SI,
192 MIPS_V4QI_FTYPE_V4QI_V4QI,
193 MIPS_VOID_FTYPE_SI_SI,
194 MIPS_VOID_FTYPE_V2HI_V2HI,
195 MIPS_VOID_FTYPE_V4QI_V4QI,
197 /* For MIPS DSP REV 2 ASE. */
198 MIPS_V4QI_FTYPE_V4QI,
199 MIPS_SI_FTYPE_SI_SI_SI,
200 MIPS_DI_FTYPE_DI_USI_USI,
202 MIPS_DI_FTYPE_USI_USI,
203 MIPS_V2HI_FTYPE_SI_SI_SI,
209 /* Specifies how a builtin function should be converted into rtl. */
210 enum mips_builtin_type
212 /* The builtin corresponds directly to an .md pattern. The return
213 value is mapped to operand 0 and the arguments are mapped to
214 operands 1 and above. */
217 /* The builtin corresponds directly to an .md pattern. There is no return
218 value and the arguments are mapped to operands 0 and above. */
219 MIPS_BUILTIN_DIRECT_NO_TARGET,
221 /* The builtin corresponds to a comparison instruction followed by
222 a mips_cond_move_tf_ps pattern. The first two arguments are the
223 values to compare and the second two arguments are the vector
224 operands for the movt.ps or movf.ps instruction (in assembly order). */
228 /* The builtin corresponds to a V2SF comparison instruction. Operand 0
229 of this instruction is the result of the comparison, which has mode
230 CCV2 or CCV4. The function arguments are mapped to operands 1 and
231 above. The function's return value is an SImode boolean that is
232 true under the following conditions:
234 MIPS_BUILTIN_CMP_ANY: one of the registers is true
235 MIPS_BUILTIN_CMP_ALL: all of the registers are true
236 MIPS_BUILTIN_CMP_LOWER: the first register is true
237 MIPS_BUILTIN_CMP_UPPER: the second register is true. */
238 MIPS_BUILTIN_CMP_ANY,
239 MIPS_BUILTIN_CMP_ALL,
240 MIPS_BUILTIN_CMP_UPPER,
241 MIPS_BUILTIN_CMP_LOWER,
243 /* As above, but the instruction only sets a single $fcc register. */
244 MIPS_BUILTIN_CMP_SINGLE,
246 /* For generating bposge32 branch instructions in MIPS32 DSP ASE. */
247 MIPS_BUILTIN_BPOSGE32
250 /* Invokes MACRO (COND) for each c.cond.fmt condition. */
251 #define MIPS_FP_CONDITIONS(MACRO) \
269 /* Enumerates the codes above as MIPS_FP_COND_<X>. */
270 #define DECLARE_MIPS_COND(X) MIPS_FP_COND_ ## X
271 enum mips_fp_condition {
272 MIPS_FP_CONDITIONS (DECLARE_MIPS_COND)
275 /* Index X provides the string representation of MIPS_FP_COND_<X>. */
276 #define STRINGIFY(X) #X
277 static const char *const mips_fp_conditions[] = {
278 MIPS_FP_CONDITIONS (STRINGIFY)
281 /* A function to save or store a register. The first argument is the
282 register and the second is the stack slot. */
283 typedef void (*mips_save_restore_fn) (rtx, rtx);
285 struct mips16_constant;
286 struct mips_arg_info;
287 struct mips_address_info;
288 struct mips_integer_op;
291 static bool mips_valid_base_register_p (rtx, enum machine_mode, int);
292 static bool mips_classify_address (struct mips_address_info *, rtx,
293 enum machine_mode, int);
294 static bool mips_cannot_force_const_mem (rtx);
295 static bool mips_use_blocks_for_constant_p (enum machine_mode, const_rtx);
296 static int mips_symbol_insns (enum mips_symbol_type, enum machine_mode);
297 static bool mips16_unextended_reference_p (enum machine_mode mode, rtx, rtx);
298 static rtx mips_force_temporary (rtx, rtx);
299 static rtx mips_unspec_offset_high (rtx, rtx, rtx, enum mips_symbol_type);
300 static rtx mips_add_offset (rtx, rtx, HOST_WIDE_INT);
301 static unsigned int mips_build_shift (struct mips_integer_op *, HOST_WIDE_INT);
302 static unsigned int mips_build_lower (struct mips_integer_op *,
303 unsigned HOST_WIDE_INT);
304 static unsigned int mips_build_integer (struct mips_integer_op *,
305 unsigned HOST_WIDE_INT);
306 static void mips_legitimize_const_move (enum machine_mode, rtx, rtx);
307 static int m16_check_op (rtx, int, int, int);
308 static bool mips_rtx_costs (rtx, int, int, int *);
309 static int mips_address_cost (rtx);
310 static void mips_emit_compare (enum rtx_code *, rtx *, rtx *, bool);
311 static void mips_load_call_address (rtx, rtx, int);
312 static bool mips_function_ok_for_sibcall (tree, tree);
313 static void mips_block_move_straight (rtx, rtx, HOST_WIDE_INT);
314 static void mips_adjust_block_mem (rtx, HOST_WIDE_INT, rtx *, rtx *);
315 static void mips_block_move_loop (rtx, rtx, HOST_WIDE_INT);
316 static void mips_arg_info (const CUMULATIVE_ARGS *, enum machine_mode,
317 tree, int, struct mips_arg_info *);
318 static bool mips_get_unaligned_mem (rtx *, unsigned int, int, rtx *, rtx *);
319 static void mips_set_architecture (const struct mips_cpu_info *);
320 static void mips_set_tune (const struct mips_cpu_info *);
321 static bool mips_handle_option (size_t, const char *, int);
322 static struct machine_function *mips_init_machine_status (void);
323 static void print_operand_reloc (FILE *, rtx, enum mips_symbol_context,
325 static void mips_file_start (void);
326 static int mips_small_data_pattern_1 (rtx *, void *);
327 static int mips_rewrite_small_data_1 (rtx *, void *);
328 static bool mips_function_has_gp_insn (void);
329 static unsigned int mips_global_pointer (void);
330 static bool mips_save_reg_p (unsigned int);
331 static void mips_save_restore_reg (enum machine_mode, int, HOST_WIDE_INT,
332 mips_save_restore_fn);
333 static void mips_for_each_saved_reg (HOST_WIDE_INT, mips_save_restore_fn);
334 static void mips_output_cplocal (void);
335 static void mips_emit_loadgp (void);
336 static void mips_output_function_prologue (FILE *, HOST_WIDE_INT);
337 static void mips_set_frame_expr (rtx);
338 static rtx mips_frame_set (rtx, rtx);
339 static void mips_save_reg (rtx, rtx);
340 static void mips_output_function_epilogue (FILE *, HOST_WIDE_INT);
341 static void mips_restore_reg (rtx, rtx);
342 static void mips_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
343 HOST_WIDE_INT, tree);
344 static section *mips_select_rtx_section (enum machine_mode, rtx,
345 unsigned HOST_WIDE_INT);
346 static section *mips_function_rodata_section (tree);
347 static bool mips_in_small_data_p (const_tree);
348 static bool mips_use_anchors_for_symbol_p (const_rtx);
349 static int mips_fpr_return_fields (const_tree, tree *);
350 static bool mips_return_in_msb (const_tree);
351 static rtx mips_return_fpr_pair (enum machine_mode mode,
352 enum machine_mode mode1, HOST_WIDE_INT,
353 enum machine_mode mode2, HOST_WIDE_INT);
354 static rtx mips16_gp_pseudo_reg (void);
355 static void mips16_fp_args (FILE *, int, int);
356 static void build_mips16_function_stub (FILE *);
357 static rtx dump_constants_1 (enum machine_mode, rtx, rtx);
358 static void dump_constants (struct mips16_constant *, rtx);
359 static int mips16_insn_length (rtx);
360 static int mips16_rewrite_pool_refs (rtx *, void *);
361 static void mips16_lay_out_constants (void);
362 static void mips_sim_reset (struct mips_sim *);
363 static void mips_sim_init (struct mips_sim *, state_t);
364 static void mips_sim_next_cycle (struct mips_sim *);
365 static void mips_sim_wait_reg (struct mips_sim *, rtx, rtx);
366 static int mips_sim_wait_regs_2 (rtx *, void *);
367 static void mips_sim_wait_regs_1 (rtx *, void *);
368 static void mips_sim_wait_regs (struct mips_sim *, rtx);
369 static void mips_sim_wait_units (struct mips_sim *, rtx);
370 static void mips_sim_wait_insn (struct mips_sim *, rtx);
371 static void mips_sim_record_set (rtx, const_rtx, void *);
372 static void mips_sim_issue_insn (struct mips_sim *, rtx);
373 static void mips_sim_issue_nop (struct mips_sim *);
374 static void mips_sim_finish_insn (struct mips_sim *, rtx);
375 static void vr4130_avoid_branch_rt_conflict (rtx);
376 static void vr4130_align_insns (void);
377 static void mips_avoid_hazard (rtx, rtx, int *, rtx *, rtx);
378 static void mips_avoid_hazards (void);
379 static void mips_reorg (void);
380 static bool mips_strict_matching_cpu_name_p (const char *, const char *);
381 static bool mips_matching_cpu_name_p (const char *, const char *);
382 static const struct mips_cpu_info *mips_parse_cpu (const char *);
383 static const struct mips_cpu_info *mips_cpu_info_from_isa (int);
384 static bool mips_return_in_memory (const_tree, const_tree);
385 static bool mips_strict_argument_naming (CUMULATIVE_ARGS *);
386 static void mips_macc_chains_record (rtx);
387 static void mips_macc_chains_reorder (rtx *, int);
388 static void vr4130_true_reg_dependence_p_1 (rtx, const_rtx, void *);
389 static bool vr4130_true_reg_dependence_p (rtx);
390 static bool vr4130_swap_insns_p (rtx, rtx);
391 static void vr4130_reorder (rtx *, int);
392 static void mips_promote_ready (rtx *, int, int);
393 static void mips_sched_init (FILE *, int, int);
394 static int mips_sched_reorder (FILE *, int, rtx *, int *, int);
395 static int mips_variable_issue (FILE *, int, rtx, int);
396 static int mips_adjust_cost (rtx, rtx, rtx, int);
397 static int mips_issue_rate (void);
398 static int mips_multipass_dfa_lookahead (void);
399 static void mips_init_libfuncs (void);
400 static void mips_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
402 static tree mips_build_builtin_va_list (void);
403 static tree mips_gimplify_va_arg_expr (tree, tree, tree *, tree *);
404 static bool mips_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode mode,
406 static bool mips_callee_copies (CUMULATIVE_ARGS *, enum machine_mode mode,
408 static int mips_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode mode,
410 static bool mips_valid_pointer_mode (enum machine_mode);
411 static bool mips_scalar_mode_supported_p (enum machine_mode);
412 static bool mips_vector_mode_supported_p (enum machine_mode);
413 static rtx mips_prepare_builtin_arg (enum insn_code, unsigned int, tree, unsigned int);
414 static rtx mips_prepare_builtin_target (enum insn_code, unsigned int, rtx);
415 static rtx mips_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
416 static void mips_init_builtins (void);
417 static rtx mips_expand_builtin_direct (enum insn_code, rtx, tree, bool);
418 static rtx mips_expand_builtin_movtf (enum mips_builtin_type,
419 enum insn_code, enum mips_fp_condition,
421 static rtx mips_expand_builtin_compare (enum mips_builtin_type,
422 enum insn_code, enum mips_fp_condition,
424 static rtx mips_expand_builtin_bposge (enum mips_builtin_type, rtx);
425 static void mips_encode_section_info (tree, rtx, int);
426 static void mips_extra_live_on_entry (bitmap);
427 static int mips_comp_type_attributes (const_tree, const_tree);
428 static void mips_set_mips16_mode (int);
429 static void mips_insert_attributes (tree, tree *);
430 static tree mips_merge_decl_attributes (tree, tree);
431 static void mips_set_current_function (tree);
432 static int mips_mode_rep_extended (enum machine_mode, enum machine_mode);
433 static bool mips_offset_within_alignment_p (rtx, HOST_WIDE_INT);
434 static void mips_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
436 /* Structure to be filled in by compute_frame_size with register
437 save masks, and offsets for the current function. */
439 struct mips_frame_info GTY(())
441 HOST_WIDE_INT total_size; /* # bytes that the entire frame takes up */
442 HOST_WIDE_INT var_size; /* # bytes that variables take up */
443 HOST_WIDE_INT args_size; /* # bytes that outgoing arguments take up */
444 HOST_WIDE_INT cprestore_size; /* # bytes that the .cprestore slot takes up */
445 HOST_WIDE_INT gp_reg_size; /* # bytes needed to store gp regs */
446 HOST_WIDE_INT fp_reg_size; /* # bytes needed to store fp regs */
447 unsigned int mask; /* mask of saved gp registers */
448 unsigned int fmask; /* mask of saved fp registers */
449 HOST_WIDE_INT gp_save_offset; /* offset from vfp to store gp registers */
450 HOST_WIDE_INT fp_save_offset; /* offset from vfp to store fp registers */
451 HOST_WIDE_INT gp_sp_offset; /* offset from new sp to store gp registers */
452 HOST_WIDE_INT fp_sp_offset; /* offset from new sp to store fp registers */
453 bool initialized; /* true if frame size already calculated */
454 int num_gp; /* number of gp registers saved */
455 int num_fp; /* number of fp registers saved */
458 struct machine_function GTY(()) {
459 /* Pseudo-reg holding the value of $28 in a mips16 function which
460 refers to GP relative global variables. */
461 rtx mips16_gp_pseudo_rtx;
463 /* The number of extra stack bytes taken up by register varargs.
464 This area is allocated by the callee at the very top of the frame. */
467 /* Current frame information, calculated by compute_frame_size. */
468 struct mips_frame_info frame;
470 /* The register to use as the global pointer within this function. */
471 unsigned int global_pointer;
473 /* True if mips_adjust_insn_length should ignore an instruction's
475 bool ignore_hazard_length_p;
477 /* True if the whole function is suitable for .set noreorder and
479 bool all_noreorder_p;
481 /* True if the function is known to have an instruction that needs $gp. */
484 /* True if we have emitted an instruction to initialize
485 mips16_gp_pseudo_rtx. */
486 bool initialized_mips16_gp_pseudo_p;
489 /* Information about a single argument. */
492 /* True if the argument is passed in a floating-point register, or
493 would have been if we hadn't run out of registers. */
496 /* The number of words passed in registers, rounded up. */
497 unsigned int reg_words;
499 /* For EABI, the offset of the first register from GP_ARG_FIRST or
500 FP_ARG_FIRST. For other ABIs, the offset of the first register from
501 the start of the ABI's argument structure (see the CUMULATIVE_ARGS
502 comment for details).
504 The value is MAX_ARGS_IN_REGISTERS if the argument is passed entirely
506 unsigned int reg_offset;
508 /* The number of words that must be passed on the stack, rounded up. */
509 unsigned int stack_words;
511 /* The offset from the start of the stack overflow area of the argument's
512 first stack word. Only meaningful when STACK_WORDS is nonzero. */
513 unsigned int stack_offset;
517 /* Information about an address described by mips_address_type.
523 REG is the base register and OFFSET is the constant offset.
526 REG is the register that contains the high part of the address,
527 OFFSET is the symbolic address being referenced and SYMBOL_TYPE
528 is the type of OFFSET's symbol.
531 SYMBOL_TYPE is the type of symbol being referenced. */
533 struct mips_address_info
535 enum mips_address_type type;
538 enum mips_symbol_type symbol_type;
542 /* One stage in a constant building sequence. These sequences have
546 A = A CODE[1] VALUE[1]
547 A = A CODE[2] VALUE[2]
550 where A is an accumulator, each CODE[i] is a binary rtl operation
551 and each VALUE[i] is a constant integer. */
552 struct mips_integer_op {
554 unsigned HOST_WIDE_INT value;
558 /* The largest number of operations needed to load an integer constant.
559 The worst accepted case for 64-bit constants is LUI,ORI,SLL,ORI,SLL,ORI.
560 When the lowest bit is clear, we can try, but reject a sequence with
561 an extra SLL at the end. */
562 #define MIPS_MAX_INTEGER_OPS 7
564 /* Information about a MIPS16e SAVE or RESTORE instruction. */
565 struct mips16e_save_restore_info {
566 /* The number of argument registers saved by a SAVE instruction.
567 0 for RESTORE instructions. */
570 /* Bit X is set if the instruction saves or restores GPR X. */
573 /* The total number of bytes to allocate. */
577 /* Global variables for machine-dependent things. */
579 /* Threshold for data being put into the small data/bss area, instead
580 of the normal data area. */
581 int mips_section_threshold = -1;
583 /* Count the number of .file directives, so that .loc is up to date. */
584 int num_source_filenames = 0;
586 /* Count the number of sdb related labels are generated (to find block
587 start and end boundaries). */
588 int sdb_label_count = 0;
590 /* Next label # for each statement for Silicon Graphics IRIS systems. */
593 /* Name of the file containing the current function. */
594 const char *current_function_file = "";
596 /* Number of nested .set noreorder, noat, nomacro, and volatile requests. */
602 /* The next branch instruction is a branch likely, not branch normal. */
603 int mips_branch_likely;
605 /* The operands passed to the last cmpMM expander. */
608 /* The target cpu for code generation. */
609 enum processor_type mips_arch;
610 const struct mips_cpu_info *mips_arch_info;
612 /* The target cpu for optimization and scheduling. */
613 enum processor_type mips_tune;
614 const struct mips_cpu_info *mips_tune_info;
616 /* Which instruction set architecture to use. */
619 /* Which ABI to use. */
620 int mips_abi = MIPS_ABI_DEFAULT;
622 /* Cost information to use. */
623 const struct mips_rtx_cost_data *mips_cost;
625 /* Remember the ambient target flags, excluding mips16. */
626 static int mips_base_target_flags;
627 /* The mips16 command-line target flags only. */
628 static bool mips_base_mips16;
629 /* Similar copies of option settings. */
630 static int mips_base_schedule_insns; /* flag_schedule_insns */
631 static int mips_base_reorder_blocks_and_partition; /* flag_reorder... */
632 static int mips_base_move_loop_invariants; /* flag_move_loop_invariants */
633 static int mips_base_align_loops; /* align_loops */
634 static int mips_base_align_jumps; /* align_jumps */
635 static int mips_base_align_functions; /* align_functions */
636 static GTY(()) int mips16_flipper;
638 /* The -mtext-loads setting. */
639 enum mips_code_readable_setting mips_code_readable = CODE_READABLE_YES;
641 /* The architecture selected by -mipsN. */
642 static const struct mips_cpu_info *mips_isa_info;
644 /* If TRUE, we split addresses into their high and low parts in the RTL. */
645 int mips_split_addresses;
647 /* Mode used for saving/restoring general purpose registers. */
648 static enum machine_mode gpr_mode;
650 /* Array giving truth value on whether or not a given hard register
651 can support a given mode. */
652 char mips_hard_regno_mode_ok[(int)MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
654 /* List of all MIPS punctuation characters used by print_operand. */
655 char mips_print_operand_punct[256];
657 /* Map GCC register number to debugger register number. */
658 int mips_dbx_regno[FIRST_PSEUDO_REGISTER];
659 int mips_dwarf_regno[FIRST_PSEUDO_REGISTER];
661 /* A copy of the original flag_delayed_branch: see override_options. */
662 static int mips_flag_delayed_branch;
664 static GTY (()) int mips_output_filename_first_time = 1;
666 /* mips_split_p[X] is true if symbols of type X can be split by
667 mips_split_symbol(). */
668 bool mips_split_p[NUM_SYMBOL_TYPES];
670 /* mips_lo_relocs[X] is the relocation to use when a symbol of type X
671 appears in a LO_SUM. It can be null if such LO_SUMs aren't valid or
672 if they are matched by a special .md file pattern. */
673 static const char *mips_lo_relocs[NUM_SYMBOL_TYPES];
675 /* Likewise for HIGHs. */
676 static const char *mips_hi_relocs[NUM_SYMBOL_TYPES];
678 /* Map hard register number to register class */
679 const enum reg_class mips_regno_to_class[] =
681 LEA_REGS, LEA_REGS, M16_NA_REGS, V1_REG,
682 M16_REGS, M16_REGS, M16_REGS, M16_REGS,
683 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
684 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
685 M16_NA_REGS, M16_NA_REGS, LEA_REGS, LEA_REGS,
686 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
687 T_REG, PIC_FN_ADDR_REG, LEA_REGS, LEA_REGS,
688 LEA_REGS, LEA_REGS, LEA_REGS, LEA_REGS,
689 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
690 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
691 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
692 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
693 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
694 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
695 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
696 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
697 MD0_REG, MD1_REG, NO_REGS, ST_REGS,
698 ST_REGS, ST_REGS, ST_REGS, ST_REGS,
699 ST_REGS, ST_REGS, ST_REGS, NO_REGS,
700 NO_REGS, ALL_REGS, ALL_REGS, NO_REGS,
701 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
702 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
703 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
704 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
705 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
706 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
707 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
708 COP0_REGS, COP0_REGS, COP0_REGS, COP0_REGS,
709 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
710 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
711 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
712 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
713 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
714 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
715 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
716 COP2_REGS, COP2_REGS, COP2_REGS, COP2_REGS,
717 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
718 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
719 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
720 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
721 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
722 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
723 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
724 COP3_REGS, COP3_REGS, COP3_REGS, COP3_REGS,
725 DSP_ACC_REGS, DSP_ACC_REGS, DSP_ACC_REGS, DSP_ACC_REGS,
726 DSP_ACC_REGS, DSP_ACC_REGS, ALL_REGS, ALL_REGS,
727 ALL_REGS, ALL_REGS, ALL_REGS, ALL_REGS
730 /* Table of machine dependent attributes. */
731 const struct attribute_spec mips_attribute_table[] =
733 { "long_call", 0, 0, false, true, true, NULL },
734 { "far", 0, 0, false, true, true, NULL },
735 { "near", 0, 0, false, true, true, NULL },
736 /* Switch MIPS16 ASE on and off per-function. We would really like
737 to make these type attributes, but GCC doesn't provide the hooks
738 we need to support the right conversion rules. As declaration
739 attributes, they affect code generation but don't carry other
741 { "mips16", 0, 0, true, false, false, NULL },
742 { "nomips16", 0, 0, true, false, false, NULL },
743 { NULL, 0, 0, false, false, false, NULL }
746 /* A table describing all the processors gcc knows about. Names are
747 matched in the order listed. The first mention of an ISA level is
748 taken as the canonical name for that ISA.
750 To ease comparison, please keep this table in the same order
751 as gas's mips_cpu_info_table[]. Please also make sure that
752 MIPS_ISA_LEVEL_SPEC and MIPS_ARCH_FLOAT_SPEC handle all -march
753 options correctly. */
754 const struct mips_cpu_info mips_cpu_info_table[] = {
755 /* Entries for generic ISAs */
756 { "mips1", PROCESSOR_R3000, 1, 0 },
757 { "mips2", PROCESSOR_R6000, 2, 0 },
758 { "mips3", PROCESSOR_R4000, 3, 0 },
759 { "mips4", PROCESSOR_R8000, 4, 0 },
760 /* Prefer not to use branch-likely instructions for generic MIPS32rX
761 and MIPS64rX code. The instructions were officially deprecated
762 in revisions 2 and earlier, but revision 3 is likely to downgrade
763 that to a recommendation to avoid the instructions in code that
764 isn't tuned to a specific processor. */
765 { "mips32", PROCESSOR_4KC, 32, PTF_AVOID_BRANCHLIKELY },
766 { "mips32r2", PROCESSOR_M4K, 33, PTF_AVOID_BRANCHLIKELY },
767 { "mips64", PROCESSOR_5KC, 64, PTF_AVOID_BRANCHLIKELY },
770 { "r3000", PROCESSOR_R3000, 1, 0 },
771 { "r2000", PROCESSOR_R3000, 1, 0 }, /* = r3000 */
772 { "r3900", PROCESSOR_R3900, 1, 0 },
775 { "r6000", PROCESSOR_R6000, 2, 0 },
778 { "r4000", PROCESSOR_R4000, 3, 0 },
779 { "vr4100", PROCESSOR_R4100, 3, 0 },
780 { "vr4111", PROCESSOR_R4111, 3, 0 },
781 { "vr4120", PROCESSOR_R4120, 3, 0 },
782 { "vr4130", PROCESSOR_R4130, 3, 0 },
783 { "vr4300", PROCESSOR_R4300, 3, 0 },
784 { "r4400", PROCESSOR_R4000, 3, 0 }, /* = r4000 */
785 { "r4600", PROCESSOR_R4600, 3, 0 },
786 { "orion", PROCESSOR_R4600, 3, 0 }, /* = r4600 */
787 { "r4650", PROCESSOR_R4650, 3, 0 },
790 { "r8000", PROCESSOR_R8000, 4, 0 },
791 { "vr5000", PROCESSOR_R5000, 4, 0 },
792 { "vr5400", PROCESSOR_R5400, 4, 0 },
793 { "vr5500", PROCESSOR_R5500, 4, PTF_AVOID_BRANCHLIKELY },
794 { "rm7000", PROCESSOR_R7000, 4, 0 },
795 { "rm9000", PROCESSOR_R9000, 4, 0 },
798 { "4kc", PROCESSOR_4KC, 32, 0 },
799 { "4km", PROCESSOR_4KC, 32, 0 }, /* = 4kc */
800 { "4kp", PROCESSOR_4KP, 32, 0 },
801 { "4ksc", PROCESSOR_4KC, 32, 0 },
803 /* MIPS32 Release 2 */
804 { "m4k", PROCESSOR_M4K, 33, 0 },
805 { "4kec", PROCESSOR_4KC, 33, 0 },
806 { "4kem", PROCESSOR_4KC, 33, 0 },
807 { "4kep", PROCESSOR_4KP, 33, 0 },
808 { "4ksd", PROCESSOR_4KC, 33, 0 },
810 { "24kc", PROCESSOR_24KC, 33, 0 },
811 { "24kf2_1", PROCESSOR_24KF2_1, 33, 0 },
812 { "24kf", PROCESSOR_24KF2_1, 33, 0 },
813 { "24kf1_1", PROCESSOR_24KF1_1, 33, 0 },
814 { "24kfx", PROCESSOR_24KF1_1, 33, 0 },
815 { "24kx", PROCESSOR_24KF1_1, 33, 0 },
817 { "24kec", PROCESSOR_24KC, 33, 0 }, /* 24K with DSP */
818 { "24kef2_1", PROCESSOR_24KF2_1, 33, 0 },
819 { "24kef", PROCESSOR_24KF2_1, 33, 0 },
820 { "24kef1_1", PROCESSOR_24KF1_1, 33, 0 },
821 { "24kefx", PROCESSOR_24KF1_1, 33, 0 },
822 { "24kex", PROCESSOR_24KF1_1, 33, 0 },
824 { "34kc", PROCESSOR_24KC, 33, 0 }, /* 34K with MT/DSP */
825 { "34kf2_1", PROCESSOR_24KF2_1, 33, 0 },
826 { "34kf", PROCESSOR_24KF2_1, 33, 0 },
827 { "34kf1_1", PROCESSOR_24KF1_1, 33, 0 },
828 { "34kfx", PROCESSOR_24KF1_1, 33, 0 },
829 { "34kx", PROCESSOR_24KF1_1, 33, 0 },
831 { "74kc", PROCESSOR_74KC, 33, 0 }, /* 74K with DSPr2 */
832 { "74kf2_1", PROCESSOR_74KF2_1, 33, 0 },
833 { "74kf", PROCESSOR_74KF2_1, 33, 0 },
834 { "74kf1_1", PROCESSOR_74KF1_1, 33, 0 },
835 { "74kfx", PROCESSOR_74KF1_1, 33, 0 },
836 { "74kx", PROCESSOR_74KF1_1, 33, 0 },
837 { "74kf3_2", PROCESSOR_74KF3_2, 33, 0 },
840 { "5kc", PROCESSOR_5KC, 64, 0 },
841 { "5kf", PROCESSOR_5KF, 64, 0 },
842 { "20kc", PROCESSOR_20KC, 64, PTF_AVOID_BRANCHLIKELY },
843 { "sb1", PROCESSOR_SB1, 64, PTF_AVOID_BRANCHLIKELY },
844 { "sb1a", PROCESSOR_SB1A, 64, PTF_AVOID_BRANCHLIKELY },
845 { "sr71000", PROCESSOR_SR71000, 64, PTF_AVOID_BRANCHLIKELY },
848 /* Default costs. If these are used for a processor we should look
849 up the actual costs. */
850 #define DEFAULT_COSTS COSTS_N_INSNS (6), /* fp_add */ \
851 COSTS_N_INSNS (7), /* fp_mult_sf */ \
852 COSTS_N_INSNS (8), /* fp_mult_df */ \
853 COSTS_N_INSNS (23), /* fp_div_sf */ \
854 COSTS_N_INSNS (36), /* fp_div_df */ \
855 COSTS_N_INSNS (10), /* int_mult_si */ \
856 COSTS_N_INSNS (10), /* int_mult_di */ \
857 COSTS_N_INSNS (69), /* int_div_si */ \
858 COSTS_N_INSNS (69), /* int_div_di */ \
859 2, /* branch_cost */ \
860 4 /* memory_latency */
862 /* Need to replace these with the costs of calling the appropriate
864 #define SOFT_FP_COSTS COSTS_N_INSNS (256), /* fp_add */ \
865 COSTS_N_INSNS (256), /* fp_mult_sf */ \
866 COSTS_N_INSNS (256), /* fp_mult_df */ \
867 COSTS_N_INSNS (256), /* fp_div_sf */ \
868 COSTS_N_INSNS (256) /* fp_div_df */
870 static struct mips_rtx_cost_data const mips_rtx_cost_optimize_size =
872 COSTS_N_INSNS (1), /* fp_add */
873 COSTS_N_INSNS (1), /* fp_mult_sf */
874 COSTS_N_INSNS (1), /* fp_mult_df */
875 COSTS_N_INSNS (1), /* fp_div_sf */
876 COSTS_N_INSNS (1), /* fp_div_df */
877 COSTS_N_INSNS (1), /* int_mult_si */
878 COSTS_N_INSNS (1), /* int_mult_di */
879 COSTS_N_INSNS (1), /* int_div_si */
880 COSTS_N_INSNS (1), /* int_div_di */
882 4 /* memory_latency */
885 static struct mips_rtx_cost_data const mips_rtx_cost_data[PROCESSOR_MAX] =
888 COSTS_N_INSNS (2), /* fp_add */
889 COSTS_N_INSNS (4), /* fp_mult_sf */
890 COSTS_N_INSNS (5), /* fp_mult_df */
891 COSTS_N_INSNS (12), /* fp_div_sf */
892 COSTS_N_INSNS (19), /* fp_div_df */
893 COSTS_N_INSNS (12), /* int_mult_si */
894 COSTS_N_INSNS (12), /* int_mult_di */
895 COSTS_N_INSNS (35), /* int_div_si */
896 COSTS_N_INSNS (35), /* int_div_di */
898 4 /* memory_latency */
903 COSTS_N_INSNS (6), /* int_mult_si */
904 COSTS_N_INSNS (6), /* int_mult_di */
905 COSTS_N_INSNS (36), /* int_div_si */
906 COSTS_N_INSNS (36), /* int_div_di */
908 4 /* memory_latency */
912 COSTS_N_INSNS (36), /* int_mult_si */
913 COSTS_N_INSNS (36), /* int_mult_di */
914 COSTS_N_INSNS (37), /* int_div_si */
915 COSTS_N_INSNS (37), /* int_div_di */
917 4 /* memory_latency */
921 COSTS_N_INSNS (4), /* int_mult_si */
922 COSTS_N_INSNS (11), /* int_mult_di */
923 COSTS_N_INSNS (36), /* int_div_si */
924 COSTS_N_INSNS (68), /* int_div_di */
926 4 /* memory_latency */
929 COSTS_N_INSNS (4), /* fp_add */
930 COSTS_N_INSNS (4), /* fp_mult_sf */
931 COSTS_N_INSNS (5), /* fp_mult_df */
932 COSTS_N_INSNS (17), /* fp_div_sf */
933 COSTS_N_INSNS (32), /* fp_div_df */
934 COSTS_N_INSNS (4), /* int_mult_si */
935 COSTS_N_INSNS (11), /* int_mult_di */
936 COSTS_N_INSNS (36), /* int_div_si */
937 COSTS_N_INSNS (68), /* int_div_di */
939 4 /* memory_latency */
942 COSTS_N_INSNS (4), /* fp_add */
943 COSTS_N_INSNS (4), /* fp_mult_sf */
944 COSTS_N_INSNS (5), /* fp_mult_df */
945 COSTS_N_INSNS (17), /* fp_div_sf */
946 COSTS_N_INSNS (32), /* fp_div_df */
947 COSTS_N_INSNS (4), /* int_mult_si */
948 COSTS_N_INSNS (7), /* int_mult_di */
949 COSTS_N_INSNS (42), /* int_div_si */
950 COSTS_N_INSNS (72), /* int_div_di */
952 4 /* memory_latency */
956 COSTS_N_INSNS (5), /* int_mult_si */
957 COSTS_N_INSNS (5), /* int_mult_di */
958 COSTS_N_INSNS (41), /* int_div_si */
959 COSTS_N_INSNS (41), /* int_div_di */
961 4 /* memory_latency */
964 COSTS_N_INSNS (8), /* fp_add */
965 COSTS_N_INSNS (8), /* fp_mult_sf */
966 COSTS_N_INSNS (10), /* fp_mult_df */
967 COSTS_N_INSNS (34), /* fp_div_sf */
968 COSTS_N_INSNS (64), /* fp_div_df */
969 COSTS_N_INSNS (5), /* int_mult_si */
970 COSTS_N_INSNS (5), /* int_mult_di */
971 COSTS_N_INSNS (41), /* int_div_si */
972 COSTS_N_INSNS (41), /* int_div_di */
974 4 /* memory_latency */
977 COSTS_N_INSNS (4), /* fp_add */
978 COSTS_N_INSNS (4), /* fp_mult_sf */
979 COSTS_N_INSNS (5), /* fp_mult_df */
980 COSTS_N_INSNS (17), /* fp_div_sf */
981 COSTS_N_INSNS (32), /* fp_div_df */
982 COSTS_N_INSNS (5), /* int_mult_si */
983 COSTS_N_INSNS (5), /* int_mult_di */
984 COSTS_N_INSNS (41), /* int_div_si */
985 COSTS_N_INSNS (41), /* int_div_di */
987 4 /* memory_latency */
991 COSTS_N_INSNS (5), /* int_mult_si */
992 COSTS_N_INSNS (5), /* int_mult_di */
993 COSTS_N_INSNS (41), /* int_div_si */
994 COSTS_N_INSNS (41), /* int_div_di */
996 4 /* memory_latency */
999 COSTS_N_INSNS (8), /* fp_add */
1000 COSTS_N_INSNS (8), /* fp_mult_sf */
1001 COSTS_N_INSNS (10), /* fp_mult_df */
1002 COSTS_N_INSNS (34), /* fp_div_sf */
1003 COSTS_N_INSNS (64), /* fp_div_df */
1004 COSTS_N_INSNS (5), /* int_mult_si */
1005 COSTS_N_INSNS (5), /* int_mult_di */
1006 COSTS_N_INSNS (41), /* int_div_si */
1007 COSTS_N_INSNS (41), /* int_div_di */
1008 1, /* branch_cost */
1009 4 /* memory_latency */
1012 COSTS_N_INSNS (4), /* fp_add */
1013 COSTS_N_INSNS (4), /* fp_mult_sf */
1014 COSTS_N_INSNS (5), /* fp_mult_df */
1015 COSTS_N_INSNS (17), /* fp_div_sf */
1016 COSTS_N_INSNS (32), /* fp_div_df */
1017 COSTS_N_INSNS (5), /* int_mult_si */
1018 COSTS_N_INSNS (5), /* int_mult_di */
1019 COSTS_N_INSNS (41), /* int_div_si */
1020 COSTS_N_INSNS (41), /* int_div_di */
1021 1, /* branch_cost */
1022 4 /* memory_latency */
1025 COSTS_N_INSNS (6), /* fp_add */
1026 COSTS_N_INSNS (6), /* fp_mult_sf */
1027 COSTS_N_INSNS (7), /* fp_mult_df */
1028 COSTS_N_INSNS (25), /* fp_div_sf */
1029 COSTS_N_INSNS (48), /* fp_div_df */
1030 COSTS_N_INSNS (5), /* int_mult_si */
1031 COSTS_N_INSNS (5), /* int_mult_di */
1032 COSTS_N_INSNS (41), /* int_div_si */
1033 COSTS_N_INSNS (41), /* int_div_di */
1034 1, /* branch_cost */
1035 4 /* memory_latency */
1041 COSTS_N_INSNS (2), /* fp_add */
1042 COSTS_N_INSNS (4), /* fp_mult_sf */
1043 COSTS_N_INSNS (5), /* fp_mult_df */
1044 COSTS_N_INSNS (12), /* fp_div_sf */
1045 COSTS_N_INSNS (19), /* fp_div_df */
1046 COSTS_N_INSNS (2), /* int_mult_si */
1047 COSTS_N_INSNS (2), /* int_mult_di */
1048 COSTS_N_INSNS (35), /* int_div_si */
1049 COSTS_N_INSNS (35), /* int_div_di */
1050 1, /* branch_cost */
1051 4 /* memory_latency */
1054 COSTS_N_INSNS (3), /* fp_add */
1055 COSTS_N_INSNS (5), /* fp_mult_sf */
1056 COSTS_N_INSNS (6), /* fp_mult_df */
1057 COSTS_N_INSNS (15), /* fp_div_sf */
1058 COSTS_N_INSNS (16), /* fp_div_df */
1059 COSTS_N_INSNS (17), /* int_mult_si */
1060 COSTS_N_INSNS (17), /* int_mult_di */
1061 COSTS_N_INSNS (38), /* int_div_si */
1062 COSTS_N_INSNS (38), /* int_div_di */
1063 2, /* branch_cost */
1064 6 /* memory_latency */
1067 COSTS_N_INSNS (6), /* fp_add */
1068 COSTS_N_INSNS (7), /* fp_mult_sf */
1069 COSTS_N_INSNS (8), /* fp_mult_df */
1070 COSTS_N_INSNS (23), /* fp_div_sf */
1071 COSTS_N_INSNS (36), /* fp_div_df */
1072 COSTS_N_INSNS (10), /* int_mult_si */
1073 COSTS_N_INSNS (10), /* int_mult_di */
1074 COSTS_N_INSNS (69), /* int_div_si */
1075 COSTS_N_INSNS (69), /* int_div_di */
1076 2, /* branch_cost */
1077 6 /* memory_latency */
1089 /* The only costs that appear to be updated here are
1090 integer multiplication. */
1092 COSTS_N_INSNS (4), /* int_mult_si */
1093 COSTS_N_INSNS (6), /* int_mult_di */
1094 COSTS_N_INSNS (69), /* int_div_si */
1095 COSTS_N_INSNS (69), /* int_div_di */
1096 1, /* branch_cost */
1097 4 /* memory_latency */
1109 COSTS_N_INSNS (6), /* fp_add */
1110 COSTS_N_INSNS (4), /* fp_mult_sf */
1111 COSTS_N_INSNS (5), /* fp_mult_df */
1112 COSTS_N_INSNS (23), /* fp_div_sf */
1113 COSTS_N_INSNS (36), /* fp_div_df */
1114 COSTS_N_INSNS (5), /* int_mult_si */
1115 COSTS_N_INSNS (5), /* int_mult_di */
1116 COSTS_N_INSNS (36), /* int_div_si */
1117 COSTS_N_INSNS (36), /* int_div_di */
1118 1, /* branch_cost */
1119 4 /* memory_latency */
1122 COSTS_N_INSNS (6), /* fp_add */
1123 COSTS_N_INSNS (5), /* fp_mult_sf */
1124 COSTS_N_INSNS (6), /* fp_mult_df */
1125 COSTS_N_INSNS (30), /* fp_div_sf */
1126 COSTS_N_INSNS (59), /* fp_div_df */
1127 COSTS_N_INSNS (3), /* int_mult_si */
1128 COSTS_N_INSNS (4), /* int_mult_di */
1129 COSTS_N_INSNS (42), /* int_div_si */
1130 COSTS_N_INSNS (74), /* int_div_di */
1131 1, /* branch_cost */
1132 4 /* memory_latency */
1135 COSTS_N_INSNS (6), /* fp_add */
1136 COSTS_N_INSNS (5), /* fp_mult_sf */
1137 COSTS_N_INSNS (6), /* fp_mult_df */
1138 COSTS_N_INSNS (30), /* fp_div_sf */
1139 COSTS_N_INSNS (59), /* fp_div_df */
1140 COSTS_N_INSNS (5), /* int_mult_si */
1141 COSTS_N_INSNS (9), /* int_mult_di */
1142 COSTS_N_INSNS (42), /* int_div_si */
1143 COSTS_N_INSNS (74), /* int_div_di */
1144 1, /* branch_cost */
1145 4 /* memory_latency */
1148 /* The only costs that are changed here are
1149 integer multiplication. */
1150 COSTS_N_INSNS (6), /* fp_add */
1151 COSTS_N_INSNS (7), /* fp_mult_sf */
1152 COSTS_N_INSNS (8), /* fp_mult_df */
1153 COSTS_N_INSNS (23), /* fp_div_sf */
1154 COSTS_N_INSNS (36), /* fp_div_df */
1155 COSTS_N_INSNS (5), /* int_mult_si */
1156 COSTS_N_INSNS (9), /* int_mult_di */
1157 COSTS_N_INSNS (69), /* int_div_si */
1158 COSTS_N_INSNS (69), /* int_div_di */
1159 1, /* branch_cost */
1160 4 /* memory_latency */
1166 /* The only costs that are changed here are
1167 integer multiplication. */
1168 COSTS_N_INSNS (6), /* fp_add */
1169 COSTS_N_INSNS (7), /* fp_mult_sf */
1170 COSTS_N_INSNS (8), /* fp_mult_df */
1171 COSTS_N_INSNS (23), /* fp_div_sf */
1172 COSTS_N_INSNS (36), /* fp_div_df */
1173 COSTS_N_INSNS (3), /* int_mult_si */
1174 COSTS_N_INSNS (8), /* int_mult_di */
1175 COSTS_N_INSNS (69), /* int_div_si */
1176 COSTS_N_INSNS (69), /* int_div_di */
1177 1, /* branch_cost */
1178 4 /* memory_latency */
1181 /* These costs are the same as the SB-1A below. */
1182 COSTS_N_INSNS (4), /* fp_add */
1183 COSTS_N_INSNS (4), /* fp_mult_sf */
1184 COSTS_N_INSNS (4), /* fp_mult_df */
1185 COSTS_N_INSNS (24), /* fp_div_sf */
1186 COSTS_N_INSNS (32), /* fp_div_df */
1187 COSTS_N_INSNS (3), /* int_mult_si */
1188 COSTS_N_INSNS (4), /* int_mult_di */
1189 COSTS_N_INSNS (36), /* int_div_si */
1190 COSTS_N_INSNS (68), /* int_div_di */
1191 1, /* branch_cost */
1192 4 /* memory_latency */
1195 /* These costs are the same as the SB-1 above. */
1196 COSTS_N_INSNS (4), /* fp_add */
1197 COSTS_N_INSNS (4), /* fp_mult_sf */
1198 COSTS_N_INSNS (4), /* fp_mult_df */
1199 COSTS_N_INSNS (24), /* fp_div_sf */
1200 COSTS_N_INSNS (32), /* fp_div_df */
1201 COSTS_N_INSNS (3), /* int_mult_si */
1202 COSTS_N_INSNS (4), /* int_mult_di */
1203 COSTS_N_INSNS (36), /* int_div_si */
1204 COSTS_N_INSNS (68), /* int_div_di */
1205 1, /* branch_cost */
1206 4 /* memory_latency */
1213 /* If a MIPS16e SAVE or RESTORE instruction saves or restores register
1214 mips16e_s2_s8_regs[X], it must also save the registers in indexes
1215 X + 1 onwards. Likewise mips16e_a0_a3_regs. */
1216 static const unsigned char mips16e_s2_s8_regs[] = {
1217 30, 23, 22, 21, 20, 19, 18
1219 static const unsigned char mips16e_a0_a3_regs[] = {
1223 /* A list of the registers that can be saved by the MIPS16e SAVE instruction,
1224 ordered from the uppermost in memory to the lowest in memory. */
1225 static const unsigned char mips16e_save_restore_regs[] = {
1226 31, 30, 23, 22, 21, 20, 19, 18, 17, 16, 7, 6, 5, 4
1229 /* Initialize the GCC target structure. */
1230 #undef TARGET_ASM_ALIGNED_HI_OP
1231 #define TARGET_ASM_ALIGNED_HI_OP "\t.half\t"
1232 #undef TARGET_ASM_ALIGNED_SI_OP
1233 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
1234 #undef TARGET_ASM_ALIGNED_DI_OP
1235 #define TARGET_ASM_ALIGNED_DI_OP "\t.dword\t"
1237 #undef TARGET_ASM_FUNCTION_PROLOGUE
1238 #define TARGET_ASM_FUNCTION_PROLOGUE mips_output_function_prologue
1239 #undef TARGET_ASM_FUNCTION_EPILOGUE
1240 #define TARGET_ASM_FUNCTION_EPILOGUE mips_output_function_epilogue
1241 #undef TARGET_ASM_SELECT_RTX_SECTION
1242 #define TARGET_ASM_SELECT_RTX_SECTION mips_select_rtx_section
1243 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
1244 #define TARGET_ASM_FUNCTION_RODATA_SECTION mips_function_rodata_section
1246 #undef TARGET_SCHED_INIT
1247 #define TARGET_SCHED_INIT mips_sched_init
1248 #undef TARGET_SCHED_REORDER
1249 #define TARGET_SCHED_REORDER mips_sched_reorder
1250 #undef TARGET_SCHED_REORDER2
1251 #define TARGET_SCHED_REORDER2 mips_sched_reorder
1252 #undef TARGET_SCHED_VARIABLE_ISSUE
1253 #define TARGET_SCHED_VARIABLE_ISSUE mips_variable_issue
1254 #undef TARGET_SCHED_ADJUST_COST
1255 #define TARGET_SCHED_ADJUST_COST mips_adjust_cost
1256 #undef TARGET_SCHED_ISSUE_RATE
1257 #define TARGET_SCHED_ISSUE_RATE mips_issue_rate
1258 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1259 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD \
1260 mips_multipass_dfa_lookahead
1262 #undef TARGET_DEFAULT_TARGET_FLAGS
1263 #define TARGET_DEFAULT_TARGET_FLAGS \
1265 | TARGET_CPU_DEFAULT \
1266 | TARGET_ENDIAN_DEFAULT \
1267 | TARGET_FP_EXCEPTIONS_DEFAULT \
1268 | MASK_CHECK_ZERO_DIV \
1270 #undef TARGET_HANDLE_OPTION
1271 #define TARGET_HANDLE_OPTION mips_handle_option
1273 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
1274 #define TARGET_FUNCTION_OK_FOR_SIBCALL mips_function_ok_for_sibcall
1276 #undef TARGET_INSERT_ATTRIBUTES
1277 #define TARGET_INSERT_ATTRIBUTES mips_insert_attributes
1278 #undef TARGET_MERGE_DECL_ATTRIBUTES
1279 #define TARGET_MERGE_DECL_ATTRIBUTES mips_merge_decl_attributes
1280 #undef TARGET_SET_CURRENT_FUNCTION
1281 #define TARGET_SET_CURRENT_FUNCTION mips_set_current_function
1283 #undef TARGET_VALID_POINTER_MODE
1284 #define TARGET_VALID_POINTER_MODE mips_valid_pointer_mode
1285 #undef TARGET_RTX_COSTS
1286 #define TARGET_RTX_COSTS mips_rtx_costs
1287 #undef TARGET_ADDRESS_COST
1288 #define TARGET_ADDRESS_COST mips_address_cost
1290 #undef TARGET_IN_SMALL_DATA_P
1291 #define TARGET_IN_SMALL_DATA_P mips_in_small_data_p
1293 #undef TARGET_MACHINE_DEPENDENT_REORG
1294 #define TARGET_MACHINE_DEPENDENT_REORG mips_reorg
1296 #undef TARGET_ASM_FILE_START
1297 #define TARGET_ASM_FILE_START mips_file_start
1298 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
1299 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
1301 #undef TARGET_INIT_LIBFUNCS
1302 #define TARGET_INIT_LIBFUNCS mips_init_libfuncs
1304 #undef TARGET_BUILD_BUILTIN_VA_LIST
1305 #define TARGET_BUILD_BUILTIN_VA_LIST mips_build_builtin_va_list
1306 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
1307 #define TARGET_GIMPLIFY_VA_ARG_EXPR mips_gimplify_va_arg_expr
1309 #undef TARGET_PROMOTE_FUNCTION_ARGS
1310 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
1311 #undef TARGET_PROMOTE_FUNCTION_RETURN
1312 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
1313 #undef TARGET_PROMOTE_PROTOTYPES
1314 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
1316 #undef TARGET_RETURN_IN_MEMORY
1317 #define TARGET_RETURN_IN_MEMORY mips_return_in_memory
1318 #undef TARGET_RETURN_IN_MSB
1319 #define TARGET_RETURN_IN_MSB mips_return_in_msb
1321 #undef TARGET_ASM_OUTPUT_MI_THUNK
1322 #define TARGET_ASM_OUTPUT_MI_THUNK mips_output_mi_thunk
1323 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
1324 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
1326 #undef TARGET_SETUP_INCOMING_VARARGS
1327 #define TARGET_SETUP_INCOMING_VARARGS mips_setup_incoming_varargs
1328 #undef TARGET_STRICT_ARGUMENT_NAMING
1329 #define TARGET_STRICT_ARGUMENT_NAMING mips_strict_argument_naming
1330 #undef TARGET_MUST_PASS_IN_STACK
1331 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
1332 #undef TARGET_PASS_BY_REFERENCE
1333 #define TARGET_PASS_BY_REFERENCE mips_pass_by_reference
1334 #undef TARGET_CALLEE_COPIES
1335 #define TARGET_CALLEE_COPIES mips_callee_copies
1336 #undef TARGET_ARG_PARTIAL_BYTES
1337 #define TARGET_ARG_PARTIAL_BYTES mips_arg_partial_bytes
1339 #undef TARGET_MODE_REP_EXTENDED
1340 #define TARGET_MODE_REP_EXTENDED mips_mode_rep_extended
1342 #undef TARGET_VECTOR_MODE_SUPPORTED_P
1343 #define TARGET_VECTOR_MODE_SUPPORTED_P mips_vector_mode_supported_p
1345 #undef TARGET_SCALAR_MODE_SUPPORTED_P
1346 #define TARGET_SCALAR_MODE_SUPPORTED_P mips_scalar_mode_supported_p
1348 #undef TARGET_INIT_BUILTINS
1349 #define TARGET_INIT_BUILTINS mips_init_builtins
1350 #undef TARGET_EXPAND_BUILTIN
1351 #define TARGET_EXPAND_BUILTIN mips_expand_builtin
1353 #undef TARGET_HAVE_TLS
1354 #define TARGET_HAVE_TLS HAVE_AS_TLS
1356 #undef TARGET_CANNOT_FORCE_CONST_MEM
1357 #define TARGET_CANNOT_FORCE_CONST_MEM mips_cannot_force_const_mem
1359 #undef TARGET_ENCODE_SECTION_INFO
1360 #define TARGET_ENCODE_SECTION_INFO mips_encode_section_info
1362 #undef TARGET_ATTRIBUTE_TABLE
1363 #define TARGET_ATTRIBUTE_TABLE mips_attribute_table
1364 /* All our function attributes are related to how out-of-line copies should
1365 be compiled or called. They don't in themselves prevent inlining. */
1366 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
1367 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P hook_bool_const_tree_true
1369 #undef TARGET_EXTRA_LIVE_ON_ENTRY
1370 #define TARGET_EXTRA_LIVE_ON_ENTRY mips_extra_live_on_entry
1372 #undef TARGET_MIN_ANCHOR_OFFSET
1373 #define TARGET_MIN_ANCHOR_OFFSET -32768
1374 #undef TARGET_MAX_ANCHOR_OFFSET
1375 #define TARGET_MAX_ANCHOR_OFFSET 32767
1376 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1377 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P mips_use_blocks_for_constant_p
1378 #undef TARGET_USE_ANCHORS_FOR_SYMBOL_P
1379 #define TARGET_USE_ANCHORS_FOR_SYMBOL_P mips_use_anchors_for_symbol_p
1381 #undef TARGET_COMP_TYPE_ATTRIBUTES
1382 #define TARGET_COMP_TYPE_ATTRIBUTES mips_comp_type_attributes
1384 #ifdef HAVE_AS_DTPRELWORD
1385 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1386 #define TARGET_ASM_OUTPUT_DWARF_DTPREL mips_output_dwarf_dtprel
1389 struct gcc_target targetm = TARGET_INITIALIZER;
1392 /* Predicates to test for presence of "near" and "far"/"long_call"
1393 attributes on the given TYPE. */
1396 mips_near_type_p (const_tree type)
1398 return lookup_attribute ("near", TYPE_ATTRIBUTES (type)) != NULL;
1402 mips_far_type_p (const_tree type)
1404 return (lookup_attribute ("long_call", TYPE_ATTRIBUTES (type)) != NULL
1405 || lookup_attribute ("far", TYPE_ATTRIBUTES (type)) != NULL);
1408 /* Similar predicates for "mips16"/"nomips16" attributes. */
1411 mips_mips16_decl_p (const_tree decl)
1413 return lookup_attribute ("mips16", DECL_ATTRIBUTES (decl)) != NULL;
1417 mips_nomips16_decl_p (const_tree decl)
1419 return lookup_attribute ("nomips16", DECL_ATTRIBUTES (decl)) != NULL;
1422 /* Return 0 if the attributes for two types are incompatible, 1 if they
1423 are compatible, and 2 if they are nearly compatible (which causes a
1424 warning to be generated). */
1427 mips_comp_type_attributes (const_tree type1, const_tree type2)
1429 /* Check for mismatch of non-default calling convention. */
1430 if (TREE_CODE (type1) != FUNCTION_TYPE)
1433 /* Disallow mixed near/far attributes. */
1434 if (mips_far_type_p (type1) && mips_near_type_p (type2))
1436 if (mips_near_type_p (type1) && mips_far_type_p (type2))
1442 /* If X is a PLUS of a CONST_INT, return the two terms in *BASE_PTR
1443 and *OFFSET_PTR. Return X in *BASE_PTR and 0 in *OFFSET_PTR otherwise. */
1446 mips_split_plus (rtx x, rtx *base_ptr, HOST_WIDE_INT *offset_ptr)
1448 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
1450 *base_ptr = XEXP (x, 0);
1451 *offset_ptr = INTVAL (XEXP (x, 1));
1460 /* Return true if SYMBOL_REF X is associated with a global symbol
1461 (in the STB_GLOBAL sense). */
1464 mips_global_symbol_p (const_rtx x)
1466 const_tree const decl = SYMBOL_REF_DECL (x);
1469 return !SYMBOL_REF_LOCAL_P (x);
1471 /* Weakref symbols are not TREE_PUBLIC, but their targets are global
1472 or weak symbols. Relocations in the object file will be against
1473 the target symbol, so it's that symbol's binding that matters here. */
1474 return DECL_P (decl) && (TREE_PUBLIC (decl) || DECL_WEAK (decl));
1477 /* Return true if SYMBOL_REF X binds locally. */
1480 mips_symbol_binds_local_p (const_rtx x)
1482 return (SYMBOL_REF_DECL (x)
1483 ? targetm.binds_local_p (SYMBOL_REF_DECL (x))
1484 : SYMBOL_REF_LOCAL_P (x));
1487 /* Return true if rtx constants of mode MODE should be put into a small
1491 mips_rtx_constant_in_small_data_p (enum machine_mode mode)
1493 return (!TARGET_EMBEDDED_DATA
1494 && TARGET_LOCAL_SDATA
1495 && GET_MODE_SIZE (mode) <= mips_section_threshold);
1498 /* Return the method that should be used to access SYMBOL_REF or
1499 LABEL_REF X in context CONTEXT. */
1501 static enum mips_symbol_type
1502 mips_classify_symbol (const_rtx x, enum mips_symbol_context context)
1505 return SYMBOL_GOT_DISP;
1507 if (GET_CODE (x) == LABEL_REF)
1509 /* LABEL_REFs are used for jump tables as well as text labels.
1510 Only return SYMBOL_PC_RELATIVE if we know the label is in
1511 the text section. */
1512 if (TARGET_MIPS16_SHORT_JUMP_TABLES)
1513 return SYMBOL_PC_RELATIVE;
1514 if (TARGET_ABICALLS && !TARGET_ABSOLUTE_ABICALLS)
1515 return SYMBOL_GOT_PAGE_OFST;
1516 return SYMBOL_ABSOLUTE;
1519 gcc_assert (GET_CODE (x) == SYMBOL_REF);
1521 if (SYMBOL_REF_TLS_MODEL (x))
1524 if (CONSTANT_POOL_ADDRESS_P (x))
1526 if (TARGET_MIPS16_TEXT_LOADS)
1527 return SYMBOL_PC_RELATIVE;
1529 if (TARGET_MIPS16_PCREL_LOADS && context == SYMBOL_CONTEXT_MEM)
1530 return SYMBOL_PC_RELATIVE;
1532 if (mips_rtx_constant_in_small_data_p (get_pool_mode (x)))
1533 return SYMBOL_GP_RELATIVE;
1536 /* Do not use small-data accesses for weak symbols; they may end up
1539 && SYMBOL_REF_SMALL_P (x)
1540 && !SYMBOL_REF_WEAK (x))
1541 return SYMBOL_GP_RELATIVE;
1543 /* Don't use GOT accesses for locally-binding symbols when -mno-shared
1546 && !(TARGET_ABSOLUTE_ABICALLS && mips_symbol_binds_local_p (x)))
1548 /* There are three cases to consider:
1550 - o32 PIC (either with or without explicit relocs)
1551 - n32/n64 PIC without explicit relocs
1552 - n32/n64 PIC with explicit relocs
1554 In the first case, both local and global accesses will use an
1555 R_MIPS_GOT16 relocation. We must correctly predict which of
1556 the two semantics (local or global) the assembler and linker
1557 will apply. The choice depends on the symbol's binding rather
1558 than its visibility.
1560 In the second case, the assembler will not use R_MIPS_GOT16
1561 relocations, but it chooses between local and global accesses
1562 in the same way as for o32 PIC.
1564 In the third case we have more freedom since both forms of
1565 access will work for any kind of symbol. However, there seems
1566 little point in doing things differently. */
1567 if (mips_global_symbol_p (x))
1568 return SYMBOL_GOT_DISP;
1570 return SYMBOL_GOT_PAGE_OFST;
1573 if (TARGET_MIPS16_PCREL_LOADS && context != SYMBOL_CONTEXT_CALL)
1574 return SYMBOL_FORCE_TO_MEM;
1575 return SYMBOL_ABSOLUTE;
1578 /* Classify symbolic expression X, given that it appears in context
1581 static enum mips_symbol_type
1582 mips_classify_symbolic_expression (rtx x, enum mips_symbol_context context)
1586 split_const (x, &x, &offset);
1587 if (UNSPEC_ADDRESS_P (x))
1588 return UNSPEC_ADDRESS_TYPE (x);
1590 return mips_classify_symbol (x, context);
1593 /* Return true if OFFSET is within the range [0, ALIGN), where ALIGN
1594 is the alignment (in bytes) of SYMBOL_REF X. */
1597 mips_offset_within_alignment_p (rtx x, HOST_WIDE_INT offset)
1599 /* If for some reason we can't get the alignment for the
1600 symbol, initializing this to one means we will only accept
1602 HOST_WIDE_INT align = 1;
1605 /* Get the alignment of the symbol we're referring to. */
1606 t = SYMBOL_REF_DECL (x);
1608 align = DECL_ALIGN_UNIT (t);
1610 return offset >= 0 && offset < align;
1613 /* Return true if X is a symbolic constant that can be used in context
1614 CONTEXT. If it is, store the type of the symbol in *SYMBOL_TYPE. */
1617 mips_symbolic_constant_p (rtx x, enum mips_symbol_context context,
1618 enum mips_symbol_type *symbol_type)
1622 split_const (x, &x, &offset);
1623 if (UNSPEC_ADDRESS_P (x))
1625 *symbol_type = UNSPEC_ADDRESS_TYPE (x);
1626 x = UNSPEC_ADDRESS (x);
1628 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1630 *symbol_type = mips_classify_symbol (x, context);
1631 if (*symbol_type == SYMBOL_TLS)
1637 if (offset == const0_rtx)
1640 /* Check whether a nonzero offset is valid for the underlying
1642 switch (*symbol_type)
1644 case SYMBOL_ABSOLUTE:
1645 case SYMBOL_FORCE_TO_MEM:
1646 case SYMBOL_32_HIGH:
1647 case SYMBOL_64_HIGH:
1650 /* If the target has 64-bit pointers and the object file only
1651 supports 32-bit symbols, the values of those symbols will be
1652 sign-extended. In this case we can't allow an arbitrary offset
1653 in case the 32-bit value X + OFFSET has a different sign from X. */
1654 if (Pmode == DImode && !ABI_HAS_64BIT_SYMBOLS)
1655 return offset_within_block_p (x, INTVAL (offset));
1657 /* In other cases the relocations can handle any offset. */
1660 case SYMBOL_PC_RELATIVE:
1661 /* Allow constant pool references to be converted to LABEL+CONSTANT.
1662 In this case, we no longer have access to the underlying constant,
1663 but the original symbol-based access was known to be valid. */
1664 if (GET_CODE (x) == LABEL_REF)
1669 case SYMBOL_GP_RELATIVE:
1670 /* Make sure that the offset refers to something within the
1671 same object block. This should guarantee that the final
1672 PC- or GP-relative offset is within the 16-bit limit. */
1673 return offset_within_block_p (x, INTVAL (offset));
1675 case SYMBOL_GOT_PAGE_OFST:
1676 case SYMBOL_GOTOFF_PAGE:
1677 /* If the symbol is global, the GOT entry will contain the symbol's
1678 address, and we will apply a 16-bit offset after loading it.
1679 If the symbol is local, the linker should provide enough local
1680 GOT entries for a 16-bit offset, but larger offsets may lead
1682 return SMALL_INT (offset);
1686 /* There is no carry between the HI and LO REL relocations, so the
1687 offset is only valid if we know it won't lead to such a carry. */
1688 return mips_offset_within_alignment_p (x, INTVAL (offset));
1690 case SYMBOL_GOT_DISP:
1691 case SYMBOL_GOTOFF_DISP:
1692 case SYMBOL_GOTOFF_CALL:
1693 case SYMBOL_GOTOFF_LOADGP:
1696 case SYMBOL_GOTTPREL:
1705 /* This function is used to implement REG_MODE_OK_FOR_BASE_P. */
1708 mips_regno_mode_ok_for_base_p (int regno, enum machine_mode mode, int strict)
1710 if (!HARD_REGISTER_NUM_P (regno))
1714 regno = reg_renumber[regno];
1717 /* These fake registers will be eliminated to either the stack or
1718 hard frame pointer, both of which are usually valid base registers.
1719 Reload deals with the cases where the eliminated form isn't valid. */
1720 if (regno == ARG_POINTER_REGNUM || regno == FRAME_POINTER_REGNUM)
1723 /* In mips16 mode, the stack pointer can only address word and doubleword
1724 values, nothing smaller. There are two problems here:
1726 (a) Instantiating virtual registers can introduce new uses of the
1727 stack pointer. If these virtual registers are valid addresses,
1728 the stack pointer should be too.
1730 (b) Most uses of the stack pointer are not made explicit until
1731 FRAME_POINTER_REGNUM and ARG_POINTER_REGNUM have been eliminated.
1732 We don't know until that stage whether we'll be eliminating to the
1733 stack pointer (which needs the restriction) or the hard frame
1734 pointer (which doesn't).
1736 All in all, it seems more consistent to only enforce this restriction
1737 during and after reload. */
1738 if (TARGET_MIPS16 && regno == STACK_POINTER_REGNUM)
1739 return !strict || GET_MODE_SIZE (mode) == 4 || GET_MODE_SIZE (mode) == 8;
1741 return TARGET_MIPS16 ? M16_REG_P (regno) : GP_REG_P (regno);
1745 /* Return true if X is a valid base register for the given mode.
1746 Allow only hard registers if STRICT. */
1749 mips_valid_base_register_p (rtx x, enum machine_mode mode, int strict)
1751 if (!strict && GET_CODE (x) == SUBREG)
1755 && mips_regno_mode_ok_for_base_p (REGNO (x), mode, strict));
1759 /* Return true if X is a valid address for machine mode MODE. If it is,
1760 fill in INFO appropriately. STRICT is true if we should only accept
1761 hard base registers. */
1764 mips_classify_address (struct mips_address_info *info, rtx x,
1765 enum machine_mode mode, int strict)
1767 switch (GET_CODE (x))
1771 info->type = ADDRESS_REG;
1773 info->offset = const0_rtx;
1774 return mips_valid_base_register_p (info->reg, mode, strict);
1777 info->type = ADDRESS_REG;
1778 info->reg = XEXP (x, 0);
1779 info->offset = XEXP (x, 1);
1780 return (mips_valid_base_register_p (info->reg, mode, strict)
1781 && const_arith_operand (info->offset, VOIDmode));
1784 info->type = ADDRESS_LO_SUM;
1785 info->reg = XEXP (x, 0);
1786 info->offset = XEXP (x, 1);
1787 /* We have to trust the creator of the LO_SUM to do something vaguely
1788 sane. Target-independent code that creates a LO_SUM should also
1789 create and verify the matching HIGH. Target-independent code that
1790 adds an offset to a LO_SUM must prove that the offset will not
1791 induce a carry. Failure to do either of these things would be
1792 a bug, and we are not required to check for it here. The MIPS
1793 backend itself should only create LO_SUMs for valid symbolic
1794 constants, with the high part being either a HIGH or a copy
1797 = mips_classify_symbolic_expression (info->offset, SYMBOL_CONTEXT_MEM);
1798 return (mips_valid_base_register_p (info->reg, mode, strict)
1799 && mips_symbol_insns (info->symbol_type, mode) > 0
1800 && mips_lo_relocs[info->symbol_type] != 0);
1803 /* Small-integer addresses don't occur very often, but they
1804 are legitimate if $0 is a valid base register. */
1805 info->type = ADDRESS_CONST_INT;
1806 return !TARGET_MIPS16 && SMALL_INT (x);
1811 info->type = ADDRESS_SYMBOLIC;
1812 return (mips_symbolic_constant_p (x, SYMBOL_CONTEXT_MEM,
1814 && mips_symbol_insns (info->symbol_type, mode) > 0
1815 && !mips_split_p[info->symbol_type]);
1822 /* Return true if X is a thread-local symbol. */
1825 mips_tls_operand_p (rtx x)
1827 return GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0;
1830 /* Return true if X can not be forced into a constant pool. */
1833 mips_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
1835 return mips_tls_operand_p (*x);
1838 /* Return true if X can not be forced into a constant pool. */
1841 mips_cannot_force_const_mem (rtx x)
1847 /* As an optimization, reject constants that mips_legitimize_move
1850 Suppose we have a multi-instruction sequence that loads constant C
1851 into register R. If R does not get allocated a hard register, and
1852 R is used in an operand that allows both registers and memory
1853 references, reload will consider forcing C into memory and using
1854 one of the instruction's memory alternatives. Returning false
1855 here will force it to use an input reload instead. */
1856 if (GET_CODE (x) == CONST_INT)
1859 split_const (x, &base, &offset);
1860 if (symbolic_operand (base, VOIDmode) && SMALL_INT (offset))
1864 if (TARGET_HAVE_TLS && for_each_rtx (&x, &mips_tls_symbol_ref_1, 0))
1870 /* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. We can't use blocks for
1871 constants when we're using a per-function constant pool. */
1874 mips_use_blocks_for_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1875 const_rtx x ATTRIBUTE_UNUSED)
1877 return !TARGET_MIPS16_PCREL_LOADS;
1880 /* Like mips_symbol_insns, but treat extended MIPS16 instructions as a
1881 single instruction. We rely on the fact that, in the worst case,
1882 all instructions involved in a MIPS16 address calculation are usually
1886 mips_symbol_insns_1 (enum mips_symbol_type type, enum machine_mode mode)
1890 case SYMBOL_ABSOLUTE:
1891 /* When using 64-bit symbols, we need 5 preparatory instructions,
1894 lui $at,%highest(symbol)
1895 daddiu $at,$at,%higher(symbol)
1897 daddiu $at,$at,%hi(symbol)
1900 The final address is then $at + %lo(symbol). With 32-bit
1901 symbols we just need a preparatory lui for normal mode and
1902 a preparatory "li; sll" for MIPS16. */
1903 return ABI_HAS_64BIT_SYMBOLS ? 6 : TARGET_MIPS16 ? 3 : 2;
1905 case SYMBOL_GP_RELATIVE:
1906 /* Treat GP-relative accesses as taking a single instruction on
1907 MIPS16 too; the copy of $gp can often be shared. */
1910 case SYMBOL_PC_RELATIVE:
1911 /* PC-relative constants can be only be used with addiupc,
1913 if (mode == MAX_MACHINE_MODE
1914 || GET_MODE_SIZE (mode) == 4
1915 || GET_MODE_SIZE (mode) == 8)
1918 /* The constant must be loaded using addiupc first. */
1921 case SYMBOL_FORCE_TO_MEM:
1922 /* LEAs will be converted into constant-pool references by
1924 if (mode == MAX_MACHINE_MODE)
1927 /* The constant must be loaded from the constant pool. */
1930 case SYMBOL_GOT_DISP:
1931 /* The constant will have to be loaded from the GOT before it
1932 is used in an address. */
1933 if (mode != MAX_MACHINE_MODE)
1938 case SYMBOL_GOT_PAGE_OFST:
1939 /* Unless -funit-at-a-time is in effect, we can't be sure whether
1940 the local/global classification is accurate. See override_options
1943 The worst cases are:
1945 (1) For local symbols when generating o32 or o64 code. The assembler
1951 ...and the final address will be $at + %lo(symbol).
1953 (2) For global symbols when -mxgot. The assembler will use:
1955 lui $at,%got_hi(symbol)
1958 ...and the final address will be $at + %got_lo(symbol). */
1961 case SYMBOL_GOTOFF_PAGE:
1962 case SYMBOL_GOTOFF_DISP:
1963 case SYMBOL_GOTOFF_CALL:
1964 case SYMBOL_GOTOFF_LOADGP:
1965 case SYMBOL_32_HIGH:
1966 case SYMBOL_64_HIGH:
1972 case SYMBOL_GOTTPREL:
1975 /* A 16-bit constant formed by a single relocation, or a 32-bit
1976 constant formed from a high 16-bit relocation and a low 16-bit
1977 relocation. Use mips_split_p to determine which. */
1978 return !mips_split_p[type] ? 1 : TARGET_MIPS16 ? 3 : 2;
1981 /* We don't treat a bare TLS symbol as a constant. */
1987 /* If MODE is MAX_MACHINE_MODE, return the number of instructions needed
1988 to load symbols of type TYPE into a register. Return 0 if the given
1989 type of symbol cannot be used as an immediate operand.
1991 Otherwise, return the number of instructions needed to load or store
1992 values of mode MODE to or from addresses of type TYPE. Return 0 if
1993 the given type of symbol is not valid in addresses.
1995 In both cases, treat extended MIPS16 instructions as two instructions. */
1998 mips_symbol_insns (enum mips_symbol_type type, enum machine_mode mode)
2000 return mips_symbol_insns_1 (type, mode) * (TARGET_MIPS16 ? 2 : 1);
2003 /* Return true if X is a legitimate $sp-based address for mode MDOE. */
2006 mips_stack_address_p (rtx x, enum machine_mode mode)
2008 struct mips_address_info addr;
2010 return (mips_classify_address (&addr, x, mode, false)
2011 && addr.type == ADDRESS_REG
2012 && addr.reg == stack_pointer_rtx);
2015 /* Return true if a value at OFFSET bytes from BASE can be accessed
2016 using an unextended mips16 instruction. MODE is the mode of the
2019 Usually the offset in an unextended instruction is a 5-bit field.
2020 The offset is unsigned and shifted left once for HIs, twice
2021 for SIs, and so on. An exception is SImode accesses off the
2022 stack pointer, which have an 8-bit immediate field. */
2025 mips16_unextended_reference_p (enum machine_mode mode, rtx base, rtx offset)
2028 && GET_CODE (offset) == CONST_INT
2029 && INTVAL (offset) >= 0
2030 && (INTVAL (offset) & (GET_MODE_SIZE (mode) - 1)) == 0)
2032 if (GET_MODE_SIZE (mode) == 4 && base == stack_pointer_rtx)
2033 return INTVAL (offset) < 256 * GET_MODE_SIZE (mode);
2034 return INTVAL (offset) < 32 * GET_MODE_SIZE (mode);
2040 /* Return the number of instructions needed to load or store a value
2041 of mode MODE at X. Return 0 if X isn't valid for MODE. Assume that
2042 multiword moves may need to be split into word moves if MIGHT_SPLIT_P,
2043 otherwise assume that a single load or store is enough.
2045 For mips16 code, count extended instructions as two instructions. */
2048 mips_address_insns (rtx x, enum machine_mode mode, bool might_split_p)
2050 struct mips_address_info addr;
2053 /* BLKmode is used for single unaligned loads and stores and should
2054 not count as a multiword mode. (GET_MODE_SIZE (BLKmode) is pretty
2055 meaningless, so we have to single it out as a special case one way
2057 if (mode != BLKmode && might_split_p)
2058 factor = (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2062 if (mips_classify_address (&addr, x, mode, false))
2067 && !mips16_unextended_reference_p (mode, addr.reg, addr.offset))
2071 case ADDRESS_LO_SUM:
2072 return (TARGET_MIPS16 ? factor * 2 : factor);
2074 case ADDRESS_CONST_INT:
2077 case ADDRESS_SYMBOLIC:
2078 return factor * mips_symbol_insns (addr.symbol_type, mode);
2084 /* Likewise for constant X. */
2087 mips_const_insns (rtx x)
2089 struct mips_integer_op codes[MIPS_MAX_INTEGER_OPS];
2090 enum mips_symbol_type symbol_type;
2093 switch (GET_CODE (x))
2096 if (!mips_symbolic_constant_p (XEXP (x, 0), SYMBOL_CONTEXT_LEA,
2098 || !mips_split_p[symbol_type])
2101 /* This is simply an lui for normal mode. It is an extended
2102 "li" followed by an extended "sll" for MIPS16. */
2103 return TARGET_MIPS16 ? 4 : 1;
2107 /* Unsigned 8-bit constants can be loaded using an unextended
2108 LI instruction. Unsigned 16-bit constants can be loaded
2109 using an extended LI. Negative constants must be loaded
2110 using LI and then negated. */
2111 return (INTVAL (x) >= 0 && INTVAL (x) < 256 ? 1
2112 : SMALL_OPERAND_UNSIGNED (INTVAL (x)) ? 2
2113 : INTVAL (x) > -256 && INTVAL (x) < 0 ? 2
2114 : SMALL_OPERAND_UNSIGNED (-INTVAL (x)) ? 3
2117 return mips_build_integer (codes, INTVAL (x));
2121 return (!TARGET_MIPS16 && x == CONST0_RTX (GET_MODE (x)) ? 1 : 0);
2127 /* See if we can refer to X directly. */
2128 if (mips_symbolic_constant_p (x, SYMBOL_CONTEXT_LEA, &symbol_type))
2129 return mips_symbol_insns (symbol_type, MAX_MACHINE_MODE);
2131 /* Otherwise try splitting the constant into a base and offset.
2132 16-bit offsets can be added using an extra addiu. Larger offsets
2133 must be calculated separately and then added to the base. */
2134 split_const (x, &x, &offset);
2137 int n = mips_const_insns (x);
2140 if (SMALL_INT (offset))
2143 return n + 1 + mips_build_integer (codes, INTVAL (offset));
2150 return mips_symbol_insns (mips_classify_symbol (x, SYMBOL_CONTEXT_LEA),
2159 /* Return the number of instructions needed to implement INSN,
2160 given that it loads from or stores to MEM. Count extended
2161 mips16 instructions as two instructions. */
2164 mips_load_store_insns (rtx mem, rtx insn)
2166 enum machine_mode mode;
2170 gcc_assert (MEM_P (mem));
2171 mode = GET_MODE (mem);
2173 /* Try to prove that INSN does not need to be split. */
2174 might_split_p = true;
2175 if (GET_MODE_BITSIZE (mode) == 64)
2177 set = single_set (insn);
2178 if (set && !mips_split_64bit_move_p (SET_DEST (set), SET_SRC (set)))
2179 might_split_p = false;
2182 return mips_address_insns (XEXP (mem, 0), mode, might_split_p);
2186 /* Return the number of instructions needed for an integer division. */
2189 mips_idiv_insns (void)
2194 if (TARGET_CHECK_ZERO_DIV)
2196 if (GENERATE_DIVIDE_TRAPS)
2202 if (TARGET_FIX_R4000 || TARGET_FIX_R4400)
2207 /* This function is used to implement GO_IF_LEGITIMATE_ADDRESS. It
2208 returns a nonzero value if X is a legitimate address for a memory
2209 operand of the indicated MODE. STRICT is nonzero if this function
2210 is called during reload. */
2213 mips_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2215 struct mips_address_info addr;
2217 return mips_classify_address (&addr, x, mode, strict);
2220 /* Emit a move from SRC to DEST. Assume that the move expanders can
2221 handle all moves if !can_create_pseudo_p (). The distinction is
2222 important because, unlike emit_move_insn, the move expanders know
2223 how to force Pmode objects into the constant pool even when the
2224 constant pool address is not itself legitimate. */
2227 mips_emit_move (rtx dest, rtx src)
2229 return (can_create_pseudo_p ()
2230 ? emit_move_insn (dest, src)
2231 : emit_move_insn_1 (dest, src));
2234 /* Copy VALUE to a register and return that register. If new psuedos
2235 are allowed, copy it into a new register, otherwise use DEST. */
2238 mips_force_temporary (rtx dest, rtx value)
2240 if (can_create_pseudo_p ())
2241 return force_reg (Pmode, value);
2244 mips_emit_move (copy_rtx (dest), value);
2250 /* If MODE is MAX_MACHINE_MODE, ADDR appears as a move operand, otherwise
2251 it appears in a MEM of that mode. Return true if ADDR is a legitimate
2252 constant in that context and can be split into a high part and a LO_SUM.
2253 If so, and if LO_SUM_OUT is nonnull, emit the high part and return
2254 the LO_SUM in *LO_SUM_OUT. Leave *LO_SUM_OUT unchanged otherwise.
2256 TEMP is as for mips_force_temporary and is used to load the high
2257 part into a register. */
2260 mips_split_symbol (rtx temp, rtx addr, enum machine_mode mode, rtx *lo_sum_out)
2262 enum mips_symbol_context context;
2263 enum mips_symbol_type symbol_type;
2266 context = (mode == MAX_MACHINE_MODE
2267 ? SYMBOL_CONTEXT_LEA
2268 : SYMBOL_CONTEXT_MEM);
2269 if (!mips_symbolic_constant_p (addr, context, &symbol_type)
2270 || mips_symbol_insns (symbol_type, mode) == 0
2271 || !mips_split_p[symbol_type])
2276 if (symbol_type == SYMBOL_GP_RELATIVE)
2278 if (!can_create_pseudo_p ())
2280 emit_insn (gen_load_const_gp (copy_rtx (temp)));
2284 high = mips16_gp_pseudo_reg ();
2288 high = gen_rtx_HIGH (Pmode, copy_rtx (addr));
2289 high = mips_force_temporary (temp, high);
2291 *lo_sum_out = gen_rtx_LO_SUM (Pmode, high, addr);
2297 /* Wrap symbol or label BASE in an unspec address of type SYMBOL_TYPE
2298 and add CONST_INT OFFSET to the result. */
2301 mips_unspec_address_offset (rtx base, rtx offset,
2302 enum mips_symbol_type symbol_type)
2304 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base),
2305 UNSPEC_ADDRESS_FIRST + symbol_type);
2306 if (offset != const0_rtx)
2307 base = gen_rtx_PLUS (Pmode, base, offset);
2308 return gen_rtx_CONST (Pmode, base);
2311 /* Return an UNSPEC address with underlying address ADDRESS and symbol
2312 type SYMBOL_TYPE. */
2315 mips_unspec_address (rtx address, enum mips_symbol_type symbol_type)
2319 split_const (address, &base, &offset);
2320 return mips_unspec_address_offset (base, offset, symbol_type);
2324 /* If mips_unspec_address (ADDR, SYMBOL_TYPE) is a 32-bit value, add the
2325 high part to BASE and return the result. Just return BASE otherwise.
2326 TEMP is available as a temporary register if needed.
2328 The returned expression can be used as the first operand to a LO_SUM. */
2331 mips_unspec_offset_high (rtx temp, rtx base, rtx addr,
2332 enum mips_symbol_type symbol_type)
2334 if (mips_split_p[symbol_type])
2336 addr = gen_rtx_HIGH (Pmode, mips_unspec_address (addr, symbol_type));
2337 addr = mips_force_temporary (temp, addr);
2338 return mips_force_temporary (temp, gen_rtx_PLUS (Pmode, addr, base));
2344 /* Return a legitimate address for REG + OFFSET. TEMP is as for
2345 mips_force_temporary; it is only needed when OFFSET is not a
2349 mips_add_offset (rtx temp, rtx reg, HOST_WIDE_INT offset)
2351 if (!SMALL_OPERAND (offset))
2356 /* Load the full offset into a register so that we can use
2357 an unextended instruction for the address itself. */
2358 high = GEN_INT (offset);
2363 /* Leave OFFSET as a 16-bit offset and put the excess in HIGH. */
2364 high = GEN_INT (CONST_HIGH_PART (offset));
2365 offset = CONST_LOW_PART (offset);
2367 high = mips_force_temporary (temp, high);
2368 reg = mips_force_temporary (temp, gen_rtx_PLUS (Pmode, high, reg));
2370 return plus_constant (reg, offset);
2373 /* Emit a call to __tls_get_addr. SYM is the TLS symbol we are
2374 referencing, and TYPE is the symbol type to use (either global
2375 dynamic or local dynamic). V0 is an RTX for the return value
2376 location. The entire insn sequence is returned. */
2378 static GTY(()) rtx mips_tls_symbol;
2381 mips_call_tls_get_addr (rtx sym, enum mips_symbol_type type, rtx v0)
2383 rtx insn, loc, tga, a0;
2385 a0 = gen_rtx_REG (Pmode, GP_ARG_FIRST);
2387 if (!mips_tls_symbol)
2388 mips_tls_symbol = init_one_libfunc ("__tls_get_addr");
2390 loc = mips_unspec_address (sym, type);
2394 emit_insn (gen_rtx_SET (Pmode, a0,
2395 gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, loc)));
2396 tga = gen_rtx_MEM (Pmode, mips_tls_symbol);
2397 insn = emit_call_insn (gen_call_value (v0, tga, const0_rtx, const0_rtx));
2398 CONST_OR_PURE_CALL_P (insn) = 1;
2399 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), v0);
2400 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), a0);
2401 insn = get_insns ();
2408 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
2409 return value will be a valid address and move_operand (either a REG
2413 mips_legitimize_tls_address (rtx loc)
2415 rtx dest, insn, v0, v1, tmp1, tmp2, eqv;
2416 enum tls_model model;
2420 sorry ("MIPS16 TLS");
2421 return gen_reg_rtx (Pmode);
2424 v0 = gen_rtx_REG (Pmode, GP_RETURN);
2425 v1 = gen_rtx_REG (Pmode, GP_RETURN + 1);
2427 model = SYMBOL_REF_TLS_MODEL (loc);
2428 /* Only TARGET_ABICALLS code can have more than one module; other
2429 code must be be static and should not use a GOT. All TLS models
2430 reduce to local exec in this situation. */
2431 if (!TARGET_ABICALLS)
2432 model = TLS_MODEL_LOCAL_EXEC;
2436 case TLS_MODEL_GLOBAL_DYNAMIC:
2437 insn = mips_call_tls_get_addr (loc, SYMBOL_TLSGD, v0);
2438 dest = gen_reg_rtx (Pmode);
2439 emit_libcall_block (insn, dest, v0, loc);
2442 case TLS_MODEL_LOCAL_DYNAMIC:
2443 insn = mips_call_tls_get_addr (loc, SYMBOL_TLSLDM, v0);
2444 tmp1 = gen_reg_rtx (Pmode);
2446 /* Attach a unique REG_EQUIV, to allow the RTL optimizers to
2447 share the LDM result with other LD model accesses. */
2448 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2450 emit_libcall_block (insn, tmp1, v0, eqv);
2452 tmp2 = mips_unspec_offset_high (NULL, tmp1, loc, SYMBOL_DTPREL);
2453 dest = gen_rtx_LO_SUM (Pmode, tmp2,
2454 mips_unspec_address (loc, SYMBOL_DTPREL));
2457 case TLS_MODEL_INITIAL_EXEC:
2458 tmp1 = gen_reg_rtx (Pmode);
2459 tmp2 = mips_unspec_address (loc, SYMBOL_GOTTPREL);
2460 if (Pmode == DImode)
2462 emit_insn (gen_tls_get_tp_di (v1));
2463 emit_insn (gen_load_gotdi (tmp1, pic_offset_table_rtx, tmp2));
2467 emit_insn (gen_tls_get_tp_si (v1));
2468 emit_insn (gen_load_gotsi (tmp1, pic_offset_table_rtx, tmp2));
2470 dest = gen_reg_rtx (Pmode);
2471 emit_insn (gen_add3_insn (dest, tmp1, v1));
2474 case TLS_MODEL_LOCAL_EXEC:
2475 if (Pmode == DImode)
2476 emit_insn (gen_tls_get_tp_di (v1));
2478 emit_insn (gen_tls_get_tp_si (v1));
2480 tmp1 = mips_unspec_offset_high (NULL, v1, loc, SYMBOL_TPREL);
2481 dest = gen_rtx_LO_SUM (Pmode, tmp1,
2482 mips_unspec_address (loc, SYMBOL_TPREL));
2492 /* This function is used to implement LEGITIMIZE_ADDRESS. If *XLOC can
2493 be legitimized in a way that the generic machinery might not expect,
2494 put the new address in *XLOC and return true. MODE is the mode of
2495 the memory being accessed. */
2498 mips_legitimize_address (rtx *xloc, enum machine_mode mode)
2500 if (mips_tls_operand_p (*xloc))
2502 *xloc = mips_legitimize_tls_address (*xloc);
2506 /* See if the address can split into a high part and a LO_SUM. */
2507 if (mips_split_symbol (NULL, *xloc, mode, xloc))
2510 if (GET_CODE (*xloc) == PLUS && GET_CODE (XEXP (*xloc, 1)) == CONST_INT)
2512 /* Handle REG + CONSTANT using mips_add_offset. */
2515 reg = XEXP (*xloc, 0);
2516 if (!mips_valid_base_register_p (reg, mode, 0))
2517 reg = copy_to_mode_reg (Pmode, reg);
2518 *xloc = mips_add_offset (0, reg, INTVAL (XEXP (*xloc, 1)));
2526 /* Subroutine of mips_build_integer (with the same interface).
2527 Assume that the final action in the sequence should be a left shift. */
2530 mips_build_shift (struct mips_integer_op *codes, HOST_WIDE_INT value)
2532 unsigned int i, shift;
2534 /* Shift VALUE right until its lowest bit is set. Shift arithmetically
2535 since signed numbers are easier to load than unsigned ones. */
2537 while ((value & 1) == 0)
2538 value /= 2, shift++;
2540 i = mips_build_integer (codes, value);
2541 codes[i].code = ASHIFT;
2542 codes[i].value = shift;
2547 /* As for mips_build_shift, but assume that the final action will be
2548 an IOR or PLUS operation. */
2551 mips_build_lower (struct mips_integer_op *codes, unsigned HOST_WIDE_INT value)
2553 unsigned HOST_WIDE_INT high;
2556 high = value & ~(unsigned HOST_WIDE_INT) 0xffff;
2557 if (!LUI_OPERAND (high) && (value & 0x18000) == 0x18000)
2559 /* The constant is too complex to load with a simple lui/ori pair
2560 so our goal is to clear as many trailing zeros as possible.
2561 In this case, we know bit 16 is set and that the low 16 bits
2562 form a negative number. If we subtract that number from VALUE,
2563 we will clear at least the lowest 17 bits, maybe more. */
2564 i = mips_build_integer (codes, CONST_HIGH_PART (value));
2565 codes[i].code = PLUS;
2566 codes[i].value = CONST_LOW_PART (value);
2570 i = mips_build_integer (codes, high);
2571 codes[i].code = IOR;
2572 codes[i].value = value & 0xffff;
2578 /* Fill CODES with a sequence of rtl operations to load VALUE.
2579 Return the number of operations needed. */
2582 mips_build_integer (struct mips_integer_op *codes,
2583 unsigned HOST_WIDE_INT value)
2585 if (SMALL_OPERAND (value)
2586 || SMALL_OPERAND_UNSIGNED (value)
2587 || LUI_OPERAND (value))
2589 /* The value can be loaded with a single instruction. */
2590 codes[0].code = UNKNOWN;
2591 codes[0].value = value;
2594 else if ((value & 1) != 0 || LUI_OPERAND (CONST_HIGH_PART (value)))
2596 /* Either the constant is a simple LUI/ORI combination or its
2597 lowest bit is set. We don't want to shift in this case. */
2598 return mips_build_lower (codes, value);
2600 else if ((value & 0xffff) == 0)
2602 /* The constant will need at least three actions. The lowest
2603 16 bits are clear, so the final action will be a shift. */
2604 return mips_build_shift (codes, value);
2608 /* The final action could be a shift, add or inclusive OR.
2609 Rather than use a complex condition to select the best
2610 approach, try both mips_build_shift and mips_build_lower
2611 and pick the one that gives the shortest sequence.
2612 Note that this case is only used once per constant. */
2613 struct mips_integer_op alt_codes[MIPS_MAX_INTEGER_OPS];
2614 unsigned int cost, alt_cost;
2616 cost = mips_build_shift (codes, value);
2617 alt_cost = mips_build_lower (alt_codes, value);
2618 if (alt_cost < cost)
2620 memcpy (codes, alt_codes, alt_cost * sizeof (codes[0]));
2628 /* Load VALUE into DEST, using TEMP as a temporary register if need be. */
2631 mips_move_integer (rtx dest, rtx temp, unsigned HOST_WIDE_INT value)
2633 struct mips_integer_op codes[MIPS_MAX_INTEGER_OPS];
2634 enum machine_mode mode;
2635 unsigned int i, cost;
2638 mode = GET_MODE (dest);
2639 cost = mips_build_integer (codes, value);
2641 /* Apply each binary operation to X. Invariant: X is a legitimate
2642 source operand for a SET pattern. */
2643 x = GEN_INT (codes[0].value);
2644 for (i = 1; i < cost; i++)
2646 if (!can_create_pseudo_p ())
2648 emit_insn (gen_rtx_SET (VOIDmode, temp, x));
2652 x = force_reg (mode, x);
2653 x = gen_rtx_fmt_ee (codes[i].code, mode, x, GEN_INT (codes[i].value));
2656 emit_insn (gen_rtx_SET (VOIDmode, dest, x));
2660 /* Subroutine of mips_legitimize_move. Move constant SRC into register
2661 DEST given that SRC satisfies immediate_operand but doesn't satisfy
2665 mips_legitimize_const_move (enum machine_mode mode, rtx dest, rtx src)
2669 /* Split moves of big integers into smaller pieces. */
2670 if (splittable_const_int_operand (src, mode))
2672 mips_move_integer (dest, dest, INTVAL (src));
2676 /* Split moves of symbolic constants into high/low pairs. */
2677 if (mips_split_symbol (dest, src, MAX_MACHINE_MODE, &src))
2679 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
2683 if (mips_tls_operand_p (src))
2685 mips_emit_move (dest, mips_legitimize_tls_address (src));
2689 /* If we have (const (plus symbol offset)), and that expression cannot
2690 be forced into memory, load the symbol first and add in the offset.
2691 In non-MIPS16 mode, prefer to do this even if the constant _can_ be
2692 forced into memory, as it usually produces better code. */
2693 split_const (src, &base, &offset);
2694 if (offset != const0_rtx
2695 && (targetm.cannot_force_const_mem (src)
2696 || (!TARGET_MIPS16 && can_create_pseudo_p ())))
2698 base = mips_force_temporary (dest, base);
2699 mips_emit_move (dest, mips_add_offset (0, base, INTVAL (offset)));
2703 src = force_const_mem (mode, src);
2705 /* When using explicit relocs, constant pool references are sometimes
2706 not legitimate addresses. */
2707 mips_split_symbol (dest, XEXP (src, 0), mode, &XEXP (src, 0));
2708 mips_emit_move (dest, src);
2712 /* If (set DEST SRC) is not a valid instruction, emit an equivalent
2713 sequence that is valid. */
2716 mips_legitimize_move (enum machine_mode mode, rtx dest, rtx src)
2718 if (!register_operand (dest, mode) && !reg_or_0_operand (src, mode))
2720 mips_emit_move (dest, force_reg (mode, src));
2724 /* Check for individual, fully-reloaded mflo and mfhi instructions. */
2725 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
2726 && REG_P (src) && MD_REG_P (REGNO (src))
2727 && REG_P (dest) && GP_REG_P (REGNO (dest)))
2729 int other_regno = REGNO (src) == HI_REGNUM ? LO_REGNUM : HI_REGNUM;
2730 if (GET_MODE_SIZE (mode) <= 4)
2731 emit_insn (gen_mfhilo_si (gen_rtx_REG (SImode, REGNO (dest)),
2732 gen_rtx_REG (SImode, REGNO (src)),
2733 gen_rtx_REG (SImode, other_regno)));
2735 emit_insn (gen_mfhilo_di (gen_rtx_REG (DImode, REGNO (dest)),
2736 gen_rtx_REG (DImode, REGNO (src)),
2737 gen_rtx_REG (DImode, other_regno)));
2741 /* We need to deal with constants that would be legitimate
2742 immediate_operands but not legitimate move_operands. */
2743 if (CONSTANT_P (src) && !move_operand (src, mode))
2745 mips_legitimize_const_move (mode, dest, src);
2746 set_unique_reg_note (get_last_insn (), REG_EQUAL, copy_rtx (src));
2752 /* We need a lot of little routines to check constant values on the
2753 mips16. These are used to figure out how long the instruction will
2754 be. It would be much better to do this using constraints, but
2755 there aren't nearly enough letters available. */
2758 m16_check_op (rtx op, int low, int high, int mask)
2760 return (GET_CODE (op) == CONST_INT
2761 && INTVAL (op) >= low
2762 && INTVAL (op) <= high
2763 && (INTVAL (op) & mask) == 0);
2767 m16_uimm3_b (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2769 return m16_check_op (op, 0x1, 0x8, 0);
2773 m16_simm4_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2775 return m16_check_op (op, - 0x8, 0x7, 0);
2779 m16_nsimm4_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2781 return m16_check_op (op, - 0x7, 0x8, 0);
2785 m16_simm5_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2787 return m16_check_op (op, - 0x10, 0xf, 0);
2791 m16_nsimm5_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2793 return m16_check_op (op, - 0xf, 0x10, 0);
2797 m16_uimm5_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2799 return m16_check_op (op, (- 0x10) << 2, 0xf << 2, 3);
2803 m16_nuimm5_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2805 return m16_check_op (op, (- 0xf) << 2, 0x10 << 2, 3);
2809 m16_simm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2811 return m16_check_op (op, - 0x80, 0x7f, 0);
2815 m16_nsimm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2817 return m16_check_op (op, - 0x7f, 0x80, 0);
2821 m16_uimm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2823 return m16_check_op (op, 0x0, 0xff, 0);
2827 m16_nuimm8_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2829 return m16_check_op (op, - 0xff, 0x0, 0);
2833 m16_uimm8_m1_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2835 return m16_check_op (op, - 0x1, 0xfe, 0);
2839 m16_uimm8_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2841 return m16_check_op (op, 0x0, 0xff << 2, 3);
2845 m16_nuimm8_4 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2847 return m16_check_op (op, (- 0xff) << 2, 0x0, 3);
2851 m16_simm8_8 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2853 return m16_check_op (op, (- 0x80) << 3, 0x7f << 3, 7);
2857 m16_nsimm8_8 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2859 return m16_check_op (op, (- 0x7f) << 3, 0x80 << 3, 7);
2862 /* Return true if ADDR matches the pattern for the lwxs load scaled indexed
2863 address instruction. */
2866 mips_lwxs_address_p (rtx addr)
2869 && GET_CODE (addr) == PLUS
2870 && REG_P (XEXP (addr, 1)))
2872 rtx offset = XEXP (addr, 0);
2873 if (GET_CODE (offset) == MULT
2874 && REG_P (XEXP (offset, 0))
2875 && GET_CODE (XEXP (offset, 1)) == CONST_INT
2876 && INTVAL (XEXP (offset, 1)) == 4)
2882 /* The cost of loading values from the constant pool. It should be
2883 larger than the cost of any constant we want to synthesize inline. */
2885 #define CONSTANT_POOL_COST COSTS_N_INSNS (TARGET_MIPS16 ? 4 : 8)
2887 /* Return the cost of X when used as an operand to the MIPS16 instruction
2888 that implements CODE. Return -1 if there is no such instruction, or if
2889 X is not a valid immediate operand for it. */
2892 mips16_constant_cost (int code, HOST_WIDE_INT x)
2899 /* Shifts by between 1 and 8 bits (inclusive) are unextended,
2900 other shifts are extended. The shift patterns truncate the shift
2901 count to the right size, so there are no out-of-range values. */
2902 if (IN_RANGE (x, 1, 8))
2904 return COSTS_N_INSNS (1);
2907 if (IN_RANGE (x, -128, 127))
2909 if (SMALL_OPERAND (x))
2910 return COSTS_N_INSNS (1);
2914 /* Like LE, but reject the always-true case. */
2918 /* We add 1 to the immediate and use SLT. */
2921 /* We can use CMPI for an xor with an unsigned 16-bit X. */
2924 if (IN_RANGE (x, 0, 255))
2926 if (SMALL_OPERAND_UNSIGNED (x))
2927 return COSTS_N_INSNS (1);
2932 /* Equality comparisons with 0 are cheap. */
2942 /* Return true if there is a non-MIPS16 instruction that implements CODE
2943 and if that instruction accepts X as an immediate operand. */
2946 mips_immediate_operand_p (int code, HOST_WIDE_INT x)
2953 /* All shift counts are truncated to a valid constant. */
2958 /* Likewise rotates, if the target supports rotates at all. */
2964 /* These instructions take 16-bit unsigned immediates. */
2965 return SMALL_OPERAND_UNSIGNED (x);
2970 /* These instructions take 16-bit signed immediates. */
2971 return SMALL_OPERAND (x);
2977 /* The "immediate" forms of these instructions are really
2978 implemented as comparisons with register 0. */
2983 /* Likewise, meaning that the only valid immediate operand is 1. */
2987 /* We add 1 to the immediate and use SLT. */
2988 return SMALL_OPERAND (x + 1);
2991 /* Likewise SLTU, but reject the always-true case. */
2992 return SMALL_OPERAND (x + 1) && x + 1 != 0;
2996 /* The bit position and size are immediate operands. */
2997 return ISA_HAS_EXT_INS;
3000 /* By default assume that $0 can be used for 0. */
3005 /* Return the cost of binary operation X, given that the instruction
3006 sequence for a word-sized or smaller operation has cost SINGLE_COST
3007 and that the sequence of a double-word operation has cost DOUBLE_COST. */
3010 mips_binary_cost (rtx x, int single_cost, int double_cost)
3014 if (GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD * 2)
3019 + rtx_cost (XEXP (x, 0), 0)
3020 + rtx_cost (XEXP (x, 1), GET_CODE (x)));
3023 /* Return the cost of floating-point multiplications of mode MODE. */
3026 mips_fp_mult_cost (enum machine_mode mode)
3028 return mode == DFmode ? mips_cost->fp_mult_df : mips_cost->fp_mult_sf;
3031 /* Return the cost of floating-point divisions of mode MODE. */
3034 mips_fp_div_cost (enum machine_mode mode)
3036 return mode == DFmode ? mips_cost->fp_div_df : mips_cost->fp_div_sf;
3039 /* Return the cost of sign-extending OP to mode MODE, not including the
3040 cost of OP itself. */
3043 mips_sign_extend_cost (enum machine_mode mode, rtx op)
3046 /* Extended loads are as cheap as unextended ones. */
3049 if (TARGET_64BIT && mode == DImode && GET_MODE (op) == SImode)
3050 /* A sign extension from SImode to DImode in 64-bit mode is free. */
3053 if (ISA_HAS_SEB_SEH || GENERATE_MIPS16E)
3054 /* We can use SEB or SEH. */
3055 return COSTS_N_INSNS (1);
3057 /* We need to use a shift left and a shift right. */
3058 return COSTS_N_INSNS (TARGET_MIPS16 ? 4 : 2);
3061 /* Return the cost of zero-extending OP to mode MODE, not including the
3062 cost of OP itself. */
3065 mips_zero_extend_cost (enum machine_mode mode, rtx op)
3068 /* Extended loads are as cheap as unextended ones. */
3071 if (TARGET_64BIT && mode == DImode && GET_MODE (op) == SImode)
3072 /* We need a shift left by 32 bits and a shift right by 32 bits. */
3073 return COSTS_N_INSNS (TARGET_MIPS16 ? 4 : 2);
3075 if (GENERATE_MIPS16E)
3076 /* We can use ZEB or ZEH. */
3077 return COSTS_N_INSNS (1);
3080 /* We need to load 0xff or 0xffff into a register and use AND. */
3081 return COSTS_N_INSNS (GET_MODE (op) == QImode ? 2 : 3);
3083 /* We can use ANDI. */
3084 return COSTS_N_INSNS (1);
3087 /* Implement TARGET_RTX_COSTS. */
3090 mips_rtx_costs (rtx x, int code, int outer_code, int *total)
3092 enum machine_mode mode = GET_MODE (x);
3093 bool float_mode_p = FLOAT_MODE_P (mode);
3097 /* The cost of a COMPARE is hard to define for MIPS. COMPAREs don't
3098 appear in the instruction stream, and the cost of a comparison is
3099 really the cost of the branch or scc condition. At the time of
3100 writing, gcc only uses an explicit outer COMPARE code when optabs
3101 is testing whether a constant is expensive enough to force into a
3102 register. We want optabs to pass such constants through the MIPS
3103 expanders instead, so make all constants very cheap here. */
3104 if (outer_code == COMPARE)
3106 gcc_assert (CONSTANT_P (x));
3114 /* Treat *clear_upper32-style ANDs as having zero cost in the
3115 second operand. The cost is entirely in the first operand.
3117 ??? This is needed because we would otherwise try to CSE
3118 the constant operand. Although that's the right thing for
3119 instructions that continue to be a register operation throughout
3120 compilation, it is disastrous for instructions that could
3121 later be converted into a memory operation. */
3123 && outer_code == AND
3124 && UINTVAL (x) == 0xffffffff)
3132 cost = mips16_constant_cost (outer_code, INTVAL (x));
3141 /* When not optimizing for size, we care more about the cost
3142 of hot code, and hot code is often in a loop. If a constant
3143 operand needs to be forced into a register, we will often be
3144 able to hoist the constant load out of the loop, so the load
3145 should not contribute to the cost. */
3147 || mips_immediate_operand_p (outer_code, INTVAL (x)))
3159 if (force_to_mem_operand (x, VOIDmode))
3161 *total = COSTS_N_INSNS (1);
3164 cost = mips_const_insns (x);
3167 /* If the constant is likely to be stored in a GPR, SETs of
3168 single-insn constants are as cheap as register sets; we
3169 never want to CSE them.
3171 Don't reduce the cost of storing a floating-point zero in
3172 FPRs. If we have a zero in an FPR for other reasons, we
3173 can get better cfg-cleanup and delayed-branch results by
3174 using it consistently, rather than using $0 sometimes and
3175 an FPR at other times. Also, moves between floating-point
3176 registers are sometimes cheaper than (D)MTC1 $0. */
3178 && outer_code == SET
3179 && !(float_mode_p && TARGET_HARD_FLOAT))
3181 /* When non-MIPS16 code loads a constant N>1 times, we rarely
3182 want to CSE the constant itself. It is usually better to
3183 have N copies of the last operation in the sequence and one
3184 shared copy of the other operations. (Note that this is
3185 not true for MIPS16 code, where the final operation in the
3186 sequence is often an extended instruction.)
3188 Also, if we have a CONST_INT, we don't know whether it is
3189 for a word or doubleword operation, so we cannot rely on
3190 the result of mips_build_integer. */
3191 else if (!TARGET_MIPS16
3192 && (outer_code == SET || mode == VOIDmode))
3194 *total = COSTS_N_INSNS (cost);
3197 /* The value will need to be fetched from the constant pool. */
3198 *total = CONSTANT_POOL_COST;
3202 /* If the address is legitimate, return the number of
3203 instructions it needs. */
3205 cost = mips_address_insns (addr, mode, true);
3208 *total = COSTS_N_INSNS (cost + 1);
3211 /* Check for a scaled indexed address. */
3212 if (mips_lwxs_address_p (addr))
3214 *total = COSTS_N_INSNS (2);
3217 /* Otherwise use the default handling. */
3221 *total = COSTS_N_INSNS (6);
3225 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) > UNITS_PER_WORD ? 2 : 1);
3229 /* Check for a *clear_upper32 pattern and treat it like a zero
3230 extension. See the pattern's comment for details. */
3233 && CONST_INT_P (XEXP (x, 1))
3234 && UINTVAL (XEXP (x, 1)) == 0xffffffff)
3236 *total = (mips_zero_extend_cost (mode, XEXP (x, 0))
3237 + rtx_cost (XEXP (x, 0), 0));
3244 /* Double-word operations use two single-word operations. */
3245 *total = mips_binary_cost (x, COSTS_N_INSNS (1), COSTS_N_INSNS (2));
3253 if (CONSTANT_P (XEXP (x, 1)))
3254 *total = mips_binary_cost (x, COSTS_N_INSNS (1), COSTS_N_INSNS (4));
3256 *total = mips_binary_cost (x, COSTS_N_INSNS (1), COSTS_N_INSNS (12));
3261 *total = mips_cost->fp_add;
3263 *total = COSTS_N_INSNS (4);
3267 /* Low-part immediates need an extended MIPS16 instruction. */
3268 *total = (COSTS_N_INSNS (TARGET_MIPS16 ? 2 : 1)
3269 + rtx_cost (XEXP (x, 0), 0));
3284 /* Branch comparisons have VOIDmode, so use the first operand's
3286 mode = GET_MODE (XEXP (x, 0));
3287 if (FLOAT_MODE_P (mode))
3289 *total = mips_cost->fp_add;
3292 *total = mips_binary_cost (x, COSTS_N_INSNS (1), COSTS_N_INSNS (4));
3297 && ISA_HAS_NMADD_NMSUB
3298 && TARGET_FUSED_MADD
3299 && !HONOR_NANS (mode)
3300 && !HONOR_SIGNED_ZEROS (mode))
3302 /* See if we can use NMADD or NMSUB. See mips.md for the
3303 associated patterns. */
3304 rtx op0 = XEXP (x, 0);
3305 rtx op1 = XEXP (x, 1);
3306 if (GET_CODE (op0) == MULT && GET_CODE (XEXP (op0, 0)) == NEG)
3308 *total = (mips_fp_mult_cost (mode)
3309 + rtx_cost (XEXP (XEXP (op0, 0), 0), 0)
3310 + rtx_cost (XEXP (op0, 1), 0)
3311 + rtx_cost (op1, 0));
3314 if (GET_CODE (op1) == MULT)
3316 *total = (mips_fp_mult_cost (mode)
3318 + rtx_cost (XEXP (op1, 0), 0)
3319 + rtx_cost (XEXP (op1, 1), 0));
3329 && TARGET_FUSED_MADD
3330 && GET_CODE (XEXP (x, 0)) == MULT)
3333 *total = mips_cost->fp_add;
3337 /* Double-word operations require three single-word operations and
3338 an SLTU. The MIPS16 version then needs to move the result of
3339 the SLTU from $24 to a MIPS16 register. */
3340 *total = mips_binary_cost (x, COSTS_N_INSNS (1),
3341 COSTS_N_INSNS (TARGET_MIPS16 ? 5 : 4));
3346 && ISA_HAS_NMADD_NMSUB
3347 && TARGET_FUSED_MADD
3348 && !HONOR_NANS (mode)
3349 && HONOR_SIGNED_ZEROS (mode))
3351 /* See if we can use NMADD or NMSUB. See mips.md for the
3352 associated patterns. */
3353 rtx op = XEXP (x, 0);
3354 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3355 && GET_CODE (XEXP (op, 0)) == MULT)
3357 *total = (mips_fp_mult_cost (mode)
3358 + rtx_cost (XEXP (XEXP (op, 0), 0), 0)
3359 + rtx_cost (XEXP (XEXP (op, 0), 1), 0)
3360 + rtx_cost (XEXP (op, 1), 0));
3366 *total = mips_cost->fp_add;
3368 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) > UNITS_PER_WORD ? 4 : 1);
3373 *total = mips_fp_mult_cost (mode);
3374 else if (mode == DImode && !TARGET_64BIT)
3375 /* Synthesized from 2 mulsi3s, 1 mulsidi3 and two additions,
3376 where the mulsidi3 always includes an MFHI and an MFLO. */
3377 *total = (optimize_size
3378 ? COSTS_N_INSNS (ISA_HAS_MUL3 ? 7 : 9)
3379 : mips_cost->int_mult_si * 3 + 6);
3380 else if (optimize_size)
3381 *total = (ISA_HAS_MUL3 ? 1 : 2);
3382 else if (mode == DImode)
3383 *total = mips_cost->int_mult_di;
3385 *total = mips_cost->int_mult_si;
3389 /* Check for a reciprocal. */
3390 if (float_mode_p && XEXP (x, 0) == CONST1_RTX (mode))
3393 && flag_unsafe_math_optimizations
3394 && (outer_code == SQRT || GET_CODE (XEXP (x, 1)) == SQRT))
3396 /* An rsqrt<mode>a or rsqrt<mode>b pattern. Count the
3397 division as being free. */
3398 *total = rtx_cost (XEXP (x, 1), 0);
3403 *total = mips_fp_div_cost (mode) + rtx_cost (XEXP (x, 1), 0);
3413 *total = mips_fp_div_cost (mode);
3422 /* It is our responsibility to make division by a power of 2
3423 as cheap as 2 register additions if we want the division
3424 expanders to be used for such operations; see the setting
3425 of sdiv_pow2_cheap in optabs.c. Using (D)DIV for MIPS16
3426 should always produce shorter code than using
3427 expand_sdiv2_pow2. */
3429 && CONST_INT_P (XEXP (x, 1))
3430 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
3432 *total = COSTS_N_INSNS (2) + rtx_cost (XEXP (x, 0), 0);
3435 *total = COSTS_N_INSNS (mips_idiv_insns ());
3437 else if (mode == DImode)
3438 *total = mips_cost->int_div_di;
3440 *total = mips_cost->int_div_si;
3444 *total = mips_sign_extend_cost (mode, XEXP (x, 0));
3448 *total = mips_zero_extend_cost (mode, XEXP (x, 0));
3452 case UNSIGNED_FLOAT:
3455 case FLOAT_TRUNCATE:
3456 *total = mips_cost->fp_add;
3464 /* Provide the costs of an addressing mode that contains ADDR.
3465 If ADDR is not a valid address, its cost is irrelevant. */
3468 mips_address_cost (rtx addr)
3470 return mips_address_insns (addr, SImode, false);
3473 /* Return one word of double-word value OP, taking into account the fixed
3474 endianness of certain registers. HIGH_P is true to select the high part,
3475 false to select the low part. */
3478 mips_subword (rtx op, int high_p)
3480 unsigned int byte, offset;
3481 enum machine_mode mode;
3483 mode = GET_MODE (op);
3484 if (mode == VOIDmode)
3487 if (TARGET_BIG_ENDIAN ? !high_p : high_p)
3488 byte = UNITS_PER_WORD;
3492 if (FP_REG_RTX_P (op))
3494 /* Paired FPRs are always ordered little-endian. */
3495 offset = (UNITS_PER_WORD < UNITS_PER_HWFPVALUE ? high_p : byte != 0);
3496 return gen_rtx_REG (word_mode, REGNO (op) + offset);
3500 return mips_rewrite_small_data (adjust_address (op, word_mode, byte));
3502 return simplify_gen_subreg (word_mode, op, mode, byte);
3506 /* Return true if a 64-bit move from SRC to DEST should be split into two. */
3509 mips_split_64bit_move_p (rtx dest, rtx src)
3514 /* FP->FP moves can be done in a single instruction. */
3515 if (FP_REG_RTX_P (src) && FP_REG_RTX_P (dest))
3518 /* Check for floating-point loads and stores. They can be done using
3519 ldc1 and sdc1 on MIPS II and above. */
3522 if (FP_REG_RTX_P (dest) && MEM_P (src))
3524 if (FP_REG_RTX_P (src) && MEM_P (dest))
3531 /* Split a doubleword move from SRC to DEST. On 32-bit targets,
3532 this function handles 64-bit moves for which mips_split_64bit_move_p
3533 holds. For 64-bit targets, this function handles 128-bit moves. */
3536 mips_split_doubleword_move (rtx dest, rtx src)
3538 if (FP_REG_RTX_P (dest) || FP_REG_RTX_P (src))
3540 if (!TARGET_64BIT && GET_MODE (dest) == DImode)
3541 emit_insn (gen_move_doubleword_fprdi (dest, src));
3542 else if (!TARGET_64BIT && GET_MODE (dest) == DFmode)
3543 emit_insn (gen_move_doubleword_fprdf (dest, src));
3544 else if (TARGET_64BIT && GET_MODE (dest) == TFmode)
3545 emit_insn (gen_move_doubleword_fprtf (dest, src));
3551 /* The operation can be split into two normal moves. Decide in
3552 which order to do them. */
3555 low_dest = mips_subword (dest, 0);
3556 if (REG_P (low_dest)
3557 && reg_overlap_mentioned_p (low_dest, src))
3559 mips_emit_move (mips_subword (dest, 1), mips_subword (src, 1));
3560 mips_emit_move (low_dest, mips_subword (src, 0));
3564 mips_emit_move (low_dest, mips_subword (src, 0));
3565 mips_emit_move (mips_subword (dest, 1), mips_subword (src, 1));
3570 /* Return the appropriate instructions to move SRC into DEST. Assume
3571 that SRC is operand 1 and DEST is operand 0. */
3574 mips_output_move (rtx dest, rtx src)
3576 enum rtx_code dest_code, src_code;
3577 enum mips_symbol_type symbol_type;
3580 dest_code = GET_CODE (dest);
3581 src_code = GET_CODE (src);
3582 dbl_p = (GET_MODE_SIZE (GET_MODE (dest)) == 8);
3584 if (dbl_p && mips_split_64bit_move_p (dest, src))
3587 if ((src_code == REG && GP_REG_P (REGNO (src)))
3588 || (!TARGET_MIPS16 && src == CONST0_RTX (GET_MODE (dest))))
3590 if (dest_code == REG)
3592 if (GP_REG_P (REGNO (dest)))
3593 return "move\t%0,%z1";
3595 if (MD_REG_P (REGNO (dest)))
3598 if (DSP_ACC_REG_P (REGNO (dest)))
3600 static char retval[] = "mt__\t%z1,%q0";
3601 retval[2] = reg_names[REGNO (dest)][4];
3602 retval[3] = reg_names[REGNO (dest)][5];
3606 if (FP_REG_P (REGNO (dest)))
3607 return (dbl_p ? "dmtc1\t%z1,%0" : "mtc1\t%z1,%0");
3609 if (ALL_COP_REG_P (REGNO (dest)))
3611 static char retval[] = "dmtc_\t%z1,%0";
3613 retval[4] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (dest));
3614 return (dbl_p ? retval : retval + 1);
3617 if (dest_code == MEM)
3618 return (dbl_p ? "sd\t%z1,%0" : "sw\t%z1,%0");
3620 if (dest_code == REG && GP_REG_P (REGNO (dest)))
3622 if (src_code == REG)
3624 if (DSP_ACC_REG_P (REGNO (src)))
3626 static char retval[] = "mf__\t%0,%q1";
3627 retval[2] = reg_names[REGNO (src)][4];
3628 retval[3] = reg_names[REGNO (src)][5];
3632 if (ST_REG_P (REGNO (src)) && ISA_HAS_8CC)
3633 return "lui\t%0,0x3f80\n\tmovf\t%0,%.,%1";
3635 if (FP_REG_P (REGNO (src)))
3636 return (dbl_p ? "dmfc1\t%0,%1" : "mfc1\t%0,%1");
3638 if (ALL_COP_REG_P (REGNO (src)))
3640 static char retval[] = "dmfc_\t%0,%1";
3642 retval[4] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (src));
3643 return (dbl_p ? retval : retval + 1);
3647 if (src_code == MEM)
3648 return (dbl_p ? "ld\t%0,%1" : "lw\t%0,%1");
3650 if (src_code == CONST_INT)
3652 /* Don't use the X format, because that will give out of
3653 range numbers for 64-bit hosts and 32-bit targets. */
3655 return "li\t%0,%1\t\t\t# %X1";
3657 if (INTVAL (src) >= 0 && INTVAL (src) <= 0xffff)
3660 if (INTVAL (src) < 0 && INTVAL (src) >= -0xffff)
3664 if (src_code == HIGH)
3665 return TARGET_MIPS16 ? "#" : "lui\t%0,%h1";
3667 if (CONST_GP_P (src))
3668 return "move\t%0,%1";
3670 if (mips_symbolic_constant_p (src, SYMBOL_CONTEXT_LEA, &symbol_type)
3671 && mips_lo_relocs[symbol_type] != 0)
3673 /* A signed 16-bit constant formed by applying a relocation
3674 operator to a symbolic address. */
3675 gcc_assert (!mips_split_p[symbol_type]);
3676 return "li\t%0,%R1";
3679 if (symbolic_operand (src, VOIDmode))
3681 gcc_assert (TARGET_MIPS16
3682 ? TARGET_MIPS16_TEXT_LOADS
3683 : !TARGET_EXPLICIT_RELOCS);
3684 return (dbl_p ? "dla\t%0,%1" : "la\t%0,%1");
3687 if (src_code == REG && FP_REG_P (REGNO (src)))
3689 if (dest_code == REG && FP_REG_P (REGNO (dest)))
3691 if (GET_MODE (dest) == V2SFmode)
3692 return "mov.ps\t%0,%1";
3694 return (dbl_p ? "mov.d\t%0,%1" : "mov.s\t%0,%1");
3697 if (dest_code == MEM)
3698 return (dbl_p ? "sdc1\t%1,%0" : "swc1\t%1,%0");
3700 if (dest_code == REG && FP_REG_P (REGNO (dest)))
3702 if (src_code == MEM)
3703 return (dbl_p ? "ldc1\t%0,%1" : "lwc1\t%0,%1");
3705 if (dest_code == REG && ALL_COP_REG_P (REGNO (dest)) && src_code == MEM)
3707 static char retval[] = "l_c_\t%0,%1";
3709 retval[1] = (dbl_p ? 'd' : 'w');
3710 retval[3] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (dest));
3713 if (dest_code == MEM && src_code == REG && ALL_COP_REG_P (REGNO (src)))
3715 static char retval[] = "s_c_\t%1,%0";
3717 retval[1] = (dbl_p ? 'd' : 'w');
3718 retval[3] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (src));
3724 /* Restore $gp from its save slot. Valid only when using o32 or
3728 mips_restore_gp (void)
3732 gcc_assert (TARGET_ABICALLS && TARGET_OLDABI);
3734 address = mips_add_offset (pic_offset_table_rtx,
3735 frame_pointer_needed
3736 ? hard_frame_pointer_rtx
3737 : stack_pointer_rtx,
3738 current_function_outgoing_args_size);
3739 slot = gen_rtx_MEM (Pmode, address);
3741 mips_emit_move (pic_offset_table_rtx, slot);
3742 if (!TARGET_EXPLICIT_RELOCS)
3743 emit_insn (gen_blockage ());
3746 /* Emit an instruction of the form (set TARGET (CODE OP0 OP1)). */
3749 mips_emit_binary (enum rtx_code code, rtx target, rtx op0, rtx op1)
3751 emit_insn (gen_rtx_SET (VOIDmode, target,
3752 gen_rtx_fmt_ee (code, GET_MODE (target), op0, op1)));
3755 /* Return true if CMP1 is a suitable second operand for relational
3756 operator CODE. See also the *sCC patterns in mips.md. */
3759 mips_relational_operand_ok_p (enum rtx_code code, rtx cmp1)
3765 return reg_or_0_operand (cmp1, VOIDmode);
3769 return !TARGET_MIPS16 && cmp1 == const1_rtx;
3773 return arith_operand (cmp1, VOIDmode);
3776 return sle_operand (cmp1, VOIDmode);
3779 return sleu_operand (cmp1, VOIDmode);
3786 /* Canonicalize LE or LEU comparisons into LT comparisons when
3787 possible to avoid extra instructions or inverting the
3791 mips_canonicalize_comparison (enum rtx_code *code, rtx *cmp1,
3792 enum machine_mode mode)
3794 HOST_WIDE_INT original, plus_one;
3796 if (GET_CODE (*cmp1) != CONST_INT)
3799 original = INTVAL (*cmp1);
3800 plus_one = trunc_int_for_mode ((unsigned HOST_WIDE_INT) original + 1, mode);
3805 if (original < plus_one)
3808 *cmp1 = force_reg (mode, GEN_INT (plus_one));
3817 *cmp1 = force_reg (mode, GEN_INT (plus_one));
3830 /* Compare CMP0 and CMP1 using relational operator CODE and store the
3831 result in TARGET. CMP0 and TARGET are register_operands that have
3832 the same integer mode. If INVERT_PTR is nonnull, it's OK to set
3833 TARGET to the inverse of the result and flip *INVERT_PTR instead. */
3836 mips_emit_int_relational (enum rtx_code code, bool *invert_ptr,
3837 rtx target, rtx cmp0, rtx cmp1)
3839 /* First see if there is a MIPS instruction that can do this operation
3840 with CMP1 in its current form. If not, try to canonicalize the
3841 comparison to LT. If that fails, try doing the same for the
3842 inverse operation. If that also fails, force CMP1 into a register
3844 if (mips_relational_operand_ok_p (code, cmp1))
3845 mips_emit_binary (code, target, cmp0, cmp1);
3846 else if (mips_canonicalize_comparison (&code, &cmp1, GET_MODE (target)))
3847 mips_emit_binary (code, target, cmp0, cmp1);
3850 enum rtx_code inv_code = reverse_condition (code);
3851 if (!mips_relational_operand_ok_p (inv_code, cmp1))
3853 cmp1 = force_reg (GET_MODE (cmp0), cmp1);
3854 mips_emit_int_relational (code, invert_ptr, target, cmp0, cmp1);
3856 else if (invert_ptr == 0)
3858 rtx inv_target = gen_reg_rtx (GET_MODE (target));
3859 mips_emit_binary (inv_code, inv_target, cmp0, cmp1);
3860 mips_emit_binary (XOR, target, inv_target, const1_rtx);
3864 *invert_ptr = !*invert_ptr;
3865 mips_emit_binary (inv_code, target, cmp0, cmp1);
3870 /* Return a register that is zero iff CMP0 and CMP1 are equal.
3871 The register will have the same mode as CMP0. */
3874 mips_zero_if_equal (rtx cmp0, rtx cmp1)
3876 if (cmp1 == const0_rtx)
3879 if (uns_arith_operand (cmp1, VOIDmode))
3880 return expand_binop (GET_MODE (cmp0), xor_optab,
3881 cmp0, cmp1, 0, 0, OPTAB_DIRECT);
3883 return expand_binop (GET_MODE (cmp0), sub_optab,
3884 cmp0, cmp1, 0, 0, OPTAB_DIRECT);
3887 /* Convert *CODE into a code that can be used in a floating-point
3888 scc instruction (c.<cond>.<fmt>). Return true if the values of
3889 the condition code registers will be inverted, with 0 indicating
3890 that the condition holds. */
3893 mips_reverse_fp_cond_p (enum rtx_code *code)
3900 *code = reverse_condition_maybe_unordered (*code);
3908 /* Convert a comparison into something that can be used in a branch or
3909 conditional move. cmp_operands[0] and cmp_operands[1] are the values
3910 being compared and *CODE is the code used to compare them.
3912 Update *CODE, *OP0 and *OP1 so that they describe the final comparison.
3913 If NEED_EQ_NE_P, then only EQ/NE comparisons against zero are possible,
3914 otherwise any standard branch condition can be used. The standard branch
3917 - EQ/NE between two registers.
3918 - any comparison between a register and zero. */
3921 mips_emit_compare (enum rtx_code *code, rtx *op0, rtx *op1, bool need_eq_ne_p)
3923 if (GET_MODE_CLASS (GET_MODE (cmp_operands[0])) == MODE_INT)
3925 if (!need_eq_ne_p && cmp_operands[1] == const0_rtx)
3927 *op0 = cmp_operands[0];
3928 *op1 = cmp_operands[1];
3930 else if (*code == EQ || *code == NE)
3934 *op0 = mips_zero_if_equal (cmp_operands[0], cmp_operands[1]);
3939 *op0 = cmp_operands[0];
3940 *op1 = force_reg (GET_MODE (*op0), cmp_operands[1]);
3945 /* The comparison needs a separate scc instruction. Store the
3946 result of the scc in *OP0 and compare it against zero. */
3947 bool invert = false;
3948 *op0 = gen_reg_rtx (GET_MODE (cmp_operands[0]));
3950 mips_emit_int_relational (*code, &invert, *op0,
3951 cmp_operands[0], cmp_operands[1]);
3952 *code = (invert ? EQ : NE);
3955 else if (ALL_FIXED_POINT_MODE_P (GET_MODE (cmp_operands[0])))
3957 *op0 = gen_rtx_REG (CCDSPmode, CCDSP_CC_REGNUM);
3958 mips_emit_binary (*code, *op0, cmp_operands[0], cmp_operands[1]);
3964 enum rtx_code cmp_code;
3966 /* Floating-point tests use a separate c.cond.fmt comparison to
3967 set a condition code register. The branch or conditional move
3968 will then compare that register against zero.
3970 Set CMP_CODE to the code of the comparison instruction and
3971 *CODE to the code that the branch or move should use. */
3973 *code = mips_reverse_fp_cond_p (&cmp_code) ? EQ : NE;
3975 ? gen_reg_rtx (CCmode)
3976 : gen_rtx_REG (CCmode, FPSW_REGNUM));
3978 mips_emit_binary (cmp_code, *op0, cmp_operands[0], cmp_operands[1]);
3982 /* Try comparing cmp_operands[0] and cmp_operands[1] using rtl code CODE.
3983 Store the result in TARGET and return true if successful.
3985 On 64-bit targets, TARGET may be wider than cmp_operands[0]. */
3988 mips_emit_scc (enum rtx_code code, rtx target)
3990 if (GET_MODE_CLASS (GET_MODE (cmp_operands[0])) != MODE_INT)
3993 target = gen_lowpart (GET_MODE (cmp_operands[0]), target);
3994 if (code == EQ || code == NE)
3996 rtx zie = mips_zero_if_equal (cmp_operands[0], cmp_operands[1]);
3997 mips_emit_binary (code, target, zie, const0_rtx);
4000 mips_emit_int_relational (code, 0, target,
4001 cmp_operands[0], cmp_operands[1]);
4005 /* Emit the common code for doing conditional branches.
4006 operand[0] is the label to jump to.
4007 The comparison operands are saved away by cmp{si,di,sf,df}. */
4010 gen_conditional_branch (rtx *operands, enum rtx_code code)
4012 rtx op0, op1, condition;
4014 mips_emit_compare (&code, &op0, &op1, TARGET_MIPS16);
4015 condition = gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4016 emit_jump_insn (gen_condjump (condition, operands[0]));
4021 (set temp (COND:CCV2 CMP_OP0 CMP_OP1))
4022 (set DEST (unspec [TRUE_SRC FALSE_SRC temp] UNSPEC_MOVE_TF_PS)) */
4025 mips_expand_vcondv2sf (rtx dest, rtx true_src, rtx false_src,
4026 enum rtx_code cond, rtx cmp_op0, rtx cmp_op1)
4031 reversed_p = mips_reverse_fp_cond_p (&cond);
4032 cmp_result = gen_reg_rtx (CCV2mode);
4033 emit_insn (gen_scc_ps (cmp_result,
4034 gen_rtx_fmt_ee (cond, VOIDmode, cmp_op0, cmp_op1)));
4036 emit_insn (gen_mips_cond_move_tf_ps (dest, false_src, true_src,
4039 emit_insn (gen_mips_cond_move_tf_ps (dest, true_src, false_src,
4043 /* Emit the common code for conditional moves. OPERANDS is the array
4044 of operands passed to the conditional move define_expand. */
4047 gen_conditional_move (rtx *operands)
4052 code = GET_CODE (operands[1]);
4053 mips_emit_compare (&code, &op0, &op1, true);
4054 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4055 gen_rtx_IF_THEN_ELSE (GET_MODE (operands[0]),
4056 gen_rtx_fmt_ee (code,
4059 operands[2], operands[3])));
4062 /* Emit a conditional trap. OPERANDS is the array of operands passed to
4063 the conditional_trap expander. */
4066 mips_gen_conditional_trap (rtx *operands)
4069 enum rtx_code cmp_code = GET_CODE (operands[0]);
4070 enum machine_mode mode = GET_MODE (cmp_operands[0]);
4072 /* MIPS conditional trap machine instructions don't have GT or LE
4073 flavors, so we must invert the comparison and convert to LT and
4074 GE, respectively. */
4077 case GT: cmp_code = LT; break;
4078 case LE: cmp_code = GE; break;
4079 case GTU: cmp_code = LTU; break;
4080 case LEU: cmp_code = GEU; break;
4083 if (cmp_code == GET_CODE (operands[0]))
4085 op0 = cmp_operands[0];
4086 op1 = cmp_operands[1];
4090 op0 = cmp_operands[1];
4091 op1 = cmp_operands[0];
4093 op0 = force_reg (mode, op0);
4094 if (!arith_operand (op1, mode))
4095 op1 = force_reg (mode, op1);
4097 emit_insn (gen_rtx_TRAP_IF (VOIDmode,
4098 gen_rtx_fmt_ee (cmp_code, mode, op0, op1),
4102 /* Return true if function DECL is a MIPS16 function. Return the ambient
4103 setting if DECL is null. */
4106 mips_use_mips16_mode_p (tree decl)
4110 /* Nested functions must use the same frame pointer as their
4111 parent and must therefore use the same ISA mode. */
4112 tree parent = decl_function_context (decl);
4115 if (mips_mips16_decl_p (decl))
4117 if (mips_nomips16_decl_p (decl))
4120 return mips_base_mips16;
4123 /* Return true if calls to X can use R_MIPS_CALL* relocations. */
4126 mips_ok_for_lazy_binding_p (rtx x)
4128 return (TARGET_USE_GOT
4129 && GET_CODE (x) == SYMBOL_REF
4130 && !mips_symbol_binds_local_p (x));
4133 /* Load function address ADDR into register DEST. SIBCALL_P is true
4134 if the address is needed for a sibling call. */
4137 mips_load_call_address (rtx dest, rtx addr, int sibcall_p)
4139 /* If we're generating PIC, and this call is to a global function,
4140 try to allow its address to be resolved lazily. This isn't
4141 possible if TARGET_CALL_SAVED_GP since the value of $gp on entry
4142 to the stub would be our caller's gp, not ours. */
4143 if (TARGET_EXPLICIT_RELOCS
4144 && !(sibcall_p && TARGET_CALL_SAVED_GP)
4145 && mips_ok_for_lazy_binding_p (addr))
4147 rtx high, lo_sum_symbol;
4149 high = mips_unspec_offset_high (dest, pic_offset_table_rtx,
4150 addr, SYMBOL_GOTOFF_CALL);
4151 lo_sum_symbol = mips_unspec_address (addr, SYMBOL_GOTOFF_CALL);
4152 if (Pmode == SImode)
4153 emit_insn (gen_load_callsi (dest, high, lo_sum_symbol));
4155 emit_insn (gen_load_calldi (dest, high, lo_sum_symbol));
4158 mips_emit_move (dest, addr);
4162 /* Expand a call or call_value instruction. RESULT is where the
4163 result will go (null for calls), ADDR is the address of the
4164 function, ARGS_SIZE is the size of the arguments and AUX is
4165 the value passed to us by mips_function_arg. SIBCALL_P is true
4166 if we are expanding a sibling call, false if we're expanding
4170 mips_expand_call (rtx result, rtx addr, rtx args_size, rtx aux, int sibcall_p)
4172 rtx orig_addr, pattern, insn;
4175 if (!call_insn_operand (addr, VOIDmode))
4177 addr = gen_reg_rtx (Pmode);
4178 mips_load_call_address (addr, orig_addr, sibcall_p);
4182 && TARGET_HARD_FLOAT_ABI
4183 && build_mips16_call_stub (result, addr, args_size,
4184 aux == 0 ? 0 : (int) GET_MODE (aux)))
4188 pattern = (sibcall_p
4189 ? gen_sibcall_internal (addr, args_size)
4190 : gen_call_internal (addr, args_size));
4191 else if (GET_CODE (result) == PARALLEL && XVECLEN (result, 0) == 2)
4195 reg1 = XEXP (XVECEXP (result, 0, 0), 0);
4196 reg2 = XEXP (XVECEXP (result, 0, 1), 0);
4199 ? gen_sibcall_value_multiple_internal (reg1, addr, args_size, reg2)
4200 : gen_call_value_multiple_internal (reg1, addr, args_size, reg2));
4203 pattern = (sibcall_p
4204 ? gen_sibcall_value_internal (result, addr, args_size)
4205 : gen_call_value_internal (result, addr, args_size));
4207 insn = emit_call_insn (pattern);
4209 /* Lazy-binding stubs require $gp to be valid on entry. */
4210 if (mips_ok_for_lazy_binding_p (orig_addr))
4211 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
4215 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
4218 mips_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
4220 if (!TARGET_SIBCALLS)
4223 /* We can't do a sibcall if the called function is a MIPS16 function
4224 because there is no direct "jx" instruction equivalent to "jalx" to
4225 switch the ISA mode. */
4226 if (mips_use_mips16_mode_p (decl))
4229 /* ...and when -minterlink-mips16 is in effect, assume that external
4230 functions could be MIPS16 ones unless an attribute explicitly
4231 tells us otherwise. We only care about cases where the sibling
4232 and normal calls would both be direct. */
4233 if (TARGET_INTERLINK_MIPS16
4235 && DECL_EXTERNAL (decl)
4236 && !mips_nomips16_decl_p (decl)
4237 && const_call_insn_operand (XEXP (DECL_RTL (decl), 0), VOIDmode))
4244 /* Emit code to move general operand SRC into condition-code
4245 register DEST. SCRATCH is a scratch TFmode float register.
4252 where FP1 and FP2 are single-precision float registers
4253 taken from SCRATCH. */
4256 mips_emit_fcc_reload (rtx dest, rtx src, rtx scratch)
4260 /* Change the source to SFmode. */
4262 src = adjust_address (src, SFmode, 0);
4263 else if (REG_P (src) || GET_CODE (src) == SUBREG)
4264 src = gen_rtx_REG (SFmode, true_regnum (src));
4266 fp1 = gen_rtx_REG (SFmode, REGNO (scratch));
4267 fp2 = gen_rtx_REG (SFmode, REGNO (scratch) + MAX_FPRS_PER_FMT);
4269 mips_emit_move (copy_rtx (fp1), src);
4270 mips_emit_move (copy_rtx (fp2), CONST0_RTX (SFmode));
4271 emit_insn (gen_slt_sf (dest, fp2, fp1));
4274 /* Emit code to change the current function's return address to
4275 ADDRESS. SCRATCH is available as a scratch register, if needed.
4276 ADDRESS and SCRATCH are both word-mode GPRs. */
4279 mips_set_return_address (rtx address, rtx scratch)
4283 compute_frame_size (get_frame_size ());
4284 gcc_assert ((cfun->machine->frame.mask >> 31) & 1);
4285 slot_address = mips_add_offset (scratch, stack_pointer_rtx,
4286 cfun->machine->frame.gp_sp_offset);
4288 mips_emit_move (gen_rtx_MEM (GET_MODE (address), slot_address), address);
4291 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
4292 Assume that the areas do not overlap. */
4295 mips_block_move_straight (rtx dest, rtx src, HOST_WIDE_INT length)
4297 HOST_WIDE_INT offset, delta;
4298 unsigned HOST_WIDE_INT bits;
4300 enum machine_mode mode;
4303 /* Work out how many bits to move at a time. If both operands have
4304 half-word alignment, it is usually better to move in half words.
4305 For instance, lh/lh/sh/sh is usually better than lwl/lwr/swl/swr
4306 and lw/lw/sw/sw is usually better than ldl/ldr/sdl/sdr.
4307 Otherwise move word-sized chunks. */
4308 if (MEM_ALIGN (src) == BITS_PER_WORD / 2
4309 && MEM_ALIGN (dest) == BITS_PER_WORD / 2)
4310 bits = BITS_PER_WORD / 2;
4312 bits = BITS_PER_WORD;
4314 mode = mode_for_size (bits, MODE_INT, 0);
4315 delta = bits / BITS_PER_UNIT;
4317 /* Allocate a buffer for the temporary registers. */
4318 regs = alloca (sizeof (rtx) * length / delta);
4320 /* Load as many BITS-sized chunks as possible. Use a normal load if
4321 the source has enough alignment, otherwise use left/right pairs. */
4322 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
4324 regs[i] = gen_reg_rtx (mode);
4325 if (MEM_ALIGN (src) >= bits)
4326 mips_emit_move (regs[i], adjust_address (src, mode, offset));
4329 rtx part = adjust_address (src, BLKmode, offset);
4330 if (!mips_expand_unaligned_load (regs[i], part, bits, 0))
4335 /* Copy the chunks to the destination. */
4336 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
4337 if (MEM_ALIGN (dest) >= bits)
4338 mips_emit_move (adjust_address (dest, mode, offset), regs[i]);
4341 rtx part = adjust_address (dest, BLKmode, offset);
4342 if (!mips_expand_unaligned_store (part, regs[i], bits, 0))
4346 /* Mop up any left-over bytes. */
4347 if (offset < length)
4349 src = adjust_address (src, BLKmode, offset);
4350 dest = adjust_address (dest, BLKmode, offset);
4351 move_by_pieces (dest, src, length - offset,
4352 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
4356 #define MAX_MOVE_REGS 4
4357 #define MAX_MOVE_BYTES (MAX_MOVE_REGS * UNITS_PER_WORD)
4360 /* Helper function for doing a loop-based block operation on memory
4361 reference MEM. Each iteration of the loop will operate on LENGTH
4364 Create a new base register for use within the loop and point it to
4365 the start of MEM. Create a new memory reference that uses this
4366 register. Store them in *LOOP_REG and *LOOP_MEM respectively. */
4369 mips_adjust_block_mem (rtx mem, HOST_WIDE_INT length,
4370 rtx *loop_reg, rtx *loop_mem)
4372 *loop_reg = copy_addr_to_reg (XEXP (mem, 0));
4374 /* Although the new mem does not refer to a known location,
4375 it does keep up to LENGTH bytes of alignment. */
4376 *loop_mem = change_address (mem, BLKmode, *loop_reg);
4377 set_mem_align (*loop_mem, MIN (MEM_ALIGN (mem), length * BITS_PER_UNIT));
4381 /* Move LENGTH bytes from SRC to DEST using a loop that moves MAX_MOVE_BYTES
4382 per iteration. LENGTH must be at least MAX_MOVE_BYTES. Assume that the
4383 memory regions do not overlap. */
4386 mips_block_move_loop (rtx dest, rtx src, HOST_WIDE_INT length)
4388 rtx label, src_reg, dest_reg, final_src;
4389 HOST_WIDE_INT leftover;
4391 leftover = length % MAX_MOVE_BYTES;
4394 /* Create registers and memory references for use within the loop. */
4395 mips_adjust_block_mem (src, MAX_MOVE_BYTES, &src_reg, &src);
4396 mips_adjust_block_mem (dest, MAX_MOVE_BYTES, &dest_reg, &dest);
4398 /* Calculate the value that SRC_REG should have after the last iteration
4400 final_src = expand_simple_binop (Pmode, PLUS, src_reg, GEN_INT (length),
4403 /* Emit the start of the loop. */
4404 label = gen_label_rtx ();
4407 /* Emit the loop body. */
4408 mips_block_move_straight (dest, src, MAX_MOVE_BYTES);
4410 /* Move on to the next block. */
4411 mips_emit_move (src_reg, plus_constant (src_reg, MAX_MOVE_BYTES));
4412 mips_emit_move (dest_reg, plus_constant (dest_reg, MAX_MOVE_BYTES));
4414 /* Emit the loop condition. */
4415 if (Pmode == DImode)
4416 emit_insn (gen_cmpdi (src_reg, final_src));
4418 emit_insn (gen_cmpsi (src_reg, final_src));
4419 emit_jump_insn (gen_bne (label));
4421 /* Mop up any left-over bytes. */
4423 mips_block_move_straight (dest, src, leftover);
4427 /* Expand a loop of synci insns for the address range [BEGIN, END). */
4430 mips_expand_synci_loop (rtx begin, rtx end)
4432 rtx inc, label, cmp, cmp_result;
4434 /* Load INC with the cache line size (rdhwr INC,$1). */
4435 inc = gen_reg_rtx (SImode);
4436 emit_insn (gen_rdhwr (inc, const1_rtx));
4438 /* Loop back to here. */
4439 label = gen_label_rtx ();
4442 emit_insn (gen_synci (begin));
4444 cmp = gen_reg_rtx (Pmode);
4445 mips_emit_binary (GTU, cmp, begin, end);
4447 mips_emit_binary (PLUS, begin, begin, inc);
4449 cmp_result = gen_rtx_EQ (VOIDmode, cmp, const0_rtx);
4450 emit_jump_insn (gen_condjump (cmp_result, label));
4453 /* Expand a movmemsi instruction. */
4456 mips_expand_block_move (rtx dest, rtx src, rtx length)
4458 if (GET_CODE (length) == CONST_INT)
4460 if (INTVAL (length) <= 2 * MAX_MOVE_BYTES)
4462 mips_block_move_straight (dest, src, INTVAL (length));
4467 mips_block_move_loop (dest, src, INTVAL (length));
4474 /* Argument support functions. */
4476 /* Initialize CUMULATIVE_ARGS for a function. */
4479 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4480 rtx libname ATTRIBUTE_UNUSED)
4482 static CUMULATIVE_ARGS zero_cum;
4483 tree param, next_param;
4486 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4488 /* Determine if this function has variable arguments. This is
4489 indicated by the last argument being 'void_type_mode' if there
4490 are no variable arguments. The standard MIPS calling sequence
4491 passes all arguments in the general purpose registers in this case. */
4493 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
4494 param != 0; param = next_param)
4496 next_param = TREE_CHAIN (param);
4497 if (next_param == 0 && TREE_VALUE (param) != void_type_node)
4498 cum->gp_reg_found = 1;
4503 /* Fill INFO with information about a single argument. CUM is the
4504 cumulative state for earlier arguments. MODE is the mode of this
4505 argument and TYPE is its type (if known). NAMED is true if this
4506 is a named (fixed) argument rather than a variable one. */
4509 mips_arg_info (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
4510 tree type, int named, struct mips_arg_info *info)
4512 bool doubleword_aligned_p;
4513 unsigned int num_bytes, num_words, max_regs;
4515 /* Work out the size of the argument. */
4516 num_bytes = type ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
4517 num_words = (num_bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4519 /* Decide whether it should go in a floating-point register, assuming
4520 one is free. Later code checks for availability.
4522 The checks against UNITS_PER_FPVALUE handle the soft-float and
4523 single-float cases. */
4527 /* The EABI conventions have traditionally been defined in terms
4528 of TYPE_MODE, regardless of the actual type. */
4529 info->fpr_p = ((GET_MODE_CLASS (mode) == MODE_FLOAT
4530 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
4531 && GET_MODE_SIZE (mode) <= UNITS_PER_FPVALUE);
4536 /* Only leading floating-point scalars are passed in
4537 floating-point registers. We also handle vector floats the same
4538 say, which is OK because they are not covered by the standard ABI. */
4539 info->fpr_p = (!cum->gp_reg_found
4540 && cum->arg_number < 2
4541 && (type == 0 || SCALAR_FLOAT_TYPE_P (type)
4542 || VECTOR_FLOAT_TYPE_P (type))
4543 && (GET_MODE_CLASS (mode) == MODE_FLOAT
4544 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
4545 && GET_MODE_SIZE (mode) <= UNITS_PER_FPVALUE);
4550 /* Scalar and complex floating-point types are passed in
4551 floating-point registers. */
4552 info->fpr_p = (named
4553 && (type == 0 || FLOAT_TYPE_P (type))
4554 && (GET_MODE_CLASS (mode) == MODE_FLOAT
4555 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
4556 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
4557 && GET_MODE_UNIT_SIZE (mode) <= UNITS_PER_FPVALUE);
4559 /* ??? According to the ABI documentation, the real and imaginary
4560 parts of complex floats should be passed in individual registers.
4561 The real and imaginary parts of stack arguments are supposed
4562 to be contiguous and there should be an extra word of padding
4565 This has two problems. First, it makes it impossible to use a
4566 single "void *" va_list type, since register and stack arguments
4567 are passed differently. (At the time of writing, MIPSpro cannot
4568 handle complex float varargs correctly.) Second, it's unclear
4569 what should happen when there is only one register free.
4571 For now, we assume that named complex floats should go into FPRs
4572 if there are two FPRs free, otherwise they should be passed in the
4573 same way as a struct containing two floats. */
4575 && GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
4576 && GET_MODE_UNIT_SIZE (mode) < UNITS_PER_FPVALUE)
4578 if (cum->num_gprs >= MAX_ARGS_IN_REGISTERS - 1)
4579 info->fpr_p = false;
4589 /* See whether the argument has doubleword alignment. */
4590 doubleword_aligned_p = FUNCTION_ARG_BOUNDARY (mode, type) > BITS_PER_WORD;
4592 /* Set REG_OFFSET to the register count we're interested in.
4593 The EABI allocates the floating-point registers separately,
4594 but the other ABIs allocate them like integer registers. */
4595 info->reg_offset = (mips_abi == ABI_EABI && info->fpr_p
4599 /* Advance to an even register if the argument is doubleword-aligned. */
4600 if (doubleword_aligned_p)
4601 info->reg_offset += info->reg_offset & 1;
4603 /* Work out the offset of a stack argument. */
4604 info->stack_offset = cum->stack_words;
4605 if (doubleword_aligned_p)
4606 info->stack_offset += info->stack_offset & 1;
4608 max_regs = MAX_ARGS_IN_REGISTERS - info->reg_offset;
4610 /* Partition the argument between registers and stack. */
4611 info->reg_words = MIN (num_words, max_regs);
4612 info->stack_words = num_words - info->reg_words;
4616 /* INFO describes an argument that is passed in a single-register value.
4617 Return the register it uses, assuming that FPRs are available if
4621 mips_arg_regno (const struct mips_arg_info *info, bool hard_float_p)
4623 if (!info->fpr_p || !hard_float_p)
4624 return GP_ARG_FIRST + info->reg_offset;
4625 else if (mips_abi == ABI_32 && TARGET_DOUBLE_FLOAT && info->reg_offset > 0)
4626 /* In o32, the second argument is always passed in $f14
4627 for TARGET_DOUBLE_FLOAT, regardless of whether the
4628 first argument was a word or doubleword. */
4629 return FP_ARG_FIRST + 2;
4631 return FP_ARG_FIRST + info->reg_offset;
4634 /* Implement FUNCTION_ARG_ADVANCE. */
4637 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4638 tree type, int named)
4640 struct mips_arg_info info;
4642 mips_arg_info (cum, mode, type, named, &info);
4645 cum->gp_reg_found = true;
4647 /* See the comment above the cumulative args structure in mips.h
4648 for an explanation of what this code does. It assumes the O32
4649 ABI, which passes at most 2 arguments in float registers. */
4650 if (cum->arg_number < 2 && info.fpr_p)
4651 cum->fp_code += (mode == SFmode ? 1 : 2) << (cum->arg_number * 2);
4653 if (mips_abi != ABI_EABI || !info.fpr_p)
4654 cum->num_gprs = info.reg_offset + info.reg_words;
4655 else if (info.reg_words > 0)
4656 cum->num_fprs += MAX_FPRS_PER_FMT;
4658 if (info.stack_words > 0)
4659 cum->stack_words = info.stack_offset + info.stack_words;
4664 /* Implement FUNCTION_ARG. */
4667 function_arg (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
4668 tree type, int named)
4670 struct mips_arg_info info;
4672 /* We will be called with a mode of VOIDmode after the last argument
4673 has been seen. Whatever we return will be passed to the call
4674 insn. If we need a mips16 fp_code, return a REG with the code
4675 stored as the mode. */
4676 if (mode == VOIDmode)
4678 if (TARGET_MIPS16 && cum->fp_code != 0)
4679 return gen_rtx_REG ((enum machine_mode) cum->fp_code, 0);
4685 mips_arg_info (cum, mode, type, named, &info);
4687 /* Return straight away if the whole argument is passed on the stack. */
4688 if (info.reg_offset == MAX_ARGS_IN_REGISTERS)
4692 && TREE_CODE (type) == RECORD_TYPE
4694 && TYPE_SIZE_UNIT (type)
4695 && host_integerp (TYPE_SIZE_UNIT (type), 1)
4698 /* The Irix 6 n32/n64 ABIs say that if any 64-bit chunk of the
4699 structure contains a double in its entirety, then that 64-bit
4700 chunk is passed in a floating point register. */
4703 /* First check to see if there is any such field. */
4704 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4705 if (TREE_CODE (field) == FIELD_DECL
4706 && TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
4707 && TYPE_PRECISION (TREE_TYPE (field)) == BITS_PER_WORD
4708 && host_integerp (bit_position (field), 0)
4709 && int_bit_position (field) % BITS_PER_WORD == 0)
4714 /* Now handle the special case by returning a PARALLEL
4715 indicating where each 64-bit chunk goes. INFO.REG_WORDS
4716 chunks are passed in registers. */
4718 HOST_WIDE_INT bitpos;
4721 /* assign_parms checks the mode of ENTRY_PARM, so we must
4722 use the actual mode here. */
4723 ret = gen_rtx_PARALLEL (mode, rtvec_alloc (info.reg_words));
4726 field = TYPE_FIELDS (type);
4727 for (i = 0; i < info.reg_words; i++)
4731 for (; field; field = TREE_CHAIN (field))
4732 if (TREE_CODE (field) == FIELD_DECL
4733 && int_bit_position (field) >= bitpos)
4737 && int_bit_position (field) == bitpos
4738 && TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
4739 && !TARGET_SOFT_FLOAT
4740 && TYPE_PRECISION (TREE_TYPE (field)) == BITS_PER_WORD)
4741 reg = gen_rtx_REG (DFmode, FP_ARG_FIRST + info.reg_offset + i);
4743 reg = gen_rtx_REG (DImode, GP_ARG_FIRST + info.reg_offset + i);
4746 = gen_rtx_EXPR_LIST (VOIDmode, reg,
4747 GEN_INT (bitpos / BITS_PER_UNIT));
4749 bitpos += BITS_PER_WORD;
4755 /* Handle the n32/n64 conventions for passing complex floating-point
4756 arguments in FPR pairs. The real part goes in the lower register
4757 and the imaginary part goes in the upper register. */
4760 && GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
4763 enum machine_mode inner;
4766 inner = GET_MODE_INNER (mode);
4767 reg = FP_ARG_FIRST + info.reg_offset;
4768 if (info.reg_words * UNITS_PER_WORD == GET_MODE_SIZE (inner))
4770 /* Real part in registers, imaginary part on stack. */
4771 gcc_assert (info.stack_words == info.reg_words);
4772 return gen_rtx_REG (inner, reg);
4776 gcc_assert (info.stack_words == 0);
4777 real = gen_rtx_EXPR_LIST (VOIDmode,
4778 gen_rtx_REG (inner, reg),
4780 imag = gen_rtx_EXPR_LIST (VOIDmode,
4782 reg + info.reg_words / 2),
4783 GEN_INT (GET_MODE_SIZE (inner)));
4784 return gen_rtx_PARALLEL (mode, gen_rtvec (2, real, imag));
4788 return gen_rtx_REG (mode, mips_arg_regno (&info, TARGET_HARD_FLOAT));
4792 /* Implement TARGET_ARG_PARTIAL_BYTES. */
4795 mips_arg_partial_bytes (CUMULATIVE_ARGS *cum,
4796 enum machine_mode mode, tree type, bool named)
4798 struct mips_arg_info info;
4800 mips_arg_info (cum, mode, type, named, &info);
4801 return info.stack_words > 0 ? info.reg_words * UNITS_PER_WORD : 0;
4805 /* Implement FUNCTION_ARG_BOUNDARY. Every parameter gets at least
4806 PARM_BOUNDARY bits of alignment, but will be given anything up
4807 to STACK_BOUNDARY bits if the type requires it. */
4810 function_arg_boundary (enum machine_mode mode, tree type)
4812 unsigned int alignment;
4814 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
4815 if (alignment < PARM_BOUNDARY)
4816 alignment = PARM_BOUNDARY;
4817 if (alignment > STACK_BOUNDARY)
4818 alignment = STACK_BOUNDARY;
4822 /* Return true if FUNCTION_ARG_PADDING (MODE, TYPE) should return
4823 upward rather than downward. In other words, return true if the
4824 first byte of the stack slot has useful data, false if the last
4828 mips_pad_arg_upward (enum machine_mode mode, const_tree type)
4830 /* On little-endian targets, the first byte of every stack argument
4831 is passed in the first byte of the stack slot. */
4832 if (!BYTES_BIG_ENDIAN)
4835 /* Otherwise, integral types are padded downward: the last byte of a
4836 stack argument is passed in the last byte of the stack slot. */
4838 ? (INTEGRAL_TYPE_P (type)
4839 || POINTER_TYPE_P (type)
4840 || FIXED_POINT_TYPE_P (type))
4841 : (GET_MODE_CLASS (mode) == MODE_INT
4842 || ALL_SCALAR_FIXED_POINT_MODE_P (mode)))
4845 /* Big-endian o64 pads floating-point arguments downward. */
4846 if (mips_abi == ABI_O64)
4847 if (type != 0 ? FLOAT_TYPE_P (type) : GET_MODE_CLASS (mode) == MODE_FLOAT)
4850 /* Other types are padded upward for o32, o64, n32 and n64. */
4851 if (mips_abi != ABI_EABI)
4854 /* Arguments smaller than a stack slot are padded downward. */
4855 if (mode != BLKmode)
4856 return (GET_MODE_BITSIZE (mode) >= PARM_BOUNDARY);
4858 return (int_size_in_bytes (type) >= (PARM_BOUNDARY / BITS_PER_UNIT));
4862 /* Likewise BLOCK_REG_PADDING (MODE, TYPE, ...). Return !BYTES_BIG_ENDIAN
4863 if the least significant byte of the register has useful data. Return
4864 the opposite if the most significant byte does. */
4867 mips_pad_reg_upward (enum machine_mode mode, tree type)
4869 /* No shifting is required for floating-point arguments. */
4870 if (type != 0 ? FLOAT_TYPE_P (type) : GET_MODE_CLASS (mode) == MODE_FLOAT)
4871 return !BYTES_BIG_ENDIAN;
4873 /* Otherwise, apply the same padding to register arguments as we do
4874 to stack arguments. */
4875 return mips_pad_arg_upward (mode, type);
4879 mips_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4880 tree type, int *pretend_size ATTRIBUTE_UNUSED,
4883 CUMULATIVE_ARGS local_cum;
4884 int gp_saved, fp_saved;
4886 /* The caller has advanced CUM up to, but not beyond, the last named
4887 argument. Advance a local copy of CUM past the last "real" named
4888 argument, to find out how many registers are left over. */
4891 FUNCTION_ARG_ADVANCE (local_cum, mode, type, 1);
4893 /* Found out how many registers we need to save. */
4894 gp_saved = MAX_ARGS_IN_REGISTERS - local_cum.num_gprs;
4895 fp_saved = (EABI_FLOAT_VARARGS_P
4896 ? MAX_ARGS_IN_REGISTERS - local_cum.num_fprs
4905 ptr = plus_constant (virtual_incoming_args_rtx,
4906 REG_PARM_STACK_SPACE (cfun->decl)
4907 - gp_saved * UNITS_PER_WORD);
4908 mem = gen_rtx_MEM (BLKmode, ptr);
4909 set_mem_alias_set (mem, get_varargs_alias_set ());
4911 move_block_from_reg (local_cum.num_gprs + GP_ARG_FIRST,
4916 /* We can't use move_block_from_reg, because it will use
4918 enum machine_mode mode;
4921 /* Set OFF to the offset from virtual_incoming_args_rtx of
4922 the first float register. The FP save area lies below
4923 the integer one, and is aligned to UNITS_PER_FPVALUE bytes. */
4924 off = -gp_saved * UNITS_PER_WORD;
4925 off &= ~(UNITS_PER_FPVALUE - 1);
4926 off -= fp_saved * UNITS_PER_FPREG;
4928 mode = TARGET_SINGLE_FLOAT ? SFmode : DFmode;
4930 for (i = local_cum.num_fprs; i < MAX_ARGS_IN_REGISTERS;
4931 i += MAX_FPRS_PER_FMT)
4935 ptr = plus_constant (virtual_incoming_args_rtx, off);
4936 mem = gen_rtx_MEM (mode, ptr);
4937 set_mem_alias_set (mem, get_varargs_alias_set ());
4938 mips_emit_move (mem, gen_rtx_REG (mode, FP_ARG_FIRST + i));
4939 off += UNITS_PER_HWFPVALUE;
4943 if (REG_PARM_STACK_SPACE (cfun->decl) == 0)
4944 cfun->machine->varargs_size = (gp_saved * UNITS_PER_WORD
4945 + fp_saved * UNITS_PER_FPREG);
4948 /* Create the va_list data type.
4949 We keep 3 pointers, and two offsets.
4950 Two pointers are to the overflow area, which starts at the CFA.
4951 One of these is constant, for addressing into the GPR save area below it.
4952 The other is advanced up the stack through the overflow region.
4953 The third pointer is to the GPR save area. Since the FPR save area
4954 is just below it, we can address FPR slots off this pointer.
4955 We also keep two one-byte offsets, which are to be subtracted from the
4956 constant pointers to yield addresses in the GPR and FPR save areas.
4957 These are downcounted as float or non-float arguments are used,
4958 and when they get to zero, the argument must be obtained from the
4960 If !EABI_FLOAT_VARARGS_P, then no FPR save area exists, and a single
4961 pointer is enough. It's started at the GPR save area, and is
4963 Note that the GPR save area is not constant size, due to optimization
4964 in the prologue. Hence, we can't use a design with two pointers
4965 and two offsets, although we could have designed this with two pointers
4966 and three offsets. */
4969 mips_build_builtin_va_list (void)
4971 if (EABI_FLOAT_VARARGS_P)
4973 tree f_ovfl, f_gtop, f_ftop, f_goff, f_foff, f_res, record;
4976 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4978 f_ovfl = build_decl (FIELD_DECL, get_identifier ("__overflow_argptr"),
4980 f_gtop = build_decl (FIELD_DECL, get_identifier ("__gpr_top"),
4982 f_ftop = build_decl (FIELD_DECL, get_identifier ("__fpr_top"),
4984 f_goff = build_decl (FIELD_DECL, get_identifier ("__gpr_offset"),
4985 unsigned_char_type_node);
4986 f_foff = build_decl (FIELD_DECL, get_identifier ("__fpr_offset"),
4987 unsigned_char_type_node);
4988 /* Explicitly pad to the size of a pointer, so that -Wpadded won't
4989 warn on every user file. */
4990 index = build_int_cst (NULL_TREE, GET_MODE_SIZE (ptr_mode) - 2 - 1);
4991 array = build_array_type (unsigned_char_type_node,
4992 build_index_type (index));
4993 f_res = build_decl (FIELD_DECL, get_identifier ("__reserved"), array);
4995 DECL_FIELD_CONTEXT (f_ovfl) = record;
4996 DECL_FIELD_CONTEXT (f_gtop) = record;
4997 DECL_FIELD_CONTEXT (f_ftop) = record;
4998 DECL_FIELD_CONTEXT (f_goff) = record;
4999 DECL_FIELD_CONTEXT (f_foff) = record;
5000 DECL_FIELD_CONTEXT (f_res) = record;
5002 TYPE_FIELDS (record) = f_ovfl;
5003 TREE_CHAIN (f_ovfl) = f_gtop;
5004 TREE_CHAIN (f_gtop) = f_ftop;
5005 TREE_CHAIN (f_ftop) = f_goff;
5006 TREE_CHAIN (f_goff) = f_foff;
5007 TREE_CHAIN (f_foff) = f_res;
5009 layout_type (record);
5012 else if (TARGET_IRIX && TARGET_IRIX6)
5013 /* On IRIX 6, this type is 'char *'. */
5014 return build_pointer_type (char_type_node);
5016 /* Otherwise, we use 'void *'. */
5017 return ptr_type_node;
5020 /* Implement va_start. */
5023 mips_va_start (tree valist, rtx nextarg)
5025 if (EABI_FLOAT_VARARGS_P)
5027 const CUMULATIVE_ARGS *cum;
5028 tree f_ovfl, f_gtop, f_ftop, f_goff, f_foff;
5029 tree ovfl, gtop, ftop, goff, foff;
5031 int gpr_save_area_size;
5032 int fpr_save_area_size;
5035 cum = ¤t_function_args_info;
5037 = (MAX_ARGS_IN_REGISTERS - cum->num_gprs) * UNITS_PER_WORD;
5039 = (MAX_ARGS_IN_REGISTERS - cum->num_fprs) * UNITS_PER_FPREG;
5041 f_ovfl = TYPE_FIELDS (va_list_type_node);
5042 f_gtop = TREE_CHAIN (f_ovfl);
5043 f_ftop = TREE_CHAIN (f_gtop);
5044 f_goff = TREE_CHAIN (f_ftop);
5045 f_foff = TREE_CHAIN (f_goff);
5047 ovfl = build3 (COMPONENT_REF, TREE_TYPE (f_ovfl), valist, f_ovfl,
5049 gtop = build3 (COMPONENT_REF, TREE_TYPE (f_gtop), valist, f_gtop,
5051 ftop = build3 (COMPONENT_REF, TREE_TYPE (f_ftop), valist, f_ftop,
5053 goff = build3 (COMPONENT_REF, TREE_TYPE (f_goff), valist, f_goff,
5055 foff = build3 (COMPONENT_REF, TREE_TYPE (f_foff), valist, f_foff,
5058 /* Emit code to initialize OVFL, which points to the next varargs
5059 stack argument. CUM->STACK_WORDS gives the number of stack
5060 words used by named arguments. */
5061 t = make_tree (TREE_TYPE (ovfl), virtual_incoming_args_rtx);
5062 if (cum->stack_words > 0)
5063 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovfl), t,
5064 size_int (cum->stack_words * UNITS_PER_WORD));
5065 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovfl), ovfl, t);
5066 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5068 /* Emit code to initialize GTOP, the top of the GPR save area. */
5069 t = make_tree (TREE_TYPE (gtop), virtual_incoming_args_rtx);
5070 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gtop), gtop, t);
5071 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5073 /* Emit code to initialize FTOP, the top of the FPR save area.
5074 This address is gpr_save_area_bytes below GTOP, rounded
5075 down to the next fp-aligned boundary. */
5076 t = make_tree (TREE_TYPE (ftop), virtual_incoming_args_rtx);
5077 fpr_offset = gpr_save_area_size + UNITS_PER_FPVALUE - 1;
5078 fpr_offset &= ~(UNITS_PER_FPVALUE - 1);
5080 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ftop), t,
5081 size_int (-fpr_offset));
5082 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ftop), ftop, t);
5083 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5085 /* Emit code to initialize GOFF, the offset from GTOP of the
5086 next GPR argument. */
5087 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (goff), goff,
5088 build_int_cst (NULL_TREE, gpr_save_area_size));
5089 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5091 /* Likewise emit code to initialize FOFF, the offset from FTOP
5092 of the next FPR argument. */
5093 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (foff), foff,
5094 build_int_cst (NULL_TREE, fpr_save_area_size));
5095 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5099 nextarg = plus_constant (nextarg, -cfun->machine->varargs_size);
5100 std_expand_builtin_va_start (valist, nextarg);
5104 /* Implement va_arg. */
5107 mips_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
5109 HOST_WIDE_INT size, rsize;
5113 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, 0);
5116 type = build_pointer_type (type);
5118 size = int_size_in_bytes (type);
5119 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
5121 if (mips_abi != ABI_EABI || !EABI_FLOAT_VARARGS_P)
5122 addr = std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5125 /* Not a simple merged stack. */
5127 tree f_ovfl, f_gtop, f_ftop, f_goff, f_foff;
5128 tree ovfl, top, off, align;
5129 HOST_WIDE_INT osize;
5132 f_ovfl = TYPE_FIELDS (va_list_type_node);
5133 f_gtop = TREE_CHAIN (f_ovfl);
5134 f_ftop = TREE_CHAIN (f_gtop);
5135 f_goff = TREE_CHAIN (f_ftop);
5136 f_foff = TREE_CHAIN (f_goff);
5138 /* We maintain separate pointers and offsets for floating-point
5139 and integer arguments, but we need similar code in both cases.
5142 TOP be the top of the register save area;
5143 OFF be the offset from TOP of the next register;
5144 ADDR_RTX be the address of the argument;
5145 RSIZE be the number of bytes used to store the argument
5146 when it's in the register save area;
5147 OSIZE be the number of bytes used to store it when it's
5148 in the stack overflow area; and
5149 PADDING be (BYTES_BIG_ENDIAN ? OSIZE - RSIZE : 0)
5151 The code we want is:
5153 1: off &= -rsize; // round down
5156 4: addr_rtx = top - off;
5161 9: ovfl += ((intptr_t) ovfl + osize - 1) & -osize;
5162 10: addr_rtx = ovfl + PADDING;
5166 [1] and [9] can sometimes be optimized away. */
5168 ovfl = build3 (COMPONENT_REF, TREE_TYPE (f_ovfl), valist, f_ovfl,
5171 if (GET_MODE_CLASS (TYPE_MODE (type)) == MODE_FLOAT
5172 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_FPVALUE)
5174 top = build3 (COMPONENT_REF, TREE_TYPE (f_ftop), valist, f_ftop,
5176 off = build3 (COMPONENT_REF, TREE_TYPE (f_foff), valist, f_foff,
5179 /* When floating-point registers are saved to the stack,
5180 each one will take up UNITS_PER_HWFPVALUE bytes, regardless
5181 of the float's precision. */
5182 rsize = UNITS_PER_HWFPVALUE;
5184 /* Overflow arguments are padded to UNITS_PER_WORD bytes
5185 (= PARM_BOUNDARY bits). This can be different from RSIZE
5188 (1) On 32-bit targets when TYPE is a structure such as:
5190 struct s { float f; };
5192 Such structures are passed in paired FPRs, so RSIZE
5193 will be 8 bytes. However, the structure only takes
5194 up 4 bytes of memory, so OSIZE will only be 4.
5196 (2) In combinations such as -mgp64 -msingle-float
5197 -fshort-double. Doubles passed in registers
5198 will then take up 4 (UNITS_PER_HWFPVALUE) bytes,
5199 but those passed on the stack take up
5200 UNITS_PER_WORD bytes. */
5201 osize = MAX (GET_MODE_SIZE (TYPE_MODE (type)), UNITS_PER_WORD);
5205 top = build3 (COMPONENT_REF, TREE_TYPE (f_gtop), valist, f_gtop,
5207 off = build3 (COMPONENT_REF, TREE_TYPE (f_goff), valist, f_goff,
5209 if (rsize > UNITS_PER_WORD)
5211 /* [1] Emit code for: off &= -rsize. */
5212 t = build2 (BIT_AND_EXPR, TREE_TYPE (off), off,
5213 build_int_cst (NULL_TREE, -rsize));
5214 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (off), off, t);
5215 gimplify_and_add (t, pre_p);
5220 /* [2] Emit code to branch if off == 0. */
5221 t = build2 (NE_EXPR, boolean_type_node, off,
5222 build_int_cst (TREE_TYPE (off), 0));
5223 addr = build3 (COND_EXPR, ptr_type_node, t, NULL_TREE, NULL_TREE);
5225 /* [5] Emit code for: off -= rsize. We do this as a form of
5226 post-increment not available to C. Also widen for the
5227 coming pointer arithmetic. */
5228 t = fold_convert (TREE_TYPE (off), build_int_cst (NULL_TREE, rsize));
5229 t = build2 (POSTDECREMENT_EXPR, TREE_TYPE (off), off, t);
5230 t = fold_convert (sizetype, t);
5231 t = fold_build1 (NEGATE_EXPR, sizetype, t);
5233 /* [4] Emit code for: addr_rtx = top - off. On big endian machines,
5234 the argument has RSIZE - SIZE bytes of leading padding. */
5235 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (top), top, t);
5236 if (BYTES_BIG_ENDIAN && rsize > size)
5238 u = size_int (rsize - size);
5239 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, u);
5241 COND_EXPR_THEN (addr) = t;
5243 if (osize > UNITS_PER_WORD)
5245 /* [9] Emit: ovfl += ((intptr_t) ovfl + osize - 1) & -osize. */
5246 u = size_int (osize - 1);
5247 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovfl), ovfl, u);
5248 t = fold_convert (sizetype, t);
5249 u = size_int (-osize);
5250 t = build2 (BIT_AND_EXPR, sizetype, t, u);
5251 t = fold_convert (TREE_TYPE (ovfl), t);
5252 align = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovfl), ovfl, t);
5257 /* [10, 11]. Emit code to store ovfl in addr_rtx, then
5258 post-increment ovfl by osize. On big-endian machines,
5259 the argument has OSIZE - SIZE bytes of leading padding. */
5260 u = fold_convert (TREE_TYPE (ovfl),
5261 build_int_cst (NULL_TREE, osize));
5262 t = build2 (POSTINCREMENT_EXPR, TREE_TYPE (ovfl), ovfl, u);
5263 if (BYTES_BIG_ENDIAN && osize > size)
5265 u = size_int (osize - size);
5266 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, u);
5269 /* String [9] and [10,11] together. */
5271 t = build2 (COMPOUND_EXPR, TREE_TYPE (t), align, t);
5272 COND_EXPR_ELSE (addr) = t;
5274 addr = fold_convert (build_pointer_type (type), addr);
5275 addr = build_va_arg_indirect_ref (addr);
5279 addr = build_va_arg_indirect_ref (addr);
5284 /* Return true if it is possible to use left/right accesses for a
5285 bitfield of WIDTH bits starting BITPOS bits into *OP. When
5286 returning true, update *OP, *LEFT and *RIGHT as follows:
5288 *OP is a BLKmode reference to the whole field.
5290 *LEFT is a QImode reference to the first byte if big endian or
5291 the last byte if little endian. This address can be used in the
5292 left-side instructions (lwl, swl, ldl, sdl).
5294 *RIGHT is a QImode reference to the opposite end of the field and
5295 can be used in the patterning right-side instruction. */
5298 mips_get_unaligned_mem (rtx *op, unsigned int width, int bitpos,
5299 rtx *left, rtx *right)
5303 /* Check that the operand really is a MEM. Not all the extv and
5304 extzv predicates are checked. */
5308 /* Check that the size is valid. */
5309 if (width != 32 && (!TARGET_64BIT || width != 64))
5312 /* We can only access byte-aligned values. Since we are always passed
5313 a reference to the first byte of the field, it is not necessary to
5314 do anything with BITPOS after this check. */
5315 if (bitpos % BITS_PER_UNIT != 0)
5318 /* Reject aligned bitfields: we want to use a normal load or store
5319 instead of a left/right pair. */
5320 if (MEM_ALIGN (*op) >= width)
5323 /* Adjust *OP to refer to the whole field. This also has the effect
5324 of legitimizing *OP's address for BLKmode, possibly simplifying it. */
5325 *op = adjust_address (*op, BLKmode, 0);
5326 set_mem_size (*op, GEN_INT (width / BITS_PER_UNIT));
5328 /* Get references to both ends of the field. We deliberately don't
5329 use the original QImode *OP for FIRST since the new BLKmode one
5330 might have a simpler address. */
5331 first = adjust_address (*op, QImode, 0);
5332 last = adjust_address (*op, QImode, width / BITS_PER_UNIT - 1);
5334 /* Allocate to LEFT and RIGHT according to endianness. LEFT should
5335 be the upper word and RIGHT the lower word. */
5336 if (TARGET_BIG_ENDIAN)
5337 *left = first, *right = last;
5339 *left = last, *right = first;
5345 /* Try to emit the equivalent of (set DEST (zero_extract SRC WIDTH BITPOS)).
5346 Return true on success. We only handle cases where zero_extract is
5347 equivalent to sign_extract. */
5350 mips_expand_unaligned_load (rtx dest, rtx src, unsigned int width, int bitpos)
5352 rtx left, right, temp;
5354 /* If TARGET_64BIT, the destination of a 32-bit load will be a
5355 paradoxical word_mode subreg. This is the only case in which
5356 we allow the destination to be larger than the source. */
5357 if (GET_CODE (dest) == SUBREG
5358 && GET_MODE (dest) == DImode
5359 && SUBREG_BYTE (dest) == 0
5360 && GET_MODE (SUBREG_REG (dest)) == SImode)
5361 dest = SUBREG_REG (dest);
5363 /* After the above adjustment, the destination must be the same
5364 width as the source. */
5365 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
5368 if (!mips_get_unaligned_mem (&src, width, bitpos, &left, &right))
5371 temp = gen_reg_rtx (GET_MODE (dest));
5372 if (GET_MODE (dest) == DImode)
5374 emit_insn (gen_mov_ldl (temp, src, left));
5375 emit_insn (gen_mov_ldr (dest, copy_rtx (src), right, temp));
5379 emit_insn (gen_mov_lwl (temp, src, left));
5380 emit_insn (gen_mov_lwr (dest, copy_rtx (src), right, temp));
5386 /* Try to expand (set (zero_extract DEST WIDTH BITPOS) SRC). Return
5390 mips_expand_unaligned_store (rtx dest, rtx src, unsigned int width, int bitpos)
5393 enum machine_mode mode;
5395 if (!mips_get_unaligned_mem (&dest, width, bitpos, &left, &right))
5398 mode = mode_for_size (width, MODE_INT, 0);
5399 src = gen_lowpart (mode, src);
5403 emit_insn (gen_mov_sdl (dest, src, left));
5404 emit_insn (gen_mov_sdr (copy_rtx (dest), copy_rtx (src), right));
5408 emit_insn (gen_mov_swl (dest, src, left));
5409 emit_insn (gen_mov_swr (copy_rtx (dest), copy_rtx (src), right));
5414 /* Return true if X is a MEM with the same size as MODE. */
5417 mips_mem_fits_mode_p (enum machine_mode mode, rtx x)
5424 size = MEM_SIZE (x);
5425 return size && INTVAL (size) == GET_MODE_SIZE (mode);
5428 /* Return true if (zero_extract OP SIZE POSITION) can be used as the
5429 source of an "ext" instruction or the destination of an "ins"
5430 instruction. OP must be a register operand and the following
5431 conditions must hold:
5433 0 <= POSITION < GET_MODE_BITSIZE (GET_MODE (op))
5434 0 < SIZE <= GET_MODE_BITSIZE (GET_MODE (op))
5435 0 < POSITION + SIZE <= GET_MODE_BITSIZE (GET_MODE (op))
5437 Also reject lengths equal to a word as they are better handled
5438 by the move patterns. */
5441 mips_use_ins_ext_p (rtx op, rtx size, rtx position)
5443 HOST_WIDE_INT len, pos;
5445 if (!ISA_HAS_EXT_INS
5446 || !register_operand (op, VOIDmode)
5447 || GET_MODE_BITSIZE (GET_MODE (op)) > BITS_PER_WORD)
5450 len = INTVAL (size);
5451 pos = INTVAL (position);
5453 if (len <= 0 || len >= GET_MODE_BITSIZE (GET_MODE (op))
5454 || pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (op)))
5460 /* Set up globals to generate code for the ISA or processor
5461 described by INFO. */
5464 mips_set_architecture (const struct mips_cpu_info *info)
5468 mips_arch_info = info;
5469 mips_arch = info->cpu;
5470 mips_isa = info->isa;
5475 /* Likewise for tuning. */
5478 mips_set_tune (const struct mips_cpu_info *info)
5482 mips_tune_info = info;
5483 mips_tune = info->cpu;
5487 /* Initialize mips_split_addresses from the associated command-line
5490 mips_split_addresses is a half-way house between explicit
5491 relocations and the traditional assembler macros. It can
5492 split absolute 32-bit symbolic constants into a high/lo_sum
5493 pair but uses macros for other sorts of access.
5495 Like explicit relocation support for REL targets, it relies
5496 on GNU extensions in the assembler and the linker.
5498 Although this code should work for -O0, it has traditionally
5499 been treated as an optimization. */
5502 mips_init_split_addresses (void)
5504 if (!TARGET_MIPS16 && TARGET_SPLIT_ADDRESSES
5505 && optimize && !flag_pic
5506 && !ABI_HAS_64BIT_SYMBOLS)
5507 mips_split_addresses = 1;
5509 mips_split_addresses = 0;
5512 /* (Re-)Initialize information about relocs. */
5515 mips_init_relocs (void)
5517 memset (mips_split_p, '\0', sizeof (mips_split_p));
5518 memset (mips_hi_relocs, '\0', sizeof (mips_hi_relocs));
5519 memset (mips_lo_relocs, '\0', sizeof (mips_lo_relocs));
5521 if (ABI_HAS_64BIT_SYMBOLS)
5523 if (TARGET_EXPLICIT_RELOCS)
5525 mips_split_p[SYMBOL_64_HIGH] = true;
5526 mips_hi_relocs[SYMBOL_64_HIGH] = "%highest(";
5527 mips_lo_relocs[SYMBOL_64_HIGH] = "%higher(";
5529 mips_split_p[SYMBOL_64_MID] = true;
5530 mips_hi_relocs[SYMBOL_64_MID] = "%higher(";
5531 mips_lo_relocs[SYMBOL_64_MID] = "%hi(";
5533 mips_split_p[SYMBOL_64_LOW] = true;
5534 mips_hi_relocs[SYMBOL_64_LOW] = "%hi(";
5535 mips_lo_relocs[SYMBOL_64_LOW] = "%lo(";
5537 mips_split_p[SYMBOL_ABSOLUTE] = true;
5538 mips_lo_relocs[SYMBOL_ABSOLUTE] = "%lo(";
5543 if (TARGET_EXPLICIT_RELOCS || mips_split_addresses || TARGET_MIPS16)
5545 mips_split_p[SYMBOL_ABSOLUTE] = true;
5546 mips_hi_relocs[SYMBOL_ABSOLUTE] = "%hi(";
5547 mips_lo_relocs[SYMBOL_ABSOLUTE] = "%lo(";
5549 mips_lo_relocs[SYMBOL_32_HIGH] = "%hi(";
5555 /* The high part is provided by a pseudo copy of $gp. */
5556 mips_split_p[SYMBOL_GP_RELATIVE] = true;
5557 mips_lo_relocs[SYMBOL_GP_RELATIVE] = "%gprel(";
5560 if (TARGET_EXPLICIT_RELOCS)
5562 /* Small data constants are kept whole until after reload,
5563 then lowered by mips_rewrite_small_data. */
5564 mips_lo_relocs[SYMBOL_GP_RELATIVE] = "%gp_rel(";
5566 mips_split_p[SYMBOL_GOT_PAGE_OFST] = true;
5569 mips_lo_relocs[SYMBOL_GOTOFF_PAGE] = "%got_page(";
5570 mips_lo_relocs[SYMBOL_GOT_PAGE_OFST] = "%got_ofst(";
5574 mips_lo_relocs[SYMBOL_GOTOFF_PAGE] = "%got(";
5575 mips_lo_relocs[SYMBOL_GOT_PAGE_OFST] = "%lo(";
5580 /* The HIGH and LO_SUM are matched by special .md patterns. */
5581 mips_split_p[SYMBOL_GOT_DISP] = true;
5583 mips_split_p[SYMBOL_GOTOFF_DISP] = true;
5584 mips_hi_relocs[SYMBOL_GOTOFF_DISP] = "%got_hi(";
5585 mips_lo_relocs[SYMBOL_GOTOFF_DISP] = "%got_lo(";
5587 mips_split_p[SYMBOL_GOTOFF_CALL] = true;
5588 mips_hi_relocs[SYMBOL_GOTOFF_CALL] = "%call_hi(";
5589 mips_lo_relocs[SYMBOL_GOTOFF_CALL] = "%call_lo(";
5594 mips_lo_relocs[SYMBOL_GOTOFF_DISP] = "%got_disp(";
5596 mips_lo_relocs[SYMBOL_GOTOFF_DISP] = "%got(";
5597 mips_lo_relocs[SYMBOL_GOTOFF_CALL] = "%call16(";
5603 mips_split_p[SYMBOL_GOTOFF_LOADGP] = true;
5604 mips_hi_relocs[SYMBOL_GOTOFF_LOADGP] = "%hi(%neg(%gp_rel(";
5605 mips_lo_relocs[SYMBOL_GOTOFF_LOADGP] = "%lo(%neg(%gp_rel(";
5608 /* Thread-local relocation operators. */
5609 mips_lo_relocs[SYMBOL_TLSGD] = "%tlsgd(";
5610 mips_lo_relocs[SYMBOL_TLSLDM] = "%tlsldm(";
5611 mips_split_p[SYMBOL_DTPREL] = 1;
5612 mips_hi_relocs[SYMBOL_DTPREL] = "%dtprel_hi(";
5613 mips_lo_relocs[SYMBOL_DTPREL] = "%dtprel_lo(";
5614 mips_lo_relocs[SYMBOL_GOTTPREL] = "%gottprel(";
5615 mips_split_p[SYMBOL_TPREL] = 1;
5616 mips_hi_relocs[SYMBOL_TPREL] = "%tprel_hi(";
5617 mips_lo_relocs[SYMBOL_TPREL] = "%tprel_lo(";
5619 mips_lo_relocs[SYMBOL_HALF] = "%half(";
5622 static GTY(()) int was_mips16_p = -1;
5624 /* Set up the target-dependent global state so that it matches the
5625 current function's ISA mode. */
5628 mips_set_mips16_mode (int mips16_p)
5630 if (mips16_p == was_mips16_p)
5633 /* Restore base settings of various flags. */
5634 target_flags = mips_base_target_flags;
5635 align_loops = mips_base_align_loops;
5636 align_jumps = mips_base_align_jumps;
5637 align_functions = mips_base_align_functions;
5638 flag_schedule_insns = mips_base_schedule_insns;
5639 flag_reorder_blocks_and_partition = mips_base_reorder_blocks_and_partition;
5640 flag_move_loop_invariants = mips_base_move_loop_invariants;
5641 flag_delayed_branch = mips_flag_delayed_branch;
5645 /* Select mips16 instruction set. */
5646 target_flags |= MASK_MIPS16;
5648 /* Don't run the scheduler before reload, since it tends to
5649 increase register pressure. */
5650 flag_schedule_insns = 0;
5652 /* Don't do hot/cold partitioning. The constant layout code expects
5653 the whole function to be in a single section. */
5654 flag_reorder_blocks_and_partition = 0;
5656 /* Don't move loop invariants, because it tends to increase
5657 register pressure. It also introduces an extra move in cases
5658 where the constant is the first operand in a two-operand binary
5659 instruction, or when it forms a register argument to a functon
5661 flag_move_loop_invariants = 0;
5663 /* Silently disable -mexplicit-relocs since it doesn't apply
5664 to mips16 code. Even so, it would overly pedantic to warn
5665 about "-mips16 -mexplicit-relocs", especially given that
5666 we use a %gprel() operator. */
5667 target_flags &= ~MASK_EXPLICIT_RELOCS;
5669 /* Experiments suggest we get the best overall results from using
5670 the range of an unextended lw or sw. Code that makes heavy use
5671 of byte or short accesses can do better with ranges of 0...31
5672 and 0...63 respectively, but most code is sensitive to the range
5673 of lw and sw instead. */
5674 targetm.min_anchor_offset = 0;
5675 targetm.max_anchor_offset = 127;
5677 if (flag_pic || TARGET_ABICALLS)
5678 sorry ("MIPS16 PIC");
5682 /* Reset to select base non-mips16 ISA. */
5683 target_flags &= ~MASK_MIPS16;
5685 /* When using explicit relocs, we call dbr_schedule from within
5687 if (TARGET_EXPLICIT_RELOCS)
5688 flag_delayed_branch = 0;
5690 /* Provide default values for align_* for 64-bit targets. */
5693 if (align_loops == 0)
5695 if (align_jumps == 0)
5697 if (align_functions == 0)
5698 align_functions = 8;
5701 targetm.min_anchor_offset = TARGET_MIN_ANCHOR_OFFSET;
5702 targetm.max_anchor_offset = TARGET_MAX_ANCHOR_OFFSET;
5705 /* (Re)initialize mips target internals for new ISA. */
5706 mips_init_split_addresses ();
5707 mips_init_relocs ();
5709 if (was_mips16_p >= 0)
5710 /* Reinitialize target-dependent state. */
5713 was_mips16_p = TARGET_MIPS16;
5716 /* Use a hash table to keep track of implicit mips16/nomips16 attributes
5717 for -mflip_mips16. It maps decl names onto a boolean mode setting. */
5719 struct mflip_mips16_entry GTY (()) {
5723 static GTY ((param_is (struct mflip_mips16_entry))) htab_t mflip_mips16_htab;
5725 /* Hash table callbacks for mflip_mips16_htab. */
5728 mflip_mips16_htab_hash (const void *entry)
5730 return htab_hash_string (((const struct mflip_mips16_entry *) entry)->name);
5734 mflip_mips16_htab_eq (const void *entry, const void *name)
5736 return strcmp (((const struct mflip_mips16_entry *) entry)->name,
5737 (const char *) name) == 0;
5740 /* DECL is a function that needs a default "mips16" or "nomips16" attribute
5741 for -mflip-mips16. Return true if it should use "mips16" and false if
5742 it should use "nomips16". */
5745 mflip_mips16_use_mips16_p (tree decl)
5747 struct mflip_mips16_entry *entry;
5752 /* Use the opposite of the command-line setting for anonymous decls. */
5753 if (!DECL_NAME (decl))
5754 return !mips_base_mips16;
5756 if (!mflip_mips16_htab)
5757 mflip_mips16_htab = htab_create_ggc (37, mflip_mips16_htab_hash,
5758 mflip_mips16_htab_eq, NULL);
5760 name = IDENTIFIER_POINTER (DECL_NAME (decl));
5761 hash = htab_hash_string (name);
5762 slot = htab_find_slot_with_hash (mflip_mips16_htab, name, hash, INSERT);
5763 entry = (struct mflip_mips16_entry *) *slot;
5766 mips16_flipper = !mips16_flipper;
5767 entry = GGC_NEW (struct mflip_mips16_entry);
5769 entry->mips16_p = mips16_flipper ? !mips_base_mips16 : mips_base_mips16;
5772 return entry->mips16_p;
5775 /* Implement TARGET_INSERT_ATTRIBUTES. */
5778 mips_insert_attributes (tree decl, tree *attributes)
5781 bool mips16_p, nomips16_p;
5783 /* Check for "mips16" and "nomips16" attributes. */
5784 mips16_p = lookup_attribute ("mips16", *attributes) != NULL;
5785 nomips16_p = lookup_attribute ("nomips16", *attributes) != NULL;
5786 if (TREE_CODE (decl) != FUNCTION_DECL)
5789 error ("%qs attribute only applies to functions", "mips16");
5791 error ("%qs attribute only applies to functions", "nomips16");
5795 mips16_p |= mips_mips16_decl_p (decl);
5796 nomips16_p |= mips_nomips16_decl_p (decl);
5797 if (mips16_p || nomips16_p)
5799 /* DECL cannot be simultaneously mips16 and nomips16. */
5800 if (mips16_p && nomips16_p)
5801 error ("%qs cannot have both %<mips16%> and "
5802 "%<nomips16%> attributes",
5803 IDENTIFIER_POINTER (DECL_NAME (decl)));
5805 else if (TARGET_FLIP_MIPS16 && !DECL_ARTIFICIAL (decl))
5807 /* Implement -mflip-mips16. If DECL has neither a "nomips16" nor a
5808 "mips16" attribute, arbitrarily pick one. We must pick the same
5809 setting for duplicate declarations of a function. */
5810 name = mflip_mips16_use_mips16_p (decl) ? "mips16" : "nomips16";
5811 *attributes = tree_cons (get_identifier (name), NULL, *attributes);
5816 /* Implement TARGET_MERGE_DECL_ATTRIBUTES. */
5819 mips_merge_decl_attributes (tree olddecl, tree newdecl)
5821 /* The decls' "mips16" and "nomips16" attributes must match exactly. */
5822 if (mips_mips16_decl_p (olddecl) != mips_mips16_decl_p (newdecl))
5823 error ("%qs redeclared with conflicting %qs attributes",
5824 IDENTIFIER_POINTER (DECL_NAME (newdecl)), "mips16");
5825 if (mips_nomips16_decl_p (olddecl) != mips_nomips16_decl_p (newdecl))
5826 error ("%qs redeclared with conflicting %qs attributes",
5827 IDENTIFIER_POINTER (DECL_NAME (newdecl)), "nomips16");
5829 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5830 DECL_ATTRIBUTES (newdecl));
5833 /* Implement TARGET_SET_CURRENT_FUNCTION. Decide whether the current
5834 function should use the MIPS16 ISA and switch modes accordingly. */
5837 mips_set_current_function (tree fndecl)
5839 mips_set_mips16_mode (mips_use_mips16_mode_p (fndecl));
5842 /* Implement TARGET_HANDLE_OPTION. */
5845 mips_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
5850 if (strcmp (arg, "32") == 0)
5852 else if (strcmp (arg, "o64") == 0)
5854 else if (strcmp (arg, "n32") == 0)
5856 else if (strcmp (arg, "64") == 0)
5858 else if (strcmp (arg, "eabi") == 0)
5859 mips_abi = ABI_EABI;
5866 return mips_parse_cpu (arg) != 0;
5869 mips_isa_info = mips_parse_cpu (ACONCAT (("mips", arg, NULL)));
5870 return mips_isa_info != 0;
5872 case OPT_mno_flush_func:
5873 mips_cache_flush_func = NULL;
5876 case OPT_mcode_readable_:
5877 if (strcmp (arg, "yes") == 0)
5878 mips_code_readable = CODE_READABLE_YES;
5879 else if (strcmp (arg, "pcrel") == 0)
5880 mips_code_readable = CODE_READABLE_PCREL;
5881 else if (strcmp (arg, "no") == 0)
5882 mips_code_readable = CODE_READABLE_NO;
5892 /* Set up the threshold for data to go into the small data area, instead
5893 of the normal data area, and detect any conflicts in the switches. */
5896 override_options (void)
5898 int i, start, regno;
5899 enum machine_mode mode;
5901 #ifdef SUBTARGET_OVERRIDE_OPTIONS
5902 SUBTARGET_OVERRIDE_OPTIONS;
5905 mips_section_threshold = g_switch_set ? g_switch_value : MIPS_DEFAULT_GVALUE;
5907 /* The following code determines the architecture and register size.
5908 Similar code was added to GAS 2.14 (see tc-mips.c:md_after_parse_args()).
5909 The GAS and GCC code should be kept in sync as much as possible. */
5911 if (mips_arch_string != 0)
5912 mips_set_architecture (mips_parse_cpu (mips_arch_string));
5914 if (mips_isa_info != 0)
5916 if (mips_arch_info == 0)
5917 mips_set_architecture (mips_isa_info);
5918 else if (mips_arch_info->isa != mips_isa_info->isa)
5919 error ("-%s conflicts with the other architecture options, "
5920 "which specify a %s processor",
5921 mips_isa_info->name,
5922 mips_cpu_info_from_isa (mips_arch_info->isa)->name);
5925 if (mips_arch_info == 0)
5927 #ifdef MIPS_CPU_STRING_DEFAULT
5928 mips_set_architecture (mips_parse_cpu (MIPS_CPU_STRING_DEFAULT));
5930 mips_set_architecture (mips_cpu_info_from_isa (MIPS_ISA_DEFAULT));
5934 if (ABI_NEEDS_64BIT_REGS && !ISA_HAS_64BIT_REGS)
5935 error ("-march=%s is not compatible with the selected ABI",
5936 mips_arch_info->name);
5938 /* Optimize for mips_arch, unless -mtune selects a different processor. */
5939 if (mips_tune_string != 0)
5940 mips_set_tune (mips_parse_cpu (mips_tune_string));
5942 if (mips_tune_info == 0)
5943 mips_set_tune (mips_arch_info);
5945 /* Set cost structure for the processor. */
5947 mips_cost = &mips_rtx_cost_optimize_size;
5949 mips_cost = &mips_rtx_cost_data[mips_tune];
5951 /* If the user hasn't specified a branch cost, use the processor's
5953 if (mips_branch_cost == 0)
5954 mips_branch_cost = mips_cost->branch_cost;
5956 if ((target_flags_explicit & MASK_64BIT) != 0)
5958 /* The user specified the size of the integer registers. Make sure
5959 it agrees with the ABI and ISA. */
5960 if (TARGET_64BIT && !ISA_HAS_64BIT_REGS)
5961 error ("-mgp64 used with a 32-bit processor");
5962 else if (!TARGET_64BIT && ABI_NEEDS_64BIT_REGS)
5963 error ("-mgp32 used with a 64-bit ABI");
5964 else if (TARGET_64BIT && ABI_NEEDS_32BIT_REGS)
5965 error ("-mgp64 used with a 32-bit ABI");
5969 /* Infer the integer register size from the ABI and processor.
5970 Restrict ourselves to 32-bit registers if that's all the
5971 processor has, or if the ABI cannot handle 64-bit registers. */
5972 if (ABI_NEEDS_32BIT_REGS || !ISA_HAS_64BIT_REGS)
5973 target_flags &= ~MASK_64BIT;
5975 target_flags |= MASK_64BIT;
5978 if ((target_flags_explicit & MASK_FLOAT64) != 0)
5980 /* Really, -mfp32 and -mfp64 are ornamental options. There's
5981 only one right answer here. */
5982 if (TARGET_64BIT && TARGET_DOUBLE_FLOAT && !TARGET_FLOAT64)
5983 error ("unsupported combination: %s", "-mgp64 -mfp32 -mdouble-float");
5984 else if (!TARGET_64BIT && TARGET_FLOAT64
5985 && !(ISA_HAS_MXHC1 && mips_abi == ABI_32))
5986 error ("-mgp32 and -mfp64 can only be combined if the target"
5987 " supports the mfhc1 and mthc1 instructions");
5988 else if (TARGET_SINGLE_FLOAT && TARGET_FLOAT64)
5989 error ("unsupported combination: %s", "-mfp64 -msingle-float");
5993 /* -msingle-float selects 32-bit float registers. Otherwise the
5994 float registers should be the same size as the integer ones. */
5995 if (TARGET_64BIT && TARGET_DOUBLE_FLOAT)
5996 target_flags |= MASK_FLOAT64;
5998 target_flags &= ~MASK_FLOAT64;
6001 /* End of code shared with GAS. */
6003 if ((target_flags_explicit & MASK_LONG64) == 0)
6005 if ((mips_abi == ABI_EABI && TARGET_64BIT) || mips_abi == ABI_64)
6006 target_flags |= MASK_LONG64;
6008 target_flags &= ~MASK_LONG64;
6012 flag_pcc_struct_return = 0;
6014 if ((target_flags_explicit & MASK_BRANCHLIKELY) == 0)
6016 /* If neither -mbranch-likely nor -mno-branch-likely was given
6017 on the command line, set MASK_BRANCHLIKELY based on the target
6018 architecture and tuning flags. Annulled delay slots are a
6019 size win, so we only consider the processor-specific tuning
6020 for !optimize_size. */
6021 if (ISA_HAS_BRANCHLIKELY
6023 || (mips_tune_info->tune_flags & PTF_AVOID_BRANCHLIKELY) == 0))
6024 target_flags |= MASK_BRANCHLIKELY;
6026 target_flags &= ~MASK_BRANCHLIKELY;
6028 else if (TARGET_BRANCHLIKELY && !ISA_HAS_BRANCHLIKELY)
6029 warning (0, "the %qs architecture does not support branch-likely"
6030 " instructions", mips_arch_info->name);
6032 /* The effect of -mabicalls isn't defined for the EABI. */
6033 if (mips_abi == ABI_EABI && TARGET_ABICALLS)
6035 error ("unsupported combination: %s", "-mabicalls -mabi=eabi");
6036 target_flags &= ~MASK_ABICALLS;
6039 /* MIPS16 cannot generate PIC yet. */
6040 if (TARGET_MIPS16 && (flag_pic || TARGET_ABICALLS))
6042 sorry ("MIPS16 PIC");
6043 target_flags &= ~MASK_ABICALLS;
6044 flag_pic = flag_pie = flag_shlib = 0;
6047 if (TARGET_ABICALLS)
6048 /* We need to set flag_pic for executables as well as DSOs
6049 because we may reference symbols that are not defined in
6050 the final executable. (MIPS does not use things like
6051 copy relocs, for example.)
6053 Also, there is a body of code that uses __PIC__ to distinguish
6054 between -mabicalls and -mno-abicalls code. */
6057 /* -mvr4130-align is a "speed over size" optimization: it usually produces
6058 faster code, but at the expense of more nops. Enable it at -O3 and
6060 if (optimize > 2 && (target_flags_explicit & MASK_VR4130_ALIGN) == 0)
6061 target_flags |= MASK_VR4130_ALIGN;
6063 /* Prefer a call to memcpy over inline code when optimizing for size,
6064 though see MOVE_RATIO in mips.h. */
6065 if (optimize_size && (target_flags_explicit & MASK_MEMCPY) == 0)
6066 target_flags |= MASK_MEMCPY;
6068 /* If we have a nonzero small-data limit, check that the -mgpopt
6069 setting is consistent with the other target flags. */
6070 if (mips_section_threshold > 0)
6074 if (!TARGET_MIPS16 && !TARGET_EXPLICIT_RELOCS)
6075 error ("%<-mno-gpopt%> needs %<-mexplicit-relocs%>");
6077 TARGET_LOCAL_SDATA = false;
6078 TARGET_EXTERN_SDATA = false;
6082 if (TARGET_VXWORKS_RTP)
6083 warning (0, "cannot use small-data accesses for %qs", "-mrtp");
6085 if (TARGET_ABICALLS)
6086 warning (0, "cannot use small-data accesses for %qs",
6091 #ifdef MIPS_TFMODE_FORMAT
6092 REAL_MODE_FORMAT (TFmode) = &MIPS_TFMODE_FORMAT;
6095 /* Make sure that the user didn't turn off paired single support when
6096 MIPS-3D support is requested. */
6097 if (TARGET_MIPS3D && (target_flags_explicit & MASK_PAIRED_SINGLE_FLOAT)
6098 && !TARGET_PAIRED_SINGLE_FLOAT)
6099 error ("-mips3d requires -mpaired-single");
6101 /* If TARGET_MIPS3D, enable MASK_PAIRED_SINGLE_FLOAT. */
6103 target_flags |= MASK_PAIRED_SINGLE_FLOAT;
6105 /* Make sure that when TARGET_PAIRED_SINGLE_FLOAT is true, TARGET_FLOAT64
6106 and TARGET_HARD_FLOAT_ABI are both true. */
6107 if (TARGET_PAIRED_SINGLE_FLOAT && !(TARGET_FLOAT64 && TARGET_HARD_FLOAT_ABI))
6108 error ("-mips3d/-mpaired-single must be used with -mfp64 -mhard-float");
6110 /* Make sure that the ISA supports TARGET_PAIRED_SINGLE_FLOAT when it is
6112 if (TARGET_PAIRED_SINGLE_FLOAT && !ISA_MIPS64)
6113 error ("-mips3d/-mpaired-single must be used with -mips64");
6115 /* If TARGET_DSPR2, enable MASK_DSP. */
6117 target_flags |= MASK_DSP;
6119 mips_print_operand_punct['?'] = 1;
6120 mips_print_operand_punct['#'] = 1;
6121 mips_print_operand_punct['/'] = 1;
6122 mips_print_operand_punct['&'] = 1;
6123 mips_print_operand_punct['!'] = 1;
6124 mips_print_operand_punct['*'] = 1;
6125 mips_print_operand_punct['@'] = 1;
6126 mips_print_operand_punct['.'] = 1;
6127 mips_print_operand_punct['('] = 1;
6128 mips_print_operand_punct[')'] = 1;
6129 mips_print_operand_punct['['] = 1;
6130 mips_print_operand_punct[']'] = 1;
6131 mips_print_operand_punct['<'] = 1;
6132 mips_print_operand_punct['>'] = 1;
6133 mips_print_operand_punct['{'] = 1;
6134 mips_print_operand_punct['}'] = 1;
6135 mips_print_operand_punct['^'] = 1;
6136 mips_print_operand_punct['$'] = 1;
6137 mips_print_operand_punct['+'] = 1;
6138 mips_print_operand_punct['~'] = 1;
6139 mips_print_operand_punct['|'] = 1;
6140 mips_print_operand_punct['-'] = 1;
6142 /* Set up array to map GCC register number to debug register number.
6143 Ignore the special purpose register numbers. */
6145 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6147 mips_dbx_regno[i] = INVALID_REGNUM;
6148 if (GP_REG_P (i) || FP_REG_P (i) || ALL_COP_REG_P (i))
6149 mips_dwarf_regno[i] = i;
6151 mips_dwarf_regno[i] = INVALID_REGNUM;
6154 start = GP_DBX_FIRST - GP_REG_FIRST;
6155 for (i = GP_REG_FIRST; i <= GP_REG_LAST; i++)
6156 mips_dbx_regno[i] = i + start;
6158 start = FP_DBX_FIRST - FP_REG_FIRST;
6159 for (i = FP_REG_FIRST; i <= FP_REG_LAST; i++)
6160 mips_dbx_regno[i] = i + start;
6162 /* HI and LO debug registers use big-endian ordering. */
6163 mips_dbx_regno[HI_REGNUM] = MD_DBX_FIRST + 0;
6164 mips_dbx_regno[LO_REGNUM] = MD_DBX_FIRST + 1;
6165 mips_dwarf_regno[HI_REGNUM] = MD_REG_FIRST + 0;
6166 mips_dwarf_regno[LO_REGNUM] = MD_REG_FIRST + 1;
6167 for (i = DSP_ACC_REG_FIRST; i <= DSP_ACC_REG_LAST; i += 2)
6169 mips_dwarf_regno[i + TARGET_LITTLE_ENDIAN] = i;
6170 mips_dwarf_regno[i + TARGET_BIG_ENDIAN] = i + 1;
6173 /* Set up array giving whether a given register can hold a given mode. */
6175 for (mode = VOIDmode;
6176 mode != MAX_MACHINE_MODE;
6177 mode = (enum machine_mode) ((int)mode + 1))
6179 register int size = GET_MODE_SIZE (mode);
6180 register enum mode_class class = GET_MODE_CLASS (mode);
6182 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6186 if (mode == CCV2mode)
6189 && (regno - ST_REG_FIRST) % 2 == 0);
6191 else if (mode == CCV4mode)
6194 && (regno - ST_REG_FIRST) % 4 == 0);
6196 else if (mode == CCmode)
6199 temp = (regno == FPSW_REGNUM);
6201 temp = (ST_REG_P (regno) || GP_REG_P (regno)
6202 || FP_REG_P (regno));
6205 else if (GP_REG_P (regno))
6206 temp = ((regno & 1) == 0 || size <= UNITS_PER_WORD);
6208 else if (FP_REG_P (regno))
6209 temp = ((((regno % MAX_FPRS_PER_FMT) == 0)
6210 || (MIN_FPRS_PER_FMT == 1
6211 && size <= UNITS_PER_FPREG))
6212 && (((class == MODE_FLOAT || class == MODE_COMPLEX_FLOAT
6213 || class == MODE_VECTOR_FLOAT)
6214 && size <= UNITS_PER_FPVALUE)
6215 /* Allow integer modes that fit into a single
6216 register. We need to put integers into FPRs
6217 when using instructions like cvt and trunc.
6218 We can't allow sizes smaller than a word,
6219 the FPU has no appropriate load/store
6220 instructions for those. */
6221 || (class == MODE_INT
6222 && size >= MIN_UNITS_PER_WORD
6223 && size <= UNITS_PER_FPREG)
6224 /* Allow TFmode for CCmode reloads. */
6225 || (ISA_HAS_8CC && mode == TFmode)));
6227 else if (ACC_REG_P (regno))
6228 temp = ((INTEGRAL_MODE_P (mode) || ALL_FIXED_POINT_MODE_P (mode))
6229 && size <= UNITS_PER_WORD * 2
6230 && (size <= UNITS_PER_WORD
6231 || regno == MD_REG_FIRST
6232 || (DSP_ACC_REG_P (regno)
6233 && ((regno - DSP_ACC_REG_FIRST) & 1) == 0)));
6235 else if (ALL_COP_REG_P (regno))
6236 temp = (class == MODE_INT && size <= UNITS_PER_WORD);
6240 mips_hard_regno_mode_ok[(int)mode][regno] = temp;
6244 /* Save GPR registers in word_mode sized hunks. word_mode hasn't been
6245 initialized yet, so we can't use that here. */
6246 gpr_mode = TARGET_64BIT ? DImode : SImode;
6248 /* Function to allocate machine-dependent function status. */
6249 init_machine_status = &mips_init_machine_status;
6251 /* Default to working around R4000 errata only if the processor
6252 was selected explicitly. */
6253 if ((target_flags_explicit & MASK_FIX_R4000) == 0
6254 && mips_matching_cpu_name_p (mips_arch_info->name, "r4000"))
6255 target_flags |= MASK_FIX_R4000;
6257 /* Default to working around R4400 errata only if the processor
6258 was selected explicitly. */
6259 if ((target_flags_explicit & MASK_FIX_R4400) == 0
6260 && mips_matching_cpu_name_p (mips_arch_info->name, "r4400"))
6261 target_flags |= MASK_FIX_R4400;
6263 /* Save base state of options. */
6264 mips_base_mips16 = TARGET_MIPS16;
6265 mips_base_target_flags = target_flags;
6266 mips_base_schedule_insns = flag_schedule_insns;
6267 mips_base_reorder_blocks_and_partition = flag_reorder_blocks_and_partition;
6268 mips_base_move_loop_invariants = flag_move_loop_invariants;
6269 mips_base_align_loops = align_loops;
6270 mips_base_align_jumps = align_jumps;
6271 mips_base_align_functions = align_functions;
6272 mips_flag_delayed_branch = flag_delayed_branch;
6274 /* Now select the mips16 or 32-bit instruction set, as requested. */
6275 mips_set_mips16_mode (mips_base_mips16);
6278 /* Swap the register information for registers I and I + 1, which
6279 currently have the wrong endianness. Note that the registers'
6280 fixedness and call-clobberedness might have been set on the
6284 mips_swap_registers (unsigned int i)
6289 #define SWAP_INT(X, Y) (tmpi = (X), (X) = (Y), (Y) = tmpi)
6290 #define SWAP_STRING(X, Y) (tmps = (X), (X) = (Y), (Y) = tmps)
6292 SWAP_INT (fixed_regs[i], fixed_regs[i + 1]);
6293 SWAP_INT (call_used_regs[i], call_used_regs[i + 1]);
6294 SWAP_INT (call_really_used_regs[i], call_really_used_regs[i + 1]);
6295 SWAP_STRING (reg_names[i], reg_names[i + 1]);
6301 /* Implement CONDITIONAL_REGISTER_USAGE. */
6304 mips_conditional_register_usage (void)
6310 for (regno = DSP_ACC_REG_FIRST; regno <= DSP_ACC_REG_LAST; regno++)
6311 fixed_regs[regno] = call_used_regs[regno] = 1;
6313 if (!TARGET_HARD_FLOAT)
6317 for (regno = FP_REG_FIRST; regno <= FP_REG_LAST; regno++)
6318 fixed_regs[regno] = call_used_regs[regno] = 1;
6319 for (regno = ST_REG_FIRST; regno <= ST_REG_LAST; regno++)
6320 fixed_regs[regno] = call_used_regs[regno] = 1;
6322 else if (! ISA_HAS_8CC)
6326 /* We only have a single condition code register. We
6327 implement this by hiding all the condition code registers,
6328 and generating RTL that refers directly to ST_REG_FIRST. */
6329 for (regno = ST_REG_FIRST; regno <= ST_REG_LAST; regno++)
6330 fixed_regs[regno] = call_used_regs[regno] = 1;
6332 /* In mips16 mode, we permit the $t temporary registers to be used
6333 for reload. We prohibit the unused $s registers, since they
6334 are caller saved, and saving them via a mips16 register would
6335 probably waste more time than just reloading the value. */
6338 fixed_regs[18] = call_used_regs[18] = 1;
6339 fixed_regs[19] = call_used_regs[19] = 1;
6340 fixed_regs[20] = call_used_regs[20] = 1;
6341 fixed_regs[21] = call_used_regs[21] = 1;
6342 fixed_regs[22] = call_used_regs[22] = 1;
6343 fixed_regs[23] = call_used_regs[23] = 1;
6344 fixed_regs[26] = call_used_regs[26] = 1;
6345 fixed_regs[27] = call_used_regs[27] = 1;
6346 fixed_regs[30] = call_used_regs[30] = 1;
6348 /* fp20-23 are now caller saved. */
6349 if (mips_abi == ABI_64)
6352 for (regno = FP_REG_FIRST + 20; regno < FP_REG_FIRST + 24; regno++)
6353 call_really_used_regs[regno] = call_used_regs[regno] = 1;
6355 /* Odd registers from fp21 to fp31 are now caller saved. */
6356 if (mips_abi == ABI_N32)
6359 for (regno = FP_REG_FIRST + 21; regno <= FP_REG_FIRST + 31; regno+=2)
6360 call_really_used_regs[regno] = call_used_regs[regno] = 1;
6362 /* Make sure that double-register accumulator values are correctly
6363 ordered for the current endianness. */
6364 if (TARGET_LITTLE_ENDIAN)
6367 mips_swap_registers (MD_REG_FIRST);
6368 for (regno = DSP_ACC_REG_FIRST; regno <= DSP_ACC_REG_LAST; regno += 2)
6369 mips_swap_registers (regno);
6373 /* Allocate a chunk of memory for per-function machine-dependent data. */
6374 static struct machine_function *
6375 mips_init_machine_status (void)
6377 return ((struct machine_function *)
6378 ggc_alloc_cleared (sizeof (struct machine_function)));
6381 /* On the mips16, we want to allocate $24 (T_REG) before other
6382 registers for instructions for which it is possible. This helps
6383 avoid shuffling registers around in order to set up for an xor,
6384 encouraging the compiler to use a cmp instead. */
6387 mips_order_regs_for_local_alloc (void)
6391 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6392 reg_alloc_order[i] = i;
6396 /* It really doesn't matter where we put register 0, since it is
6397 a fixed register anyhow. */
6398 reg_alloc_order[0] = 24;
6399 reg_alloc_order[24] = 0;
6404 /* The MIPS debug format wants all automatic variables and arguments
6405 to be in terms of the virtual frame pointer (stack pointer before
6406 any adjustment in the function), while the MIPS 3.0 linker wants
6407 the frame pointer to be the stack pointer after the initial
6408 adjustment. So, we do the adjustment here. The arg pointer (which
6409 is eliminated) points to the virtual frame pointer, while the frame
6410 pointer (which may be eliminated) points to the stack pointer after
6411 the initial adjustments. */
6414 mips_debugger_offset (rtx addr, HOST_WIDE_INT offset)
6416 rtx offset2 = const0_rtx;
6417 rtx reg = eliminate_constant_term (addr, &offset2);
6420 offset = INTVAL (offset2);
6422 if (reg == stack_pointer_rtx || reg == frame_pointer_rtx
6423 || reg == hard_frame_pointer_rtx)
6425 HOST_WIDE_INT frame_size = (!cfun->machine->frame.initialized)
6426 ? compute_frame_size (get_frame_size ())
6427 : cfun->machine->frame.total_size;
6429 /* MIPS16 frame is smaller */
6430 if (frame_pointer_needed && TARGET_MIPS16)
6431 frame_size -= cfun->machine->frame.args_size;
6433 offset = offset - frame_size;
6436 /* sdbout_parms does not want this to crash for unrecognized cases. */
6438 else if (reg != arg_pointer_rtx)
6439 fatal_insn ("mips_debugger_offset called with non stack/frame/arg pointer",
6446 /* If OP is an UNSPEC address, return the address to which it refers,
6447 otherwise return OP itself. */
6450 mips_strip_unspec_address (rtx op)
6454 split_const (op, &base, &offset);
6455 if (UNSPEC_ADDRESS_P (base))
6456 op = plus_constant (UNSPEC_ADDRESS (base), INTVAL (offset));
6460 /* Implement the PRINT_OPERAND macro. The MIPS-specific operand codes are:
6462 'X' OP is CONST_INT, prints 32 bits in hexadecimal format = "0x%08x",
6463 'x' OP is CONST_INT, prints 16 bits in hexadecimal format = "0x%04x",
6464 'h' OP is HIGH, prints %hi(X),
6465 'd' output integer constant in decimal,
6466 'z' if the operand is 0, use $0 instead of normal operand.
6467 'D' print second part of double-word register or memory operand.
6468 'L' print low-order register of double-word register operand.
6469 'M' print high-order register of double-word register operand.
6470 'C' print part of opcode for a branch condition.
6471 'F' print part of opcode for a floating-point branch condition.
6472 'N' print part of opcode for a branch condition, inverted.
6473 'W' print part of opcode for a floating-point branch condition, inverted.
6474 'T' print 'f' for (eq:CC ...), 't' for (ne:CC ...),
6475 'z' for (eq:?I ...), 'n' for (ne:?I ...).
6476 't' like 'T', but with the EQ/NE cases reversed
6477 'Y' for a CONST_INT X, print mips_fp_conditions[X]
6478 'Z' print the operand and a comma for ISA_HAS_8CC, otherwise print nothing
6479 'R' print the reloc associated with LO_SUM
6480 'q' print DSP accumulator registers
6482 The punctuation characters are:
6484 '(' Turn on .set noreorder
6485 ')' Turn on .set reorder
6486 '[' Turn on .set noat
6488 '<' Turn on .set nomacro
6489 '>' Turn on .set macro
6490 '{' Turn on .set volatile (not GAS)
6491 '}' Turn on .set novolatile (not GAS)
6492 '&' Turn on .set noreorder if filling delay slots
6493 '*' Turn on both .set noreorder and .set nomacro if filling delay slots
6494 '!' Turn on .set nomacro if filling delay slots
6495 '#' Print nop if in a .set noreorder section.
6496 '/' Like '#', but does nothing within a delayed branch sequence
6497 '?' Print 'l' if we are to use a branch likely instead of normal branch.
6498 '@' Print the name of the assembler temporary register (at or $1).
6499 '.' Print the name of the register with a hard-wired zero (zero or $0).
6500 '^' Print the name of the pic call-through register (t9 or $25).
6501 '$' Print the name of the stack pointer register (sp or $29).
6502 '+' Print the name of the gp register (usually gp or $28).
6503 '~' Output a branch alignment to LABEL_ALIGN(NULL).
6504 '|' Print .set push; .set mips2 if !ISA_HAS_LL_SC.
6505 '-' Print .set pop under the same conditions for '|'. */
6508 print_operand (FILE *file, rtx op, int letter)
6510 register enum rtx_code code;
6512 if (PRINT_OPERAND_PUNCT_VALID_P (letter))
6517 if (mips_branch_likely)
6522 fputs (reg_names [GP_REG_FIRST + 1], file);
6526 fputs (reg_names [PIC_FUNCTION_ADDR_REGNUM], file);
6530 fputs (reg_names [GP_REG_FIRST + 0], file);
6534 fputs (reg_names[STACK_POINTER_REGNUM], file);
6538 fputs (reg_names[PIC_OFFSET_TABLE_REGNUM], file);
6542 if (final_sequence != 0 && set_noreorder++ == 0)
6543 fputs (".set\tnoreorder\n\t", file);
6547 if (final_sequence != 0)
6549 if (set_noreorder++ == 0)
6550 fputs (".set\tnoreorder\n\t", file);
6552 if (set_nomacro++ == 0)
6553 fputs (".set\tnomacro\n\t", file);
6558 if (final_sequence != 0 && set_nomacro++ == 0)
6559 fputs ("\n\t.set\tnomacro", file);
6563 if (set_noreorder != 0)
6564 fputs ("\n\tnop", file);
6568 /* Print an extra newline so that the delayed insn is separated
6569 from the following ones. This looks neater and is consistent
6570 with non-nop delayed sequences. */
6571 if (set_noreorder != 0 && final_sequence == 0)
6572 fputs ("\n\tnop\n", file);
6576 if (set_noreorder++ == 0)
6577 fputs (".set\tnoreorder\n\t", file);
6581 if (set_noreorder == 0)
6582 error ("internal error: %%) found without a %%( in assembler pattern");
6584 else if (--set_noreorder == 0)
6585 fputs ("\n\t.set\treorder", file);
6590 if (set_noat++ == 0)
6591 fputs (".set\tnoat\n\t", file);
6596 error ("internal error: %%] found without a %%[ in assembler pattern");
6597 else if (--set_noat == 0)
6598 fputs ("\n\t.set\tat", file);
6603 if (set_nomacro++ == 0)
6604 fputs (".set\tnomacro\n\t", file);
6608 if (set_nomacro == 0)
6609 error ("internal error: %%> found without a %%< in assembler pattern");
6610 else if (--set_nomacro == 0)
6611 fputs ("\n\t.set\tmacro", file);
6616 if (set_volatile++ == 0)
6617 fputs ("#.set\tvolatile\n\t", file);
6621 if (set_volatile == 0)
6622 error ("internal error: %%} found without a %%{ in assembler pattern");
6623 else if (--set_volatile == 0)
6624 fputs ("\n\t#.set\tnovolatile", file);
6630 if (align_labels_log > 0)
6631 ASM_OUTPUT_ALIGN (file, align_labels_log);
6637 fputs (".set\tpush\n\t.set\tmips2\n\t", file);
6642 fputs ("\n\t.set\tpop", file);
6646 error ("PRINT_OPERAND: unknown punctuation '%c'", letter);
6655 error ("PRINT_OPERAND null pointer");
6659 code = GET_CODE (op);
6664 case EQ: fputs ("eq", file); break;
6665 case NE: fputs ("ne", file); break;
6666 case GT: fputs ("gt", file); break;
6667 case GE: fputs ("ge", file); break;
6668 case LT: fputs ("lt", file); break;
6669 case LE: fputs ("le", file); break;
6670 case GTU: fputs ("gtu", file); break;
6671 case GEU: fputs ("geu", file); break;
6672 case LTU: fputs ("ltu", file); break;
6673 case LEU: fputs ("leu", file); break;
6675 fatal_insn ("PRINT_OPERAND, invalid insn for %%C", op);
6678 else if (letter == 'N')
6681 case EQ: fputs ("ne", file); break;
6682 case NE: fputs ("eq", file); break;
6683 case GT: fputs ("le", file); break;
6684 case GE: fputs ("lt", file); break;
6685 case LT: fputs ("ge", file); break;
6686 case LE: fputs ("gt", file); break;
6687 case GTU: fputs ("leu", file); break;
6688 case GEU: fputs ("ltu", file); break;
6689 case LTU: fputs ("geu", file); break;
6690 case LEU: fputs ("gtu", file); break;
6692 fatal_insn ("PRINT_OPERAND, invalid insn for %%N", op);
6695 else if (letter == 'F')
6698 case EQ: fputs ("c1f", file); break;
6699 case NE: fputs ("c1t", file); break;
6701 fatal_insn ("PRINT_OPERAND, invalid insn for %%F", op);
6704 else if (letter == 'W')
6707 case EQ: fputs ("c1t", file); break;
6708 case NE: fputs ("c1f", file); break;
6710 fatal_insn ("PRINT_OPERAND, invalid insn for %%W", op);
6713 else if (letter == 'h')
6715 if (GET_CODE (op) == HIGH)
6718 print_operand_reloc (file, op, SYMBOL_CONTEXT_LEA, mips_hi_relocs);
6721 else if (letter == 'R')
6722 print_operand_reloc (file, op, SYMBOL_CONTEXT_LEA, mips_lo_relocs);
6724 else if (letter == 'Y')
6726 if (GET_CODE (op) == CONST_INT
6727 && ((unsigned HOST_WIDE_INT) INTVAL (op)
6728 < ARRAY_SIZE (mips_fp_conditions)))
6729 fputs (mips_fp_conditions[INTVAL (op)], file);
6731 output_operand_lossage ("invalid %%Y value");
6734 else if (letter == 'Z')
6738 print_operand (file, op, 0);
6743 else if (letter == 'q')
6748 fatal_insn ("PRINT_OPERAND, invalid insn for %%q", op);
6750 regnum = REGNO (op);
6751 if (MD_REG_P (regnum))
6752 fprintf (file, "$ac0");
6753 else if (DSP_ACC_REG_P (regnum))
6754 fprintf (file, "$ac%c", reg_names[regnum][3]);
6756 fatal_insn ("PRINT_OPERAND, invalid insn for %%q", op);
6759 else if (code == REG || code == SUBREG)
6761 register int regnum;
6764 regnum = REGNO (op);
6766 regnum = true_regnum (op);
6768 if ((letter == 'M' && ! WORDS_BIG_ENDIAN)
6769 || (letter == 'L' && WORDS_BIG_ENDIAN)
6773 fprintf (file, "%s", reg_names[regnum]);
6776 else if (code == MEM)
6779 output_address (plus_constant (XEXP (op, 0), 4));
6781 output_address (XEXP (op, 0));
6784 else if (letter == 'x' && GET_CODE (op) == CONST_INT)
6785 fprintf (file, HOST_WIDE_INT_PRINT_HEX, 0xffff & INTVAL(op));
6787 else if (letter == 'X' && GET_CODE(op) == CONST_INT)
6788 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (op));
6790 else if (letter == 'd' && GET_CODE(op) == CONST_INT)
6791 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (INTVAL(op)));
6793 else if (letter == 'z' && op == CONST0_RTX (GET_MODE (op)))
6794 fputs (reg_names[GP_REG_FIRST], file);
6796 else if (letter == 'd' || letter == 'x' || letter == 'X')
6797 output_operand_lossage ("invalid use of %%d, %%x, or %%X");
6799 else if (letter == 'T' || letter == 't')
6801 int truth = (code == NE) == (letter == 'T');
6802 fputc ("zfnt"[truth * 2 + (GET_MODE (op) == CCmode)], file);
6805 else if (CONST_GP_P (op))
6806 fputs (reg_names[GLOBAL_POINTER_REGNUM], file);
6809 output_addr_const (file, mips_strip_unspec_address (op));
6813 /* Print symbolic operand OP, which is part of a HIGH or LO_SUM
6814 in context CONTEXT. RELOCS is the array of relocations to use. */
6817 print_operand_reloc (FILE *file, rtx op, enum mips_symbol_context context,
6818 const char **relocs)
6820 enum mips_symbol_type symbol_type;
6823 symbol_type = mips_classify_symbolic_expression (op, context);
6824 if (relocs[symbol_type] == 0)
6825 fatal_insn ("PRINT_OPERAND, invalid operand for relocation", op);
6827 fputs (relocs[symbol_type], file);
6828 output_addr_const (file, mips_strip_unspec_address (op));
6829 for (p = relocs[symbol_type]; *p != 0; p++)
6834 /* Output address operand X to FILE. */
6837 print_operand_address (FILE *file, rtx x)
6839 struct mips_address_info addr;
6841 if (mips_classify_address (&addr, x, word_mode, true))
6845 print_operand (file, addr.offset, 0);
6846 fprintf (file, "(%s)", reg_names[REGNO (addr.reg)]);
6849 case ADDRESS_LO_SUM:
6850 print_operand_reloc (file, addr.offset, SYMBOL_CONTEXT_MEM,
6852 fprintf (file, "(%s)", reg_names[REGNO (addr.reg)]);
6855 case ADDRESS_CONST_INT:
6856 output_addr_const (file, x);
6857 fprintf (file, "(%s)", reg_names[0]);
6860 case ADDRESS_SYMBOLIC:
6861 output_addr_const (file, mips_strip_unspec_address (x));
6867 /* When using assembler macros, keep track of all of small-data externs
6868 so that mips_file_end can emit the appropriate declarations for them.
6870 In most cases it would be safe (though pointless) to emit .externs
6871 for other symbols too. One exception is when an object is within
6872 the -G limit but declared by the user to be in a section other
6873 than .sbss or .sdata. */
6876 mips_output_external (FILE *file, tree decl, const char *name)
6878 default_elf_asm_output_external (file, decl, name);
6880 /* We output the name if and only if TREE_SYMBOL_REFERENCED is
6881 set in order to avoid putting out names that are never really
6883 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
6885 if (!TARGET_EXPLICIT_RELOCS && mips_in_small_data_p (decl))
6887 fputs ("\t.extern\t", file);
6888 assemble_name (file, name);
6889 fprintf (file, ", " HOST_WIDE_INT_PRINT_DEC "\n",
6890 int_size_in_bytes (TREE_TYPE (decl)));
6892 else if (TARGET_IRIX
6893 && mips_abi == ABI_32
6894 && TREE_CODE (decl) == FUNCTION_DECL)
6896 /* In IRIX 5 or IRIX 6 for the O32 ABI, we must output a
6897 `.global name .text' directive for every used but
6898 undefined function. If we don't, the linker may perform
6899 an optimization (skipping over the insns that set $gp)
6900 when it is unsafe. */
6901 fputs ("\t.globl ", file);
6902 assemble_name (file, name);
6903 fputs (" .text\n", file);
6908 /* Emit a new filename to a stream. If we are smuggling stabs, try to
6909 put out a MIPS ECOFF file and a stab. */
6912 mips_output_filename (FILE *stream, const char *name)
6915 /* If we are emitting DWARF-2, let dwarf2out handle the ".file"
6917 if (write_symbols == DWARF2_DEBUG)
6919 else if (mips_output_filename_first_time)
6921 mips_output_filename_first_time = 0;
6922 num_source_filenames += 1;
6923 current_function_file = name;
6924 fprintf (stream, "\t.file\t%d ", num_source_filenames);
6925 output_quoted_string (stream, name);
6926 putc ('\n', stream);
6929 /* If we are emitting stabs, let dbxout.c handle this (except for
6930 the mips_output_filename_first_time case). */
6931 else if (write_symbols == DBX_DEBUG)
6934 else if (name != current_function_file
6935 && strcmp (name, current_function_file) != 0)
6937 num_source_filenames += 1;
6938 current_function_file = name;
6939 fprintf (stream, "\t.file\t%d ", num_source_filenames);
6940 output_quoted_string (stream, name);
6941 putc ('\n', stream);
6945 /* Output an ASCII string, in a space-saving way. PREFIX is the string
6946 that should be written before the opening quote, such as "\t.ascii\t"
6947 for real string data or "\t# " for a comment. */
6950 mips_output_ascii (FILE *stream, const char *string_param, size_t len,
6955 register const unsigned char *string =
6956 (const unsigned char *)string_param;
6958 fprintf (stream, "%s\"", prefix);
6959 for (i = 0; i < len; i++)
6961 register int c = string[i];
6965 if (c == '\\' || c == '\"')
6967 putc ('\\', stream);
6975 fprintf (stream, "\\%03o", c);
6979 if (cur_pos > 72 && i+1 < len)
6982 fprintf (stream, "\"\n%s\"", prefix);
6985 fprintf (stream, "\"\n");
6988 /* Implement TARGET_ASM_FILE_START. */
6991 mips_file_start (void)
6993 default_file_start ();
6997 /* Generate a special section to describe the ABI switches used to
6998 produce the resultant binary. This used to be done by the assembler
6999 setting bits in the ELF header's flags field, but we have run out of
7000 bits. GDB needs this information in order to be able to correctly
7001 debug these binaries. See the function mips_gdbarch_init() in
7002 gdb/mips-tdep.c. This is unnecessary for the IRIX 5/6 ABIs and
7003 causes unnecessary IRIX 6 ld warnings. */
7004 const char * abi_string = NULL;
7008 case ABI_32: abi_string = "abi32"; break;
7009 case ABI_N32: abi_string = "abiN32"; break;
7010 case ABI_64: abi_string = "abi64"; break;
7011 case ABI_O64: abi_string = "abiO64"; break;
7012 case ABI_EABI: abi_string = TARGET_64BIT ? "eabi64" : "eabi32"; break;
7016 /* Note - we use fprintf directly rather than calling switch_to_section
7017 because in this way we can avoid creating an allocated section. We
7018 do not want this section to take up any space in the running
7020 fprintf (asm_out_file, "\t.section .mdebug.%s\n\t.previous\n",
7023 /* There is no ELF header flag to distinguish long32 forms of the
7024 EABI from long64 forms. Emit a special section to help tools
7025 such as GDB. Do the same for o64, which is sometimes used with
7027 if (mips_abi == ABI_EABI || mips_abi == ABI_O64)
7028 fprintf (asm_out_file, "\t.section .gcc_compiled_long%d\n"
7029 "\t.previous\n", TARGET_LONG64 ? 64 : 32);
7031 #ifdef HAVE_AS_GNU_ATTRIBUTE
7032 fprintf (asm_out_file, "\t.gnu_attribute 4, %d\n",
7033 TARGET_HARD_FLOAT_ABI ? (TARGET_DOUBLE_FLOAT ? 1 : 2) : 3);
7037 /* Generate the pseudo ops that System V.4 wants. */
7038 if (TARGET_ABICALLS)
7039 fprintf (asm_out_file, "\t.abicalls\n");
7041 if (flag_verbose_asm)
7042 fprintf (asm_out_file, "\n%s -G value = %d, Arch = %s, ISA = %d\n",
7044 mips_section_threshold, mips_arch_info->name, mips_isa);
7047 #ifdef BSS_SECTION_ASM_OP
7048 /* Implement ASM_OUTPUT_ALIGNED_BSS. This differs from the default only
7049 in the use of sbss. */
7052 mips_output_aligned_bss (FILE *stream, tree decl, const char *name,
7053 unsigned HOST_WIDE_INT size, int align)
7055 extern tree last_assemble_variable_decl;
7057 if (mips_in_small_data_p (decl))
7058 switch_to_section (get_named_section (NULL, ".sbss", 0));
7060 switch_to_section (bss_section);
7061 ASM_OUTPUT_ALIGN (stream, floor_log2 (align / BITS_PER_UNIT));
7062 last_assemble_variable_decl = decl;
7063 ASM_DECLARE_OBJECT_NAME (stream, name, decl);
7064 ASM_OUTPUT_SKIP (stream, size != 0 ? size : 1);
7068 /* Implement ASM_OUTPUT_ALIGNED_DECL_COMMON. This is usually the same as the
7069 elfos.h version, but we also need to handle -muninit-const-in-rodata. */
7072 mips_output_aligned_decl_common (FILE *stream, tree decl, const char *name,
7073 unsigned HOST_WIDE_INT size,
7076 /* If the target wants uninitialized const declarations in
7077 .rdata then don't put them in .comm. */
7078 if (TARGET_EMBEDDED_DATA && TARGET_UNINIT_CONST_IN_RODATA
7079 && TREE_CODE (decl) == VAR_DECL && TREE_READONLY (decl)
7080 && (DECL_INITIAL (decl) == 0 || DECL_INITIAL (decl) == error_mark_node))
7082 if (TREE_PUBLIC (decl) && DECL_NAME (decl))
7083 targetm.asm_out.globalize_label (stream, name);
7085 switch_to_section (readonly_data_section);
7086 ASM_OUTPUT_ALIGN (stream, floor_log2 (align / BITS_PER_UNIT));
7087 mips_declare_object (stream, name, "",
7088 ":\n\t.space\t" HOST_WIDE_INT_PRINT_UNSIGNED "\n",
7092 mips_declare_common_object (stream, name, "\n\t.comm\t",
7096 /* Declare a common object of SIZE bytes using asm directive INIT_STRING.
7097 NAME is the name of the object and ALIGN is the required alignment
7098 in bytes. TAKES_ALIGNMENT_P is true if the directive takes a third
7099 alignment argument. */
7102 mips_declare_common_object (FILE *stream, const char *name,
7103 const char *init_string,
7104 unsigned HOST_WIDE_INT size,
7105 unsigned int align, bool takes_alignment_p)
7107 if (!takes_alignment_p)
7109 size += (align / BITS_PER_UNIT) - 1;
7110 size -= size % (align / BITS_PER_UNIT);
7111 mips_declare_object (stream, name, init_string,
7112 "," HOST_WIDE_INT_PRINT_UNSIGNED "\n", size);
7115 mips_declare_object (stream, name, init_string,
7116 "," HOST_WIDE_INT_PRINT_UNSIGNED ",%u\n",
7117 size, align / BITS_PER_UNIT);
7120 /* Emit either a label, .comm, or .lcomm directive. When using assembler
7121 macros, mark the symbol as written so that mips_file_end won't emit an
7122 .extern for it. STREAM is the output file, NAME is the name of the
7123 symbol, INIT_STRING is the string that should be written before the
7124 symbol and FINAL_STRING is the string that should be written after it.
7125 FINAL_STRING is a printf() format that consumes the remaining arguments. */
7128 mips_declare_object (FILE *stream, const char *name, const char *init_string,
7129 const char *final_string, ...)
7133 fputs (init_string, stream);
7134 assemble_name (stream, name);
7135 va_start (ap, final_string);
7136 vfprintf (stream, final_string, ap);
7139 if (!TARGET_EXPLICIT_RELOCS)
7141 tree name_tree = get_identifier (name);
7142 TREE_ASM_WRITTEN (name_tree) = 1;
7146 #ifdef ASM_OUTPUT_SIZE_DIRECTIVE
7147 extern int size_directive_output;
7149 /* Implement ASM_DECLARE_OBJECT_NAME. This is like most of the standard ELF
7150 definitions except that it uses mips_declare_object() to emit the label. */
7153 mips_declare_object_name (FILE *stream, const char *name,
7154 tree decl ATTRIBUTE_UNUSED)
7156 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
7157 ASM_OUTPUT_TYPE_DIRECTIVE (stream, name, "object");
7160 size_directive_output = 0;
7161 if (!flag_inhibit_size_directive && DECL_SIZE (decl))
7165 size_directive_output = 1;
7166 size = int_size_in_bytes (TREE_TYPE (decl));
7167 ASM_OUTPUT_SIZE_DIRECTIVE (stream, name, size);
7170 mips_declare_object (stream, name, "", ":\n");
7173 /* Implement ASM_FINISH_DECLARE_OBJECT. This is generic ELF stuff. */
7176 mips_finish_declare_object (FILE *stream, tree decl, int top_level, int at_end)
7180 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
7181 if (!flag_inhibit_size_directive
7182 && DECL_SIZE (decl) != 0
7183 && !at_end && top_level
7184 && DECL_INITIAL (decl) == error_mark_node
7185 && !size_directive_output)
7189 size_directive_output = 1;
7190 size = int_size_in_bytes (TREE_TYPE (decl));
7191 ASM_OUTPUT_SIZE_DIRECTIVE (stream, name, size);
7196 /* Return true if X in context CONTEXT is a small data address that can
7197 be rewritten as a LO_SUM. */
7200 mips_rewrite_small_data_p (rtx x, enum mips_symbol_context context)
7202 enum mips_symbol_type symbol_type;
7204 return (TARGET_EXPLICIT_RELOCS
7205 && mips_symbolic_constant_p (x, context, &symbol_type)
7206 && symbol_type == SYMBOL_GP_RELATIVE);
7210 /* A for_each_rtx callback for mips_small_data_pattern_p. DATA is the
7211 containing MEM, or null if none. */
7214 mips_small_data_pattern_1 (rtx *loc, void *data)
7216 enum mips_symbol_context context;
7218 if (GET_CODE (*loc) == LO_SUM)
7223 if (for_each_rtx (&XEXP (*loc, 0), mips_small_data_pattern_1, *loc))
7228 context = data ? SYMBOL_CONTEXT_MEM : SYMBOL_CONTEXT_LEA;
7229 return mips_rewrite_small_data_p (*loc, context);
7232 /* Return true if OP refers to small data symbols directly, not through
7236 mips_small_data_pattern_p (rtx op)
7238 return for_each_rtx (&op, mips_small_data_pattern_1, 0);
7241 /* A for_each_rtx callback, used by mips_rewrite_small_data.
7242 DATA is the containing MEM, or null if none. */
7245 mips_rewrite_small_data_1 (rtx *loc, void *data)
7247 enum mips_symbol_context context;
7251 for_each_rtx (&XEXP (*loc, 0), mips_rewrite_small_data_1, *loc);
7255 context = data ? SYMBOL_CONTEXT_MEM : SYMBOL_CONTEXT_LEA;
7256 if (mips_rewrite_small_data_p (*loc, context))
7257 *loc = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, *loc);
7259 if (GET_CODE (*loc) == LO_SUM)
7265 /* If possible, rewrite OP so that it refers to small data using
7266 explicit relocations. */
7269 mips_rewrite_small_data (rtx op)
7271 op = copy_insn (op);
7272 for_each_rtx (&op, mips_rewrite_small_data_1, 0);
7276 /* Return true if the current function has an insn that implicitly
7280 mips_function_has_gp_insn (void)
7282 /* Don't bother rechecking if we found one last time. */
7283 if (!cfun->machine->has_gp_insn_p)
7287 push_topmost_sequence ();
7288 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7290 && GET_CODE (PATTERN (insn)) != USE
7291 && GET_CODE (PATTERN (insn)) != CLOBBER
7292 && (get_attr_got (insn) != GOT_UNSET
7293 || small_data_pattern (PATTERN (insn), VOIDmode)))
7295 pop_topmost_sequence ();
7297 cfun->machine->has_gp_insn_p = (insn != 0);
7299 return cfun->machine->has_gp_insn_p;
7303 /* Return the register that should be used as the global pointer
7304 within this function. Return 0 if the function doesn't need
7305 a global pointer. */
7308 mips_global_pointer (void)
7312 /* $gp is always available unless we're using a GOT. */
7313 if (!TARGET_USE_GOT)
7314 return GLOBAL_POINTER_REGNUM;
7316 /* We must always provide $gp when it is used implicitly. */
7317 if (!TARGET_EXPLICIT_RELOCS)
7318 return GLOBAL_POINTER_REGNUM;
7320 /* FUNCTION_PROFILER includes a jal macro, so we need to give it
7322 if (current_function_profile)
7323 return GLOBAL_POINTER_REGNUM;
7325 /* If the function has a nonlocal goto, $gp must hold the correct
7326 global pointer for the target function. */
7327 if (current_function_has_nonlocal_goto)
7328 return GLOBAL_POINTER_REGNUM;
7330 /* If the gp is never referenced, there's no need to initialize it.
7331 Note that reload can sometimes introduce constant pool references
7332 into a function that otherwise didn't need them. For example,
7333 suppose we have an instruction like:
7335 (set (reg:DF R1) (float:DF (reg:SI R2)))
7337 If R2 turns out to be constant such as 1, the instruction may have a
7338 REG_EQUAL note saying that R1 == 1.0. Reload then has the option of
7339 using this constant if R2 doesn't get allocated to a register.
7341 In cases like these, reload will have added the constant to the pool
7342 but no instruction will yet refer to it. */
7343 if (!df_regs_ever_live_p (GLOBAL_POINTER_REGNUM)
7344 && !current_function_uses_const_pool
7345 && !mips_function_has_gp_insn ())
7348 /* We need a global pointer, but perhaps we can use a call-clobbered
7349 register instead of $gp. */
7350 if (TARGET_CALL_SAVED_GP && current_function_is_leaf)
7351 for (regno = GP_REG_FIRST; regno <= GP_REG_LAST; regno++)
7352 if (!df_regs_ever_live_p (regno)
7353 && call_really_used_regs[regno]
7354 && !fixed_regs[regno]
7355 && regno != PIC_FUNCTION_ADDR_REGNUM)
7358 return GLOBAL_POINTER_REGNUM;
7362 /* Return true if the function return value MODE will get returned in a
7363 floating-point register. */
7366 mips_return_mode_in_fpr_p (enum machine_mode mode)
7368 return ((GET_MODE_CLASS (mode) == MODE_FLOAT
7369 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
7370 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7371 && GET_MODE_UNIT_SIZE (mode) <= UNITS_PER_HWFPVALUE);
7374 /* Return a two-character string representing a function floating-point
7375 return mode, used to name MIPS16 function stubs. */
7378 mips16_call_stub_mode_suffix (enum machine_mode mode)
7382 else if (mode == DFmode)
7384 else if (mode == SCmode)
7386 else if (mode == DCmode)
7388 else if (mode == V2SFmode)
7394 /* Return true if the current function returns its value in a floating-point
7395 register in MIPS16 mode. */
7398 mips16_cfun_returns_in_fpr_p (void)
7400 tree return_type = DECL_RESULT (current_function_decl);
7401 return (TARGET_MIPS16
7402 && TARGET_HARD_FLOAT_ABI
7403 && !aggregate_value_p (return_type, current_function_decl)
7404 && mips_return_mode_in_fpr_p (DECL_MODE (return_type)));
7408 /* Return true if the current function must save REGNO. */
7411 mips_save_reg_p (unsigned int regno)
7413 /* We only need to save $gp if TARGET_CALL_SAVED_GP and only then
7414 if we have not chosen a call-clobbered substitute. */
7415 if (regno == GLOBAL_POINTER_REGNUM)
7416 return TARGET_CALL_SAVED_GP && cfun->machine->global_pointer == regno;
7418 /* Check call-saved registers. */
7419 if ((current_function_saves_all_registers || df_regs_ever_live_p (regno))
7420 && !call_really_used_regs[regno])
7423 /* Save both registers in an FPR pair if either one is used. This is
7424 needed for the case when MIN_FPRS_PER_FMT == 1, which allows the odd
7425 register to be used without the even register. */
7426 if (FP_REG_P (regno)
7427 && MAX_FPRS_PER_FMT == 2
7428 && df_regs_ever_live_p (regno + 1)
7429 && !call_really_used_regs[regno + 1])
7432 /* We need to save the old frame pointer before setting up a new one. */
7433 if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed)
7436 /* Check for registers that must be saved for FUNCTION_PROFILER. */
7437 if (current_function_profile && MIPS_SAVE_REG_FOR_PROFILING_P (regno))
7440 /* We need to save the incoming return address if it is ever clobbered
7441 within the function, if __builtin_eh_return is being used to set a
7442 different return address, or if a stub is being used to return a
7444 if (regno == GP_REG_FIRST + 31
7445 && (df_regs_ever_live_p (regno)
7446 || current_function_calls_eh_return
7447 || mips16_cfun_returns_in_fpr_p ()))
7453 /* Return the index of the lowest X in the range [0, SIZE) for which
7454 bit REGS[X] is set in MASK. Return SIZE if there is no such X. */
7457 mips16e_find_first_register (unsigned int mask, const unsigned char *regs,
7462 for (i = 0; i < size; i++)
7463 if (BITSET_P (mask, regs[i]))
7469 /* *MASK_PTR is a mask of general purpose registers and *GP_REG_SIZE_PTR
7470 is the number of bytes that they occupy. If *MASK_PTR contains REGS[X]
7471 for some X in [0, SIZE), adjust *MASK_PTR and *GP_REG_SIZE_PTR so that
7472 the same is true for all indexes (X, SIZE). */
7475 mips16e_mask_registers (unsigned int *mask_ptr, const unsigned char *regs,
7476 unsigned int size, HOST_WIDE_INT *gp_reg_size_ptr)
7480 i = mips16e_find_first_register (*mask_ptr, regs, size);
7481 for (i++; i < size; i++)
7482 if (!BITSET_P (*mask_ptr, regs[i]))
7484 *gp_reg_size_ptr += GET_MODE_SIZE (gpr_mode);
7485 *mask_ptr |= 1 << regs[i];
7489 /* Return the bytes needed to compute the frame pointer from the current
7490 stack pointer. SIZE is the size (in bytes) of the local variables.
7492 MIPS stack frames look like:
7494 Before call After call
7495 high +-----------------------+ +-----------------------+
7497 | caller's temps. | | caller's temps. |
7499 +-----------------------+ +-----------------------+
7501 | arguments on stack. | | arguments on stack. |
7503 +-----------------------+ +-----------------------+
7504 | 4 words to save | | 4 words to save |
7505 | arguments passed | | arguments passed |
7506 | in registers, even | | in registers, even |
7507 | if not passed. | | if not passed. |
7508 SP->+-----------------------+ VFP->+-----------------------+
7509 (VFP = SP+fp_sp_offset) | |\
7510 | fp register save | | fp_reg_size
7512 SP+gp_sp_offset->+-----------------------+
7514 | | gp register save | | gp_reg_size
7515 gp_reg_rounded | | |/
7516 | +-----------------------+
7517 \| alignment padding |
7518 +-----------------------+
7520 | local variables | | var_size
7522 +-----------------------+
7524 | alloca allocations |
7526 +-----------------------+
7528 cprestore_size | | GP save for V.4 abi |
7530 +-----------------------+
7532 | arguments on stack | |
7534 +-----------------------+ |
7535 | 4 words to save | | args_size
7536 | arguments passed | |
7537 | in registers, even | |
7538 | if not passed. | |
7539 low | (TARGET_OLDABI only) |/
7540 memory SP->+-----------------------+
7545 compute_frame_size (HOST_WIDE_INT size)
7548 HOST_WIDE_INT total_size; /* # bytes that the entire frame takes up */
7549 HOST_WIDE_INT var_size; /* # bytes that variables take up */
7550 HOST_WIDE_INT args_size; /* # bytes that outgoing arguments take up */
7551 HOST_WIDE_INT cprestore_size; /* # bytes that the cprestore slot takes up */
7552 HOST_WIDE_INT gp_reg_rounded; /* # bytes needed to store gp after rounding */
7553 HOST_WIDE_INT gp_reg_size; /* # bytes needed to store gp regs */
7554 HOST_WIDE_INT fp_reg_size; /* # bytes needed to store fp regs */
7555 unsigned int mask; /* mask of saved gp registers */
7556 unsigned int fmask; /* mask of saved fp registers */
7558 cfun->machine->global_pointer = mips_global_pointer ();
7564 var_size = MIPS_STACK_ALIGN (size);
7565 args_size = current_function_outgoing_args_size;
7566 cprestore_size = MIPS_STACK_ALIGN (STARTING_FRAME_OFFSET) - args_size;
7568 /* The space set aside by STARTING_FRAME_OFFSET isn't needed in leaf
7569 functions. If the function has local variables, we're committed
7570 to allocating it anyway. Otherwise reclaim it here. */
7571 if (var_size == 0 && current_function_is_leaf)
7572 cprestore_size = args_size = 0;
7574 /* The MIPS 3.0 linker does not like functions that dynamically
7575 allocate the stack and have 0 for STACK_DYNAMIC_OFFSET, since it
7576 looks like we are trying to create a second frame pointer to the
7577 function, so allocate some stack space to make it happy. */
7579 if (args_size == 0 && current_function_calls_alloca)
7580 args_size = 4 * UNITS_PER_WORD;
7582 total_size = var_size + args_size + cprestore_size;
7584 /* Calculate space needed for gp registers. */
7585 for (regno = GP_REG_FIRST; regno <= GP_REG_LAST; regno++)
7586 if (mips_save_reg_p (regno))
7588 gp_reg_size += GET_MODE_SIZE (gpr_mode);
7589 mask |= 1 << (regno - GP_REG_FIRST);
7592 /* We need to restore these for the handler. */
7593 if (current_function_calls_eh_return)
7598 regno = EH_RETURN_DATA_REGNO (i);
7599 if (regno == INVALID_REGNUM)
7601 gp_reg_size += GET_MODE_SIZE (gpr_mode);
7602 mask |= 1 << (regno - GP_REG_FIRST);
7606 /* The MIPS16e SAVE and RESTORE instructions have two ranges of registers:
7607 $a3-$a0 and $s2-$s8. If we save one register in the range, we must
7608 save all later registers too. */
7609 if (GENERATE_MIPS16E_SAVE_RESTORE)
7611 mips16e_mask_registers (&mask, mips16e_s2_s8_regs,
7612 ARRAY_SIZE (mips16e_s2_s8_regs), &gp_reg_size);
7613 mips16e_mask_registers (&mask, mips16e_a0_a3_regs,
7614 ARRAY_SIZE (mips16e_a0_a3_regs), &gp_reg_size);
7617 /* This loop must iterate over the same space as its companion in
7618 mips_for_each_saved_reg. */
7619 if (TARGET_HARD_FLOAT)
7620 for (regno = (FP_REG_LAST - MAX_FPRS_PER_FMT + 1);
7621 regno >= FP_REG_FIRST;
7622 regno -= MAX_FPRS_PER_FMT)
7623 if (mips_save_reg_p (regno))
7625 fp_reg_size += MAX_FPRS_PER_FMT * UNITS_PER_FPREG;
7626 fmask |= ((1 << MAX_FPRS_PER_FMT) - 1) << (regno - FP_REG_FIRST);
7629 gp_reg_rounded = MIPS_STACK_ALIGN (gp_reg_size);
7630 total_size += gp_reg_rounded + MIPS_STACK_ALIGN (fp_reg_size);
7632 /* Add in the space required for saving incoming register arguments. */
7633 total_size += current_function_pretend_args_size;
7634 total_size += MIPS_STACK_ALIGN (cfun->machine->varargs_size);
7636 /* Save other computed information. */
7637 cfun->machine->frame.total_size = total_size;
7638 cfun->machine->frame.var_size = var_size;
7639 cfun->machine->frame.args_size = args_size;
7640 cfun->machine->frame.cprestore_size = cprestore_size;
7641 cfun->machine->frame.gp_reg_size = gp_reg_size;
7642 cfun->machine->frame.fp_reg_size = fp_reg_size;
7643 cfun->machine->frame.mask = mask;
7644 cfun->machine->frame.fmask = fmask;
7645 cfun->machine->frame.initialized = reload_completed;
7646 cfun->machine->frame.num_gp = gp_reg_size / UNITS_PER_WORD;
7647 cfun->machine->frame.num_fp = (fp_reg_size
7648 / (MAX_FPRS_PER_FMT * UNITS_PER_FPREG));
7652 HOST_WIDE_INT offset;
7654 if (GENERATE_MIPS16E_SAVE_RESTORE)
7655 /* MIPS16e SAVE and RESTORE instructions require the GP save area
7656 to be aligned at the high end with any padding at the low end.
7657 It is only safe to use this calculation for o32, where we never
7658 have pretend arguments, and where any varargs will be saved in
7659 the caller-allocated area rather than at the top of the frame. */
7660 offset = (total_size - GET_MODE_SIZE (gpr_mode));
7662 offset = (args_size + cprestore_size + var_size
7663 + gp_reg_size - GET_MODE_SIZE (gpr_mode));
7664 cfun->machine->frame.gp_sp_offset = offset;
7665 cfun->machine->frame.gp_save_offset = offset - total_size;
7669 cfun->machine->frame.gp_sp_offset = 0;
7670 cfun->machine->frame.gp_save_offset = 0;
7675 HOST_WIDE_INT offset;
7677 offset = (args_size + cprestore_size + var_size
7678 + gp_reg_rounded + fp_reg_size
7679 - MAX_FPRS_PER_FMT * UNITS_PER_FPREG);
7680 cfun->machine->frame.fp_sp_offset = offset;
7681 cfun->machine->frame.fp_save_offset = offset - total_size;
7685 cfun->machine->frame.fp_sp_offset = 0;
7686 cfun->machine->frame.fp_save_offset = 0;
7689 /* Ok, we're done. */
7693 /* Implement INITIAL_ELIMINATION_OFFSET. FROM is either the frame
7694 pointer or argument pointer. TO is either the stack pointer or
7695 hard frame pointer. */
7698 mips_initial_elimination_offset (int from, int to)
7700 HOST_WIDE_INT offset;
7702 compute_frame_size (get_frame_size ());
7704 /* Set OFFSET to the offset from the stack pointer. */
7707 case FRAME_POINTER_REGNUM:
7711 case ARG_POINTER_REGNUM:
7712 offset = (cfun->machine->frame.total_size
7713 - current_function_pretend_args_size);
7720 if (TARGET_MIPS16 && to == HARD_FRAME_POINTER_REGNUM)
7721 offset -= cfun->machine->frame.args_size;
7726 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
7727 back to a previous frame. */
7729 mips_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
7734 return get_hard_reg_initial_val (Pmode, GP_REG_FIRST + 31);
7737 /* Use FN to save or restore register REGNO. MODE is the register's
7738 mode and OFFSET is the offset of its save slot from the current
7742 mips_save_restore_reg (enum machine_mode mode, int regno,
7743 HOST_WIDE_INT offset, mips_save_restore_fn fn)
7747 mem = gen_frame_mem (mode, plus_constant (stack_pointer_rtx, offset));
7749 fn (gen_rtx_REG (mode, regno), mem);
7753 /* Call FN for each register that is saved by the current function.
7754 SP_OFFSET is the offset of the current stack pointer from the start
7758 mips_for_each_saved_reg (HOST_WIDE_INT sp_offset, mips_save_restore_fn fn)
7760 enum machine_mode fpr_mode;
7761 HOST_WIDE_INT offset;
7764 /* Save registers starting from high to low. The debuggers prefer at least
7765 the return register be stored at func+4, and also it allows us not to
7766 need a nop in the epilogue if at least one register is reloaded in
7767 addition to return address. */
7768 offset = cfun->machine->frame.gp_sp_offset - sp_offset;
7769 for (regno = GP_REG_LAST; regno >= GP_REG_FIRST; regno--)
7770 if (BITSET_P (cfun->machine->frame.mask, regno - GP_REG_FIRST))
7772 mips_save_restore_reg (gpr_mode, regno, offset, fn);
7773 offset -= GET_MODE_SIZE (gpr_mode);
7776 /* This loop must iterate over the same space as its companion in
7777 compute_frame_size. */
7778 offset = cfun->machine->frame.fp_sp_offset - sp_offset;
7779 fpr_mode = (TARGET_SINGLE_FLOAT ? SFmode : DFmode);
7780 for (regno = (FP_REG_LAST - MAX_FPRS_PER_FMT + 1);
7781 regno >= FP_REG_FIRST;
7782 regno -= MAX_FPRS_PER_FMT)
7783 if (BITSET_P (cfun->machine->frame.fmask, regno - FP_REG_FIRST))
7785 mips_save_restore_reg (fpr_mode, regno, offset, fn);
7786 offset -= GET_MODE_SIZE (fpr_mode);
7790 /* If we're generating n32 or n64 abicalls, and the current function
7791 does not use $28 as its global pointer, emit a cplocal directive.
7792 Use pic_offset_table_rtx as the argument to the directive. */
7795 mips_output_cplocal (void)
7797 if (!TARGET_EXPLICIT_RELOCS
7798 && cfun->machine->global_pointer > 0
7799 && cfun->machine->global_pointer != GLOBAL_POINTER_REGNUM)
7800 output_asm_insn (".cplocal %+", 0);
7803 /* Return the style of GP load sequence that is being used for the
7804 current function. */
7806 enum mips_loadgp_style
7807 mips_current_loadgp_style (void)
7809 if (!TARGET_USE_GOT || cfun->machine->global_pointer == 0)
7815 if (TARGET_ABSOLUTE_ABICALLS)
7816 return LOADGP_ABSOLUTE;
7818 return TARGET_NEWABI ? LOADGP_NEWABI : LOADGP_OLDABI;
7821 /* The __gnu_local_gp symbol. */
7823 static GTY(()) rtx mips_gnu_local_gp;
7825 /* If we're generating n32 or n64 abicalls, emit instructions
7826 to set up the global pointer. */
7829 mips_emit_loadgp (void)
7831 rtx addr, offset, incoming_address, base, index;
7833 switch (mips_current_loadgp_style ())
7835 case LOADGP_ABSOLUTE:
7836 if (mips_gnu_local_gp == NULL)
7838 mips_gnu_local_gp = gen_rtx_SYMBOL_REF (Pmode, "__gnu_local_gp");
7839 SYMBOL_REF_FLAGS (mips_gnu_local_gp) |= SYMBOL_FLAG_LOCAL;
7841 emit_insn (gen_loadgp_absolute (mips_gnu_local_gp));
7845 addr = XEXP (DECL_RTL (current_function_decl), 0);
7846 offset = mips_unspec_address (addr, SYMBOL_GOTOFF_LOADGP);
7847 incoming_address = gen_rtx_REG (Pmode, PIC_FUNCTION_ADDR_REGNUM);
7848 emit_insn (gen_loadgp_newabi (offset, incoming_address));
7849 if (!TARGET_EXPLICIT_RELOCS)
7850 emit_insn (gen_loadgp_blockage ());
7854 base = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (VXWORKS_GOTT_BASE));
7855 index = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (VXWORKS_GOTT_INDEX));
7856 emit_insn (gen_loadgp_rtp (base, index));
7857 if (!TARGET_EXPLICIT_RELOCS)
7858 emit_insn (gen_loadgp_blockage ());
7866 /* Set up the stack and frame (if desired) for the function. */
7869 mips_output_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
7872 HOST_WIDE_INT tsize = cfun->machine->frame.total_size;
7874 #ifdef SDB_DEBUGGING_INFO
7875 if (debug_info_level != DINFO_LEVEL_TERSE && write_symbols == SDB_DEBUG)
7876 SDB_OUTPUT_SOURCE_LINE (file, DECL_SOURCE_LINE (current_function_decl));
7879 /* In mips16 mode, we may need to generate a 32 bit to handle
7880 floating point arguments. The linker will arrange for any 32-bit
7881 functions to call this stub, which will then jump to the 16-bit
7884 && TARGET_HARD_FLOAT_ABI
7885 && current_function_args_info.fp_code != 0)
7886 build_mips16_function_stub (file);
7888 /* Select the mips16 mode for this function. */
7890 fprintf (file, "\t.set\tmips16\n");
7892 fprintf (file, "\t.set\tnomips16\n");
7894 if (!FUNCTION_NAME_ALREADY_DECLARED)
7896 /* Get the function name the same way that toplev.c does before calling
7897 assemble_start_function. This is needed so that the name used here
7898 exactly matches the name used in ASM_DECLARE_FUNCTION_NAME. */
7899 fnname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
7901 if (!flag_inhibit_size_directive)
7903 fputs ("\t.ent\t", file);
7904 assemble_name (file, fnname);
7908 assemble_name (file, fnname);
7909 fputs (":\n", file);
7912 /* Stop mips_file_end from treating this function as external. */
7913 if (TARGET_IRIX && mips_abi == ABI_32)
7914 TREE_ASM_WRITTEN (DECL_NAME (cfun->decl)) = 1;
7916 if (!flag_inhibit_size_directive)
7918 /* .frame FRAMEREG, FRAMESIZE, RETREG */
7920 "\t.frame\t%s," HOST_WIDE_INT_PRINT_DEC ",%s\t\t"
7921 "# vars= " HOST_WIDE_INT_PRINT_DEC ", regs= %d/%d"
7922 ", args= " HOST_WIDE_INT_PRINT_DEC
7923 ", gp= " HOST_WIDE_INT_PRINT_DEC "\n",
7924 (reg_names[(frame_pointer_needed)
7925 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM]),
7926 ((frame_pointer_needed && TARGET_MIPS16)
7927 ? tsize - cfun->machine->frame.args_size
7929 reg_names[GP_REG_FIRST + 31],
7930 cfun->machine->frame.var_size,
7931 cfun->machine->frame.num_gp,
7932 cfun->machine->frame.num_fp,
7933 cfun->machine->frame.args_size,
7934 cfun->machine->frame.cprestore_size);
7936 /* .mask MASK, GPOFFSET; .fmask FPOFFSET */
7937 fprintf (file, "\t.mask\t0x%08x," HOST_WIDE_INT_PRINT_DEC "\n",
7938 cfun->machine->frame.mask,
7939 cfun->machine->frame.gp_save_offset);
7940 fprintf (file, "\t.fmask\t0x%08x," HOST_WIDE_INT_PRINT_DEC "\n",
7941 cfun->machine->frame.fmask,
7942 cfun->machine->frame.fp_save_offset);
7945 OLD_SP == *FRAMEREG + FRAMESIZE => can find old_sp from nominated FP reg.
7946 HIGHEST_GP_SAVED == *FRAMEREG + FRAMESIZE + GPOFFSET => can find saved regs. */
7949 if (mips_current_loadgp_style () == LOADGP_OLDABI)
7951 /* Handle the initialization of $gp for SVR4 PIC. */
7952 if (!cfun->machine->all_noreorder_p)
7953 output_asm_insn ("%(.cpload\t%^%)", 0);
7955 output_asm_insn ("%(.cpload\t%^\n\t%<", 0);
7957 else if (cfun->machine->all_noreorder_p)
7958 output_asm_insn ("%(%<", 0);
7960 /* Tell the assembler which register we're using as the global
7961 pointer. This is needed for thunks, since they can use either
7962 explicit relocs or assembler macros. */
7963 mips_output_cplocal ();
7966 /* Make the last instruction frame related and note that it performs
7967 the operation described by FRAME_PATTERN. */
7970 mips_set_frame_expr (rtx frame_pattern)
7974 insn = get_last_insn ();
7975 RTX_FRAME_RELATED_P (insn) = 1;
7976 REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7982 /* Return a frame-related rtx that stores REG at MEM.
7983 REG must be a single register. */
7986 mips_frame_set (rtx mem, rtx reg)
7990 /* If we're saving the return address register and the dwarf return
7991 address column differs from the hard register number, adjust the
7992 note reg to refer to the former. */
7993 if (REGNO (reg) == GP_REG_FIRST + 31
7994 && DWARF_FRAME_RETURN_COLUMN != GP_REG_FIRST + 31)
7995 reg = gen_rtx_REG (GET_MODE (reg), DWARF_FRAME_RETURN_COLUMN);
7997 set = gen_rtx_SET (VOIDmode, mem, reg);
7998 RTX_FRAME_RELATED_P (set) = 1;
8004 /* Save register REG to MEM. Make the instruction frame-related. */
8007 mips_save_reg (rtx reg, rtx mem)
8009 if (GET_MODE (reg) == DFmode && !TARGET_FLOAT64)
8013 if (mips_split_64bit_move_p (mem, reg))
8014 mips_split_doubleword_move (mem, reg);
8016 mips_emit_move (mem, reg);
8018 x1 = mips_frame_set (mips_subword (mem, 0), mips_subword (reg, 0));
8019 x2 = mips_frame_set (mips_subword (mem, 1), mips_subword (reg, 1));
8020 mips_set_frame_expr (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, x1, x2)));
8025 && REGNO (reg) != GP_REG_FIRST + 31
8026 && !M16_REG_P (REGNO (reg)))
8028 /* Save a non-mips16 register by moving it through a temporary.
8029 We don't need to do this for $31 since there's a special
8030 instruction for it. */
8031 mips_emit_move (MIPS_PROLOGUE_TEMP (GET_MODE (reg)), reg);
8032 mips_emit_move (mem, MIPS_PROLOGUE_TEMP (GET_MODE (reg)));
8035 mips_emit_move (mem, reg);
8037 mips_set_frame_expr (mips_frame_set (mem, reg));
8041 /* Return a move between register REGNO and memory location SP + OFFSET.
8042 Make the move a load if RESTORE_P, otherwise make it a frame-related
8046 mips16e_save_restore_reg (bool restore_p, HOST_WIDE_INT offset,
8051 mem = gen_frame_mem (SImode, plus_constant (stack_pointer_rtx, offset));
8052 reg = gen_rtx_REG (SImode, regno);
8054 ? gen_rtx_SET (VOIDmode, reg, mem)
8055 : mips_frame_set (mem, reg));
8058 /* Return RTL for a MIPS16e SAVE or RESTORE instruction; RESTORE_P says which.
8059 The instruction must:
8061 - Allocate or deallocate SIZE bytes in total; SIZE is known
8064 - Save or restore as many registers in *MASK_PTR as possible.
8065 The instruction saves the first registers at the top of the
8066 allocated area, with the other registers below it.
8068 - Save NARGS argument registers above the allocated area.
8070 (NARGS is always zero if RESTORE_P.)
8072 The SAVE and RESTORE instructions cannot save and restore all general
8073 registers, so there may be some registers left over for the caller to
8074 handle. Destructively modify *MASK_PTR so that it contains the registers
8075 that still need to be saved or restored. The caller can save these
8076 registers in the memory immediately below *OFFSET_PTR, which is a
8077 byte offset from the bottom of the allocated stack area. */
8080 mips16e_build_save_restore (bool restore_p, unsigned int *mask_ptr,
8081 HOST_WIDE_INT *offset_ptr, unsigned int nargs,
8085 HOST_WIDE_INT offset, top_offset;
8086 unsigned int i, regno;
8089 gcc_assert (cfun->machine->frame.fp_reg_size == 0);
8091 /* Calculate the number of elements in the PARALLEL. We need one element
8092 for the stack adjustment, one for each argument register save, and one
8093 for each additional register move. */
8095 for (i = 0; i < ARRAY_SIZE (mips16e_save_restore_regs); i++)
8096 if (BITSET_P (*mask_ptr, mips16e_save_restore_regs[i]))
8099 /* Create the final PARALLEL. */
8100 pattern = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (n));
8103 /* Add the stack pointer adjustment. */
8104 set = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8105 plus_constant (stack_pointer_rtx,
8106 restore_p ? size : -size));
8107 RTX_FRAME_RELATED_P (set) = 1;
8108 XVECEXP (pattern, 0, n++) = set;
8110 /* Stack offsets in the PARALLEL are relative to the old stack pointer. */
8111 top_offset = restore_p ? size : 0;
8113 /* Save the arguments. */
8114 for (i = 0; i < nargs; i++)
8116 offset = top_offset + i * GET_MODE_SIZE (gpr_mode);
8117 set = mips16e_save_restore_reg (restore_p, offset, GP_ARG_FIRST + i);
8118 XVECEXP (pattern, 0, n++) = set;
8121 /* Then fill in the other register moves. */
8122 offset = top_offset;
8123 for (i = 0; i < ARRAY_SIZE (mips16e_save_restore_regs); i++)
8125 regno = mips16e_save_restore_regs[i];
8126 if (BITSET_P (*mask_ptr, regno))
8128 offset -= UNITS_PER_WORD;
8129 set = mips16e_save_restore_reg (restore_p, offset, regno);
8130 XVECEXP (pattern, 0, n++) = set;
8131 *mask_ptr &= ~(1 << regno);
8135 /* Tell the caller what offset it should use for the remaining registers. */
8136 *offset_ptr = size + (offset - top_offset) + size;
8138 gcc_assert (n == XVECLEN (pattern, 0));
8143 /* PATTERN is a PARALLEL whose first element adds ADJUST to the stack
8144 pointer. Return true if PATTERN matches the kind of instruction
8145 generated by mips16e_build_save_restore. If INFO is nonnull,
8146 initialize it when returning true. */
8149 mips16e_save_restore_pattern_p (rtx pattern, HOST_WIDE_INT adjust,
8150 struct mips16e_save_restore_info *info)
8152 unsigned int i, nargs, mask;
8153 HOST_WIDE_INT top_offset, save_offset, offset, extra;
8154 rtx set, reg, mem, base;
8157 if (!GENERATE_MIPS16E_SAVE_RESTORE)
8160 /* Stack offsets in the PARALLEL are relative to the old stack pointer. */
8161 top_offset = adjust > 0 ? adjust : 0;
8163 /* Interpret all other members of the PARALLEL. */
8164 save_offset = top_offset - GET_MODE_SIZE (gpr_mode);
8168 for (n = 1; n < XVECLEN (pattern, 0); n++)
8170 /* Check that we have a SET. */
8171 set = XVECEXP (pattern, 0, n);
8172 if (GET_CODE (set) != SET)
8175 /* Check that the SET is a load (if restoring) or a store
8177 mem = adjust > 0 ? SET_SRC (set) : SET_DEST (set);
8181 /* Check that the address is the sum of the stack pointer and a
8182 possibly-zero constant offset. */
8183 mips_split_plus (XEXP (mem, 0), &base, &offset);
8184 if (base != stack_pointer_rtx)
8187 /* Check that SET's other operand is a register. */
8188 reg = adjust > 0 ? SET_DEST (set) : SET_SRC (set);
8192 /* Check for argument saves. */
8193 if (offset == top_offset + nargs * GET_MODE_SIZE (gpr_mode)
8194 && REGNO (reg) == GP_ARG_FIRST + nargs)
8196 else if (offset == save_offset)
8198 while (mips16e_save_restore_regs[i++] != REGNO (reg))
8199 if (i == ARRAY_SIZE (mips16e_save_restore_regs))
8202 mask |= 1 << REGNO (reg);
8203 save_offset -= GET_MODE_SIZE (gpr_mode);
8209 /* Check that the restrictions on register ranges are met. */
8211 mips16e_mask_registers (&mask, mips16e_s2_s8_regs,
8212 ARRAY_SIZE (mips16e_s2_s8_regs), &extra);
8213 mips16e_mask_registers (&mask, mips16e_a0_a3_regs,
8214 ARRAY_SIZE (mips16e_a0_a3_regs), &extra);
8218 /* Make sure that the topmost argument register is not saved twice.
8219 The checks above ensure that the same is then true for the other
8220 argument registers. */
8221 if (nargs > 0 && BITSET_P (mask, GP_ARG_FIRST + nargs - 1))
8224 /* Pass back information, if requested. */
8227 info->nargs = nargs;
8229 info->size = (adjust > 0 ? adjust : -adjust);
8235 /* Add a MIPS16e SAVE or RESTORE register-range argument to string S
8236 for the register range [MIN_REG, MAX_REG]. Return a pointer to
8237 the null terminator. */
8240 mips16e_add_register_range (char *s, unsigned int min_reg,
8241 unsigned int max_reg)
8243 if (min_reg != max_reg)
8244 s += sprintf (s, ",%s-%s", reg_names[min_reg], reg_names[max_reg]);
8246 s += sprintf (s, ",%s", reg_names[min_reg]);
8250 /* Return the assembly instruction for a MIPS16e SAVE or RESTORE instruction.
8251 PATTERN and ADJUST are as for mips16e_save_restore_pattern_p. */
8254 mips16e_output_save_restore (rtx pattern, HOST_WIDE_INT adjust)
8256 static char buffer[300];
8258 struct mips16e_save_restore_info info;
8259 unsigned int i, end;
8262 /* Parse the pattern. */
8263 if (!mips16e_save_restore_pattern_p (pattern, adjust, &info))
8266 /* Add the mnemonic. */
8267 s = strcpy (buffer, adjust > 0 ? "restore\t" : "save\t");
8270 /* Save the arguments. */
8272 s += sprintf (s, "%s-%s,", reg_names[GP_ARG_FIRST],
8273 reg_names[GP_ARG_FIRST + info.nargs - 1]);
8274 else if (info.nargs == 1)
8275 s += sprintf (s, "%s,", reg_names[GP_ARG_FIRST]);
8277 /* Emit the amount of stack space to allocate or deallocate. */
8278 s += sprintf (s, "%d", (int) info.size);
8280 /* Save or restore $16. */
8281 if (BITSET_P (info.mask, 16))
8282 s += sprintf (s, ",%s", reg_names[GP_REG_FIRST + 16]);
8284 /* Save or restore $17. */
8285 if (BITSET_P (info.mask, 17))
8286 s += sprintf (s, ",%s", reg_names[GP_REG_FIRST + 17]);
8288 /* Save or restore registers in the range $s2...$s8, which
8289 mips16e_s2_s8_regs lists in decreasing order. Note that this
8290 is a software register range; the hardware registers are not
8291 numbered consecutively. */
8292 end = ARRAY_SIZE (mips16e_s2_s8_regs);
8293 i = mips16e_find_first_register (info.mask, mips16e_s2_s8_regs, end);
8295 s = mips16e_add_register_range (s, mips16e_s2_s8_regs[end - 1],
8296 mips16e_s2_s8_regs[i]);
8298 /* Save or restore registers in the range $a0...$a3. */
8299 end = ARRAY_SIZE (mips16e_a0_a3_regs);
8300 i = mips16e_find_first_register (info.mask, mips16e_a0_a3_regs, end);
8302 s = mips16e_add_register_range (s, mips16e_a0_a3_regs[i],
8303 mips16e_a0_a3_regs[end - 1]);
8305 /* Save or restore $31. */
8306 if (BITSET_P (info.mask, 31))
8307 s += sprintf (s, ",%s", reg_names[GP_REG_FIRST + 31]);
8312 /* Return a simplified form of X using the register values in REG_VALUES.
8313 REG_VALUES[R] is the last value assigned to hard register R, or null
8314 if R has not been modified.
8316 This function is rather limited, but is good enough for our purposes. */
8319 mips16e_collect_propagate_value (rtx x, rtx *reg_values)
8323 x = avoid_constant_pool_reference (x);
8327 x0 = mips16e_collect_propagate_value (XEXP (x, 0), reg_values);
8328 return simplify_gen_unary (GET_CODE (x), GET_MODE (x),
8329 x0, GET_MODE (XEXP (x, 0)));
8332 if (ARITHMETIC_P (x))
8334 x0 = mips16e_collect_propagate_value (XEXP (x, 0), reg_values);
8335 x1 = mips16e_collect_propagate_value (XEXP (x, 1), reg_values);
8336 return simplify_gen_binary (GET_CODE (x), GET_MODE (x), x0, x1);
8340 && reg_values[REGNO (x)]
8341 && !rtx_unstable_p (reg_values[REGNO (x)]))
8342 return reg_values[REGNO (x)];
8347 /* Return true if (set DEST SRC) stores an argument register into its
8348 caller-allocated save slot, storing the number of that argument
8349 register in *REGNO_PTR if so. REG_VALUES is as for
8350 mips16e_collect_propagate_value. */
8353 mips16e_collect_argument_save_p (rtx dest, rtx src, rtx *reg_values,
8354 unsigned int *regno_ptr)
8356 unsigned int argno, regno;
8357 HOST_WIDE_INT offset, required_offset;
8360 /* Check that this is a word-mode store. */
8361 if (!MEM_P (dest) || !REG_P (src) || GET_MODE (dest) != word_mode)
8364 /* Check that the register being saved is an unmodified argument
8366 regno = REGNO (src);
8367 if (regno < GP_ARG_FIRST || regno > GP_ARG_LAST || reg_values[regno])
8369 argno = regno - GP_ARG_FIRST;
8371 /* Check whether the address is an appropriate stack pointer or
8372 frame pointer access. The frame pointer is offset from the
8373 stack pointer by the size of the outgoing arguments. */
8374 addr = mips16e_collect_propagate_value (XEXP (dest, 0), reg_values);
8375 mips_split_plus (addr, &base, &offset);
8376 required_offset = cfun->machine->frame.total_size + argno * UNITS_PER_WORD;
8377 if (base == hard_frame_pointer_rtx)
8378 required_offset -= cfun->machine->frame.args_size;
8379 else if (base != stack_pointer_rtx)
8381 if (offset != required_offset)
8388 /* A subroutine of mips_expand_prologue, called only when generating
8389 MIPS16e SAVE instructions. Search the start of the function for any
8390 instructions that save argument registers into their caller-allocated
8391 save slots. Delete such instructions and return a value N such that
8392 saving [GP_ARG_FIRST, GP_ARG_FIRST + N) would make all the deleted
8393 instructions redundant. */
8396 mips16e_collect_argument_saves (void)
8398 rtx reg_values[FIRST_PSEUDO_REGISTER];
8399 rtx insn, next, set, dest, src;
8400 unsigned int nargs, regno;
8402 push_topmost_sequence ();
8404 memset (reg_values, 0, sizeof (reg_values));
8405 for (insn = get_insns (); insn; insn = next)
8407 next = NEXT_INSN (insn);
8414 set = PATTERN (insn);
8415 if (GET_CODE (set) != SET)
8418 dest = SET_DEST (set);
8419 src = SET_SRC (set);
8420 if (mips16e_collect_argument_save_p (dest, src, reg_values, ®no))
8422 if (!BITSET_P (cfun->machine->frame.mask, regno))
8425 nargs = MAX (nargs, (regno - GP_ARG_FIRST) + 1);
8428 else if (REG_P (dest) && GET_MODE (dest) == word_mode)
8429 reg_values[REGNO (dest)]
8430 = mips16e_collect_propagate_value (src, reg_values);
8434 pop_topmost_sequence ();
8439 /* Expand the prologue into a bunch of separate insns. */
8442 mips_expand_prologue (void)
8448 if (cfun->machine->global_pointer > 0)
8449 SET_REGNO (pic_offset_table_rtx, cfun->machine->global_pointer);
8451 size = compute_frame_size (get_frame_size ());
8453 /* Save the registers. Allocate up to MIPS_MAX_FIRST_STACK_STEP
8454 bytes beforehand; this is enough to cover the register save area
8455 without going out of range. */
8456 if ((cfun->machine->frame.mask | cfun->machine->frame.fmask) != 0)
8458 HOST_WIDE_INT step1;
8460 step1 = MIN (size, MIPS_MAX_FIRST_STACK_STEP);
8462 if (GENERATE_MIPS16E_SAVE_RESTORE)
8464 HOST_WIDE_INT offset;
8465 unsigned int mask, regno;
8467 /* Try to merge argument stores into the save instruction. */
8468 nargs = mips16e_collect_argument_saves ();
8470 /* Build the save instruction. */
8471 mask = cfun->machine->frame.mask;
8472 insn = mips16e_build_save_restore (false, &mask, &offset,
8474 RTX_FRAME_RELATED_P (emit_insn (insn)) = 1;
8477 /* Check if we need to save other registers. */
8478 for (regno = GP_REG_FIRST; regno < GP_REG_LAST; regno++)
8479 if (BITSET_P (mask, regno - GP_REG_FIRST))
8481 offset -= GET_MODE_SIZE (gpr_mode);
8482 mips_save_restore_reg (gpr_mode, regno, offset, mips_save_reg);
8487 insn = gen_add3_insn (stack_pointer_rtx,
8490 RTX_FRAME_RELATED_P (emit_insn (insn)) = 1;
8492 mips_for_each_saved_reg (size, mips_save_reg);
8496 /* Allocate the rest of the frame. */
8499 if (SMALL_OPERAND (-size))
8500 RTX_FRAME_RELATED_P (emit_insn (gen_add3_insn (stack_pointer_rtx,
8502 GEN_INT (-size)))) = 1;
8505 mips_emit_move (MIPS_PROLOGUE_TEMP (Pmode), GEN_INT (size));
8508 /* There are no instructions to add or subtract registers
8509 from the stack pointer, so use the frame pointer as a
8510 temporary. We should always be using a frame pointer
8511 in this case anyway. */
8512 gcc_assert (frame_pointer_needed);
8513 mips_emit_move (hard_frame_pointer_rtx, stack_pointer_rtx);
8514 emit_insn (gen_sub3_insn (hard_frame_pointer_rtx,
8515 hard_frame_pointer_rtx,
8516 MIPS_PROLOGUE_TEMP (Pmode)));
8517 mips_emit_move (stack_pointer_rtx, hard_frame_pointer_rtx);
8520 emit_insn (gen_sub3_insn (stack_pointer_rtx,
8522 MIPS_PROLOGUE_TEMP (Pmode)));
8524 /* Describe the combined effect of the previous instructions. */
8526 (gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8527 plus_constant (stack_pointer_rtx, -size)));
8531 /* Set up the frame pointer, if we're using one. In mips16 code,
8532 we point the frame pointer ahead of the outgoing argument area.
8533 This should allow more variables & incoming arguments to be
8534 accessed with unextended instructions. */
8535 if (frame_pointer_needed)
8537 if (TARGET_MIPS16 && cfun->machine->frame.args_size != 0)
8539 rtx offset = GEN_INT (cfun->machine->frame.args_size);
8540 if (SMALL_OPERAND (cfun->machine->frame.args_size))
8542 (emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
8547 mips_emit_move (MIPS_PROLOGUE_TEMP (Pmode), offset);
8548 mips_emit_move (hard_frame_pointer_rtx, stack_pointer_rtx);
8549 emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
8550 hard_frame_pointer_rtx,
8551 MIPS_PROLOGUE_TEMP (Pmode)));
8553 (gen_rtx_SET (VOIDmode, hard_frame_pointer_rtx,
8554 plus_constant (stack_pointer_rtx,
8555 cfun->machine->frame.args_size)));
8559 RTX_FRAME_RELATED_P (mips_emit_move (hard_frame_pointer_rtx,
8560 stack_pointer_rtx)) = 1;
8563 mips_emit_loadgp ();
8565 /* If generating o32/o64 abicalls, save $gp on the stack. */
8566 if (TARGET_ABICALLS && TARGET_OLDABI && !current_function_is_leaf)
8567 emit_insn (gen_cprestore (GEN_INT (current_function_outgoing_args_size)));
8569 /* If we are profiling, make sure no instructions are scheduled before
8570 the call to mcount. */
8572 if (current_function_profile)
8573 emit_insn (gen_blockage ());
8576 /* Do any necessary cleanup after a function to restore stack, frame,
8579 #define RA_MASK BITMASK_HIGH /* 1 << 31 */
8582 mips_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
8583 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
8585 /* Reinstate the normal $gp. */
8586 SET_REGNO (pic_offset_table_rtx, GLOBAL_POINTER_REGNUM);
8587 mips_output_cplocal ();
8589 if (cfun->machine->all_noreorder_p)
8591 /* Avoid using %>%) since it adds excess whitespace. */
8592 output_asm_insn (".set\tmacro", 0);
8593 output_asm_insn (".set\treorder", 0);
8594 set_noreorder = set_nomacro = 0;
8597 if (!FUNCTION_NAME_ALREADY_DECLARED && !flag_inhibit_size_directive)
8601 /* Get the function name the same way that toplev.c does before calling
8602 assemble_start_function. This is needed so that the name used here
8603 exactly matches the name used in ASM_DECLARE_FUNCTION_NAME. */
8604 fnname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
8605 fputs ("\t.end\t", file);
8606 assemble_name (file, fnname);
8611 /* Emit instructions to restore register REG from slot MEM. */
8614 mips_restore_reg (rtx reg, rtx mem)
8616 /* There's no mips16 instruction to load $31 directly. Load into
8617 $7 instead and adjust the return insn appropriately. */
8618 if (TARGET_MIPS16 && REGNO (reg) == GP_REG_FIRST + 31)
8619 reg = gen_rtx_REG (GET_MODE (reg), 7);
8621 if (TARGET_MIPS16 && !M16_REG_P (REGNO (reg)))
8623 /* Can't restore directly; move through a temporary. */
8624 mips_emit_move (MIPS_EPILOGUE_TEMP (GET_MODE (reg)), mem);
8625 mips_emit_move (reg, MIPS_EPILOGUE_TEMP (GET_MODE (reg)));
8628 mips_emit_move (reg, mem);
8632 /* Expand the epilogue into a bunch of separate insns. SIBCALL_P is true
8633 if this epilogue precedes a sibling call, false if it is for a normal
8634 "epilogue" pattern. */
8637 mips_expand_epilogue (int sibcall_p)
8639 HOST_WIDE_INT step1, step2;
8642 if (!sibcall_p && mips_can_use_return_insn ())
8644 emit_jump_insn (gen_return ());
8648 /* In mips16 mode, if the return value should go into a floating-point
8649 register, we need to call a helper routine to copy it over. */
8650 if (mips16_cfun_returns_in_fpr_p ())
8659 enum machine_mode return_mode;
8661 return_type = DECL_RESULT (current_function_decl);
8662 return_mode = DECL_MODE (return_type);
8664 name = ACONCAT (("__mips16_ret_",
8665 mips16_call_stub_mode_suffix (return_mode),
8667 id = get_identifier (name);
8668 func = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (id));
8669 retval = gen_rtx_REG (return_mode, GP_RETURN);
8670 call = gen_call_value_internal (retval, func, const0_rtx);
8671 insn = emit_call_insn (call);
8672 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), retval);
8675 /* Split the frame into two. STEP1 is the amount of stack we should
8676 deallocate before restoring the registers. STEP2 is the amount we
8677 should deallocate afterwards.
8679 Start off by assuming that no registers need to be restored. */
8680 step1 = cfun->machine->frame.total_size;
8683 /* Work out which register holds the frame address. Account for the
8684 frame pointer offset used by mips16 code. */
8685 if (!frame_pointer_needed)
8686 base = stack_pointer_rtx;
8689 base = hard_frame_pointer_rtx;
8691 step1 -= cfun->machine->frame.args_size;
8694 /* If we need to restore registers, deallocate as much stack as
8695 possible in the second step without going out of range. */
8696 if ((cfun->machine->frame.mask | cfun->machine->frame.fmask) != 0)
8698 step2 = MIN (step1, MIPS_MAX_FIRST_STACK_STEP);
8702 /* Set TARGET to BASE + STEP1. */
8708 /* Get an rtx for STEP1 that we can add to BASE. */
8709 adjust = GEN_INT (step1);
8710 if (!SMALL_OPERAND (step1))
8712 mips_emit_move (MIPS_EPILOGUE_TEMP (Pmode), adjust);
8713 adjust = MIPS_EPILOGUE_TEMP (Pmode);
8716 /* Normal mode code can copy the result straight into $sp. */
8718 target = stack_pointer_rtx;
8720 emit_insn (gen_add3_insn (target, base, adjust));
8723 /* Copy TARGET into the stack pointer. */
8724 if (target != stack_pointer_rtx)
8725 mips_emit_move (stack_pointer_rtx, target);
8727 /* If we're using addressing macros, $gp is implicitly used by all
8728 SYMBOL_REFs. We must emit a blockage insn before restoring $gp
8730 if (TARGET_CALL_SAVED_GP && !TARGET_EXPLICIT_RELOCS)
8731 emit_insn (gen_blockage ());
8733 if (GENERATE_MIPS16E_SAVE_RESTORE && cfun->machine->frame.mask != 0)
8735 unsigned int regno, mask;
8736 HOST_WIDE_INT offset;
8739 /* Generate the restore instruction. */
8740 mask = cfun->machine->frame.mask;
8741 restore = mips16e_build_save_restore (true, &mask, &offset, 0, step2);
8743 /* Restore any other registers manually. */
8744 for (regno = GP_REG_FIRST; regno < GP_REG_LAST; regno++)
8745 if (BITSET_P (mask, regno - GP_REG_FIRST))
8747 offset -= GET_MODE_SIZE (gpr_mode);
8748 mips_save_restore_reg (gpr_mode, regno, offset, mips_restore_reg);
8751 /* Restore the remaining registers and deallocate the final bit
8753 emit_insn (restore);
8757 /* Restore the registers. */
8758 mips_for_each_saved_reg (cfun->machine->frame.total_size - step2,
8761 /* Deallocate the final bit of the frame. */
8763 emit_insn (gen_add3_insn (stack_pointer_rtx,
8768 /* Add in the __builtin_eh_return stack adjustment. We need to
8769 use a temporary in mips16 code. */
8770 if (current_function_calls_eh_return)
8774 mips_emit_move (MIPS_EPILOGUE_TEMP (Pmode), stack_pointer_rtx);
8775 emit_insn (gen_add3_insn (MIPS_EPILOGUE_TEMP (Pmode),
8776 MIPS_EPILOGUE_TEMP (Pmode),
8777 EH_RETURN_STACKADJ_RTX));
8778 mips_emit_move (stack_pointer_rtx, MIPS_EPILOGUE_TEMP (Pmode));
8781 emit_insn (gen_add3_insn (stack_pointer_rtx,
8783 EH_RETURN_STACKADJ_RTX));
8788 /* When generating MIPS16 code, the normal mips_for_each_saved_reg
8789 path will restore the return address into $7 rather than $31. */
8791 && !GENERATE_MIPS16E_SAVE_RESTORE
8792 && (cfun->machine->frame.mask & RA_MASK) != 0)
8793 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode,
8794 GP_REG_FIRST + 7)));
8796 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode,
8797 GP_REG_FIRST + 31)));
8801 /* Return nonzero if this function is known to have a null epilogue.
8802 This allows the optimizer to omit jumps to jumps if no stack
8806 mips_can_use_return_insn (void)
8808 if (! reload_completed)
8811 if (df_regs_ever_live_p (31) || current_function_profile)
8814 /* In mips16 mode, a function that returns a floating point value
8815 needs to arrange to copy the return value into the floating point
8817 if (mips16_cfun_returns_in_fpr_p ())
8820 if (cfun->machine->frame.initialized)
8821 return cfun->machine->frame.total_size == 0;
8823 return compute_frame_size (get_frame_size ()) == 0;
8826 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. Generate rtl rather than asm text
8827 in order to avoid duplicating too much logic from elsewhere. */
8830 mips_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
8831 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8834 rtx this, temp1, temp2, insn, fnaddr;
8837 /* Pretend to be a post-reload pass while generating rtl. */
8838 reload_completed = 1;
8840 /* Mark the end of the (empty) prologue. */
8841 emit_note (NOTE_INSN_PROLOGUE_END);
8843 /* Determine if we can use a sibcall to call FUNCTION directly. */
8844 fnaddr = XEXP (DECL_RTL (function), 0);
8845 use_sibcall_p = (mips_function_ok_for_sibcall (function, NULL)
8846 && const_call_insn_operand (fnaddr, Pmode));
8848 /* Determine if we need to load FNADDR from the GOT. */
8850 switch (mips_classify_symbol (fnaddr, SYMBOL_CONTEXT_LEA))
8852 case SYMBOL_GOT_PAGE_OFST:
8853 case SYMBOL_GOT_DISP:
8854 /* Pick a global pointer. Use a call-clobbered register if
8855 TARGET_CALL_SAVED_GP. */
8856 cfun->machine->global_pointer =
8857 TARGET_CALL_SAVED_GP ? 15 : GLOBAL_POINTER_REGNUM;
8858 SET_REGNO (pic_offset_table_rtx, cfun->machine->global_pointer);
8860 /* Set up the global pointer for n32 or n64 abicalls. */
8861 mips_emit_loadgp ();
8868 /* We need two temporary registers in some cases. */
8869 temp1 = gen_rtx_REG (Pmode, 2);
8870 temp2 = gen_rtx_REG (Pmode, 3);
8872 /* Find out which register contains the "this" pointer. */
8873 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8874 this = gen_rtx_REG (Pmode, GP_ARG_FIRST + 1);
8876 this = gen_rtx_REG (Pmode, GP_ARG_FIRST);
8878 /* Add DELTA to THIS. */
8881 rtx offset = GEN_INT (delta);
8882 if (!SMALL_OPERAND (delta))
8884 mips_emit_move (temp1, offset);
8887 emit_insn (gen_add3_insn (this, this, offset));
8890 /* If needed, add *(*THIS + VCALL_OFFSET) to THIS. */
8891 if (vcall_offset != 0)
8895 /* Set TEMP1 to *THIS. */
8896 mips_emit_move (temp1, gen_rtx_MEM (Pmode, this));
8898 /* Set ADDR to a legitimate address for *THIS + VCALL_OFFSET. */
8899 addr = mips_add_offset (temp2, temp1, vcall_offset);
8901 /* Load the offset and add it to THIS. */
8902 mips_emit_move (temp1, gen_rtx_MEM (Pmode, addr));
8903 emit_insn (gen_add3_insn (this, this, temp1));
8906 /* Jump to the target function. Use a sibcall if direct jumps are
8907 allowed, otherwise load the address into a register first. */
8910 insn = emit_call_insn (gen_sibcall_internal (fnaddr, const0_rtx));
8911 SIBLING_CALL_P (insn) = 1;
8915 /* This is messy. gas treats "la $25,foo" as part of a call
8916 sequence and may allow a global "foo" to be lazily bound.
8917 The general move patterns therefore reject this combination.
8919 In this context, lazy binding would actually be OK
8920 for TARGET_CALL_CLOBBERED_GP, but it's still wrong for
8921 TARGET_CALL_SAVED_GP; see mips_load_call_address.
8922 We must therefore load the address via a temporary
8923 register if mips_dangerous_for_la25_p.
8925 If we jump to the temporary register rather than $25, the assembler
8926 can use the move insn to fill the jump's delay slot. */
8927 if (TARGET_USE_PIC_FN_ADDR_REG
8928 && !mips_dangerous_for_la25_p (fnaddr))
8929 temp1 = gen_rtx_REG (Pmode, PIC_FUNCTION_ADDR_REGNUM);
8930 mips_load_call_address (temp1, fnaddr, true);
8932 if (TARGET_USE_PIC_FN_ADDR_REG
8933 && REGNO (temp1) != PIC_FUNCTION_ADDR_REGNUM)
8934 mips_emit_move (gen_rtx_REG (Pmode, PIC_FUNCTION_ADDR_REGNUM), temp1);
8935 emit_jump_insn (gen_indirect_jump (temp1));
8938 /* Run just enough of rest_of_compilation. This sequence was
8939 "borrowed" from alpha.c. */
8940 insn = get_insns ();
8941 insn_locators_alloc ();
8942 split_all_insns_noflow ();
8943 mips16_lay_out_constants ();
8944 shorten_branches (insn);
8945 final_start_function (insn, file, 1);
8946 final (insn, file, 1);
8947 final_end_function ();
8949 /* Clean up the vars set above. Note that final_end_function resets
8950 the global pointer for us. */
8951 reload_completed = 0;
8954 /* Implement TARGET_SELECT_RTX_SECTION. */
8957 mips_select_rtx_section (enum machine_mode mode, rtx x,
8958 unsigned HOST_WIDE_INT align)
8960 /* ??? Consider using mergeable small data sections. */
8961 if (mips_rtx_constant_in_small_data_p (mode))
8962 return get_named_section (NULL, ".sdata", 0);
8964 return default_elf_select_rtx_section (mode, x, align);
8967 /* Implement TARGET_ASM_FUNCTION_RODATA_SECTION.
8969 The complication here is that, with the combination TARGET_ABICALLS
8970 && !TARGET_GPWORD, jump tables will use absolute addresses, and should
8971 therefore not be included in the read-only part of a DSO. Handle such
8972 cases by selecting a normal data section instead of a read-only one.
8973 The logic apes that in default_function_rodata_section. */
8976 mips_function_rodata_section (tree decl)
8978 if (!TARGET_ABICALLS || TARGET_GPWORD)
8979 return default_function_rodata_section (decl);
8981 if (decl && DECL_SECTION_NAME (decl))
8983 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
8984 if (DECL_ONE_ONLY (decl) && strncmp (name, ".gnu.linkonce.t.", 16) == 0)
8986 char *rname = ASTRDUP (name);
8988 return get_section (rname, SECTION_LINKONCE | SECTION_WRITE, decl);
8990 else if (flag_function_sections && flag_data_sections
8991 && strncmp (name, ".text.", 6) == 0)
8993 char *rname = ASTRDUP (name);
8994 memcpy (rname + 1, "data", 4);
8995 return get_section (rname, SECTION_WRITE, decl);
8998 return data_section;
9001 /* Implement TARGET_IN_SMALL_DATA_P. This function controls whether
9002 locally-defined objects go in a small data section. It also controls
9003 the setting of the SYMBOL_REF_SMALL_P flag, which in turn helps
9004 mips_classify_symbol decide when to use %gp_rel(...)($gp) accesses. */
9007 mips_in_small_data_p (const_tree decl)
9011 if (TREE_CODE (decl) == STRING_CST || TREE_CODE (decl) == FUNCTION_DECL)
9014 /* We don't yet generate small-data references for -mabicalls or
9015 VxWorks RTP code. See the related -G handling in override_options. */
9016 if (TARGET_ABICALLS || TARGET_VXWORKS_RTP)
9019 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl) != 0)
9023 /* Reject anything that isn't in a known small-data section. */
9024 name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
9025 if (strcmp (name, ".sdata") != 0 && strcmp (name, ".sbss") != 0)
9028 /* If a symbol is defined externally, the assembler will use the
9029 usual -G rules when deciding how to implement macros. */
9030 if (mips_lo_relocs[SYMBOL_GP_RELATIVE] || !DECL_EXTERNAL (decl))
9033 else if (TARGET_EMBEDDED_DATA)
9035 /* Don't put constants into the small data section: we want them
9036 to be in ROM rather than RAM. */
9037 if (TREE_CODE (decl) != VAR_DECL)
9040 if (TREE_READONLY (decl)
9041 && !TREE_SIDE_EFFECTS (decl)
9042 && (!DECL_INITIAL (decl) || TREE_CONSTANT (DECL_INITIAL (decl))))
9046 /* Enforce -mlocal-sdata. */
9047 if (!TARGET_LOCAL_SDATA && !TREE_PUBLIC (decl))
9050 /* Enforce -mextern-sdata. */
9051 if (!TARGET_EXTERN_SDATA && DECL_P (decl))
9053 if (DECL_EXTERNAL (decl))
9055 if (DECL_COMMON (decl) && DECL_INITIAL (decl) == NULL)
9059 size = int_size_in_bytes (TREE_TYPE (decl));
9060 return (size > 0 && size <= mips_section_threshold);
9063 /* Implement TARGET_USE_ANCHORS_FOR_SYMBOL_P. We don't want to use
9064 anchors for small data: the GP register acts as an anchor in that
9065 case. We also don't want to use them for PC-relative accesses,
9066 where the PC acts as an anchor. */
9069 mips_use_anchors_for_symbol_p (const_rtx symbol)
9071 switch (mips_classify_symbol (symbol, SYMBOL_CONTEXT_MEM))
9073 case SYMBOL_PC_RELATIVE:
9074 case SYMBOL_GP_RELATIVE:
9078 return default_use_anchors_for_symbol_p (symbol);
9082 /* See whether VALTYPE is a record whose fields should be returned in
9083 floating-point registers. If so, return the number of fields and
9084 list them in FIELDS (which should have two elements). Return 0
9087 For n32 & n64, a structure with one or two fields is returned in
9088 floating-point registers as long as every field has a floating-point
9092 mips_fpr_return_fields (const_tree valtype, tree *fields)
9100 if (TREE_CODE (valtype) != RECORD_TYPE)
9104 for (field = TYPE_FIELDS (valtype); field != 0; field = TREE_CHAIN (field))
9106 if (TREE_CODE (field) != FIELD_DECL)
9109 if (TREE_CODE (TREE_TYPE (field)) != REAL_TYPE)
9115 fields[i++] = field;
9121 /* Implement TARGET_RETURN_IN_MSB. For n32 & n64, we should return
9122 a value in the most significant part of $2/$3 if:
9124 - the target is big-endian;
9126 - the value has a structure or union type (we generalize this to
9127 cover aggregates from other languages too); and
9129 - the structure is not returned in floating-point registers. */
9132 mips_return_in_msb (const_tree valtype)
9136 return (TARGET_NEWABI
9137 && TARGET_BIG_ENDIAN
9138 && AGGREGATE_TYPE_P (valtype)
9139 && mips_fpr_return_fields (valtype, fields) == 0);
9143 /* Return a composite value in a pair of floating-point registers.
9144 MODE1 and OFFSET1 are the mode and byte offset for the first value,
9145 likewise MODE2 and OFFSET2 for the second. MODE is the mode of the
9148 For n32 & n64, $f0 always holds the first value and $f2 the second.
9149 Otherwise the values are packed together as closely as possible. */
9152 mips_return_fpr_pair (enum machine_mode mode,
9153 enum machine_mode mode1, HOST_WIDE_INT offset1,
9154 enum machine_mode mode2, HOST_WIDE_INT offset2)
9158 inc = (TARGET_NEWABI ? 2 : MAX_FPRS_PER_FMT);
9159 return gen_rtx_PARALLEL
9162 gen_rtx_EXPR_LIST (VOIDmode,
9163 gen_rtx_REG (mode1, FP_RETURN),
9165 gen_rtx_EXPR_LIST (VOIDmode,
9166 gen_rtx_REG (mode2, FP_RETURN + inc),
9167 GEN_INT (offset2))));
9172 /* Implement FUNCTION_VALUE and LIBCALL_VALUE. For normal calls,
9173 VALTYPE is the return type and MODE is VOIDmode. For libcalls,
9174 VALTYPE is null and MODE is the mode of the return value. */
9177 mips_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED,
9178 enum machine_mode mode)
9185 mode = TYPE_MODE (valtype);
9186 unsignedp = TYPE_UNSIGNED (valtype);
9188 /* Since we define TARGET_PROMOTE_FUNCTION_RETURN that returns
9189 true, we must promote the mode just as PROMOTE_MODE does. */
9190 mode = promote_mode (valtype, mode, &unsignedp, 1);
9192 /* Handle structures whose fields are returned in $f0/$f2. */
9193 switch (mips_fpr_return_fields (valtype, fields))
9196 return gen_rtx_REG (mode, FP_RETURN);
9199 return mips_return_fpr_pair (mode,
9200 TYPE_MODE (TREE_TYPE (fields[0])),
9201 int_byte_position (fields[0]),
9202 TYPE_MODE (TREE_TYPE (fields[1])),
9203 int_byte_position (fields[1]));
9206 /* If a value is passed in the most significant part of a register, see
9207 whether we have to round the mode up to a whole number of words. */
9208 if (mips_return_in_msb (valtype))
9210 HOST_WIDE_INT size = int_size_in_bytes (valtype);
9211 if (size % UNITS_PER_WORD != 0)
9213 size += UNITS_PER_WORD - size % UNITS_PER_WORD;
9214 mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
9218 /* For EABI, the class of return register depends entirely on MODE.
9219 For example, "struct { some_type x; }" and "union { some_type x; }"
9220 are returned in the same way as a bare "some_type" would be.
9221 Other ABIs only use FPRs for scalar, complex or vector types. */
9222 if (mips_abi != ABI_EABI && !FLOAT_TYPE_P (valtype))
9223 return gen_rtx_REG (mode, GP_RETURN);
9228 /* Handle long doubles for n32 & n64. */
9230 return mips_return_fpr_pair (mode,
9232 DImode, GET_MODE_SIZE (mode) / 2);
9234 if (mips_return_mode_in_fpr_p (mode))
9236 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
9237 return mips_return_fpr_pair (mode,
9238 GET_MODE_INNER (mode), 0,
9239 GET_MODE_INNER (mode),
9240 GET_MODE_SIZE (mode) / 2);
9242 return gen_rtx_REG (mode, FP_RETURN);
9246 return gen_rtx_REG (mode, GP_RETURN);
9249 /* Return nonzero when an argument must be passed by reference. */
9252 mips_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
9253 enum machine_mode mode, const_tree type,
9254 bool named ATTRIBUTE_UNUSED)
9256 if (mips_abi == ABI_EABI)
9260 /* ??? How should SCmode be handled? */
9261 if (mode == DImode || mode == DFmode
9262 || mode == DQmode || mode == UDQmode
9263 || mode == DAmode || mode == UDAmode)
9266 size = type ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
9267 return size == -1 || size > UNITS_PER_WORD;
9271 /* If we have a variable-sized parameter, we have no choice. */
9272 return targetm.calls.must_pass_in_stack (mode, type);
9277 mips_callee_copies (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
9278 enum machine_mode mode ATTRIBUTE_UNUSED,
9279 const_tree type ATTRIBUTE_UNUSED, bool named)
9281 return mips_abi == ABI_EABI && named;
9284 /* Return true if registers of class CLASS cannot change from mode FROM
9288 mips_cannot_change_mode_class (enum machine_mode from ATTRIBUTE_UNUSED,
9289 enum machine_mode to ATTRIBUTE_UNUSED,
9290 enum reg_class class)
9292 /* There are several problems with changing the modes of values
9293 in floating-point registers:
9295 - When a multi-word value is stored in paired floating-point
9296 registers, the first register always holds the low word.
9297 We therefore can't allow FPRs to change between single-word
9298 and multi-word modes on big-endian targets.
9300 - GCC assumes that each word of a multiword register can be accessed
9301 individually using SUBREGs. This is not true for floating-point
9302 registers if they are bigger than a word.
9304 - Loading a 32-bit value into a 64-bit floating-point register
9305 will not sign-extend the value, despite what LOAD_EXTEND_OP says.
9306 We can't allow FPRs to change from SImode to to a wider mode on
9309 - If the FPU has already interpreted a value in one format, we must
9310 not ask it to treat the value as having a different format.
9312 We therefore only allow changes between 4-byte and smaller integer
9313 values, all of which have the "W" format as far as the FPU is
9315 return (reg_classes_intersect_p (FP_REGS, class)
9316 && (GET_MODE_CLASS (from) != MODE_INT
9317 || GET_MODE_CLASS (to) != MODE_INT
9318 || GET_MODE_SIZE (from) > 4
9319 || GET_MODE_SIZE (to) > 4));
9322 /* Return true if X should not be moved directly into register $25.
9323 We need this because many versions of GAS will treat "la $25,foo" as
9324 part of a call sequence and so allow a global "foo" to be lazily bound. */
9327 mips_dangerous_for_la25_p (rtx x)
9329 return (!TARGET_EXPLICIT_RELOCS
9331 && GET_CODE (x) == SYMBOL_REF
9332 && mips_global_symbol_p (x));
9335 /* Return true if moves in mode MODE can use the FPU's mov.fmt instruction. */
9338 mips_mode_ok_for_mov_fmt_p (enum machine_mode mode)
9343 return TARGET_HARD_FLOAT;
9346 return TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT;
9349 return TARGET_HARD_FLOAT && TARGET_PAIRED_SINGLE_FLOAT;
9356 /* Implement PREFERRED_RELOAD_CLASS. */
9359 mips_preferred_reload_class (rtx x, enum reg_class class)
9361 if (mips_dangerous_for_la25_p (x) && reg_class_subset_p (LEA_REGS, class))
9364 if (reg_class_subset_p (FP_REGS, class)
9365 && mips_mode_ok_for_mov_fmt_p (GET_MODE (x)))
9368 if (reg_class_subset_p (GR_REGS, class))
9371 if (TARGET_MIPS16 && reg_class_subset_p (M16_REGS, class))
9377 /* This function returns the register class required for a secondary
9378 register when copying between one of the registers in CLASS, and X,
9379 using MODE. If IN_P is nonzero, the copy is going from X to the
9380 register, otherwise the register is the source. A return value of
9381 NO_REGS means that no secondary register is required. */
9384 mips_secondary_reload_class (enum reg_class class,
9385 enum machine_mode mode, rtx x, int in_p)
9389 /* If X is a constant that cannot be loaded into $25, it must be loaded
9390 into some other GPR. No other register class allows a direct move. */
9391 if (mips_dangerous_for_la25_p (x))
9392 return reg_class_subset_p (class, LEA_REGS) ? NO_REGS : LEA_REGS;
9394 regno = true_regnum (x);
9397 /* In MIPS16 mode, every move must involve a member of M16_REGS. */
9398 if (!reg_class_subset_p (class, M16_REGS) && !M16_REG_P (regno))
9401 /* We can't really copy to HI or LO at all in MIPS16 mode. */
9402 if (in_p ? reg_classes_intersect_p (class, ACC_REGS) : ACC_REG_P (regno))
9408 /* Copying from accumulator registers to anywhere other than a general
9409 register requires a temporary general register. */
9410 if (reg_class_subset_p (class, ACC_REGS))
9411 return GP_REG_P (regno) ? NO_REGS : GR_REGS;
9412 if (ACC_REG_P (regno))
9413 return reg_class_subset_p (class, GR_REGS) ? NO_REGS : GR_REGS;
9415 /* We can only copy a value to a condition code register from a
9416 floating point register, and even then we require a scratch
9417 floating point register. We can only copy a value out of a
9418 condition code register into a general register. */
9419 if (reg_class_subset_p (class, ST_REGS))
9423 return GP_REG_P (regno) ? NO_REGS : GR_REGS;
9425 if (ST_REG_P (regno))
9429 return reg_class_subset_p (class, GR_REGS) ? NO_REGS : GR_REGS;
9432 if (reg_class_subset_p (class, FP_REGS))
9435 && (GET_MODE_SIZE (mode) == 4 || GET_MODE_SIZE (mode) == 8))
9436 /* In this case we can use lwc1, swc1, ldc1 or sdc1. We'll use
9437 pairs of lwc1s and swc1s if ldc1 and sdc1 are not supported. */
9440 if (GP_REG_P (regno) || x == CONST0_RTX (mode))
9441 /* In this case we can use mtc1, mfc1, dmtc1 or dmfc1. */
9444 if (CONSTANT_P (x) && !targetm.cannot_force_const_mem (x))
9445 /* We can force the constant to memory and use lwc1
9446 and ldc1. As above, we will use pairs of lwc1s if
9447 ldc1 is not supported. */
9450 if (FP_REG_P (regno) && mips_mode_ok_for_mov_fmt_p (mode))
9451 /* In this case we can use mov.fmt. */
9454 /* Otherwise, we need to reload through an integer register. */
9457 if (FP_REG_P (regno))
9458 return reg_class_subset_p (class, GR_REGS) ? NO_REGS : GR_REGS;
9463 /* Implement CLASS_MAX_NREGS.
9465 - UNITS_PER_FPREG controls the number of registers needed by FP_REGS.
9467 - ST_REGS are always hold CCmode values, and CCmode values are
9468 considered to be 4 bytes wide.
9470 All other register classes are covered by UNITS_PER_WORD. Note that
9471 this is true even for unions of integer and float registers when the
9472 latter are smaller than the former. The only supported combination
9473 in which case this occurs is -mgp64 -msingle-float, which has 64-bit
9474 words but 32-bit float registers. A word-based calculation is correct
9475 in that case since -msingle-float disallows multi-FPR values. */
9478 mips_class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,
9479 enum machine_mode mode)
9481 if (class == ST_REGS)
9482 return (GET_MODE_SIZE (mode) + 3) / 4;
9483 else if (class == FP_REGS)
9484 return (GET_MODE_SIZE (mode) + UNITS_PER_FPREG - 1) / UNITS_PER_FPREG;
9486 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
9490 mips_valid_pointer_mode (enum machine_mode mode)
9492 return (mode == SImode || (TARGET_64BIT && mode == DImode));
9495 /* Target hook for vector_mode_supported_p. */
9498 mips_vector_mode_supported_p (enum machine_mode mode)
9503 return TARGET_PAIRED_SINGLE_FLOAT;
9520 /* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
9523 mips_scalar_mode_supported_p (enum machine_mode mode)
9525 if (ALL_FIXED_POINT_MODE_P (mode)
9526 && GET_MODE_PRECISION (mode) <= 2 * BITS_PER_WORD)
9529 return default_scalar_mode_supported_p (mode);
9532 /* If we can access small data directly (using gp-relative relocation
9533 operators) return the small data pointer, otherwise return null.
9535 For each mips16 function which refers to GP relative symbols, we
9536 use a pseudo register, initialized at the start of the function, to
9537 hold the $gp value. */
9540 mips16_gp_pseudo_reg (void)
9542 if (cfun->machine->mips16_gp_pseudo_rtx == NULL_RTX)
9543 cfun->machine->mips16_gp_pseudo_rtx = gen_reg_rtx (Pmode);
9545 /* Don't initialize the pseudo register if we are being called from
9546 the tree optimizers' cost-calculation routines. */
9547 if (!cfun->machine->initialized_mips16_gp_pseudo_p
9548 && (current_ir_type () != IR_GIMPLE || currently_expanding_to_rtl))
9552 /* We want to initialize this to a value which gcc will believe
9554 insn = gen_load_const_gp (cfun->machine->mips16_gp_pseudo_rtx);
9556 push_topmost_sequence ();
9557 /* We need to emit the initialization after the FUNCTION_BEG
9558 note, so that it will be integrated. */
9559 for (scan = get_insns (); scan != NULL_RTX; scan = NEXT_INSN (scan))
9561 && NOTE_KIND (scan) == NOTE_INSN_FUNCTION_BEG)
9563 if (scan == NULL_RTX)
9564 scan = get_insns ();
9565 insn = emit_insn_after (insn, scan);
9566 pop_topmost_sequence ();
9568 cfun->machine->initialized_mips16_gp_pseudo_p = true;
9571 return cfun->machine->mips16_gp_pseudo_rtx;
9574 /* Write out code to move floating point arguments in or out of
9575 general registers. Output the instructions to FILE. FP_CODE is
9576 the code describing which arguments are present (see the comment at
9577 the definition of CUMULATIVE_ARGS in mips.h). FROM_FP_P is nonzero if
9578 we are copying from the floating point registers. */
9581 mips16_fp_args (FILE *file, int fp_code, int from_fp_p)
9586 CUMULATIVE_ARGS cum;
9588 /* This code only works for the original 32-bit ABI and the O64 ABI. */
9589 gcc_assert (TARGET_OLDABI);
9596 init_cumulative_args (&cum, NULL, NULL);
9598 for (f = (unsigned int) fp_code; f != 0; f >>= 2)
9600 enum machine_mode mode;
9601 struct mips_arg_info info;
9605 else if ((f & 3) == 2)
9610 mips_arg_info (&cum, mode, NULL, true, &info);
9611 gparg = mips_arg_regno (&info, false);
9612 fparg = mips_arg_regno (&info, true);
9615 fprintf (file, "\t%s\t%s,%s\n", s,
9616 reg_names[gparg], reg_names[fparg]);
9617 else if (TARGET_64BIT)
9618 fprintf (file, "\td%s\t%s,%s\n", s,
9619 reg_names[gparg], reg_names[fparg]);
9620 else if (ISA_HAS_MXHC1)
9621 /* -mips32r2 -mfp64 */
9622 fprintf (file, "\t%s\t%s,%s\n\t%s\t%s,%s\n",
9624 reg_names[gparg + (WORDS_BIG_ENDIAN ? 1 : 0)],
9626 from_fp_p ? "mfhc1" : "mthc1",
9627 reg_names[gparg + (WORDS_BIG_ENDIAN ? 0 : 1)],
9629 else if (TARGET_BIG_ENDIAN)
9630 fprintf (file, "\t%s\t%s,%s\n\t%s\t%s,%s\n", s,
9631 reg_names[gparg], reg_names[fparg + 1], s,
9632 reg_names[gparg + 1], reg_names[fparg]);
9634 fprintf (file, "\t%s\t%s,%s\n\t%s\t%s,%s\n", s,
9635 reg_names[gparg], reg_names[fparg], s,
9636 reg_names[gparg + 1], reg_names[fparg + 1]);
9638 function_arg_advance (&cum, mode, NULL, true);
9642 /* Build a mips16 function stub. This is used for functions which
9643 take arguments in the floating point registers. It is 32-bit code
9644 that moves the floating point args into the general registers, and
9645 then jumps to the 16-bit code. */
9648 build_mips16_function_stub (FILE *file)
9651 char *secname, *stubname;
9652 tree stubid, stubdecl;
9656 fnname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9657 fnname = targetm.strip_name_encoding (fnname);
9658 secname = (char *) alloca (strlen (fnname) + 20);
9659 sprintf (secname, ".mips16.fn.%s", fnname);
9660 stubname = (char *) alloca (strlen (fnname) + 20);
9661 sprintf (stubname, "__fn_stub_%s", fnname);
9662 stubid = get_identifier (stubname);
9663 stubdecl = build_decl (FUNCTION_DECL, stubid,
9664 build_function_type (void_type_node, NULL_TREE));
9665 DECL_SECTION_NAME (stubdecl) = build_string (strlen (secname), secname);
9666 DECL_RESULT (stubdecl) = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
9668 fprintf (file, "\t# Stub function for %s (", current_function_name ());
9670 for (f = (unsigned int) current_function_args_info.fp_code; f != 0; f >>= 2)
9672 fprintf (file, "%s%s",
9673 need_comma ? ", " : "",
9674 (f & 3) == 1 ? "float" : "double");
9677 fprintf (file, ")\n");
9679 fprintf (file, "\t.set\tnomips16\n");
9680 switch_to_section (function_section (stubdecl));
9681 ASM_OUTPUT_ALIGN (file, floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT));
9683 /* ??? If FUNCTION_NAME_ALREADY_DECLARED is defined, then we are
9684 within a .ent, and we cannot emit another .ent. */
9685 if (!FUNCTION_NAME_ALREADY_DECLARED)
9687 fputs ("\t.ent\t", file);
9688 assemble_name (file, stubname);
9692 assemble_name (file, stubname);
9693 fputs (":\n", file);
9695 /* We don't want the assembler to insert any nops here. */
9696 fprintf (file, "\t.set\tnoreorder\n");
9698 mips16_fp_args (file, current_function_args_info.fp_code, 1);
9700 fprintf (asm_out_file, "\t.set\tnoat\n");
9701 fprintf (asm_out_file, "\tla\t%s,", reg_names[GP_REG_FIRST + 1]);
9702 assemble_name (file, fnname);
9703 fprintf (file, "\n");
9704 fprintf (asm_out_file, "\tjr\t%s\n", reg_names[GP_REG_FIRST + 1]);
9705 fprintf (asm_out_file, "\t.set\tat\n");
9707 /* Unfortunately, we can't fill the jump delay slot. We can't fill
9708 with one of the mfc1 instructions, because the result is not
9709 available for one instruction, so if the very first instruction
9710 in the function refers to the register, it will see the wrong
9712 fprintf (file, "\tnop\n");
9714 fprintf (file, "\t.set\treorder\n");
9716 if (!FUNCTION_NAME_ALREADY_DECLARED)
9718 fputs ("\t.end\t", file);
9719 assemble_name (file, stubname);
9723 switch_to_section (function_section (current_function_decl));
9726 /* We keep a list of functions for which we have already built stubs
9727 in build_mips16_call_stub. */
9731 struct mips16_stub *next;
9736 static struct mips16_stub *mips16_stubs;
9738 /* Emit code to return a double value from a mips16 stub. GPREG is the
9739 first GP reg to use, FPREG is the first FP reg to use. */
9742 mips16_fpret_double (int gpreg, int fpreg)
9745 fprintf (asm_out_file, "\tdmfc1\t%s,%s\n",
9746 reg_names[gpreg], reg_names[fpreg]);
9747 else if (TARGET_FLOAT64)
9749 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9750 reg_names[gpreg + WORDS_BIG_ENDIAN],
9752 fprintf (asm_out_file, "\tmfhc1\t%s,%s\n",
9753 reg_names[gpreg + !WORDS_BIG_ENDIAN],
9758 if (TARGET_BIG_ENDIAN)
9760 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9761 reg_names[gpreg + 0],
9762 reg_names[fpreg + 1]);
9763 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9764 reg_names[gpreg + 1],
9765 reg_names[fpreg + 0]);
9769 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9770 reg_names[gpreg + 0],
9771 reg_names[fpreg + 0]);
9772 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9773 reg_names[gpreg + 1],
9774 reg_names[fpreg + 1]);
9779 /* Build a call stub for a mips16 call. A stub is needed if we are
9780 passing any floating point values which should go into the floating
9781 point registers. If we are, and the call turns out to be to a
9782 32-bit function, the stub will be used to move the values into the
9783 floating point registers before calling the 32-bit function. The
9784 linker will magically adjust the function call to either the 16-bit
9785 function or the 32-bit stub, depending upon where the function call
9786 is actually defined.
9788 Similarly, we need a stub if the return value might come back in a
9789 floating point register.
9791 RETVAL is the location of the return value, or null if this is
9792 a call rather than a call_value. FN is the address of the
9793 function and ARG_SIZE is the size of the arguments. FP_CODE
9794 is the code built by function_arg. This function returns a nonzero
9795 value if it builds the call instruction itself. */
9798 build_mips16_call_stub (rtx retval, rtx fn, rtx arg_size, int fp_code)
9802 char *secname, *stubname;
9803 struct mips16_stub *l;
9804 tree stubid, stubdecl;
9809 /* We don't need to do anything if we aren't in mips16 mode, or if
9810 we were invoked with the -msoft-float option. */
9811 if (!TARGET_MIPS16 || TARGET_SOFT_FLOAT_ABI)
9814 /* Figure out whether the value might come back in a floating point
9817 fpret = mips_return_mode_in_fpr_p (GET_MODE (retval));
9819 /* We don't need to do anything if there were no floating point
9820 arguments and the value will not be returned in a floating point
9822 if (fp_code == 0 && ! fpret)
9825 /* We don't need to do anything if this is a call to a special
9826 mips16 support function. */
9827 if (GET_CODE (fn) == SYMBOL_REF
9828 && strncmp (XSTR (fn, 0), "__mips16_", 9) == 0)
9831 /* This code will only work for o32 and o64 abis. The other ABI's
9832 require more sophisticated support. */
9833 gcc_assert (TARGET_OLDABI);
9835 /* If we're calling via a function pointer, then we must always call
9836 via a stub. There are magic stubs provided in libgcc.a for each
9837 of the required cases. Each of them expects the function address
9838 to arrive in register $2. */
9840 if (GET_CODE (fn) != SYMBOL_REF)
9846 /* ??? If this code is modified to support other ABI's, we need
9847 to handle PARALLEL return values here. */
9850 sprintf (buf, "__mips16_call_stub_%s_%d",
9851 mips16_call_stub_mode_suffix (GET_MODE (retval)),
9854 sprintf (buf, "__mips16_call_stub_%d",
9857 id = get_identifier (buf);
9858 stub_fn = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (id));
9860 mips_emit_move (gen_rtx_REG (Pmode, 2), fn);
9862 if (retval == NULL_RTX)
9863 insn = gen_call_internal (stub_fn, arg_size);
9865 insn = gen_call_value_internal (retval, stub_fn, arg_size);
9866 insn = emit_call_insn (insn);
9868 /* Put the register usage information on the CALL. */
9869 CALL_INSN_FUNCTION_USAGE (insn) =
9870 gen_rtx_EXPR_LIST (VOIDmode,
9871 gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 2)),
9872 CALL_INSN_FUNCTION_USAGE (insn));
9874 /* If we are handling a floating point return value, we need to
9875 save $18 in the function prologue. Putting a note on the
9876 call will mean that df_regs_ever_live_p ($18) will be true if the
9877 call is not eliminated, and we can check that in the prologue
9880 CALL_INSN_FUNCTION_USAGE (insn) =
9881 gen_rtx_EXPR_LIST (VOIDmode,
9882 gen_rtx_USE (VOIDmode,
9883 gen_rtx_REG (word_mode, 18)),
9884 CALL_INSN_FUNCTION_USAGE (insn));
9886 /* Return 1 to tell the caller that we've generated the call
9891 /* We know the function we are going to call. If we have already
9892 built a stub, we don't need to do anything further. */
9894 fnname = targetm.strip_name_encoding (XSTR (fn, 0));
9895 for (l = mips16_stubs; l != NULL; l = l->next)
9896 if (strcmp (l->name, fnname) == 0)
9901 /* Build a special purpose stub. When the linker sees a
9902 function call in mips16 code, it will check where the target
9903 is defined. If the target is a 32-bit call, the linker will
9904 search for the section defined here. It can tell which
9905 symbol this section is associated with by looking at the
9906 relocation information (the name is unreliable, since this
9907 might be a static function). If such a section is found, the
9908 linker will redirect the call to the start of the magic
9911 If the function does not return a floating point value, the
9912 special stub section is named
9915 If the function does return a floating point value, the stub
9917 .mips16.call.fp.FNNAME
9920 secname = (char *) alloca (strlen (fnname) + 40);
9921 sprintf (secname, ".mips16.call.%s%s",
9924 stubname = (char *) alloca (strlen (fnname) + 20);
9925 sprintf (stubname, "__call_stub_%s%s",
9928 stubid = get_identifier (stubname);
9929 stubdecl = build_decl (FUNCTION_DECL, stubid,
9930 build_function_type (void_type_node, NULL_TREE));
9931 DECL_SECTION_NAME (stubdecl) = build_string (strlen (secname), secname);
9932 DECL_RESULT (stubdecl) = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
9934 fprintf (asm_out_file, "\t# Stub function to call %s%s (",
9936 ? (GET_MODE (retval) == SFmode ? "float " : "double ")
9940 for (f = (unsigned int) fp_code; f != 0; f >>= 2)
9942 fprintf (asm_out_file, "%s%s",
9943 need_comma ? ", " : "",
9944 (f & 3) == 1 ? "float" : "double");
9947 fprintf (asm_out_file, ")\n");
9949 fprintf (asm_out_file, "\t.set\tnomips16\n");
9950 assemble_start_function (stubdecl, stubname);
9952 if (!FUNCTION_NAME_ALREADY_DECLARED)
9954 fputs ("\t.ent\t", asm_out_file);
9955 assemble_name (asm_out_file, stubname);
9956 fputs ("\n", asm_out_file);
9958 assemble_name (asm_out_file, stubname);
9959 fputs (":\n", asm_out_file);
9962 /* We build the stub code by hand. That's the only way we can
9963 do it, since we can't generate 32-bit code during a 16-bit
9966 /* We don't want the assembler to insert any nops here. */
9967 fprintf (asm_out_file, "\t.set\tnoreorder\n");
9969 mips16_fp_args (asm_out_file, fp_code, 0);
9973 fprintf (asm_out_file, "\t.set\tnoat\n");
9974 fprintf (asm_out_file, "\tla\t%s,%s\n", reg_names[GP_REG_FIRST + 1],
9976 fprintf (asm_out_file, "\tjr\t%s\n", reg_names[GP_REG_FIRST + 1]);
9977 fprintf (asm_out_file, "\t.set\tat\n");
9978 /* Unfortunately, we can't fill the jump delay slot. We
9979 can't fill with one of the mtc1 instructions, because the
9980 result is not available for one instruction, so if the
9981 very first instruction in the function refers to the
9982 register, it will see the wrong value. */
9983 fprintf (asm_out_file, "\tnop\n");
9987 fprintf (asm_out_file, "\tmove\t%s,%s\n",
9988 reg_names[GP_REG_FIRST + 18], reg_names[GP_REG_FIRST + 31]);
9989 fprintf (asm_out_file, "\tjal\t%s\n", fnname);
9990 /* As above, we can't fill the delay slot. */
9991 fprintf (asm_out_file, "\tnop\n");
9992 switch (GET_MODE (retval))
9995 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
9996 reg_names[GP_REG_FIRST + 3],
9997 reg_names[FP_REG_FIRST + MAX_FPRS_PER_FMT]);
10000 fprintf (asm_out_file, "\tmfc1\t%s,%s\n",
10001 reg_names[GP_REG_FIRST + 2],
10002 reg_names[FP_REG_FIRST + 0]);
10003 if (GET_MODE (retval) == SCmode && TARGET_64BIT)
10005 /* On 64-bit targets, complex floats are returned in
10006 a single GPR, such that "sd" on a suitably-aligned
10007 target would store the value correctly. */
10008 fprintf (asm_out_file, "\tdsll\t%s,%s,32\n",
10009 reg_names[GP_REG_FIRST + 2 + TARGET_LITTLE_ENDIAN],
10010 reg_names[GP_REG_FIRST + 2 + TARGET_LITTLE_ENDIAN]);
10011 fprintf (asm_out_file, "\tor\t%s,%s,%s\n",
10012 reg_names[GP_REG_FIRST + 2],
10013 reg_names[GP_REG_FIRST + 2],
10014 reg_names[GP_REG_FIRST + 3]);
10019 mips16_fpret_double (GP_REG_FIRST + 2 + (8 / UNITS_PER_WORD),
10020 FP_REG_FIRST + MAX_FPRS_PER_FMT);
10024 mips16_fpret_double (GP_REG_FIRST + 2, FP_REG_FIRST + 0);
10028 gcc_unreachable ();
10030 fprintf (asm_out_file, "\tj\t%s\n", reg_names[GP_REG_FIRST + 18]);
10031 /* As above, we can't fill the delay slot. */
10032 fprintf (asm_out_file, "\tnop\n");
10035 fprintf (asm_out_file, "\t.set\treorder\n");
10037 #ifdef ASM_DECLARE_FUNCTION_SIZE
10038 ASM_DECLARE_FUNCTION_SIZE (asm_out_file, stubname, stubdecl);
10041 if (!FUNCTION_NAME_ALREADY_DECLARED)
10043 fputs ("\t.end\t", asm_out_file);
10044 assemble_name (asm_out_file, stubname);
10045 fputs ("\n", asm_out_file);
10048 /* Record this stub. */
10049 l = (struct mips16_stub *) xmalloc (sizeof *l);
10050 l->name = xstrdup (fnname);
10052 l->next = mips16_stubs;
10056 /* If we expect a floating point return value, but we've built a
10057 stub which does not expect one, then we're in trouble. We can't
10058 use the existing stub, because it won't handle the floating point
10059 value. We can't build a new stub, because the linker won't know
10060 which stub to use for the various calls in this object file.
10061 Fortunately, this case is illegal, since it means that a function
10062 was declared in two different ways in a single compilation. */
10063 if (fpret && ! l->fpret)
10064 error ("cannot handle inconsistent calls to %qs", fnname);
10066 if (retval == NULL_RTX)
10067 insn = gen_call_internal_direct (fn, arg_size);
10069 insn = gen_call_value_internal_direct (retval, fn, arg_size);
10070 insn = emit_call_insn (insn);
10072 /* If we are calling a stub which handles a floating point return
10073 value, we need to arrange to save $18 in the prologue. We do
10074 this by marking the function call as using the register. The
10075 prologue will later see that it is used, and emit code to save
10078 CALL_INSN_FUNCTION_USAGE (insn) =
10079 gen_rtx_EXPR_LIST (VOIDmode,
10080 gen_rtx_USE (VOIDmode, gen_rtx_REG (word_mode, 18)),
10081 CALL_INSN_FUNCTION_USAGE (insn));
10083 /* Return 1 to tell the caller that we've generated the call
10088 /* An entry in the mips16 constant pool. VALUE is the pool constant,
10089 MODE is its mode, and LABEL is the CODE_LABEL associated with it. */
10091 struct mips16_constant {
10092 struct mips16_constant *next;
10095 enum machine_mode mode;
10098 /* Information about an incomplete mips16 constant pool. FIRST is the
10099 first constant, HIGHEST_ADDRESS is the highest address that the first
10100 byte of the pool can have, and INSN_ADDRESS is the current instruction
10103 struct mips16_constant_pool {
10104 struct mips16_constant *first;
10105 int highest_address;
10109 /* Add constant VALUE to POOL and return its label. MODE is the
10110 value's mode (used for CONST_INTs, etc.). */
10113 add_constant (struct mips16_constant_pool *pool,
10114 rtx value, enum machine_mode mode)
10116 struct mips16_constant **p, *c;
10117 bool first_of_size_p;
10119 /* See whether the constant is already in the pool. If so, return the
10120 existing label, otherwise leave P pointing to the place where the
10121 constant should be added.
10123 Keep the pool sorted in increasing order of mode size so that we can
10124 reduce the number of alignments needed. */
10125 first_of_size_p = true;
10126 for (p = &pool->first; *p != 0; p = &(*p)->next)
10128 if (mode == (*p)->mode && rtx_equal_p (value, (*p)->value))
10129 return (*p)->label;
10130 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE ((*p)->mode))
10132 if (GET_MODE_SIZE (mode) == GET_MODE_SIZE ((*p)->mode))
10133 first_of_size_p = false;
10136 /* In the worst case, the constant needed by the earliest instruction
10137 will end up at the end of the pool. The entire pool must then be
10138 accessible from that instruction.
10140 When adding the first constant, set the pool's highest address to
10141 the address of the first out-of-range byte. Adjust this address
10142 downwards each time a new constant is added. */
10143 if (pool->first == 0)
10144 /* For pc-relative lw, addiu and daddiu instructions, the base PC value
10145 is the address of the instruction with the lowest two bits clear.
10146 The base PC value for ld has the lowest three bits clear. Assume
10147 the worst case here. */
10148 pool->highest_address = pool->insn_address - (UNITS_PER_WORD - 2) + 0x8000;
10149 pool->highest_address -= GET_MODE_SIZE (mode);
10150 if (first_of_size_p)
10151 /* Take into account the worst possible padding due to alignment. */
10152 pool->highest_address -= GET_MODE_SIZE (mode) - 1;
10154 /* Create a new entry. */
10155 c = (struct mips16_constant *) xmalloc (sizeof *c);
10158 c->label = gen_label_rtx ();
10165 /* Output constant VALUE after instruction INSN and return the last
10166 instruction emitted. MODE is the mode of the constant. */
10169 dump_constants_1 (enum machine_mode mode, rtx value, rtx insn)
10171 if (SCALAR_INT_MODE_P (mode)
10172 || ALL_SCALAR_FRACT_MODE_P (mode)
10173 || ALL_SCALAR_ACCUM_MODE_P (mode))
10175 rtx size = GEN_INT (GET_MODE_SIZE (mode));
10176 return emit_insn_after (gen_consttable_int (value, size), insn);
10179 if (SCALAR_FLOAT_MODE_P (mode))
10180 return emit_insn_after (gen_consttable_float (value), insn);
10182 if (VECTOR_MODE_P (mode))
10186 for (i = 0; i < CONST_VECTOR_NUNITS (value); i++)
10187 insn = dump_constants_1 (GET_MODE_INNER (mode),
10188 CONST_VECTOR_ELT (value, i), insn);
10192 gcc_unreachable ();
10196 /* Dump out the constants in CONSTANTS after INSN. */
10199 dump_constants (struct mips16_constant *constants, rtx insn)
10201 struct mips16_constant *c, *next;
10205 for (c = constants; c != NULL; c = next)
10207 /* If necessary, increase the alignment of PC. */
10208 if (align < GET_MODE_SIZE (c->mode))
10210 int align_log = floor_log2 (GET_MODE_SIZE (c->mode));
10211 insn = emit_insn_after (gen_align (GEN_INT (align_log)), insn);
10213 align = GET_MODE_SIZE (c->mode);
10215 insn = emit_label_after (c->label, insn);
10216 insn = dump_constants_1 (c->mode, c->value, insn);
10222 emit_barrier_after (insn);
10225 /* Return the length of instruction INSN. */
10228 mips16_insn_length (rtx insn)
10232 rtx body = PATTERN (insn);
10233 if (GET_CODE (body) == ADDR_VEC)
10234 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, 0);
10235 if (GET_CODE (body) == ADDR_DIFF_VEC)
10236 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, 1);
10238 return get_attr_length (insn);
10241 /* If *X is a symbolic constant that refers to the constant pool, add
10242 the constant to POOL and rewrite *X to use the constant's label. */
10245 mips16_rewrite_pool_constant (struct mips16_constant_pool *pool, rtx *x)
10247 rtx base, offset, label;
10249 split_const (*x, &base, &offset);
10250 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
10252 label = add_constant (pool, get_pool_constant (base),
10253 get_pool_mode (base));
10254 base = gen_rtx_LABEL_REF (Pmode, label);
10255 *x = mips_unspec_address_offset (base, offset, SYMBOL_PC_RELATIVE);
10259 /* This structure is used to communicate with mips16_rewrite_pool_refs.
10260 INSN is the instruction we're rewriting and POOL points to the current
10262 struct mips16_rewrite_pool_refs_info {
10264 struct mips16_constant_pool *pool;
10267 /* Rewrite *X so that constant pool references refer to the constant's
10268 label instead. DATA points to a mips16_rewrite_pool_refs_info
10272 mips16_rewrite_pool_refs (rtx *x, void *data)
10274 struct mips16_rewrite_pool_refs_info *info = data;
10276 if (force_to_mem_operand (*x, Pmode))
10278 rtx mem = force_const_mem (GET_MODE (*x), *x);
10279 validate_change (info->insn, x, mem, false);
10284 mips16_rewrite_pool_constant (info->pool, &XEXP (*x, 0));
10288 if (TARGET_MIPS16_TEXT_LOADS)
10289 mips16_rewrite_pool_constant (info->pool, x);
10291 return GET_CODE (*x) == CONST ? -1 : 0;
10294 /* Build MIPS16 constant pools. */
10297 mips16_lay_out_constants (void)
10299 struct mips16_constant_pool pool;
10300 struct mips16_rewrite_pool_refs_info info;
10303 if (!TARGET_MIPS16_PCREL_LOADS)
10307 memset (&pool, 0, sizeof (pool));
10308 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10310 /* Rewrite constant pool references in INSN. */
10315 for_each_rtx (&PATTERN (insn), mips16_rewrite_pool_refs, &info);
10318 pool.insn_address += mips16_insn_length (insn);
10320 if (pool.first != NULL)
10322 /* If there are no natural barriers between the first user of
10323 the pool and the highest acceptable address, we'll need to
10324 create a new instruction to jump around the constant pool.
10325 In the worst case, this instruction will be 4 bytes long.
10327 If it's too late to do this transformation after INSN,
10328 do it immediately before INSN. */
10329 if (barrier == 0 && pool.insn_address + 4 > pool.highest_address)
10333 label = gen_label_rtx ();
10335 jump = emit_jump_insn_before (gen_jump (label), insn);
10336 JUMP_LABEL (jump) = label;
10337 LABEL_NUSES (label) = 1;
10338 barrier = emit_barrier_after (jump);
10340 emit_label_after (label, barrier);
10341 pool.insn_address += 4;
10344 /* See whether the constant pool is now out of range of the first
10345 user. If so, output the constants after the previous barrier.
10346 Note that any instructions between BARRIER and INSN (inclusive)
10347 will use negative offsets to refer to the pool. */
10348 if (pool.insn_address > pool.highest_address)
10350 dump_constants (pool.first, barrier);
10354 else if (BARRIER_P (insn))
10358 dump_constants (pool.first, get_last_insn ());
10361 /* A temporary variable used by for_each_rtx callbacks, etc. */
10362 static rtx mips_sim_insn;
10364 /* A structure representing the state of the processor pipeline.
10365 Used by the mips_sim_* family of functions. */
10367 /* The maximum number of instructions that can be issued in a cycle.
10368 (Caches mips_issue_rate.) */
10369 unsigned int issue_rate;
10371 /* The current simulation time. */
10374 /* How many more instructions can be issued in the current cycle. */
10375 unsigned int insns_left;
10377 /* LAST_SET[X].INSN is the last instruction to set register X.
10378 LAST_SET[X].TIME is the time at which that instruction was issued.
10379 INSN is null if no instruction has yet set register X. */
10383 } last_set[FIRST_PSEUDO_REGISTER];
10385 /* The pipeline's current DFA state. */
10389 /* Reset STATE to the initial simulation state. */
10392 mips_sim_reset (struct mips_sim *state)
10395 state->insns_left = state->issue_rate;
10396 memset (&state->last_set, 0, sizeof (state->last_set));
10397 state_reset (state->dfa_state);
10400 /* Initialize STATE before its first use. DFA_STATE points to an
10401 allocated but uninitialized DFA state. */
10404 mips_sim_init (struct mips_sim *state, state_t dfa_state)
10406 state->issue_rate = mips_issue_rate ();
10407 state->dfa_state = dfa_state;
10408 mips_sim_reset (state);
10411 /* Advance STATE by one clock cycle. */
10414 mips_sim_next_cycle (struct mips_sim *state)
10417 state->insns_left = state->issue_rate;
10418 state_transition (state->dfa_state, 0);
10421 /* Advance simulation state STATE until instruction INSN can read
10425 mips_sim_wait_reg (struct mips_sim *state, rtx insn, rtx reg)
10429 for (i = 0; i < HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)); i++)
10430 if (state->last_set[REGNO (reg) + i].insn != 0)
10434 t = state->last_set[REGNO (reg) + i].time;
10435 t += insn_latency (state->last_set[REGNO (reg) + i].insn, insn);
10436 while (state->time < t)
10437 mips_sim_next_cycle (state);
10441 /* A for_each_rtx callback. If *X is a register, advance simulation state
10442 DATA until mips_sim_insn can read the register's value. */
10445 mips_sim_wait_regs_2 (rtx *x, void *data)
10448 mips_sim_wait_reg (data, mips_sim_insn, *x);
10452 /* Call mips_sim_wait_regs_2 (R, DATA) for each register R mentioned in *X. */
10455 mips_sim_wait_regs_1 (rtx *x, void *data)
10457 for_each_rtx (x, mips_sim_wait_regs_2, data);
10460 /* Advance simulation state STATE until all of INSN's register
10461 dependencies are satisfied. */
10464 mips_sim_wait_regs (struct mips_sim *state, rtx insn)
10466 mips_sim_insn = insn;
10467 note_uses (&PATTERN (insn), mips_sim_wait_regs_1, state);
10470 /* Advance simulation state STATE until the units required by
10471 instruction INSN are available. */
10474 mips_sim_wait_units (struct mips_sim *state, rtx insn)
10478 tmp_state = alloca (state_size ());
10479 while (state->insns_left == 0
10480 || (memcpy (tmp_state, state->dfa_state, state_size ()),
10481 state_transition (tmp_state, insn) >= 0))
10482 mips_sim_next_cycle (state);
10485 /* Advance simulation state STATE until INSN is ready to issue. */
10488 mips_sim_wait_insn (struct mips_sim *state, rtx insn)
10490 mips_sim_wait_regs (state, insn);
10491 mips_sim_wait_units (state, insn);
10494 /* mips_sim_insn has just set X. Update the LAST_SET array
10495 in simulation state DATA. */
10498 mips_sim_record_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
10500 struct mips_sim *state;
10505 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
10507 state->last_set[REGNO (x) + i].insn = mips_sim_insn;
10508 state->last_set[REGNO (x) + i].time = state->time;
10512 /* Issue instruction INSN in scheduler state STATE. Assume that INSN
10513 can issue immediately (i.e., that mips_sim_wait_insn has already
10517 mips_sim_issue_insn (struct mips_sim *state, rtx insn)
10519 state_transition (state->dfa_state, insn);
10520 state->insns_left--;
10522 mips_sim_insn = insn;
10523 note_stores (PATTERN (insn), mips_sim_record_set, state);
10526 /* Simulate issuing a NOP in state STATE. */
10529 mips_sim_issue_nop (struct mips_sim *state)
10531 if (state->insns_left == 0)
10532 mips_sim_next_cycle (state);
10533 state->insns_left--;
10536 /* Update simulation state STATE so that it's ready to accept the instruction
10537 after INSN. INSN should be part of the main rtl chain, not a member of a
10541 mips_sim_finish_insn (struct mips_sim *state, rtx insn)
10543 /* If INSN is a jump with an implicit delay slot, simulate a nop. */
10545 mips_sim_issue_nop (state);
10547 switch (GET_CODE (SEQ_BEGIN (insn)))
10551 /* We can't predict the processor state after a call or label. */
10552 mips_sim_reset (state);
10556 /* The delay slots of branch likely instructions are only executed
10557 when the branch is taken. Therefore, if the caller has simulated
10558 the delay slot instruction, STATE does not really reflect the state
10559 of the pipeline for the instruction after the delay slot. Also,
10560 branch likely instructions tend to incur a penalty when not taken,
10561 so there will probably be an extra delay between the branch and
10562 the instruction after the delay slot. */
10563 if (INSN_ANNULLED_BRANCH_P (SEQ_BEGIN (insn)))
10564 mips_sim_reset (state);
10572 /* The VR4130 pipeline issues aligned pairs of instructions together,
10573 but it stalls the second instruction if it depends on the first.
10574 In order to cut down the amount of logic required, this dependence
10575 check is not based on a full instruction decode. Instead, any non-SPECIAL
10576 instruction is assumed to modify the register specified by bits 20-16
10577 (which is usually the "rt" field).
10579 In beq, beql, bne and bnel instructions, the rt field is actually an
10580 input, so we can end up with a false dependence between the branch
10581 and its delay slot. If this situation occurs in instruction INSN,
10582 try to avoid it by swapping rs and rt. */
10585 vr4130_avoid_branch_rt_conflict (rtx insn)
10589 first = SEQ_BEGIN (insn);
10590 second = SEQ_END (insn);
10592 && NONJUMP_INSN_P (second)
10593 && GET_CODE (PATTERN (first)) == SET
10594 && GET_CODE (SET_DEST (PATTERN (first))) == PC
10595 && GET_CODE (SET_SRC (PATTERN (first))) == IF_THEN_ELSE)
10597 /* Check for the right kind of condition. */
10598 rtx cond = XEXP (SET_SRC (PATTERN (first)), 0);
10599 if ((GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
10600 && REG_P (XEXP (cond, 0))
10601 && REG_P (XEXP (cond, 1))
10602 && reg_referenced_p (XEXP (cond, 1), PATTERN (second))
10603 && !reg_referenced_p (XEXP (cond, 0), PATTERN (second)))
10605 /* SECOND mentions the rt register but not the rs register. */
10606 rtx tmp = XEXP (cond, 0);
10607 XEXP (cond, 0) = XEXP (cond, 1);
10608 XEXP (cond, 1) = tmp;
10613 /* Implement -mvr4130-align. Go through each basic block and simulate the
10614 processor pipeline. If we find that a pair of instructions could execute
10615 in parallel, and the first of those instruction is not 8-byte aligned,
10616 insert a nop to make it aligned. */
10619 vr4130_align_insns (void)
10621 struct mips_sim state;
10622 rtx insn, subinsn, last, last2, next;
10627 /* LAST is the last instruction before INSN to have a nonzero length.
10628 LAST2 is the last such instruction before LAST. */
10632 /* ALIGNED_P is true if INSN is known to be at an aligned address. */
10635 mips_sim_init (&state, alloca (state_size ()));
10636 for (insn = get_insns (); insn != 0; insn = next)
10638 unsigned int length;
10640 next = NEXT_INSN (insn);
10642 /* See the comment above vr4130_avoid_branch_rt_conflict for details.
10643 This isn't really related to the alignment pass, but we do it on
10644 the fly to avoid a separate instruction walk. */
10645 vr4130_avoid_branch_rt_conflict (insn);
10647 if (USEFUL_INSN_P (insn))
10648 FOR_EACH_SUBINSN (subinsn, insn)
10650 mips_sim_wait_insn (&state, subinsn);
10652 /* If we want this instruction to issue in parallel with the
10653 previous one, make sure that the previous instruction is
10654 aligned. There are several reasons why this isn't worthwhile
10655 when the second instruction is a call:
10657 - Calls are less likely to be performance critical,
10658 - There's a good chance that the delay slot can execute
10659 in parallel with the call.
10660 - The return address would then be unaligned.
10662 In general, if we're going to insert a nop between instructions
10663 X and Y, it's better to insert it immediately after X. That
10664 way, if the nop makes Y aligned, it will also align any labels
10665 between X and Y. */
10666 if (state.insns_left != state.issue_rate
10667 && !CALL_P (subinsn))
10669 if (subinsn == SEQ_BEGIN (insn) && aligned_p)
10671 /* SUBINSN is the first instruction in INSN and INSN is
10672 aligned. We want to align the previous instruction
10673 instead, so insert a nop between LAST2 and LAST.
10675 Note that LAST could be either a single instruction
10676 or a branch with a delay slot. In the latter case,
10677 LAST, like INSN, is already aligned, but the delay
10678 slot must have some extra delay that stops it from
10679 issuing at the same time as the branch. We therefore
10680 insert a nop before the branch in order to align its
10682 emit_insn_after (gen_nop (), last2);
10685 else if (subinsn != SEQ_BEGIN (insn) && !aligned_p)
10687 /* SUBINSN is the delay slot of INSN, but INSN is
10688 currently unaligned. Insert a nop between
10689 LAST and INSN to align it. */
10690 emit_insn_after (gen_nop (), last);
10694 mips_sim_issue_insn (&state, subinsn);
10696 mips_sim_finish_insn (&state, insn);
10698 /* Update LAST, LAST2 and ALIGNED_P for the next instruction. */
10699 length = get_attr_length (insn);
10702 /* If the instruction is an asm statement or multi-instruction
10703 mips.md patern, the length is only an estimate. Insert an
10704 8 byte alignment after it so that the following instructions
10705 can be handled correctly. */
10706 if (NONJUMP_INSN_P (SEQ_BEGIN (insn))
10707 && (recog_memoized (insn) < 0 || length >= 8))
10709 next = emit_insn_after (gen_align (GEN_INT (3)), insn);
10710 next = NEXT_INSN (next);
10711 mips_sim_next_cycle (&state);
10714 else if (length & 4)
10715 aligned_p = !aligned_p;
10720 /* See whether INSN is an aligned label. */
10721 if (LABEL_P (insn) && label_to_alignment (insn) >= 3)
10727 /* Subroutine of mips_reorg. If there is a hazard between INSN
10728 and a previous instruction, avoid it by inserting nops after
10731 *DELAYED_REG and *HILO_DELAY describe the hazards that apply at
10732 this point. If *DELAYED_REG is non-null, INSN must wait a cycle
10733 before using the value of that register. *HILO_DELAY counts the
10734 number of instructions since the last hilo hazard (that is,
10735 the number of instructions since the last mflo or mfhi).
10737 After inserting nops for INSN, update *DELAYED_REG and *HILO_DELAY
10738 for the next instruction.
10740 LO_REG is an rtx for the LO register, used in dependence checking. */
10743 mips_avoid_hazard (rtx after, rtx insn, int *hilo_delay,
10744 rtx *delayed_reg, rtx lo_reg)
10749 if (!INSN_P (insn))
10752 pattern = PATTERN (insn);
10754 /* Do not put the whole function in .set noreorder if it contains
10755 an asm statement. We don't know whether there will be hazards
10756 between the asm statement and the gcc-generated code. */
10757 if (GET_CODE (pattern) == ASM_INPUT || asm_noperands (pattern) >= 0)
10758 cfun->machine->all_noreorder_p = false;
10760 /* Ignore zero-length instructions (barriers and the like). */
10761 ninsns = get_attr_length (insn) / 4;
10765 /* Work out how many nops are needed. Note that we only care about
10766 registers that are explicitly mentioned in the instruction's pattern.
10767 It doesn't matter that calls use the argument registers or that they
10768 clobber hi and lo. */
10769 if (*hilo_delay < 2 && reg_set_p (lo_reg, pattern))
10770 nops = 2 - *hilo_delay;
10771 else if (*delayed_reg != 0 && reg_referenced_p (*delayed_reg, pattern))
10776 /* Insert the nops between this instruction and the previous one.
10777 Each new nop takes us further from the last hilo hazard. */
10778 *hilo_delay += nops;
10780 emit_insn_after (gen_hazard_nop (), after);
10782 /* Set up the state for the next instruction. */
10783 *hilo_delay += ninsns;
10785 if (INSN_CODE (insn) >= 0)
10786 switch (get_attr_hazard (insn))
10796 set = single_set (insn);
10797 gcc_assert (set != 0);
10798 *delayed_reg = SET_DEST (set);
10804 /* Go through the instruction stream and insert nops where necessary.
10805 See if the whole function can then be put into .set noreorder &
10809 mips_avoid_hazards (void)
10811 rtx insn, last_insn, lo_reg, delayed_reg;
10814 /* Force all instructions to be split into their final form. */
10815 split_all_insns_noflow ();
10817 /* Recalculate instruction lengths without taking nops into account. */
10818 cfun->machine->ignore_hazard_length_p = true;
10819 shorten_branches (get_insns ());
10821 cfun->machine->all_noreorder_p = true;
10823 /* Profiled functions can't be all noreorder because the profiler
10824 support uses assembler macros. */
10825 if (current_function_profile)
10826 cfun->machine->all_noreorder_p = false;
10828 /* Code compiled with -mfix-vr4120 can't be all noreorder because
10829 we rely on the assembler to work around some errata. */
10830 if (TARGET_FIX_VR4120)
10831 cfun->machine->all_noreorder_p = false;
10833 /* The same is true for -mfix-vr4130 if we might generate mflo or
10834 mfhi instructions. Note that we avoid using mflo and mfhi if
10835 the VR4130 macc and dmacc instructions are available instead;
10836 see the *mfhilo_{si,di}_macc patterns. */
10837 if (TARGET_FIX_VR4130 && !ISA_HAS_MACCHI)
10838 cfun->machine->all_noreorder_p = false;
10843 lo_reg = gen_rtx_REG (SImode, LO_REGNUM);
10845 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10848 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
10849 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
10850 mips_avoid_hazard (last_insn, XVECEXP (PATTERN (insn), 0, i),
10851 &hilo_delay, &delayed_reg, lo_reg);
10853 mips_avoid_hazard (last_insn, insn, &hilo_delay,
10854 &delayed_reg, lo_reg);
10861 /* Implement TARGET_MACHINE_DEPENDENT_REORG. */
10866 mips16_lay_out_constants ();
10867 if (TARGET_EXPLICIT_RELOCS)
10869 if (mips_flag_delayed_branch)
10870 dbr_schedule (get_insns ());
10871 mips_avoid_hazards ();
10872 if (TUNE_MIPS4130 && TARGET_VR4130_ALIGN)
10873 vr4130_align_insns ();
10877 /* This function does three things:
10879 - Register the special divsi3 and modsi3 functions if -mfix-vr4120.
10880 - Register the mips16 hardware floating point stubs.
10881 - Register the gofast functions if selected using --enable-gofast. */
10883 #include "config/gofast.h"
10886 mips_init_libfuncs (void)
10888 if (TARGET_FIX_VR4120)
10890 set_optab_libfunc (sdiv_optab, SImode, "__vr4120_divsi3");
10891 set_optab_libfunc (smod_optab, SImode, "__vr4120_modsi3");
10894 if (TARGET_MIPS16 && TARGET_HARD_FLOAT_ABI)
10896 set_optab_libfunc (add_optab, SFmode, "__mips16_addsf3");
10897 set_optab_libfunc (sub_optab, SFmode, "__mips16_subsf3");
10898 set_optab_libfunc (smul_optab, SFmode, "__mips16_mulsf3");
10899 set_optab_libfunc (sdiv_optab, SFmode, "__mips16_divsf3");
10901 set_optab_libfunc (eq_optab, SFmode, "__mips16_eqsf2");
10902 set_optab_libfunc (ne_optab, SFmode, "__mips16_nesf2");
10903 set_optab_libfunc (gt_optab, SFmode, "__mips16_gtsf2");
10904 set_optab_libfunc (ge_optab, SFmode, "__mips16_gesf2");
10905 set_optab_libfunc (lt_optab, SFmode, "__mips16_ltsf2");
10906 set_optab_libfunc (le_optab, SFmode, "__mips16_lesf2");
10907 set_optab_libfunc (unord_optab, SFmode, "__mips16_unordsf2");
10909 set_conv_libfunc (sfix_optab, SImode, SFmode, "__mips16_fix_truncsfsi");
10910 set_conv_libfunc (sfloat_optab, SFmode, SImode, "__mips16_floatsisf");
10911 set_conv_libfunc (ufloat_optab, SFmode, SImode, "__mips16_floatunsisf");
10913 if (TARGET_DOUBLE_FLOAT)
10915 set_optab_libfunc (add_optab, DFmode, "__mips16_adddf3");
10916 set_optab_libfunc (sub_optab, DFmode, "__mips16_subdf3");
10917 set_optab_libfunc (smul_optab, DFmode, "__mips16_muldf3");
10918 set_optab_libfunc (sdiv_optab, DFmode, "__mips16_divdf3");
10920 set_optab_libfunc (eq_optab, DFmode, "__mips16_eqdf2");
10921 set_optab_libfunc (ne_optab, DFmode, "__mips16_nedf2");
10922 set_optab_libfunc (gt_optab, DFmode, "__mips16_gtdf2");
10923 set_optab_libfunc (ge_optab, DFmode, "__mips16_gedf2");
10924 set_optab_libfunc (lt_optab, DFmode, "__mips16_ltdf2");
10925 set_optab_libfunc (le_optab, DFmode, "__mips16_ledf2");
10926 set_optab_libfunc (unord_optab, DFmode, "__mips16_unorddf2");
10928 set_conv_libfunc (sext_optab, DFmode, SFmode, "__mips16_extendsfdf2");
10929 set_conv_libfunc (trunc_optab, SFmode, DFmode, "__mips16_truncdfsf2");
10931 set_conv_libfunc (sfix_optab, SImode, DFmode, "__mips16_fix_truncdfsi");
10932 set_conv_libfunc (sfloat_optab, DFmode, SImode, "__mips16_floatsidf");
10933 set_conv_libfunc (ufloat_optab, DFmode, SImode, "__mips16_floatunsidf");
10937 gofast_maybe_init_libfuncs ();
10940 /* Return a number assessing the cost of moving a register in class
10941 FROM to class TO. The classes are expressed using the enumeration
10942 values such as `GENERAL_REGS'. A value of 2 is the default; other
10943 values are interpreted relative to that.
10945 It is not required that the cost always equal 2 when FROM is the
10946 same as TO; on some machines it is expensive to move between
10947 registers if they are not general registers.
10949 If reload sees an insn consisting of a single `set' between two
10950 hard registers, and if `REGISTER_MOVE_COST' applied to their
10951 classes returns a value of 2, reload does not check to ensure that
10952 the constraints of the insn are met. Setting a cost of other than
10953 2 will allow reload to verify that the constraints are met. You
10954 should do this if the `movM' pattern's constraints do not allow
10957 ??? We make the cost of moving from HI/LO into general
10958 registers the same as for one of moving general registers to
10959 HI/LO for TARGET_MIPS16 in order to prevent allocating a
10960 pseudo to HI/LO. This might hurt optimizations though, it
10961 isn't clear if it is wise. And it might not work in all cases. We
10962 could solve the DImode LO reg problem by using a multiply, just
10963 like reload_{in,out}si. We could solve the SImode/HImode HI reg
10964 problem by using divide instructions. divu puts the remainder in
10965 the HI reg, so doing a divide by -1 will move the value in the HI
10966 reg for all values except -1. We could handle that case by using a
10967 signed divide, e.g. -1 / 2 (or maybe 1 / -2?). We'd have to emit
10968 a compare/branch to test the input value to see which instruction
10969 we need to use. This gets pretty messy, but it is feasible. */
10972 mips_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
10973 enum reg_class to, enum reg_class from)
10975 if (from == M16_REGS && reg_class_subset_p (to, GENERAL_REGS))
10977 else if (from == M16_NA_REGS && reg_class_subset_p (to, GENERAL_REGS))
10979 else if (reg_class_subset_p (from, GENERAL_REGS))
10981 if (to == M16_REGS)
10983 else if (to == M16_NA_REGS)
10985 else if (reg_class_subset_p (to, GENERAL_REGS))
10992 else if (to == FP_REGS)
10994 else if (reg_class_subset_p (to, ACC_REGS))
11001 else if (reg_class_subset_p (to, ALL_COP_REGS))
11006 else if (from == FP_REGS)
11008 if (reg_class_subset_p (to, GENERAL_REGS))
11010 else if (to == FP_REGS)
11012 else if (to == ST_REGS)
11015 else if (reg_class_subset_p (from, ACC_REGS))
11017 if (reg_class_subset_p (to, GENERAL_REGS))
11025 else if (from == ST_REGS && reg_class_subset_p (to, GENERAL_REGS))
11027 else if (reg_class_subset_p (from, ALL_COP_REGS))
11033 ??? What cases are these? Shouldn't we return 2 here? */
11038 /* Return the length of INSN. LENGTH is the initial length computed by
11039 attributes in the machine-description file. */
11042 mips_adjust_insn_length (rtx insn, int length)
11044 /* A unconditional jump has an unfilled delay slot if it is not part
11045 of a sequence. A conditional jump normally has a delay slot, but
11046 does not on MIPS16. */
11047 if (CALL_P (insn) || (TARGET_MIPS16 ? simplejump_p (insn) : JUMP_P (insn)))
11050 /* See how many nops might be needed to avoid hardware hazards. */
11051 if (!cfun->machine->ignore_hazard_length_p && INSN_CODE (insn) >= 0)
11052 switch (get_attr_hazard (insn))
11066 /* All MIPS16 instructions are a measly two bytes. */
11074 /* Return an asm sequence to start a noat block and load the address
11075 of a label into $1. */
11078 mips_output_load_label (void)
11080 if (TARGET_EXPLICIT_RELOCS)
11084 return "%[lw\t%@,%%got_page(%0)(%+)\n\taddiu\t%@,%@,%%got_ofst(%0)";
11087 return "%[ld\t%@,%%got_page(%0)(%+)\n\tdaddiu\t%@,%@,%%got_ofst(%0)";
11090 if (ISA_HAS_LOAD_DELAY)
11091 return "%[lw\t%@,%%got(%0)(%+)%#\n\taddiu\t%@,%@,%%lo(%0)";
11092 return "%[lw\t%@,%%got(%0)(%+)\n\taddiu\t%@,%@,%%lo(%0)";
11096 if (Pmode == DImode)
11097 return "%[dla\t%@,%0";
11099 return "%[la\t%@,%0";
11103 /* Return the assembly code for INSN, which has the operands given by
11104 OPERANDS, and which branches to OPERANDS[1] if some condition is true.
11105 BRANCH_IF_TRUE is the asm template that should be used if OPERANDS[1]
11106 is in range of a direct branch. BRANCH_IF_FALSE is an inverted
11107 version of BRANCH_IF_TRUE. */
11110 mips_output_conditional_branch (rtx insn, rtx *operands,
11111 const char *branch_if_true,
11112 const char *branch_if_false)
11114 unsigned int length;
11115 rtx taken, not_taken;
11117 length = get_attr_length (insn);
11120 /* Just a simple conditional branch. */
11121 mips_branch_likely = (final_sequence && INSN_ANNULLED_BRANCH_P (insn));
11122 return branch_if_true;
11125 /* Generate a reversed branch around a direct jump. This fallback does
11126 not use branch-likely instructions. */
11127 mips_branch_likely = false;
11128 not_taken = gen_label_rtx ();
11129 taken = operands[1];
11131 /* Generate the reversed branch to NOT_TAKEN. */
11132 operands[1] = not_taken;
11133 output_asm_insn (branch_if_false, operands);
11135 /* If INSN has a delay slot, we must provide delay slots for both the
11136 branch to NOT_TAKEN and the conditional jump. We must also ensure
11137 that INSN's delay slot is executed in the appropriate cases. */
11138 if (final_sequence)
11140 /* This first delay slot will always be executed, so use INSN's
11141 delay slot if is not annulled. */
11142 if (!INSN_ANNULLED_BRANCH_P (insn))
11144 final_scan_insn (XVECEXP (final_sequence, 0, 1),
11145 asm_out_file, optimize, 1, NULL);
11146 INSN_DELETED_P (XVECEXP (final_sequence, 0, 1)) = 1;
11149 output_asm_insn ("nop", 0);
11150 fprintf (asm_out_file, "\n");
11153 /* Output the unconditional branch to TAKEN. */
11155 output_asm_insn ("j\t%0%/", &taken);
11158 output_asm_insn (mips_output_load_label (), &taken);
11159 output_asm_insn ("jr\t%@%]%/", 0);
11162 /* Now deal with its delay slot; see above. */
11163 if (final_sequence)
11165 /* This delay slot will only be executed if the branch is taken.
11166 Use INSN's delay slot if is annulled. */
11167 if (INSN_ANNULLED_BRANCH_P (insn))
11169 final_scan_insn (XVECEXP (final_sequence, 0, 1),
11170 asm_out_file, optimize, 1, NULL);
11171 INSN_DELETED_P (XVECEXP (final_sequence, 0, 1)) = 1;
11174 output_asm_insn ("nop", 0);
11175 fprintf (asm_out_file, "\n");
11178 /* Output NOT_TAKEN. */
11179 (*targetm.asm_out.internal_label) (asm_out_file, "L",
11180 CODE_LABEL_NUMBER (not_taken));
11184 /* Return the assembly code for INSN, which branches to OPERANDS[1]
11185 if some ordered condition is true. The condition is given by
11186 OPERANDS[0] if !INVERTED_P, otherwise it is the inverse of
11187 OPERANDS[0]. OPERANDS[2] is the comparison's first operand;
11188 its second is always zero. */
11191 mips_output_order_conditional_branch (rtx insn, rtx *operands, bool inverted_p)
11193 const char *branch[2];
11195 /* Make BRANCH[1] branch to OPERANDS[1] when the condition is true.
11196 Make BRANCH[0] branch on the inverse condition. */
11197 switch (GET_CODE (operands[0]))
11199 /* These cases are equivalent to comparisons against zero. */
11201 inverted_p = !inverted_p;
11202 /* Fall through. */
11204 branch[!inverted_p] = MIPS_BRANCH ("bne", "%2,%.,%1");
11205 branch[inverted_p] = MIPS_BRANCH ("beq", "%2,%.,%1");
11208 /* These cases are always true or always false. */
11210 inverted_p = !inverted_p;
11211 /* Fall through. */
11213 branch[!inverted_p] = MIPS_BRANCH ("beq", "%.,%.,%1");
11214 branch[inverted_p] = MIPS_BRANCH ("bne", "%.,%.,%1");
11218 branch[!inverted_p] = MIPS_BRANCH ("b%C0z", "%2,%1");
11219 branch[inverted_p] = MIPS_BRANCH ("b%N0z", "%2,%1");
11222 return mips_output_conditional_branch (insn, operands, branch[1], branch[0]);
11225 /* Used to output div or ddiv instruction DIVISION, which has the operands
11226 given by OPERANDS. Add in a divide-by-zero check if needed.
11228 When working around R4000 and R4400 errata, we need to make sure that
11229 the division is not immediately followed by a shift[1][2]. We also
11230 need to stop the division from being put into a branch delay slot[3].
11231 The easiest way to avoid both problems is to add a nop after the
11232 division. When a divide-by-zero check is needed, this nop can be
11233 used to fill the branch delay slot.
11235 [1] If a double-word or a variable shift executes immediately
11236 after starting an integer division, the shift may give an
11237 incorrect result. See quotations of errata #16 and #28 from
11238 "MIPS R4000PC/SC Errata, Processor Revision 2.2 and 3.0"
11239 in mips.md for details.
11241 [2] A similar bug to [1] exists for all revisions of the
11242 R4000 and the R4400 when run in an MC configuration.
11243 From "MIPS R4000MC Errata, Processor Revision 2.2 and 3.0":
11245 "19. In this following sequence:
11247 ddiv (or ddivu or div or divu)
11248 dsll32 (or dsrl32, dsra32)
11250 if an MPT stall occurs, while the divide is slipping the cpu
11251 pipeline, then the following double shift would end up with an
11254 Workaround: The compiler needs to avoid generating any
11255 sequence with divide followed by extended double shift."
11257 This erratum is also present in "MIPS R4400MC Errata, Processor
11258 Revision 1.0" and "MIPS R4400MC Errata, Processor Revision 2.0
11259 & 3.0" as errata #10 and #4, respectively.
11261 [3] From "MIPS R4000PC/SC Errata, Processor Revision 2.2 and 3.0"
11262 (also valid for MIPS R4000MC processors):
11264 "52. R4000SC: This bug does not apply for the R4000PC.
11266 There are two flavors of this bug:
11268 1) If the instruction just after divide takes an RF exception
11269 (tlb-refill, tlb-invalid) and gets an instruction cache
11270 miss (both primary and secondary) and the line which is
11271 currently in secondary cache at this index had the first
11272 data word, where the bits 5..2 are set, then R4000 would
11273 get a wrong result for the div.
11278 ------------------- # end-of page. -tlb-refill
11283 ------------------- # end-of page. -tlb-invalid
11286 2) If the divide is in the taken branch delay slot, where the
11287 target takes RF exception and gets an I-cache miss for the
11288 exception vector or where I-cache miss occurs for the
11289 target address, under the above mentioned scenarios, the
11290 div would get wrong results.
11293 j r2 # to next page mapped or unmapped
11294 div r8,r9 # this bug would be there as long
11295 # as there is an ICache miss and
11296 nop # the "data pattern" is present
11299 beq r0, r0, NextPage # to Next page
11303 This bug is present for div, divu, ddiv, and ddivu
11306 Workaround: For item 1), OS could make sure that the next page
11307 after the divide instruction is also mapped. For item 2), the
11308 compiler could make sure that the divide instruction is not in
11309 the branch delay slot."
11311 These processors have PRId values of 0x00004220 and 0x00004300 for
11312 the R4000 and 0x00004400, 0x00004500 and 0x00004600 for the R4400. */
11315 mips_output_division (const char *division, rtx *operands)
11320 if (TARGET_FIX_R4000 || TARGET_FIX_R4400)
11322 output_asm_insn (s, operands);
11325 if (TARGET_CHECK_ZERO_DIV)
11329 output_asm_insn (s, operands);
11330 s = "bnez\t%2,1f\n\tbreak\t7\n1:";
11332 else if (GENERATE_DIVIDE_TRAPS)
11334 output_asm_insn (s, operands);
11335 s = "teq\t%2,%.,7";
11339 output_asm_insn ("%(bne\t%2,%.,1f", operands);
11340 output_asm_insn (s, operands);
11341 s = "break\t7%)\n1:";
11347 /* Return true if GIVEN is the same as CANONICAL, or if it is CANONICAL
11348 with a final "000" replaced by "k". Ignore case.
11350 Note: this function is shared between GCC and GAS. */
11353 mips_strict_matching_cpu_name_p (const char *canonical, const char *given)
11355 while (*given != 0 && TOLOWER (*given) == TOLOWER (*canonical))
11356 given++, canonical++;
11358 return ((*given == 0 && *canonical == 0)
11359 || (strcmp (canonical, "000") == 0 && strcasecmp (given, "k") == 0));
11363 /* Return true if GIVEN matches CANONICAL, where GIVEN is a user-supplied
11364 CPU name. We've traditionally allowed a lot of variation here.
11366 Note: this function is shared between GCC and GAS. */
11369 mips_matching_cpu_name_p (const char *canonical, const char *given)
11371 /* First see if the name matches exactly, or with a final "000"
11372 turned into "k". */
11373 if (mips_strict_matching_cpu_name_p (canonical, given))
11376 /* If not, try comparing based on numerical designation alone.
11377 See if GIVEN is an unadorned number, or 'r' followed by a number. */
11378 if (TOLOWER (*given) == 'r')
11380 if (!ISDIGIT (*given))
11383 /* Skip over some well-known prefixes in the canonical name,
11384 hoping to find a number there too. */
11385 if (TOLOWER (canonical[0]) == 'v' && TOLOWER (canonical[1]) == 'r')
11387 else if (TOLOWER (canonical[0]) == 'r' && TOLOWER (canonical[1]) == 'm')
11389 else if (TOLOWER (canonical[0]) == 'r')
11392 return mips_strict_matching_cpu_name_p (canonical, given);
11396 /* Return the mips_cpu_info entry for the processor or ISA given
11397 by CPU_STRING. Return null if the string isn't recognized.
11399 A similar function exists in GAS. */
11401 static const struct mips_cpu_info *
11402 mips_parse_cpu (const char *cpu_string)
11407 /* In the past, we allowed upper-case CPU names, but it doesn't
11408 work well with the multilib machinery. */
11409 for (s = cpu_string; *s != 0; s++)
11412 warning (0, "the cpu name must be lower case");
11416 /* 'from-abi' selects the most compatible architecture for the given
11417 ABI: MIPS I for 32-bit ABIs and MIPS III for 64-bit ABIs. For the
11418 EABIs, we have to decide whether we're using the 32-bit or 64-bit
11419 version. Look first at the -mgp options, if given, otherwise base
11420 the choice on MASK_64BIT in TARGET_DEFAULT. */
11421 if (strcasecmp (cpu_string, "from-abi") == 0)
11422 return mips_cpu_info_from_isa (ABI_NEEDS_32BIT_REGS ? 1
11423 : ABI_NEEDS_64BIT_REGS ? 3
11424 : (TARGET_64BIT ? 3 : 1));
11426 /* 'default' has traditionally been a no-op. Probably not very useful. */
11427 if (strcasecmp (cpu_string, "default") == 0)
11430 for (i = 0; i < ARRAY_SIZE (mips_cpu_info_table); i++)
11431 if (mips_matching_cpu_name_p (mips_cpu_info_table[i].name, cpu_string))
11432 return mips_cpu_info_table + i;
11438 /* Return the processor associated with the given ISA level, or null
11439 if the ISA isn't valid. */
11441 static const struct mips_cpu_info *
11442 mips_cpu_info_from_isa (int isa)
11446 for (i = 0; i < ARRAY_SIZE (mips_cpu_info_table); i++)
11447 if (mips_cpu_info_table[i].isa == isa)
11448 return mips_cpu_info_table + i;
11453 /* Implement HARD_REGNO_NREGS. The size of FP registers is controlled
11454 by UNITS_PER_FPREG. The size of FP status registers is always 4, because
11455 they only hold condition code modes, and CCmode is always considered to
11456 be 4 bytes wide. All other registers are word sized. */
11459 mips_hard_regno_nregs (int regno, enum machine_mode mode)
11461 if (ST_REG_P (regno))
11462 return ((GET_MODE_SIZE (mode) + 3) / 4);
11463 else if (! FP_REG_P (regno))
11464 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
11466 return ((GET_MODE_SIZE (mode) + UNITS_PER_FPREG - 1) / UNITS_PER_FPREG);
11469 /* Implement TARGET_RETURN_IN_MEMORY. Under the old (i.e., 32 and O64 ABIs)
11470 all BLKmode objects are returned in memory. Under the new (N32 and
11471 64-bit MIPS ABIs) small structures are returned in a register.
11472 Objects with varying size must still be returned in memory, of
11476 mips_return_in_memory (const_tree type, const_tree fndecl ATTRIBUTE_UNUSED)
11479 return (TYPE_MODE (type) == BLKmode);
11481 return ((int_size_in_bytes (type) > (2 * UNITS_PER_WORD))
11482 || (int_size_in_bytes (type) == -1));
11486 mips_strict_argument_naming (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
11488 return !TARGET_OLDABI;
11491 /* Return true if INSN is a multiply-add or multiply-subtract
11492 instruction and PREV assigns to the accumulator operand. */
11495 mips_linked_madd_p (rtx prev, rtx insn)
11499 x = single_set (insn);
11505 if (GET_CODE (x) == PLUS
11506 && GET_CODE (XEXP (x, 0)) == MULT
11507 && reg_set_p (XEXP (x, 1), prev))
11510 if (GET_CODE (x) == MINUS
11511 && GET_CODE (XEXP (x, 1)) == MULT
11512 && reg_set_p (XEXP (x, 0), prev))
11518 /* Used by TUNE_MACC_CHAINS to record the last scheduled instruction
11519 that may clobber hi or lo. */
11521 static rtx mips_macc_chains_last_hilo;
11523 /* A TUNE_MACC_CHAINS helper function. Record that instruction INSN has
11524 been scheduled, updating mips_macc_chains_last_hilo appropriately. */
11527 mips_macc_chains_record (rtx insn)
11529 if (get_attr_may_clobber_hilo (insn))
11530 mips_macc_chains_last_hilo = insn;
11533 /* A TUNE_MACC_CHAINS helper function. Search ready queue READY, which
11534 has NREADY elements, looking for a multiply-add or multiply-subtract
11535 instruction that is cumulative with mips_macc_chains_last_hilo.
11536 If there is one, promote it ahead of anything else that might
11537 clobber hi or lo. */
11540 mips_macc_chains_reorder (rtx *ready, int nready)
11544 if (mips_macc_chains_last_hilo != 0)
11545 for (i = nready - 1; i >= 0; i--)
11546 if (mips_linked_madd_p (mips_macc_chains_last_hilo, ready[i]))
11548 for (j = nready - 1; j > i; j--)
11549 if (recog_memoized (ready[j]) >= 0
11550 && get_attr_may_clobber_hilo (ready[j]))
11552 mips_promote_ready (ready, i, j);
11559 /* The last instruction to be scheduled. */
11561 static rtx vr4130_last_insn;
11563 /* A note_stores callback used by vr4130_true_reg_dependence_p. DATA
11564 points to an rtx that is initially an instruction. Nullify the rtx
11565 if the instruction uses the value of register X. */
11568 vr4130_true_reg_dependence_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
11570 rtx *insn_ptr = data;
11573 && reg_referenced_p (x, PATTERN (*insn_ptr)))
11577 /* Return true if there is true register dependence between vr4130_last_insn
11581 vr4130_true_reg_dependence_p (rtx insn)
11583 note_stores (PATTERN (vr4130_last_insn),
11584 vr4130_true_reg_dependence_p_1, &insn);
11588 /* A TUNE_MIPS4130 helper function. Given that INSN1 is at the head of
11589 the ready queue and that INSN2 is the instruction after it, return
11590 true if it is worth promoting INSN2 ahead of INSN1. Look for cases
11591 in which INSN1 and INSN2 can probably issue in parallel, but for
11592 which (INSN2, INSN1) should be less sensitive to instruction
11593 alignment than (INSN1, INSN2). See 4130.md for more details. */
11596 vr4130_swap_insns_p (rtx insn1, rtx insn2)
11598 sd_iterator_def sd_it;
11601 /* Check for the following case:
11603 1) there is some other instruction X with an anti dependence on INSN1;
11604 2) X has a higher priority than INSN2; and
11605 3) X is an arithmetic instruction (and thus has no unit restrictions).
11607 If INSN1 is the last instruction blocking X, it would better to
11608 choose (INSN1, X) over (INSN2, INSN1). */
11609 FOR_EACH_DEP (insn1, SD_LIST_FORW, sd_it, dep)
11610 if (DEP_TYPE (dep) == REG_DEP_ANTI
11611 && INSN_PRIORITY (DEP_CON (dep)) > INSN_PRIORITY (insn2)
11612 && recog_memoized (DEP_CON (dep)) >= 0
11613 && get_attr_vr4130_class (DEP_CON (dep)) == VR4130_CLASS_ALU)
11616 if (vr4130_last_insn != 0
11617 && recog_memoized (insn1) >= 0
11618 && recog_memoized (insn2) >= 0)
11620 /* See whether INSN1 and INSN2 use different execution units,
11621 or if they are both ALU-type instructions. If so, they can
11622 probably execute in parallel. */
11623 enum attr_vr4130_class class1 = get_attr_vr4130_class (insn1);
11624 enum attr_vr4130_class class2 = get_attr_vr4130_class (insn2);
11625 if (class1 != class2 || class1 == VR4130_CLASS_ALU)
11627 /* If only one of the instructions has a dependence on
11628 vr4130_last_insn, prefer to schedule the other one first. */
11629 bool dep1 = vr4130_true_reg_dependence_p (insn1);
11630 bool dep2 = vr4130_true_reg_dependence_p (insn2);
11634 /* Prefer to schedule INSN2 ahead of INSN1 if vr4130_last_insn
11635 is not an ALU-type instruction and if INSN1 uses the same
11636 execution unit. (Note that if this condition holds, we already
11637 know that INSN2 uses a different execution unit.) */
11638 if (class1 != VR4130_CLASS_ALU
11639 && recog_memoized (vr4130_last_insn) >= 0
11640 && class1 == get_attr_vr4130_class (vr4130_last_insn))
11647 /* A TUNE_MIPS4130 helper function. (READY, NREADY) describes a ready
11648 queue with at least two instructions. Swap the first two if
11649 vr4130_swap_insns_p says that it could be worthwhile. */
11652 vr4130_reorder (rtx *ready, int nready)
11654 if (vr4130_swap_insns_p (ready[nready - 1], ready[nready - 2]))
11655 mips_promote_ready (ready, nready - 2, nready - 1);
11658 /* Remove the instruction at index LOWER from ready queue READY and
11659 reinsert it in front of the instruction at index HIGHER. LOWER must
11663 mips_promote_ready (rtx *ready, int lower, int higher)
11668 new_head = ready[lower];
11669 for (i = lower; i < higher; i++)
11670 ready[i] = ready[i + 1];
11671 ready[i] = new_head;
11674 /* If the priority of the instruction at POS2 in the ready queue READY
11675 is within LIMIT units of that of the instruction at POS1, swap the
11676 instructions if POS2 is not already less than POS1. */
11679 mips_maybe_swap_ready (rtx *ready, int pos1, int pos2, int limit)
11682 && INSN_PRIORITY (ready[pos1]) + limit >= INSN_PRIORITY (ready[pos2]))
11685 temp = ready[pos1];
11686 ready[pos1] = ready[pos2];
11687 ready[pos2] = temp;
11691 /* Record whether last 74k AGEN instruction was a load or store. */
11693 static enum attr_type mips_last_74k_agen_insn = TYPE_UNKNOWN;
11695 /* Initialize mips_last_74k_agen_insn from INSN. A null argument
11696 resets to TYPE_UNKNOWN state. */
11699 mips_74k_agen_init (rtx insn)
11701 if (!insn || !NONJUMP_INSN_P (insn))
11702 mips_last_74k_agen_insn = TYPE_UNKNOWN;
11703 else if (USEFUL_INSN_P (insn))
11705 enum attr_type type = get_attr_type (insn);
11706 if (type == TYPE_LOAD || type == TYPE_STORE)
11707 mips_last_74k_agen_insn = type;
11711 /* A TUNE_74K helper function. The 74K AGEN pipeline likes multiple
11712 loads to be grouped together, and multiple stores to be grouped
11713 together. Swap things around in the ready queue to make this happen. */
11716 mips_74k_agen_reorder (rtx *ready, int nready)
11719 int store_pos, load_pos;
11724 for (i = nready - 1; i >= 0; i--)
11726 rtx insn = ready[i];
11727 if (USEFUL_INSN_P (insn))
11728 switch (get_attr_type (insn))
11731 if (store_pos == -1)
11736 if (load_pos == -1)
11745 if (load_pos == -1 || store_pos == -1)
11748 switch (mips_last_74k_agen_insn)
11751 /* Prefer to schedule loads since they have a higher latency. */
11753 /* Swap loads to the front of the queue. */
11754 mips_maybe_swap_ready (ready, load_pos, store_pos, 4);
11757 /* Swap stores to the front of the queue. */
11758 mips_maybe_swap_ready (ready, store_pos, load_pos, 4);
11765 /* Implement TARGET_SCHED_INIT. */
11768 mips_sched_init (FILE *file ATTRIBUTE_UNUSED, int verbose ATTRIBUTE_UNUSED,
11769 int max_ready ATTRIBUTE_UNUSED)
11771 mips_macc_chains_last_hilo = 0;
11772 vr4130_last_insn = 0;
11773 mips_74k_agen_init (NULL_RTX);
11776 /* Implement TARGET_SCHED_REORDER and TARG_SCHED_REORDER2. */
11779 mips_sched_reorder (FILE *file ATTRIBUTE_UNUSED, int verbose ATTRIBUTE_UNUSED,
11780 rtx *ready, int *nreadyp, int cycle ATTRIBUTE_UNUSED)
11782 if (!reload_completed
11783 && TUNE_MACC_CHAINS
11785 mips_macc_chains_reorder (ready, *nreadyp);
11786 if (reload_completed
11788 && !TARGET_VR4130_ALIGN
11790 vr4130_reorder (ready, *nreadyp);
11792 mips_74k_agen_reorder (ready, *nreadyp);
11793 return mips_issue_rate ();
11796 /* Implement TARGET_SCHED_VARIABLE_ISSUE. */
11799 mips_variable_issue (FILE *file ATTRIBUTE_UNUSED, int verbose ATTRIBUTE_UNUSED,
11800 rtx insn, int more)
11803 mips_74k_agen_init (insn);
11804 switch (GET_CODE (PATTERN (insn)))
11808 /* Don't count USEs and CLOBBERs against the issue rate. */
11813 if (!reload_completed && TUNE_MACC_CHAINS)
11814 mips_macc_chains_record (insn);
11815 vr4130_last_insn = insn;
11821 /* Implement TARGET_SCHED_ADJUST_COST. We assume that anti and output
11822 dependencies have no cost, except on the 20Kc where output-dependence
11823 is treated like input-dependence. */
11826 mips_adjust_cost (rtx insn ATTRIBUTE_UNUSED, rtx link,
11827 rtx dep ATTRIBUTE_UNUSED, int cost)
11829 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
11832 if (REG_NOTE_KIND (link) != 0)
11837 /* Return the number of instructions that can be issued per cycle. */
11840 mips_issue_rate (void)
11844 case PROCESSOR_74KC:
11845 case PROCESSOR_74KF2_1:
11846 case PROCESSOR_74KF1_1:
11847 case PROCESSOR_74KF3_2:
11848 /* The 74k is not strictly quad-issue cpu, but can be seen as one
11849 by the scheduler. It can issue 1 ALU, 1 AGEN and 2 FPU insns,
11850 but in reality only a maximum of 3 insns can be issued as the
11851 floating point load/stores also require a slot in the AGEN pipe. */
11854 case PROCESSOR_20KC:
11855 case PROCESSOR_R4130:
11856 case PROCESSOR_R5400:
11857 case PROCESSOR_R5500:
11858 case PROCESSOR_R7000:
11859 case PROCESSOR_R9000:
11862 case PROCESSOR_SB1:
11863 case PROCESSOR_SB1A:
11864 /* This is actually 4, but we get better performance if we claim 3.
11865 This is partly because of unwanted speculative code motion with the
11866 larger number, and partly because in most common cases we can't
11867 reach the theoretical max of 4. */
11875 /* Implements TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD. This should
11876 be as wide as the scheduling freedom in the DFA. */
11879 mips_multipass_dfa_lookahead (void)
11881 /* Can schedule up to 4 of the 6 function units in any one cycle. */
11888 /* Implements a store data bypass check. We need this because the cprestore
11889 pattern is type store, but defined using an UNSPEC. This UNSPEC causes the
11890 default routine to abort. We just return false for that case. */
11891 /* ??? Should try to give a better result here than assuming false. */
11894 mips_store_data_bypass_p (rtx out_insn, rtx in_insn)
11896 if (GET_CODE (PATTERN (in_insn)) == UNSPEC_VOLATILE)
11899 return ! store_data_bypass_p (out_insn, in_insn);
11902 /* Given that we have an rtx of the form (prefetch ... WRITE LOCALITY),
11903 return the first operand of the associated "pref" or "prefx" insn. */
11906 mips_prefetch_cookie (rtx write, rtx locality)
11908 /* store_streamed / load_streamed. */
11909 if (INTVAL (locality) <= 0)
11910 return GEN_INT (INTVAL (write) + 4);
11912 /* store / load. */
11913 if (INTVAL (locality) <= 2)
11916 /* store_retained / load_retained. */
11917 return GEN_INT (INTVAL (write) + 6);
11920 /* MIPS builtin function support. */
11922 struct builtin_description
11924 /* The code of the main .md file instruction. See mips_builtin_type
11925 for more information. */
11926 enum insn_code icode;
11928 /* The floating-point comparison code to use with ICODE, if any. */
11929 enum mips_fp_condition cond;
11931 /* The name of the builtin function. */
11934 /* Specifies how the function should be expanded. */
11935 enum mips_builtin_type builtin_type;
11937 /* The function's prototype. */
11938 enum mips_function_type function_type;
11940 /* The target flags required for this function. */
11944 /* Define a MIPS_BUILTIN_DIRECT function for instruction CODE_FOR_mips_<INSN>.
11945 FUNCTION_TYPE and TARGET_FLAGS are builtin_description fields. */
11946 #define DIRECT_BUILTIN(INSN, FUNCTION_TYPE, TARGET_FLAGS) \
11947 { CODE_FOR_mips_ ## INSN, 0, "__builtin_mips_" #INSN, \
11948 MIPS_BUILTIN_DIRECT, FUNCTION_TYPE, TARGET_FLAGS }
11950 /* Define __builtin_mips_<INSN>_<COND>_{s,d}, both of which require
11952 #define CMP_SCALAR_BUILTINS(INSN, COND, TARGET_FLAGS) \
11953 { CODE_FOR_mips_ ## INSN ## _cond_s, MIPS_FP_COND_ ## COND, \
11954 "__builtin_mips_" #INSN "_" #COND "_s", \
11955 MIPS_BUILTIN_CMP_SINGLE, MIPS_INT_FTYPE_SF_SF, TARGET_FLAGS }, \
11956 { CODE_FOR_mips_ ## INSN ## _cond_d, MIPS_FP_COND_ ## COND, \
11957 "__builtin_mips_" #INSN "_" #COND "_d", \
11958 MIPS_BUILTIN_CMP_SINGLE, MIPS_INT_FTYPE_DF_DF, TARGET_FLAGS }
11960 /* Define __builtin_mips_{any,all,upper,lower}_<INSN>_<COND>_ps.
11961 The lower and upper forms require TARGET_FLAGS while the any and all
11962 forms require MASK_MIPS3D. */
11963 #define CMP_PS_BUILTINS(INSN, COND, TARGET_FLAGS) \
11964 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11965 "__builtin_mips_any_" #INSN "_" #COND "_ps", \
11966 MIPS_BUILTIN_CMP_ANY, MIPS_INT_FTYPE_V2SF_V2SF, MASK_MIPS3D }, \
11967 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11968 "__builtin_mips_all_" #INSN "_" #COND "_ps", \
11969 MIPS_BUILTIN_CMP_ALL, MIPS_INT_FTYPE_V2SF_V2SF, MASK_MIPS3D }, \
11970 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11971 "__builtin_mips_lower_" #INSN "_" #COND "_ps", \
11972 MIPS_BUILTIN_CMP_LOWER, MIPS_INT_FTYPE_V2SF_V2SF, TARGET_FLAGS }, \
11973 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11974 "__builtin_mips_upper_" #INSN "_" #COND "_ps", \
11975 MIPS_BUILTIN_CMP_UPPER, MIPS_INT_FTYPE_V2SF_V2SF, TARGET_FLAGS }
11977 /* Define __builtin_mips_{any,all}_<INSN>_<COND>_4s. The functions
11978 require MASK_MIPS3D. */
11979 #define CMP_4S_BUILTINS(INSN, COND) \
11980 { CODE_FOR_mips_ ## INSN ## _cond_4s, MIPS_FP_COND_ ## COND, \
11981 "__builtin_mips_any_" #INSN "_" #COND "_4s", \
11982 MIPS_BUILTIN_CMP_ANY, MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF, \
11984 { CODE_FOR_mips_ ## INSN ## _cond_4s, MIPS_FP_COND_ ## COND, \
11985 "__builtin_mips_all_" #INSN "_" #COND "_4s", \
11986 MIPS_BUILTIN_CMP_ALL, MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF, \
11989 /* Define __builtin_mips_mov{t,f}_<INSN>_<COND>_ps. The comparison
11990 instruction requires TARGET_FLAGS. */
11991 #define MOVTF_BUILTINS(INSN, COND, TARGET_FLAGS) \
11992 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11993 "__builtin_mips_movt_" #INSN "_" #COND "_ps", \
11994 MIPS_BUILTIN_MOVT, MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF, \
11996 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
11997 "__builtin_mips_movf_" #INSN "_" #COND "_ps", \
11998 MIPS_BUILTIN_MOVF, MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF, \
12001 /* Define all the builtins related to c.cond.fmt condition COND. */
12002 #define CMP_BUILTINS(COND) \
12003 MOVTF_BUILTINS (c, COND, MASK_PAIRED_SINGLE_FLOAT), \
12004 MOVTF_BUILTINS (cabs, COND, MASK_MIPS3D), \
12005 CMP_SCALAR_BUILTINS (cabs, COND, MASK_MIPS3D), \
12006 CMP_PS_BUILTINS (c, COND, MASK_PAIRED_SINGLE_FLOAT), \
12007 CMP_PS_BUILTINS (cabs, COND, MASK_MIPS3D), \
12008 CMP_4S_BUILTINS (c, COND), \
12009 CMP_4S_BUILTINS (cabs, COND)
12011 static const struct builtin_description mips_bdesc[] =
12013 DIRECT_BUILTIN (pll_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
12014 DIRECT_BUILTIN (pul_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
12015 DIRECT_BUILTIN (plu_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
12016 DIRECT_BUILTIN (puu_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_PAIRED_SINGLE_FLOAT),
12017 DIRECT_BUILTIN (cvt_ps_s, MIPS_V2SF_FTYPE_SF_SF, MASK_PAIRED_SINGLE_FLOAT),
12018 DIRECT_BUILTIN (cvt_s_pl, MIPS_SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT),
12019 DIRECT_BUILTIN (cvt_s_pu, MIPS_SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT),
12020 DIRECT_BUILTIN (abs_ps, MIPS_V2SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT),
12022 DIRECT_BUILTIN (alnv_ps, MIPS_V2SF_FTYPE_V2SF_V2SF_INT,
12023 MASK_PAIRED_SINGLE_FLOAT),
12024 DIRECT_BUILTIN (addr_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
12025 DIRECT_BUILTIN (mulr_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
12026 DIRECT_BUILTIN (cvt_pw_ps, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
12027 DIRECT_BUILTIN (cvt_ps_pw, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
12029 DIRECT_BUILTIN (recip1_s, MIPS_SF_FTYPE_SF, MASK_MIPS3D),
12030 DIRECT_BUILTIN (recip1_d, MIPS_DF_FTYPE_DF, MASK_MIPS3D),
12031 DIRECT_BUILTIN (recip1_ps, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
12032 DIRECT_BUILTIN (recip2_s, MIPS_SF_FTYPE_SF_SF, MASK_MIPS3D),
12033 DIRECT_BUILTIN (recip2_d, MIPS_DF_FTYPE_DF_DF, MASK_MIPS3D),
12034 DIRECT_BUILTIN (recip2_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
12036 DIRECT_BUILTIN (rsqrt1_s, MIPS_SF_FTYPE_SF, MASK_MIPS3D),
12037 DIRECT_BUILTIN (rsqrt1_d, MIPS_DF_FTYPE_DF, MASK_MIPS3D),
12038 DIRECT_BUILTIN (rsqrt1_ps, MIPS_V2SF_FTYPE_V2SF, MASK_MIPS3D),
12039 DIRECT_BUILTIN (rsqrt2_s, MIPS_SF_FTYPE_SF_SF, MASK_MIPS3D),
12040 DIRECT_BUILTIN (rsqrt2_d, MIPS_DF_FTYPE_DF_DF, MASK_MIPS3D),
12041 DIRECT_BUILTIN (rsqrt2_ps, MIPS_V2SF_FTYPE_V2SF_V2SF, MASK_MIPS3D),
12043 MIPS_FP_CONDITIONS (CMP_BUILTINS)
12046 /* Builtin functions for the SB-1 processor. */
12048 #define CODE_FOR_mips_sqrt_ps CODE_FOR_sqrtv2sf2
12050 static const struct builtin_description sb1_bdesc[] =
12052 DIRECT_BUILTIN (sqrt_ps, MIPS_V2SF_FTYPE_V2SF, MASK_PAIRED_SINGLE_FLOAT)
12055 /* Builtin functions for DSP ASE. */
12057 #define CODE_FOR_mips_addq_ph CODE_FOR_addv2hi3
12058 #define CODE_FOR_mips_addu_qb CODE_FOR_addv4qi3
12059 #define CODE_FOR_mips_subq_ph CODE_FOR_subv2hi3
12060 #define CODE_FOR_mips_subu_qb CODE_FOR_subv4qi3
12061 #define CODE_FOR_mips_mul_ph CODE_FOR_mulv2hi3
12063 /* Define a MIPS_BUILTIN_DIRECT_NO_TARGET function for instruction
12064 CODE_FOR_mips_<INSN>. FUNCTION_TYPE and TARGET_FLAGS are
12065 builtin_description fields. */
12066 #define DIRECT_NO_TARGET_BUILTIN(INSN, FUNCTION_TYPE, TARGET_FLAGS) \
12067 { CODE_FOR_mips_ ## INSN, 0, "__builtin_mips_" #INSN, \
12068 MIPS_BUILTIN_DIRECT_NO_TARGET, FUNCTION_TYPE, TARGET_FLAGS }
12070 /* Define __builtin_mips_bposge<VALUE>. <VALUE> is 32 for the MIPS32 DSP
12071 branch instruction. TARGET_FLAGS is a builtin_description field. */
12072 #define BPOSGE_BUILTIN(VALUE, TARGET_FLAGS) \
12073 { CODE_FOR_mips_bposge, 0, "__builtin_mips_bposge" #VALUE, \
12074 MIPS_BUILTIN_BPOSGE ## VALUE, MIPS_SI_FTYPE_VOID, TARGET_FLAGS }
12076 static const struct builtin_description dsp_bdesc[] =
12078 DIRECT_BUILTIN (addq_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
12079 DIRECT_BUILTIN (addq_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
12080 DIRECT_BUILTIN (addq_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
12081 DIRECT_BUILTIN (addu_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
12082 DIRECT_BUILTIN (addu_s_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
12083 DIRECT_BUILTIN (subq_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
12084 DIRECT_BUILTIN (subq_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
12085 DIRECT_BUILTIN (subq_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
12086 DIRECT_BUILTIN (subu_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
12087 DIRECT_BUILTIN (subu_s_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
12088 DIRECT_BUILTIN (addsc, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
12089 DIRECT_BUILTIN (addwc, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
12090 DIRECT_BUILTIN (modsub, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
12091 DIRECT_BUILTIN (raddu_w_qb, MIPS_SI_FTYPE_V4QI, MASK_DSP),
12092 DIRECT_BUILTIN (absq_s_ph, MIPS_V2HI_FTYPE_V2HI, MASK_DSP),
12093 DIRECT_BUILTIN (absq_s_w, MIPS_SI_FTYPE_SI, MASK_DSP),
12094 DIRECT_BUILTIN (precrq_qb_ph, MIPS_V4QI_FTYPE_V2HI_V2HI, MASK_DSP),
12095 DIRECT_BUILTIN (precrq_ph_w, MIPS_V2HI_FTYPE_SI_SI, MASK_DSP),
12096 DIRECT_BUILTIN (precrq_rs_ph_w, MIPS_V2HI_FTYPE_SI_SI, MASK_DSP),
12097 DIRECT_BUILTIN (precrqu_s_qb_ph, MIPS_V4QI_FTYPE_V2HI_V2HI, MASK_DSP),
12098 DIRECT_BUILTIN (preceq_w_phl, MIPS_SI_FTYPE_V2HI, MASK_DSP),
12099 DIRECT_BUILTIN (preceq_w_phr, MIPS_SI_FTYPE_V2HI, MASK_DSP),
12100 DIRECT_BUILTIN (precequ_ph_qbl, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
12101 DIRECT_BUILTIN (precequ_ph_qbr, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
12102 DIRECT_BUILTIN (precequ_ph_qbla, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
12103 DIRECT_BUILTIN (precequ_ph_qbra, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
12104 DIRECT_BUILTIN (preceu_ph_qbl, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
12105 DIRECT_BUILTIN (preceu_ph_qbr, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
12106 DIRECT_BUILTIN (preceu_ph_qbla, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
12107 DIRECT_BUILTIN (preceu_ph_qbra, MIPS_V2HI_FTYPE_V4QI, MASK_DSP),
12108 DIRECT_BUILTIN (shll_qb, MIPS_V4QI_FTYPE_V4QI_SI, MASK_DSP),
12109 DIRECT_BUILTIN (shll_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
12110 DIRECT_BUILTIN (shll_s_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
12111 DIRECT_BUILTIN (shll_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
12112 DIRECT_BUILTIN (shrl_qb, MIPS_V4QI_FTYPE_V4QI_SI, MASK_DSP),
12113 DIRECT_BUILTIN (shra_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
12114 DIRECT_BUILTIN (shra_r_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSP),
12115 DIRECT_BUILTIN (shra_r_w, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
12116 DIRECT_BUILTIN (muleu_s_ph_qbl, MIPS_V2HI_FTYPE_V4QI_V2HI, MASK_DSP),
12117 DIRECT_BUILTIN (muleu_s_ph_qbr, MIPS_V2HI_FTYPE_V4QI_V2HI, MASK_DSP),
12118 DIRECT_BUILTIN (mulq_rs_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
12119 DIRECT_BUILTIN (muleq_s_w_phl, MIPS_SI_FTYPE_V2HI_V2HI, MASK_DSP),
12120 DIRECT_BUILTIN (muleq_s_w_phr, MIPS_SI_FTYPE_V2HI_V2HI, MASK_DSP),
12121 DIRECT_BUILTIN (bitrev, MIPS_SI_FTYPE_SI, MASK_DSP),
12122 DIRECT_BUILTIN (insv, MIPS_SI_FTYPE_SI_SI, MASK_DSP),
12123 DIRECT_BUILTIN (repl_qb, MIPS_V4QI_FTYPE_SI, MASK_DSP),
12124 DIRECT_BUILTIN (repl_ph, MIPS_V2HI_FTYPE_SI, MASK_DSP),
12125 DIRECT_NO_TARGET_BUILTIN (cmpu_eq_qb, MIPS_VOID_FTYPE_V4QI_V4QI, MASK_DSP),
12126 DIRECT_NO_TARGET_BUILTIN (cmpu_lt_qb, MIPS_VOID_FTYPE_V4QI_V4QI, MASK_DSP),
12127 DIRECT_NO_TARGET_BUILTIN (cmpu_le_qb, MIPS_VOID_FTYPE_V4QI_V4QI, MASK_DSP),
12128 DIRECT_BUILTIN (cmpgu_eq_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSP),
12129 DIRECT_BUILTIN (cmpgu_lt_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSP),
12130 DIRECT_BUILTIN (cmpgu_le_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSP),
12131 DIRECT_NO_TARGET_BUILTIN (cmp_eq_ph, MIPS_VOID_FTYPE_V2HI_V2HI, MASK_DSP),
12132 DIRECT_NO_TARGET_BUILTIN (cmp_lt_ph, MIPS_VOID_FTYPE_V2HI_V2HI, MASK_DSP),
12133 DIRECT_NO_TARGET_BUILTIN (cmp_le_ph, MIPS_VOID_FTYPE_V2HI_V2HI, MASK_DSP),
12134 DIRECT_BUILTIN (pick_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSP),
12135 DIRECT_BUILTIN (pick_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
12136 DIRECT_BUILTIN (packrl_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSP),
12137 DIRECT_NO_TARGET_BUILTIN (wrdsp, MIPS_VOID_FTYPE_SI_SI, MASK_DSP),
12138 DIRECT_BUILTIN (rddsp, MIPS_SI_FTYPE_SI, MASK_DSP),
12139 DIRECT_BUILTIN (lbux, MIPS_SI_FTYPE_PTR_SI, MASK_DSP),
12140 DIRECT_BUILTIN (lhx, MIPS_SI_FTYPE_PTR_SI, MASK_DSP),
12141 DIRECT_BUILTIN (lwx, MIPS_SI_FTYPE_PTR_SI, MASK_DSP),
12142 BPOSGE_BUILTIN (32, MASK_DSP),
12144 /* The following are for the MIPS DSP ASE REV 2. */
12145 DIRECT_BUILTIN (absq_s_qb, MIPS_V4QI_FTYPE_V4QI, MASK_DSPR2),
12146 DIRECT_BUILTIN (addu_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
12147 DIRECT_BUILTIN (addu_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
12148 DIRECT_BUILTIN (adduh_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSPR2),
12149 DIRECT_BUILTIN (adduh_r_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSPR2),
12150 DIRECT_BUILTIN (append, MIPS_SI_FTYPE_SI_SI_SI, MASK_DSPR2),
12151 DIRECT_BUILTIN (balign, MIPS_SI_FTYPE_SI_SI_SI, MASK_DSPR2),
12152 DIRECT_BUILTIN (cmpgdu_eq_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSPR2),
12153 DIRECT_BUILTIN (cmpgdu_lt_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSPR2),
12154 DIRECT_BUILTIN (cmpgdu_le_qb, MIPS_SI_FTYPE_V4QI_V4QI, MASK_DSPR2),
12155 DIRECT_BUILTIN (mul_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
12156 DIRECT_BUILTIN (mul_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
12157 DIRECT_BUILTIN (mulq_rs_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
12158 DIRECT_BUILTIN (mulq_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
12159 DIRECT_BUILTIN (mulq_s_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
12160 DIRECT_BUILTIN (precr_qb_ph, MIPS_V4QI_FTYPE_V2HI_V2HI, MASK_DSPR2),
12161 DIRECT_BUILTIN (precr_sra_ph_w, MIPS_V2HI_FTYPE_SI_SI_SI, MASK_DSPR2),
12162 DIRECT_BUILTIN (precr_sra_r_ph_w, MIPS_V2HI_FTYPE_SI_SI_SI, MASK_DSPR2),
12163 DIRECT_BUILTIN (prepend, MIPS_SI_FTYPE_SI_SI_SI, MASK_DSPR2),
12164 DIRECT_BUILTIN (shra_qb, MIPS_V4QI_FTYPE_V4QI_SI, MASK_DSPR2),
12165 DIRECT_BUILTIN (shra_r_qb, MIPS_V4QI_FTYPE_V4QI_SI, MASK_DSPR2),
12166 DIRECT_BUILTIN (shrl_ph, MIPS_V2HI_FTYPE_V2HI_SI, MASK_DSPR2),
12167 DIRECT_BUILTIN (subu_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
12168 DIRECT_BUILTIN (subu_s_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
12169 DIRECT_BUILTIN (subuh_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSPR2),
12170 DIRECT_BUILTIN (subuh_r_qb, MIPS_V4QI_FTYPE_V4QI_V4QI, MASK_DSPR2),
12171 DIRECT_BUILTIN (addqh_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
12172 DIRECT_BUILTIN (addqh_r_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
12173 DIRECT_BUILTIN (addqh_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
12174 DIRECT_BUILTIN (addqh_r_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
12175 DIRECT_BUILTIN (subqh_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
12176 DIRECT_BUILTIN (subqh_r_ph, MIPS_V2HI_FTYPE_V2HI_V2HI, MASK_DSPR2),
12177 DIRECT_BUILTIN (subqh_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2),
12178 DIRECT_BUILTIN (subqh_r_w, MIPS_SI_FTYPE_SI_SI, MASK_DSPR2)
12181 static const struct builtin_description dsp_32only_bdesc[] =
12183 DIRECT_BUILTIN (dpau_h_qbl, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
12184 DIRECT_BUILTIN (dpau_h_qbr, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
12185 DIRECT_BUILTIN (dpsu_h_qbl, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
12186 DIRECT_BUILTIN (dpsu_h_qbr, MIPS_DI_FTYPE_DI_V4QI_V4QI, MASK_DSP),
12187 DIRECT_BUILTIN (dpaq_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
12188 DIRECT_BUILTIN (dpsq_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
12189 DIRECT_BUILTIN (mulsaq_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
12190 DIRECT_BUILTIN (dpaq_sa_l_w, MIPS_DI_FTYPE_DI_SI_SI, MASK_DSP),
12191 DIRECT_BUILTIN (dpsq_sa_l_w, MIPS_DI_FTYPE_DI_SI_SI, MASK_DSP),
12192 DIRECT_BUILTIN (maq_s_w_phl, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
12193 DIRECT_BUILTIN (maq_s_w_phr, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
12194 DIRECT_BUILTIN (maq_sa_w_phl, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
12195 DIRECT_BUILTIN (maq_sa_w_phr, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSP),
12196 DIRECT_BUILTIN (extr_w, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
12197 DIRECT_BUILTIN (extr_r_w, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
12198 DIRECT_BUILTIN (extr_rs_w, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
12199 DIRECT_BUILTIN (extr_s_h, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
12200 DIRECT_BUILTIN (extp, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
12201 DIRECT_BUILTIN (extpdp, MIPS_SI_FTYPE_DI_SI, MASK_DSP),
12202 DIRECT_BUILTIN (shilo, MIPS_DI_FTYPE_DI_SI, MASK_DSP),
12203 DIRECT_BUILTIN (mthlip, MIPS_DI_FTYPE_DI_SI, MASK_DSP),
12205 /* The following are for the MIPS DSP ASE REV 2. */
12206 DIRECT_BUILTIN (dpa_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
12207 DIRECT_BUILTIN (dps_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
12208 DIRECT_BUILTIN (madd, MIPS_DI_FTYPE_DI_SI_SI, MASK_DSPR2),
12209 DIRECT_BUILTIN (maddu, MIPS_DI_FTYPE_DI_USI_USI, MASK_DSPR2),
12210 DIRECT_BUILTIN (msub, MIPS_DI_FTYPE_DI_SI_SI, MASK_DSPR2),
12211 DIRECT_BUILTIN (msubu, MIPS_DI_FTYPE_DI_USI_USI, MASK_DSPR2),
12212 DIRECT_BUILTIN (mulsa_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
12213 DIRECT_BUILTIN (mult, MIPS_DI_FTYPE_SI_SI, MASK_DSPR2),
12214 DIRECT_BUILTIN (multu, MIPS_DI_FTYPE_USI_USI, MASK_DSPR2),
12215 DIRECT_BUILTIN (dpax_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
12216 DIRECT_BUILTIN (dpsx_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
12217 DIRECT_BUILTIN (dpaqx_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
12218 DIRECT_BUILTIN (dpaqx_sa_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
12219 DIRECT_BUILTIN (dpsqx_s_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2),
12220 DIRECT_BUILTIN (dpsqx_sa_w_ph, MIPS_DI_FTYPE_DI_V2HI_V2HI, MASK_DSPR2)
12223 /* This helps provide a mapping from builtin function codes to bdesc
12228 /* The builtin function table that this entry describes. */
12229 const struct builtin_description *bdesc;
12231 /* The number of entries in the builtin function table. */
12234 /* The target processor that supports these builtin functions.
12235 PROCESSOR_MAX means we enable them for all processors. */
12236 enum processor_type proc;
12238 /* If the target has these flags, this builtin function table
12239 will not be supported. */
12240 int unsupported_target_flags;
12243 static const struct bdesc_map bdesc_arrays[] =
12245 { mips_bdesc, ARRAY_SIZE (mips_bdesc), PROCESSOR_MAX, 0 },
12246 { sb1_bdesc, ARRAY_SIZE (sb1_bdesc), PROCESSOR_SB1, 0 },
12247 { dsp_bdesc, ARRAY_SIZE (dsp_bdesc), PROCESSOR_MAX, 0 },
12248 { dsp_32only_bdesc, ARRAY_SIZE (dsp_32only_bdesc), PROCESSOR_MAX,
12252 /* Take the argument ARGNUM of the arglist of EXP and convert it into a form
12253 suitable for input operand OP of instruction ICODE. Return the value. */
12256 mips_prepare_builtin_arg (enum insn_code icode,
12257 unsigned int op, tree exp, unsigned int argnum)
12260 enum machine_mode mode;
12262 value = expand_normal (CALL_EXPR_ARG (exp, argnum));
12263 mode = insn_data[icode].operand[op].mode;
12264 if (!insn_data[icode].operand[op].predicate (value, mode))
12266 value = copy_to_mode_reg (mode, value);
12267 /* Check the predicate again. */
12268 if (!insn_data[icode].operand[op].predicate (value, mode))
12270 error ("invalid argument to builtin function");
12278 /* Return an rtx suitable for output operand OP of instruction ICODE.
12279 If TARGET is non-null, try to use it where possible. */
12282 mips_prepare_builtin_target (enum insn_code icode, unsigned int op, rtx target)
12284 enum machine_mode mode;
12286 mode = insn_data[icode].operand[op].mode;
12287 if (target == 0 || !insn_data[icode].operand[op].predicate (target, mode))
12288 target = gen_reg_rtx (mode);
12293 /* Expand builtin functions. This is called from TARGET_EXPAND_BUILTIN. */
12296 mips_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
12297 enum machine_mode mode ATTRIBUTE_UNUSED,
12298 int ignore ATTRIBUTE_UNUSED)
12300 enum insn_code icode;
12301 enum mips_builtin_type type;
12303 unsigned int fcode;
12304 const struct builtin_description *bdesc;
12305 const struct bdesc_map *m;
12307 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
12308 fcode = DECL_FUNCTION_CODE (fndecl);
12312 error ("built-in function %qs not supported for MIPS16",
12313 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
12318 for (m = bdesc_arrays; m < &bdesc_arrays[ARRAY_SIZE (bdesc_arrays)]; m++)
12320 if (fcode < m->size)
12323 icode = bdesc[fcode].icode;
12324 type = bdesc[fcode].builtin_type;
12334 case MIPS_BUILTIN_DIRECT:
12335 return mips_expand_builtin_direct (icode, target, exp, true);
12337 case MIPS_BUILTIN_DIRECT_NO_TARGET:
12338 return mips_expand_builtin_direct (icode, target, exp, false);
12340 case MIPS_BUILTIN_MOVT:
12341 case MIPS_BUILTIN_MOVF:
12342 return mips_expand_builtin_movtf (type, icode, bdesc[fcode].cond,
12345 case MIPS_BUILTIN_CMP_ANY:
12346 case MIPS_BUILTIN_CMP_ALL:
12347 case MIPS_BUILTIN_CMP_UPPER:
12348 case MIPS_BUILTIN_CMP_LOWER:
12349 case MIPS_BUILTIN_CMP_SINGLE:
12350 return mips_expand_builtin_compare (type, icode, bdesc[fcode].cond,
12353 case MIPS_BUILTIN_BPOSGE32:
12354 return mips_expand_builtin_bposge (type, target);
12361 /* Init builtin functions. This is called from TARGET_INIT_BUILTIN. */
12364 mips_init_builtins (void)
12366 const struct builtin_description *d;
12367 const struct bdesc_map *m;
12368 tree types[(int) MIPS_MAX_FTYPE_MAX];
12369 tree V2SF_type_node;
12370 tree V2HI_type_node;
12371 tree V4QI_type_node;
12372 unsigned int offset;
12374 /* We have only builtins for -mpaired-single, -mips3d and -mdsp. */
12375 if (!TARGET_PAIRED_SINGLE_FLOAT && !TARGET_DSP)
12378 if (TARGET_PAIRED_SINGLE_FLOAT)
12380 V2SF_type_node = build_vector_type_for_mode (float_type_node, V2SFmode);
12382 types[MIPS_V2SF_FTYPE_V2SF]
12383 = build_function_type_list (V2SF_type_node, V2SF_type_node, NULL_TREE);
12385 types[MIPS_V2SF_FTYPE_V2SF_V2SF]
12386 = build_function_type_list (V2SF_type_node,
12387 V2SF_type_node, V2SF_type_node, NULL_TREE);
12389 types[MIPS_V2SF_FTYPE_V2SF_V2SF_INT]
12390 = build_function_type_list (V2SF_type_node,
12391 V2SF_type_node, V2SF_type_node,
12392 integer_type_node, NULL_TREE);
12394 types[MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF]
12395 = build_function_type_list (V2SF_type_node,
12396 V2SF_type_node, V2SF_type_node,
12397 V2SF_type_node, V2SF_type_node, NULL_TREE);
12399 types[MIPS_V2SF_FTYPE_SF_SF]
12400 = build_function_type_list (V2SF_type_node,
12401 float_type_node, float_type_node, NULL_TREE);
12403 types[MIPS_INT_FTYPE_V2SF_V2SF]
12404 = build_function_type_list (integer_type_node,
12405 V2SF_type_node, V2SF_type_node, NULL_TREE);
12407 types[MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF]
12408 = build_function_type_list (integer_type_node,
12409 V2SF_type_node, V2SF_type_node,
12410 V2SF_type_node, V2SF_type_node, NULL_TREE);
12412 types[MIPS_INT_FTYPE_SF_SF]
12413 = build_function_type_list (integer_type_node,
12414 float_type_node, float_type_node, NULL_TREE);
12416 types[MIPS_INT_FTYPE_DF_DF]
12417 = build_function_type_list (integer_type_node,
12418 double_type_node, double_type_node, NULL_TREE);
12420 types[MIPS_SF_FTYPE_V2SF]
12421 = build_function_type_list (float_type_node, V2SF_type_node, NULL_TREE);
12423 types[MIPS_SF_FTYPE_SF]
12424 = build_function_type_list (float_type_node,
12425 float_type_node, NULL_TREE);
12427 types[MIPS_SF_FTYPE_SF_SF]
12428 = build_function_type_list (float_type_node,
12429 float_type_node, float_type_node, NULL_TREE);
12431 types[MIPS_DF_FTYPE_DF]
12432 = build_function_type_list (double_type_node,
12433 double_type_node, NULL_TREE);
12435 types[MIPS_DF_FTYPE_DF_DF]
12436 = build_function_type_list (double_type_node,
12437 double_type_node, double_type_node, NULL_TREE);
12442 V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
12443 V4QI_type_node = build_vector_type_for_mode (intQI_type_node, V4QImode);
12445 types[MIPS_V2HI_FTYPE_V2HI_V2HI]
12446 = build_function_type_list (V2HI_type_node,
12447 V2HI_type_node, V2HI_type_node,
12450 types[MIPS_SI_FTYPE_SI_SI]
12451 = build_function_type_list (intSI_type_node,
12452 intSI_type_node, intSI_type_node,
12455 types[MIPS_V4QI_FTYPE_V4QI_V4QI]
12456 = build_function_type_list (V4QI_type_node,
12457 V4QI_type_node, V4QI_type_node,
12460 types[MIPS_SI_FTYPE_V4QI]
12461 = build_function_type_list (intSI_type_node,
12465 types[MIPS_V2HI_FTYPE_V2HI]
12466 = build_function_type_list (V2HI_type_node,
12470 types[MIPS_SI_FTYPE_SI]
12471 = build_function_type_list (intSI_type_node,
12475 types[MIPS_V4QI_FTYPE_V2HI_V2HI]
12476 = build_function_type_list (V4QI_type_node,
12477 V2HI_type_node, V2HI_type_node,
12480 types[MIPS_V2HI_FTYPE_SI_SI]
12481 = build_function_type_list (V2HI_type_node,
12482 intSI_type_node, intSI_type_node,
12485 types[MIPS_SI_FTYPE_V2HI]
12486 = build_function_type_list (intSI_type_node,
12490 types[MIPS_V2HI_FTYPE_V4QI]
12491 = build_function_type_list (V2HI_type_node,
12495 types[MIPS_V4QI_FTYPE_V4QI_SI]
12496 = build_function_type_list (V4QI_type_node,
12497 V4QI_type_node, intSI_type_node,
12500 types[MIPS_V2HI_FTYPE_V2HI_SI]
12501 = build_function_type_list (V2HI_type_node,
12502 V2HI_type_node, intSI_type_node,
12505 types[MIPS_V2HI_FTYPE_V4QI_V2HI]
12506 = build_function_type_list (V2HI_type_node,
12507 V4QI_type_node, V2HI_type_node,
12510 types[MIPS_SI_FTYPE_V2HI_V2HI]
12511 = build_function_type_list (intSI_type_node,
12512 V2HI_type_node, V2HI_type_node,
12515 types[MIPS_DI_FTYPE_DI_V4QI_V4QI]
12516 = build_function_type_list (intDI_type_node,
12517 intDI_type_node, V4QI_type_node, V4QI_type_node,
12520 types[MIPS_DI_FTYPE_DI_V2HI_V2HI]
12521 = build_function_type_list (intDI_type_node,
12522 intDI_type_node, V2HI_type_node, V2HI_type_node,
12525 types[MIPS_DI_FTYPE_DI_SI_SI]
12526 = build_function_type_list (intDI_type_node,
12527 intDI_type_node, intSI_type_node, intSI_type_node,
12530 types[MIPS_V4QI_FTYPE_SI]
12531 = build_function_type_list (V4QI_type_node,
12535 types[MIPS_V2HI_FTYPE_SI]
12536 = build_function_type_list (V2HI_type_node,
12540 types[MIPS_VOID_FTYPE_V4QI_V4QI]
12541 = build_function_type_list (void_type_node,
12542 V4QI_type_node, V4QI_type_node,
12545 types[MIPS_SI_FTYPE_V4QI_V4QI]
12546 = build_function_type_list (intSI_type_node,
12547 V4QI_type_node, V4QI_type_node,
12550 types[MIPS_VOID_FTYPE_V2HI_V2HI]
12551 = build_function_type_list (void_type_node,
12552 V2HI_type_node, V2HI_type_node,
12555 types[MIPS_SI_FTYPE_DI_SI]
12556 = build_function_type_list (intSI_type_node,
12557 intDI_type_node, intSI_type_node,
12560 types[MIPS_DI_FTYPE_DI_SI]
12561 = build_function_type_list (intDI_type_node,
12562 intDI_type_node, intSI_type_node,
12565 types[MIPS_VOID_FTYPE_SI_SI]
12566 = build_function_type_list (void_type_node,
12567 intSI_type_node, intSI_type_node,
12570 types[MIPS_SI_FTYPE_PTR_SI]
12571 = build_function_type_list (intSI_type_node,
12572 ptr_type_node, intSI_type_node,
12575 types[MIPS_SI_FTYPE_VOID]
12576 = build_function_type (intSI_type_node, void_list_node);
12580 types[MIPS_V4QI_FTYPE_V4QI]
12581 = build_function_type_list (V4QI_type_node,
12585 types[MIPS_SI_FTYPE_SI_SI_SI]
12586 = build_function_type_list (intSI_type_node,
12587 intSI_type_node, intSI_type_node,
12588 intSI_type_node, NULL_TREE);
12590 types[MIPS_DI_FTYPE_DI_USI_USI]
12591 = build_function_type_list (intDI_type_node,
12593 unsigned_intSI_type_node,
12594 unsigned_intSI_type_node, NULL_TREE);
12596 types[MIPS_DI_FTYPE_SI_SI]
12597 = build_function_type_list (intDI_type_node,
12598 intSI_type_node, intSI_type_node,
12601 types[MIPS_DI_FTYPE_USI_USI]
12602 = build_function_type_list (intDI_type_node,
12603 unsigned_intSI_type_node,
12604 unsigned_intSI_type_node, NULL_TREE);
12606 types[MIPS_V2HI_FTYPE_SI_SI_SI]
12607 = build_function_type_list (V2HI_type_node,
12608 intSI_type_node, intSI_type_node,
12609 intSI_type_node, NULL_TREE);
12614 /* Iterate through all of the bdesc arrays, initializing all of the
12615 builtin functions. */
12618 for (m = bdesc_arrays; m < &bdesc_arrays[ARRAY_SIZE (bdesc_arrays)]; m++)
12620 if ((m->proc == PROCESSOR_MAX || (m->proc == mips_arch))
12621 && (m->unsupported_target_flags & target_flags) == 0)
12622 for (d = m->bdesc; d < &m->bdesc[m->size]; d++)
12623 if ((d->target_flags & target_flags) == d->target_flags)
12624 add_builtin_function (d->name, types[d->function_type],
12625 d - m->bdesc + offset,
12626 BUILT_IN_MD, NULL, NULL);
12631 /* Expand a MIPS_BUILTIN_DIRECT function. ICODE is the code of the
12632 .md pattern and CALL is the function expr with arguments. TARGET,
12633 if nonnull, suggests a good place to put the result.
12634 HAS_TARGET indicates the function must return something. */
12637 mips_expand_builtin_direct (enum insn_code icode, rtx target, tree exp,
12640 rtx ops[MAX_RECOG_OPERANDS];
12646 /* We save target to ops[0]. */
12647 ops[0] = mips_prepare_builtin_target (icode, 0, target);
12651 /* We need to test if the arglist is not zero. Some instructions have extra
12652 clobber registers. */
12653 for (; i < insn_data[icode].n_operands && i <= call_expr_nargs (exp); i++, j++)
12654 ops[i] = mips_prepare_builtin_arg (icode, i, exp, j);
12659 emit_insn (GEN_FCN (icode) (ops[0], ops[1]));
12663 emit_insn (GEN_FCN (icode) (ops[0], ops[1], ops[2]));
12667 emit_insn (GEN_FCN (icode) (ops[0], ops[1], ops[2], ops[3]));
12671 gcc_unreachable ();
12676 /* Expand a __builtin_mips_movt_*_ps() or __builtin_mips_movf_*_ps()
12677 function (TYPE says which). EXP is the tree for the function
12678 function, ICODE is the instruction that should be used to compare
12679 the first two arguments, and COND is the condition it should test.
12680 TARGET, if nonnull, suggests a good place to put the result. */
12683 mips_expand_builtin_movtf (enum mips_builtin_type type,
12684 enum insn_code icode, enum mips_fp_condition cond,
12685 rtx target, tree exp)
12687 rtx cmp_result, op0, op1;
12689 cmp_result = mips_prepare_builtin_target (icode, 0, 0);
12690 op0 = mips_prepare_builtin_arg (icode, 1, exp, 0);
12691 op1 = mips_prepare_builtin_arg (icode, 2, exp, 1);
12692 emit_insn (GEN_FCN (icode) (cmp_result, op0, op1, GEN_INT (cond)));
12694 icode = CODE_FOR_mips_cond_move_tf_ps;
12695 target = mips_prepare_builtin_target (icode, 0, target);
12696 if (type == MIPS_BUILTIN_MOVT)
12698 op1 = mips_prepare_builtin_arg (icode, 2, exp, 2);
12699 op0 = mips_prepare_builtin_arg (icode, 1, exp, 3);
12703 op0 = mips_prepare_builtin_arg (icode, 1, exp, 2);
12704 op1 = mips_prepare_builtin_arg (icode, 2, exp, 3);
12706 emit_insn (gen_mips_cond_move_tf_ps (target, op0, op1, cmp_result));
12710 /* Move VALUE_IF_TRUE into TARGET if CONDITION is true; move VALUE_IF_FALSE
12711 into TARGET otherwise. Return TARGET. */
12714 mips_builtin_branch_and_move (rtx condition, rtx target,
12715 rtx value_if_true, rtx value_if_false)
12717 rtx true_label, done_label;
12719 true_label = gen_label_rtx ();
12720 done_label = gen_label_rtx ();
12722 /* First assume that CONDITION is false. */
12723 mips_emit_move (target, value_if_false);
12725 /* Branch to TRUE_LABEL if CONDITION is true and DONE_LABEL otherwise. */
12726 emit_jump_insn (gen_condjump (condition, true_label));
12727 emit_jump_insn (gen_jump (done_label));
12730 /* Fix TARGET if CONDITION is true. */
12731 emit_label (true_label);
12732 mips_emit_move (target, value_if_true);
12734 emit_label (done_label);
12738 /* Expand a comparison builtin of type BUILTIN_TYPE. ICODE is the code
12739 of the comparison instruction and COND is the condition it should test.
12740 EXP is the function call and arguments and TARGET, if nonnull,
12741 suggests a good place to put the boolean result. */
12744 mips_expand_builtin_compare (enum mips_builtin_type builtin_type,
12745 enum insn_code icode, enum mips_fp_condition cond,
12746 rtx target, tree exp)
12748 rtx offset, condition, cmp_result, ops[MAX_RECOG_OPERANDS];
12752 if (target == 0 || GET_MODE (target) != SImode)
12753 target = gen_reg_rtx (SImode);
12755 /* Prepare the operands to the comparison. */
12756 cmp_result = mips_prepare_builtin_target (icode, 0, 0);
12757 for (i = 1; i < insn_data[icode].n_operands - 1; i++, j++)
12758 ops[i] = mips_prepare_builtin_arg (icode, i, exp, j);
12760 switch (insn_data[icode].n_operands)
12763 emit_insn (GEN_FCN (icode) (cmp_result, ops[1], ops[2], GEN_INT (cond)));
12767 emit_insn (GEN_FCN (icode) (cmp_result, ops[1], ops[2],
12768 ops[3], ops[4], GEN_INT (cond)));
12772 gcc_unreachable ();
12775 /* If the comparison sets more than one register, we define the result
12776 to be 0 if all registers are false and -1 if all registers are true.
12777 The value of the complete result is indeterminate otherwise. */
12778 switch (builtin_type)
12780 case MIPS_BUILTIN_CMP_ALL:
12781 condition = gen_rtx_NE (VOIDmode, cmp_result, constm1_rtx);
12782 return mips_builtin_branch_and_move (condition, target,
12783 const0_rtx, const1_rtx);
12785 case MIPS_BUILTIN_CMP_UPPER:
12786 case MIPS_BUILTIN_CMP_LOWER:
12787 offset = GEN_INT (builtin_type == MIPS_BUILTIN_CMP_UPPER);
12788 condition = gen_single_cc (cmp_result, offset);
12789 return mips_builtin_branch_and_move (condition, target,
12790 const1_rtx, const0_rtx);
12793 condition = gen_rtx_NE (VOIDmode, cmp_result, const0_rtx);
12794 return mips_builtin_branch_and_move (condition, target,
12795 const1_rtx, const0_rtx);
12799 /* Expand a bposge builtin of type BUILTIN_TYPE. TARGET, if nonnull,
12800 suggests a good place to put the boolean result. */
12803 mips_expand_builtin_bposge (enum mips_builtin_type builtin_type, rtx target)
12805 rtx condition, cmp_result;
12808 if (target == 0 || GET_MODE (target) != SImode)
12809 target = gen_reg_rtx (SImode);
12811 cmp_result = gen_rtx_REG (CCDSPmode, CCDSP_PO_REGNUM);
12813 if (builtin_type == MIPS_BUILTIN_BPOSGE32)
12818 condition = gen_rtx_GE (VOIDmode, cmp_result, GEN_INT (cmp_value));
12819 return mips_builtin_branch_and_move (condition, target,
12820 const1_rtx, const0_rtx);
12823 /* Set SYMBOL_REF_FLAGS for the SYMBOL_REF inside RTL, which belongs to DECL.
12824 FIRST is true if this is the first time handling this decl. */
12827 mips_encode_section_info (tree decl, rtx rtl, int first)
12829 default_encode_section_info (decl, rtl, first);
12831 if (TREE_CODE (decl) == FUNCTION_DECL)
12833 rtx symbol = XEXP (rtl, 0);
12834 tree type = TREE_TYPE (decl);
12836 if ((TARGET_LONG_CALLS && !mips_near_type_p (type))
12837 || mips_far_type_p (type))
12838 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LONG_CALL;
12842 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. Some code models use the incoming
12843 value of PIC_FUNCTION_ADDR_REGNUM to set up the global pointer. */
12846 mips_extra_live_on_entry (bitmap regs)
12848 if (TARGET_USE_GOT && !TARGET_ABSOLUTE_ABICALLS)
12849 bitmap_set_bit (regs, PIC_FUNCTION_ADDR_REGNUM);
12852 /* SImode values are represented as sign-extended to DImode. */
12855 mips_mode_rep_extended (enum machine_mode mode, enum machine_mode mode_rep)
12857 if (TARGET_64BIT && mode == SImode && mode_rep == DImode)
12858 return SIGN_EXTEND;
12863 /* MIPS implementation of TARGET_ASM_OUTPUT_DWARF_DTPREL. */
12866 mips_output_dwarf_dtprel (FILE *file, int size, rtx x)
12871 fputs ("\t.dtprelword\t", file);
12875 fputs ("\t.dtpreldword\t", file);
12879 gcc_unreachable ();
12881 output_addr_const (file, x);
12882 fputs ("+0x8000", file);
12885 #include "gt-mips.h"