1 /* Subroutines for insn-output.c for Matsushita MN10300 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
41 #include "diagnostic-core.h"
44 #include "target-def.h"
47 /* This is used by GOTaddr2picreg to uniquely identify
49 int mn10300_unspec_int_label_counter;
51 /* This is used in the am33_2.0-linux-gnu port, in which global symbol
52 names are not prefixed by underscores, to tell whether to prefix a
53 label with a plus sign or not, so that the assembler can tell
54 symbol names from register names. */
55 int mn10300_protect_label;
57 /* The selected processor. */
58 enum processor_type mn10300_processor = PROCESSOR_DEFAULT;
60 /* Processor type to select for tuning. */
61 static const char * mn10300_tune_string = NULL;
63 /* Selected processor type for tuning. */
64 enum processor_type mn10300_tune_cpu = PROCESSOR_DEFAULT;
66 /* The size of the callee register save area. Right now we save everything
67 on entry since it costs us nothing in code size. It does cost us from a
68 speed standpoint, so we want to optimize this sooner or later. */
69 #define REG_SAVE_BYTES (4 * df_regs_ever_live_p (2) \
70 + 4 * df_regs_ever_live_p (3) \
71 + 4 * df_regs_ever_live_p (6) \
72 + 4 * df_regs_ever_live_p (7) \
73 + 16 * (df_regs_ever_live_p (14) \
74 || df_regs_ever_live_p (15) \
75 || df_regs_ever_live_p (16) \
76 || df_regs_ever_live_p (17)))
78 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
79 static const struct default_options mn10300_option_optimization_table[] =
81 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
82 { OPT_LEVELS_NONE, 0, NULL, 0 }
85 /* Implement TARGET_HANDLE_OPTION. */
88 mn10300_handle_option (size_t code,
89 const char *arg ATTRIBUTE_UNUSED,
95 mn10300_processor = value ? PROCESSOR_AM33 : PROCESSOR_MN10300;
99 mn10300_processor = (value
101 : MIN (PROCESSOR_AM33, PROCESSOR_DEFAULT));
105 mn10300_processor = (value ? PROCESSOR_AM34 : PROCESSOR_DEFAULT);
109 mn10300_tune_string = arg;
117 /* Implement TARGET_OPTION_OVERRIDE. */
120 mn10300_option_override (void)
123 target_flags &= ~MASK_MULT_BUG;
126 /* Disable scheduling for the MN10300 as we do
127 not have timing information available for it. */
128 flag_schedule_insns = 0;
129 flag_schedule_insns_after_reload = 0;
131 /* Force enable splitting of wide types, as otherwise it is trivial
132 to run out of registers. Indeed, this works so well that register
133 allocation problems are now more common *without* optimization,
134 when this flag is not enabled by default. */
135 flag_split_wide_types = 1;
138 if (mn10300_tune_string)
140 if (strcasecmp (mn10300_tune_string, "mn10300") == 0)
141 mn10300_tune_cpu = PROCESSOR_MN10300;
142 else if (strcasecmp (mn10300_tune_string, "am33") == 0)
143 mn10300_tune_cpu = PROCESSOR_AM33;
144 else if (strcasecmp (mn10300_tune_string, "am33-2") == 0)
145 mn10300_tune_cpu = PROCESSOR_AM33_2;
146 else if (strcasecmp (mn10300_tune_string, "am34") == 0)
147 mn10300_tune_cpu = PROCESSOR_AM34;
149 error ("-mtune= expects mn10300, am33, am33-2, or am34");
154 mn10300_file_start (void)
156 default_file_start ();
159 fprintf (asm_out_file, "\t.am33_2\n");
160 else if (TARGET_AM33)
161 fprintf (asm_out_file, "\t.am33\n");
164 /* Print operand X using operand code CODE to assembly language output file
168 mn10300_print_operand (FILE *file, rtx x, int code)
174 if (GET_MODE (XEXP (x, 0)) == CC_FLOATmode)
176 switch (code == 'b' ? GET_CODE (x)
177 : reverse_condition_maybe_unordered (GET_CODE (x)))
180 fprintf (file, "ne");
183 fprintf (file, "eq");
186 fprintf (file, "ge");
189 fprintf (file, "gt");
192 fprintf (file, "le");
195 fprintf (file, "lt");
198 fprintf (file, "lge");
201 fprintf (file, "uo");
204 fprintf (file, "lg");
207 fprintf (file, "ue");
210 fprintf (file, "uge");
213 fprintf (file, "ug");
216 fprintf (file, "ule");
219 fprintf (file, "ul");
226 /* These are normal and reversed branches. */
227 switch (code == 'b' ? GET_CODE (x) : reverse_condition (GET_CODE (x)))
230 fprintf (file, "ne");
233 fprintf (file, "eq");
236 fprintf (file, "ge");
239 fprintf (file, "gt");
242 fprintf (file, "le");
245 fprintf (file, "lt");
248 fprintf (file, "cc");
251 fprintf (file, "hi");
254 fprintf (file, "ls");
257 fprintf (file, "cs");
264 /* This is used for the operand to a call instruction;
265 if it's a REG, enclose it in parens, else output
266 the operand normally. */
270 mn10300_print_operand (file, x, 0);
274 mn10300_print_operand (file, x, 0);
278 switch (GET_CODE (x))
282 output_address (XEXP (x, 0));
287 fprintf (file, "fd%d", REGNO (x) - 18);
295 /* These are the least significant word in a 64bit value. */
297 switch (GET_CODE (x))
301 output_address (XEXP (x, 0));
306 fprintf (file, "%s", reg_names[REGNO (x)]);
310 fprintf (file, "%s", reg_names[subreg_regno (x)]);
318 switch (GET_MODE (x))
321 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
322 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
323 fprintf (file, "0x%lx", val[0]);
326 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
327 REAL_VALUE_TO_TARGET_SINGLE (rv, val[0]);
328 fprintf (file, "0x%lx", val[0]);
332 mn10300_print_operand_address (file,
333 GEN_INT (CONST_DOUBLE_LOW (x)));
344 split_double (x, &low, &high);
345 fprintf (file, "%ld", (long)INTVAL (low));
354 /* Similarly, but for the most significant word. */
356 switch (GET_CODE (x))
360 x = adjust_address (x, SImode, 4);
361 output_address (XEXP (x, 0));
366 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
370 fprintf (file, "%s", reg_names[subreg_regno (x) + 1]);
378 switch (GET_MODE (x))
381 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
382 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
383 fprintf (file, "0x%lx", val[1]);
389 mn10300_print_operand_address (file,
390 GEN_INT (CONST_DOUBLE_HIGH (x)));
401 split_double (x, &low, &high);
402 fprintf (file, "%ld", (long)INTVAL (high));
413 if (REG_P (XEXP (x, 0)))
414 output_address (gen_rtx_PLUS (SImode, XEXP (x, 0), const0_rtx));
416 output_address (XEXP (x, 0));
421 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
422 fprintf (file, "%d", (int)((~INTVAL (x)) & 0xff));
426 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
427 fprintf (file, "%d", (int)(INTVAL (x) & 0xff));
430 /* For shift counts. The hardware ignores the upper bits of
431 any immediate, but the assembler will flag an out of range
432 shift count as an error. So we mask off the high bits
433 of the immediate here. */
437 fprintf (file, "%d", (int)(INTVAL (x) & 0x1f));
443 switch (GET_CODE (x))
447 output_address (XEXP (x, 0));
456 fprintf (file, "%s", reg_names[REGNO (x)]);
460 fprintf (file, "%s", reg_names[subreg_regno (x)]);
463 /* This will only be single precision.... */
469 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
470 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
471 fprintf (file, "0x%lx", val);
481 mn10300_print_operand_address (file, x);
490 /* Output assembly language output for the address ADDR to FILE. */
493 mn10300_print_operand_address (FILE *file, rtx addr)
495 switch (GET_CODE (addr))
498 mn10300_print_operand_address (file, XEXP (addr, 0));
502 mn10300_print_operand (file, addr, 0);
507 if (REG_P (XEXP (addr, 0))
508 && REG_OK_FOR_BASE_P (XEXP (addr, 0)))
509 base = XEXP (addr, 0), index = XEXP (addr, 1);
510 else if (REG_P (XEXP (addr, 1))
511 && REG_OK_FOR_BASE_P (XEXP (addr, 1)))
512 base = XEXP (addr, 1), index = XEXP (addr, 0);
515 mn10300_print_operand (file, index, 0);
517 mn10300_print_operand (file, base, 0);;
521 output_addr_const (file, addr);
524 output_addr_const (file, addr);
529 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA.
531 Used for PIC-specific UNSPECs. */
534 mn10300_asm_output_addr_const_extra (FILE *file, rtx x)
536 if (GET_CODE (x) == UNSPEC)
540 case UNSPEC_INT_LABEL:
541 asm_fprintf (file, ".%LLIL" HOST_WIDE_INT_PRINT_DEC,
542 INTVAL (XVECEXP (x, 0, 0)));
545 /* GLOBAL_OFFSET_TABLE or local symbols, no suffix. */
546 output_addr_const (file, XVECEXP (x, 0, 0));
549 output_addr_const (file, XVECEXP (x, 0, 0));
550 fputs ("@GOT", file);
553 output_addr_const (file, XVECEXP (x, 0, 0));
554 fputs ("@GOTOFF", file);
557 output_addr_const (file, XVECEXP (x, 0, 0));
558 fputs ("@PLT", file);
560 case UNSPEC_GOTSYM_OFF:
561 assemble_name (file, GOT_SYMBOL_NAME);
563 output_addr_const (file, XVECEXP (x, 0, 0));
575 /* Count the number of FP registers that have to be saved. */
577 fp_regs_to_save (void)
584 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
585 if (df_regs_ever_live_p (i) && ! call_really_used_regs[i])
591 /* Print a set of registers in the format required by "movm" and "ret".
592 Register K is saved if bit K of MASK is set. The data and address
593 registers can be stored individually, but the extended registers cannot.
594 We assume that the mask already takes that into account. For instance,
595 bits 14 to 17 must have the same value. */
598 mn10300_print_reg_list (FILE *file, int mask)
606 for (i = 0; i < FIRST_EXTENDED_REGNUM; i++)
607 if ((mask & (1 << i)) != 0)
611 fputs (reg_names [i], file);
615 if ((mask & 0x3c000) != 0)
617 gcc_assert ((mask & 0x3c000) == 0x3c000);
620 fputs ("exreg1", file);
628 mn10300_can_use_return_insn (void)
630 /* size includes the fixed stack space needed for function calls. */
631 int size = get_frame_size () + crtl->outgoing_args_size;
633 /* And space for the return pointer. */
634 size += crtl->outgoing_args_size ? 4 : 0;
636 return (reload_completed
638 && !df_regs_ever_live_p (2)
639 && !df_regs_ever_live_p (3)
640 && !df_regs_ever_live_p (6)
641 && !df_regs_ever_live_p (7)
642 && !df_regs_ever_live_p (14)
643 && !df_regs_ever_live_p (15)
644 && !df_regs_ever_live_p (16)
645 && !df_regs_ever_live_p (17)
646 && fp_regs_to_save () == 0
647 && !frame_pointer_needed);
650 /* Returns the set of live, callee-saved registers as a bitmask. The
651 callee-saved extended registers cannot be stored individually, so
652 all of them will be included in the mask if any one of them is used. */
655 mn10300_get_live_callee_saved_regs (void)
661 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
662 if (df_regs_ever_live_p (i) && ! call_really_used_regs[i])
664 if ((mask & 0x3c000) != 0)
673 RTX_FRAME_RELATED_P (r) = 1;
677 /* Generate an instruction that pushes several registers onto the stack.
678 Register K will be saved if bit K in MASK is set. The function does
679 nothing if MASK is zero.
681 To be compatible with the "movm" instruction, the lowest-numbered
682 register must be stored in the lowest slot. If MASK is the set
683 { R1,...,RN }, where R1...RN are ordered least first, the generated
684 instruction will have the form:
687 (set (reg:SI 9) (plus:SI (reg:SI 9) (const_int -N*4)))
688 (set (mem:SI (plus:SI (reg:SI 9)
692 (set (mem:SI (plus:SI (reg:SI 9)
697 mn10300_gen_multiple_store (unsigned int mask)
699 /* The order in which registers are stored, from SP-4 through SP-N*4. */
700 static const unsigned int store_order[8] = {
701 /* e2, e3: never saved */
702 FIRST_EXTENDED_REGNUM + 4,
703 FIRST_EXTENDED_REGNUM + 5,
704 FIRST_EXTENDED_REGNUM + 6,
705 FIRST_EXTENDED_REGNUM + 7,
706 /* e0, e1, mdrq, mcrh, mcrl, mcvf: never saved. */
707 FIRST_DATA_REGNUM + 2,
708 FIRST_DATA_REGNUM + 3,
709 FIRST_ADDRESS_REGNUM + 2,
710 FIRST_ADDRESS_REGNUM + 3,
711 /* d0, d1, a0, a1, mdr, lir, lar: never saved. */
721 for (i = count = 0; i < ARRAY_SIZE(store_order); ++i)
723 unsigned regno = store_order[i];
725 if (((mask >> regno) & 1) == 0)
729 x = plus_constant (stack_pointer_rtx, count * -4);
730 x = gen_frame_mem (SImode, x);
731 x = gen_rtx_SET (VOIDmode, x, gen_rtx_REG (SImode, regno));
734 /* Remove the register from the mask so that... */
735 mask &= ~(1u << regno);
738 /* ... we can make sure that we didn't try to use a register
739 not listed in the store order. */
740 gcc_assert (mask == 0);
742 /* Create the instruction that updates the stack pointer. */
743 x = plus_constant (stack_pointer_rtx, count * -4);
744 x = gen_rtx_SET (VOIDmode, stack_pointer_rtx, x);
747 /* We need one PARALLEL element to update the stack pointer and
748 an additional element for each register that is stored. */
749 x = gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (count + 1, elts));
754 mn10300_expand_prologue (void)
758 /* SIZE includes the fixed stack space needed for function calls. */
759 size = get_frame_size () + crtl->outgoing_args_size;
760 size += (crtl->outgoing_args_size ? 4 : 0);
762 /* If we use any of the callee-saved registers, save them now. */
763 mn10300_gen_multiple_store (mn10300_get_live_callee_saved_regs ());
765 if (TARGET_AM33_2 && fp_regs_to_save ())
767 int num_regs_to_save = fp_regs_to_save (), i;
773 save_sp_partial_merge,
777 unsigned int strategy_size = (unsigned)-1, this_strategy_size;
780 /* We have several different strategies to save FP registers.
781 We can store them using SP offsets, which is beneficial if
782 there are just a few registers to save, or we can use `a0' in
783 post-increment mode (`a0' is the only call-clobbered address
784 register that is never used to pass information to a
785 function). Furthermore, if we don't need a frame pointer, we
786 can merge the two SP adds into a single one, but this isn't
787 always beneficial; sometimes we can just split the two adds
788 so that we don't exceed a 16-bit constant size. The code
789 below will select which strategy to use, so as to generate
790 smallest code. Ties are broken in favor or shorter sequences
791 (in terms of number of instructions). */
793 #define SIZE_ADD_AX(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
794 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 2)
795 #define SIZE_ADD_SP(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
796 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 3)
798 /* We add 0 * (S) in two places to promote to the type of S,
799 so that all arms of the conditional have the same type. */
800 #define SIZE_FMOV_LIMIT(S,N,L,SIZE1,SIZE2,ELSE) \
801 (((S) >= (L)) ? 0 * (S) + (SIZE1) * (N) \
802 : ((S) + 4 * (N) >= (L)) ? (((L) - (S)) / 4 * (SIZE2) \
803 + ((S) + 4 * (N) - (L)) / 4 * (SIZE1)) \
805 #define SIZE_FMOV_SP_(S,N) \
806 (SIZE_FMOV_LIMIT ((S), (N), (1 << 24), 7, 6, \
807 SIZE_FMOV_LIMIT ((S), (N), (1 << 8), 6, 4, \
808 (S) ? 4 * (N) : 3 + 4 * ((N) - 1))))
809 #define SIZE_FMOV_SP(S,N) (SIZE_FMOV_SP_ ((unsigned HOST_WIDE_INT)(S), (N)))
811 /* Consider alternative save_sp_merge only if we don't need the
812 frame pointer and size is nonzero. */
813 if (! frame_pointer_needed && size)
815 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
816 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
817 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
818 this_strategy_size += SIZE_FMOV_SP (size, num_regs_to_save);
820 if (this_strategy_size < strategy_size)
822 strategy = save_sp_merge;
823 strategy_size = this_strategy_size;
827 /* Consider alternative save_sp_no_merge unconditionally. */
828 /* Insn: add -4 * num_regs_to_save, sp. */
829 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
830 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
831 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
834 /* Insn: add -size, sp. */
835 this_strategy_size += SIZE_ADD_SP (-size);
838 if (this_strategy_size < strategy_size)
840 strategy = save_sp_no_merge;
841 strategy_size = this_strategy_size;
844 /* Consider alternative save_sp_partial_merge only if we don't
845 need a frame pointer and size is reasonably large. */
846 if (! frame_pointer_needed && size + 4 * num_regs_to_save > 128)
848 /* Insn: add -128, sp. */
849 this_strategy_size = SIZE_ADD_SP (-128);
850 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
851 this_strategy_size += SIZE_FMOV_SP (128 - 4 * num_regs_to_save,
855 /* Insn: add 128-size, sp. */
856 this_strategy_size += SIZE_ADD_SP (128 - size);
859 if (this_strategy_size < strategy_size)
861 strategy = save_sp_partial_merge;
862 strategy_size = this_strategy_size;
866 /* Consider alternative save_a0_merge only if we don't need a
867 frame pointer, size is nonzero and the user hasn't
868 changed the calling conventions of a0. */
869 if (! frame_pointer_needed && size
870 && call_really_used_regs [FIRST_ADDRESS_REGNUM]
871 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
873 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
874 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
875 /* Insn: mov sp, a0. */
876 this_strategy_size++;
879 /* Insn: add size, a0. */
880 this_strategy_size += SIZE_ADD_AX (size);
882 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
883 this_strategy_size += 3 * num_regs_to_save;
885 if (this_strategy_size < strategy_size)
887 strategy = save_a0_merge;
888 strategy_size = this_strategy_size;
892 /* Consider alternative save_a0_no_merge if the user hasn't
893 changed the calling conventions of a0. */
894 if (call_really_used_regs [FIRST_ADDRESS_REGNUM]
895 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
897 /* Insn: add -4 * num_regs_to_save, sp. */
898 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
899 /* Insn: mov sp, a0. */
900 this_strategy_size++;
901 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
902 this_strategy_size += 3 * num_regs_to_save;
905 /* Insn: add -size, sp. */
906 this_strategy_size += SIZE_ADD_SP (-size);
909 if (this_strategy_size < strategy_size)
911 strategy = save_a0_no_merge;
912 strategy_size = this_strategy_size;
916 /* Emit the initial SP add, common to all strategies. */
919 case save_sp_no_merge:
920 case save_a0_no_merge:
921 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
923 GEN_INT (-4 * num_regs_to_save))));
927 case save_sp_partial_merge:
928 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
931 xsize = 128 - 4 * num_regs_to_save;
937 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
939 GEN_INT (-(size + 4 * num_regs_to_save)))));
940 /* We'll have to adjust FP register saves according to the
943 /* Since we've already created the stack frame, don't do it
944 again at the end of the function. */
952 /* Now prepare register a0, if we have decided to use it. */
956 case save_sp_no_merge:
957 case save_sp_partial_merge:
962 case save_a0_no_merge:
963 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM);
964 F (emit_insn (gen_movsi (reg, stack_pointer_rtx)));
966 F (emit_insn (gen_addsi3 (reg, reg, GEN_INT (xsize))));
967 reg = gen_rtx_POST_INC (SImode, reg);
974 /* Now actually save the FP registers. */
975 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
976 if (df_regs_ever_live_p (i) && ! call_really_used_regs [i])
984 /* If we aren't using `a0', use an SP offset. */
987 addr = gen_rtx_PLUS (SImode,
992 addr = stack_pointer_rtx;
997 F (emit_insn (gen_movsf (gen_rtx_MEM (SFmode, addr),
998 gen_rtx_REG (SFmode, i))));
1002 /* Now put the frame pointer into the frame pointer register. */
1003 if (frame_pointer_needed)
1004 F (emit_move_insn (frame_pointer_rtx, stack_pointer_rtx));
1006 /* Allocate stack for this frame. */
1008 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
1012 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
1013 emit_insn (gen_GOTaddr2picreg ());
1017 mn10300_expand_epilogue (void)
1021 /* SIZE includes the fixed stack space needed for function calls. */
1022 size = get_frame_size () + crtl->outgoing_args_size;
1023 size += (crtl->outgoing_args_size ? 4 : 0);
1025 if (TARGET_AM33_2 && fp_regs_to_save ())
1027 int num_regs_to_save = fp_regs_to_save (), i;
1030 /* We have several options to restore FP registers. We could
1031 load them from SP offsets, but, if there are enough FP
1032 registers to restore, we win if we use a post-increment
1035 /* If we have a frame pointer, it's the best option, because we
1036 already know it has the value we want. */
1037 if (frame_pointer_needed)
1038 reg = gen_rtx_REG (SImode, FRAME_POINTER_REGNUM);
1039 /* Otherwise, we may use `a1', since it's call-clobbered and
1040 it's never used for return values. But only do so if it's
1041 smaller than using SP offsets. */
1044 enum { restore_sp_post_adjust,
1045 restore_sp_pre_adjust,
1046 restore_sp_partial_adjust,
1047 restore_a1 } strategy;
1048 unsigned int this_strategy_size, strategy_size = (unsigned)-1;
1050 /* Consider using sp offsets before adjusting sp. */
1051 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1052 this_strategy_size = SIZE_FMOV_SP (size, num_regs_to_save);
1053 /* If size is too large, we'll have to adjust SP with an
1055 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1057 /* Insn: add size + 4 * num_regs_to_save, sp. */
1058 this_strategy_size += SIZE_ADD_SP (size + 4 * num_regs_to_save);
1060 /* If we don't have to restore any non-FP registers,
1061 we'll be able to save one byte by using rets. */
1062 if (! REG_SAVE_BYTES)
1063 this_strategy_size--;
1065 if (this_strategy_size < strategy_size)
1067 strategy = restore_sp_post_adjust;
1068 strategy_size = this_strategy_size;
1071 /* Consider using sp offsets after adjusting sp. */
1072 /* Insn: add size, sp. */
1073 this_strategy_size = SIZE_ADD_SP (size);
1074 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1075 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
1076 /* We're going to use ret to release the FP registers
1077 save area, so, no savings. */
1079 if (this_strategy_size < strategy_size)
1081 strategy = restore_sp_pre_adjust;
1082 strategy_size = this_strategy_size;
1085 /* Consider using sp offsets after partially adjusting sp.
1086 When size is close to 32Kb, we may be able to adjust SP
1087 with an imm16 add instruction while still using fmov
1089 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1091 /* Insn: add size + 4 * num_regs_to_save
1092 + REG_SAVE_BYTES - 252,sp. */
1093 this_strategy_size = SIZE_ADD_SP (size + 4 * num_regs_to_save
1094 + REG_SAVE_BYTES - 252);
1095 /* Insn: fmov (##,sp),fs#, fo each fs# to be restored. */
1096 this_strategy_size += SIZE_FMOV_SP (252 - REG_SAVE_BYTES
1097 - 4 * num_regs_to_save,
1099 /* We're going to use ret to release the FP registers
1100 save area, so, no savings. */
1102 if (this_strategy_size < strategy_size)
1104 strategy = restore_sp_partial_adjust;
1105 strategy_size = this_strategy_size;
1109 /* Consider using a1 in post-increment mode, as long as the
1110 user hasn't changed the calling conventions of a1. */
1111 if (call_really_used_regs [FIRST_ADDRESS_REGNUM + 1]
1112 && ! fixed_regs[FIRST_ADDRESS_REGNUM+1])
1114 /* Insn: mov sp,a1. */
1115 this_strategy_size = 1;
1118 /* Insn: add size,a1. */
1119 this_strategy_size += SIZE_ADD_AX (size);
1121 /* Insn: fmov (a1+),fs#, for each fs# to be restored. */
1122 this_strategy_size += 3 * num_regs_to_save;
1123 /* If size is large enough, we may be able to save a
1125 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1127 /* Insn: mov a1,sp. */
1128 this_strategy_size += 2;
1130 /* If we don't have to restore any non-FP registers,
1131 we'll be able to save one byte by using rets. */
1132 if (! REG_SAVE_BYTES)
1133 this_strategy_size--;
1135 if (this_strategy_size < strategy_size)
1137 strategy = restore_a1;
1138 strategy_size = this_strategy_size;
1144 case restore_sp_post_adjust:
1147 case restore_sp_pre_adjust:
1148 emit_insn (gen_addsi3 (stack_pointer_rtx,
1154 case restore_sp_partial_adjust:
1155 emit_insn (gen_addsi3 (stack_pointer_rtx,
1157 GEN_INT (size + 4 * num_regs_to_save
1158 + REG_SAVE_BYTES - 252)));
1159 size = 252 - REG_SAVE_BYTES - 4 * num_regs_to_save;
1163 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM + 1);
1164 emit_insn (gen_movsi (reg, stack_pointer_rtx));
1166 emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
1174 /* Adjust the selected register, if any, for post-increment. */
1176 reg = gen_rtx_POST_INC (SImode, reg);
1178 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
1179 if (df_regs_ever_live_p (i) && ! call_really_used_regs [i])
1187 /* If we aren't using a post-increment register, use an
1189 addr = gen_rtx_PLUS (SImode,
1194 addr = stack_pointer_rtx;
1198 emit_insn (gen_movsf (gen_rtx_REG (SFmode, i),
1199 gen_rtx_MEM (SFmode, addr)));
1202 /* If we were using the restore_a1 strategy and the number of
1203 bytes to be released won't fit in the `ret' byte, copy `a1'
1204 to `sp', to avoid having to use `add' to adjust it. */
1205 if (! frame_pointer_needed && reg && size + REG_SAVE_BYTES > 255)
1207 emit_move_insn (stack_pointer_rtx, XEXP (reg, 0));
1212 /* Maybe cut back the stack, except for the register save area.
1214 If the frame pointer exists, then use the frame pointer to
1217 If the stack size + register save area is more than 255 bytes,
1218 then the stack must be cut back here since the size + register
1219 save size is too big for a ret/retf instruction.
1221 Else leave it alone, it will be cut back as part of the
1222 ret/retf instruction, or there wasn't any stack to begin with.
1224 Under no circumstances should the register save area be
1225 deallocated here, that would leave a window where an interrupt
1226 could occur and trash the register save area. */
1227 if (frame_pointer_needed)
1229 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1232 else if (size + REG_SAVE_BYTES > 255)
1234 emit_insn (gen_addsi3 (stack_pointer_rtx,
1240 /* Adjust the stack and restore callee-saved registers, if any. */
1241 if (size || df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1242 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1243 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1244 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1245 || frame_pointer_needed)
1246 emit_jump_insn (gen_return_internal_regs
1247 (GEN_INT (size + REG_SAVE_BYTES)));
1249 emit_jump_insn (gen_return_internal ());
1252 /* Recognize the PARALLEL rtx generated by mn10300_gen_multiple_store().
1253 This function is for MATCH_PARALLEL and so assumes OP is known to be
1254 parallel. If OP is a multiple store, return a mask indicating which
1255 registers it saves. Return 0 otherwise. */
1258 mn10300_store_multiple_operation (rtx op,
1259 enum machine_mode mode ATTRIBUTE_UNUSED)
1267 count = XVECLEN (op, 0);
1271 /* Check that first instruction has the form (set (sp) (plus A B)) */
1272 elt = XVECEXP (op, 0, 0);
1273 if (GET_CODE (elt) != SET
1274 || (! REG_P (SET_DEST (elt)))
1275 || REGNO (SET_DEST (elt)) != STACK_POINTER_REGNUM
1276 || GET_CODE (SET_SRC (elt)) != PLUS)
1279 /* Check that A is the stack pointer and B is the expected stack size.
1280 For OP to match, each subsequent instruction should push a word onto
1281 the stack. We therefore expect the first instruction to create
1282 COUNT-1 stack slots. */
1283 elt = SET_SRC (elt);
1284 if ((! REG_P (XEXP (elt, 0)))
1285 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1286 || (! CONST_INT_P (XEXP (elt, 1)))
1287 || INTVAL (XEXP (elt, 1)) != -(count - 1) * 4)
1291 for (i = 1; i < count; i++)
1293 /* Check that element i is a (set (mem M) R). */
1294 /* ??? Validate the register order a-la mn10300_gen_multiple_store.
1295 Remember: the ordering is *not* monotonic. */
1296 elt = XVECEXP (op, 0, i);
1297 if (GET_CODE (elt) != SET
1298 || (! MEM_P (SET_DEST (elt)))
1299 || (! REG_P (SET_SRC (elt))))
1302 /* Remember which registers are to be saved. */
1303 last = REGNO (SET_SRC (elt));
1304 mask |= (1 << last);
1306 /* Check that M has the form (plus (sp) (const_int -I*4)) */
1307 elt = XEXP (SET_DEST (elt), 0);
1308 if (GET_CODE (elt) != PLUS
1309 || (! REG_P (XEXP (elt, 0)))
1310 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1311 || (! CONST_INT_P (XEXP (elt, 1)))
1312 || INTVAL (XEXP (elt, 1)) != -i * 4)
1316 /* All or none of the callee-saved extended registers must be in the set. */
1317 if ((mask & 0x3c000) != 0
1318 && (mask & 0x3c000) != 0x3c000)
1324 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1327 mn10300_preferred_reload_class (rtx x, reg_class_t rclass)
1329 if (x == stack_pointer_rtx && rclass != SP_REGS)
1330 return ADDRESS_OR_EXTENDED_REGS;
1333 && !HARD_REGISTER_P (x))
1334 || (GET_CODE (x) == SUBREG
1335 && REG_P (SUBREG_REG (x))
1336 && !HARD_REGISTER_P (SUBREG_REG (x))))
1337 return LIMIT_RELOAD_CLASS (GET_MODE (x), rclass);
1342 /* Implement TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
1345 mn10300_preferred_output_reload_class (rtx x, reg_class_t rclass)
1347 if (x == stack_pointer_rtx && rclass != SP_REGS)
1348 return ADDRESS_OR_EXTENDED_REGS;
1353 /* What (if any) secondary registers are needed to move IN with mode
1354 MODE into a register in register class RCLASS.
1356 We might be able to simplify this. */
1359 mn10300_secondary_reload_class (enum reg_class rclass, enum machine_mode mode,
1364 /* Strip off any SUBREG expressions from IN. Basically we want
1365 to know if IN is a pseudo or (subreg (pseudo)) as those can
1366 turn into MEMs during reload. */
1367 while (GET_CODE (inner) == SUBREG)
1368 inner = SUBREG_REG (inner);
1370 /* Memory loads less than a full word wide can't have an
1371 address or stack pointer destination. They must use
1372 a data register as an intermediate register. */
1375 && REGNO (inner) >= FIRST_PSEUDO_REGISTER))
1376 && (mode == QImode || mode == HImode)
1377 && (rclass == ADDRESS_REGS || rclass == SP_REGS
1378 || rclass == SP_OR_ADDRESS_REGS))
1381 return DATA_OR_EXTENDED_REGS;
1385 /* We can't directly load sp + const_int into a data register;
1386 we must use an address register as an intermediate. */
1387 if (rclass != SP_REGS
1388 && rclass != ADDRESS_REGS
1389 && rclass != SP_OR_ADDRESS_REGS
1390 && rclass != SP_OR_EXTENDED_REGS
1391 && rclass != ADDRESS_OR_EXTENDED_REGS
1392 && rclass != SP_OR_ADDRESS_OR_EXTENDED_REGS
1393 && (in == stack_pointer_rtx
1394 || (GET_CODE (in) == PLUS
1395 && (XEXP (in, 0) == stack_pointer_rtx
1396 || XEXP (in, 1) == stack_pointer_rtx))))
1397 return ADDRESS_REGS;
1400 && rclass == FP_REGS)
1402 /* We can't load directly into an FP register from a
1403 constant address. */
1405 && CONSTANT_ADDRESS_P (XEXP (in, 0)))
1406 return DATA_OR_EXTENDED_REGS;
1408 /* Handle case were a pseudo may not get a hard register
1409 but has an equivalent memory location defined. */
1411 && REGNO (inner) >= FIRST_PSEUDO_REGISTER
1412 && reg_equiv_mem [REGNO (inner)]
1413 && CONSTANT_ADDRESS_P (XEXP (reg_equiv_mem [REGNO (inner)], 0)))
1414 return DATA_OR_EXTENDED_REGS;
1417 /* Otherwise assume no secondary reloads are needed. */
1422 mn10300_initial_offset (int from, int to)
1424 /* The difference between the argument pointer and the frame pointer
1425 is the size of the callee register save area. */
1426 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
1428 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1429 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1430 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1431 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1432 || fp_regs_to_save ()
1433 || frame_pointer_needed)
1434 return REG_SAVE_BYTES
1435 + 4 * fp_regs_to_save ();
1440 /* The difference between the argument pointer and the stack pointer is
1441 the sum of the size of this function's frame, the callee register save
1442 area, and the fixed stack space needed for function calls (if any). */
1443 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1445 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1446 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1447 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1448 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1449 || fp_regs_to_save ()
1450 || frame_pointer_needed)
1451 return (get_frame_size () + REG_SAVE_BYTES
1452 + 4 * fp_regs_to_save ()
1453 + (crtl->outgoing_args_size
1454 ? crtl->outgoing_args_size + 4 : 0));
1456 return (get_frame_size ()
1457 + (crtl->outgoing_args_size
1458 ? crtl->outgoing_args_size + 4 : 0));
1461 /* The difference between the frame pointer and stack pointer is the sum
1462 of the size of this function's frame and the fixed stack space needed
1463 for function calls (if any). */
1464 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1465 return (get_frame_size ()
1466 + (crtl->outgoing_args_size
1467 ? crtl->outgoing_args_size + 4 : 0));
1472 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1475 mn10300_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1477 /* Return values > 8 bytes in length in memory. */
1478 return (int_size_in_bytes (type) > 8
1479 || int_size_in_bytes (type) == 0
1480 || TYPE_MODE (type) == BLKmode);
1483 /* Flush the argument registers to the stack for a stdarg function;
1484 return the new argument pointer. */
1486 mn10300_builtin_saveregs (void)
1489 tree fntype = TREE_TYPE (current_function_decl);
1490 int argadj = ((!stdarg_p (fntype))
1491 ? UNITS_PER_WORD : 0);
1492 alias_set_type set = get_varargs_alias_set ();
1495 offset = plus_constant (crtl->args.arg_offset_rtx, argadj);
1497 offset = crtl->args.arg_offset_rtx;
1499 mem = gen_rtx_MEM (SImode, crtl->args.internal_arg_pointer);
1500 set_mem_alias_set (mem, set);
1501 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
1503 mem = gen_rtx_MEM (SImode,
1504 plus_constant (crtl->args.internal_arg_pointer, 4));
1505 set_mem_alias_set (mem, set);
1506 emit_move_insn (mem, gen_rtx_REG (SImode, 1));
1508 return copy_to_reg (expand_binop (Pmode, add_optab,
1509 crtl->args.internal_arg_pointer,
1510 offset, 0, 0, OPTAB_LIB_WIDEN));
1514 mn10300_va_start (tree valist, rtx nextarg)
1516 nextarg = expand_builtin_saveregs ();
1517 std_expand_builtin_va_start (valist, nextarg);
1520 /* Return true when a parameter should be passed by reference. */
1523 mn10300_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1524 enum machine_mode mode, const_tree type,
1525 bool named ATTRIBUTE_UNUSED)
1527 unsigned HOST_WIDE_INT size;
1530 size = int_size_in_bytes (type);
1532 size = GET_MODE_SIZE (mode);
1534 return (size > 8 || size == 0);
1537 /* Return an RTX to represent where a value with mode MODE will be returned
1538 from a function. If the result is NULL_RTX, the argument is pushed. */
1541 mn10300_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1542 const_tree type, bool named ATTRIBUTE_UNUSED)
1544 rtx result = NULL_RTX;
1547 /* We only support using 2 data registers as argument registers. */
1550 /* Figure out the size of the object to be passed. */
1551 if (mode == BLKmode)
1552 size = int_size_in_bytes (type);
1554 size = GET_MODE_SIZE (mode);
1556 cum->nbytes = (cum->nbytes + 3) & ~3;
1558 /* Don't pass this arg via a register if all the argument registers
1560 if (cum->nbytes > nregs * UNITS_PER_WORD)
1563 /* Don't pass this arg via a register if it would be split between
1564 registers and memory. */
1565 if (type == NULL_TREE
1566 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1569 switch (cum->nbytes / UNITS_PER_WORD)
1572 result = gen_rtx_REG (mode, FIRST_ARGUMENT_REGNUM);
1575 result = gen_rtx_REG (mode, FIRST_ARGUMENT_REGNUM + 1);
1584 /* Update the data in CUM to advance over an argument
1585 of mode MODE and data type TYPE.
1586 (TYPE is null for libcalls where that information may not be available.) */
1589 mn10300_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1590 const_tree type, bool named ATTRIBUTE_UNUSED)
1592 cum->nbytes += (mode != BLKmode
1593 ? (GET_MODE_SIZE (mode) + 3) & ~3
1594 : (int_size_in_bytes (type) + 3) & ~3);
1597 /* Return the number of bytes of registers to use for an argument passed
1598 partially in registers and partially in memory. */
1601 mn10300_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1602 tree type, bool named ATTRIBUTE_UNUSED)
1606 /* We only support using 2 data registers as argument registers. */
1609 /* Figure out the size of the object to be passed. */
1610 if (mode == BLKmode)
1611 size = int_size_in_bytes (type);
1613 size = GET_MODE_SIZE (mode);
1615 cum->nbytes = (cum->nbytes + 3) & ~3;
1617 /* Don't pass this arg via a register if all the argument registers
1619 if (cum->nbytes > nregs * UNITS_PER_WORD)
1622 if (cum->nbytes + size <= nregs * UNITS_PER_WORD)
1625 /* Don't pass this arg via a register if it would be split between
1626 registers and memory. */
1627 if (type == NULL_TREE
1628 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1631 return nregs * UNITS_PER_WORD - cum->nbytes;
1634 /* Return the location of the function's value. This will be either
1635 $d0 for integer functions, $a0 for pointers, or a PARALLEL of both
1636 $d0 and $a0 if the -mreturn-pointer-on-do flag is set. Note that
1637 we only return the PARALLEL for outgoing values; we do not want
1638 callers relying on this extra copy. */
1641 mn10300_function_value (const_tree valtype,
1642 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1646 enum machine_mode mode = TYPE_MODE (valtype);
1648 if (! POINTER_TYPE_P (valtype))
1649 return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
1650 else if (! TARGET_PTR_A0D0 || ! outgoing
1651 || cfun->returns_struct)
1652 return gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM);
1654 rv = gen_rtx_PARALLEL (mode, rtvec_alloc (2));
1656 = gen_rtx_EXPR_LIST (VOIDmode,
1657 gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM),
1661 = gen_rtx_EXPR_LIST (VOIDmode,
1662 gen_rtx_REG (mode, FIRST_DATA_REGNUM),
1667 /* Implements TARGET_LIBCALL_VALUE. */
1670 mn10300_libcall_value (enum machine_mode mode,
1671 const_rtx fun ATTRIBUTE_UNUSED)
1673 return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
1676 /* Implements FUNCTION_VALUE_REGNO_P. */
1679 mn10300_function_value_regno_p (const unsigned int regno)
1681 return (regno == FIRST_DATA_REGNUM || regno == FIRST_ADDRESS_REGNUM);
1684 /* Output a compare insn. */
1687 mn10300_output_cmp (rtx operand, rtx insn)
1692 /* We can save a byte if we can find a register which has the value
1694 temp = PREV_INSN (insn);
1695 while (optimize && temp)
1699 /* We allow the search to go through call insns. We record
1700 the fact that we've past a CALL_INSN and reject matches which
1701 use call clobbered registers. */
1704 || GET_CODE (temp) == BARRIER)
1710 if (GET_CODE (temp) == NOTE)
1712 temp = PREV_INSN (temp);
1716 /* It must be an insn, see if it is a simple set. */
1717 set = single_set (temp);
1720 temp = PREV_INSN (temp);
1724 /* Are we setting a data register to zero (this does not win for
1727 If it's a call clobbered register, have we past a call?
1729 Make sure the register we find isn't the same as ourself;
1730 the mn10300 can't encode that.
1732 ??? reg_set_between_p return nonzero anytime we pass a CALL_INSN
1733 so the code to detect calls here isn't doing anything useful. */
1734 if (REG_P (SET_DEST (set))
1735 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1736 && !reg_set_between_p (SET_DEST (set), temp, insn)
1737 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1738 == REGNO_REG_CLASS (REGNO (operand)))
1739 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) != EXTENDED_REGS
1740 && REGNO (SET_DEST (set)) != REGNO (operand)
1742 || ! call_really_used_regs [REGNO (SET_DEST (set))]))
1745 xoperands[0] = operand;
1746 xoperands[1] = SET_DEST (set);
1748 output_asm_insn ("cmp %1,%0", xoperands);
1752 if (REGNO_REG_CLASS (REGNO (operand)) == EXTENDED_REGS
1753 && REG_P (SET_DEST (set))
1754 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1755 && !reg_set_between_p (SET_DEST (set), temp, insn)
1756 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1757 != REGNO_REG_CLASS (REGNO (operand)))
1758 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) == EXTENDED_REGS
1759 && REGNO (SET_DEST (set)) != REGNO (operand)
1761 || ! call_really_used_regs [REGNO (SET_DEST (set))]))
1764 xoperands[0] = operand;
1765 xoperands[1] = SET_DEST (set);
1767 output_asm_insn ("cmp %1,%0", xoperands);
1770 temp = PREV_INSN (temp);
1775 /* Return 1 if X contains a symbolic expression. We know these
1776 expressions will have one of a few well defined forms, so
1777 we need only check those forms. */
1780 mn10300_symbolic_operand (rtx op,
1781 enum machine_mode mode ATTRIBUTE_UNUSED)
1783 switch (GET_CODE (op))
1790 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1791 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1792 && CONST_INT_P (XEXP (op, 1)));
1798 /* Try machine dependent ways of modifying an illegitimate address
1799 to be legitimate. If we find one, return the new valid address.
1800 This macro is used in only one place: `memory_address' in explow.c.
1802 OLDX is the address as it was before break_out_memory_refs was called.
1803 In some cases it is useful to look at this to decide what needs to be done.
1805 Normally it is always safe for this macro to do nothing. It exists to
1806 recognize opportunities to optimize the output.
1808 But on a few ports with segmented architectures and indexed addressing
1809 (mn10300, hppa) it is used to rewrite certain problematical addresses. */
1812 mn10300_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1813 enum machine_mode mode ATTRIBUTE_UNUSED)
1815 if (flag_pic && ! mn10300_legitimate_pic_operand_p (x))
1816 x = mn10300_legitimize_pic_address (oldx, NULL_RTX);
1818 /* Uh-oh. We might have an address for x[n-100000]. This needs
1819 special handling to avoid creating an indexed memory address
1820 with x-100000 as the base. */
1821 if (GET_CODE (x) == PLUS
1822 && mn10300_symbolic_operand (XEXP (x, 1), VOIDmode))
1824 /* Ugly. We modify things here so that the address offset specified
1825 by the index expression is computed first, then added to x to form
1826 the entire address. */
1828 rtx regx1, regy1, regy2, y;
1830 /* Strip off any CONST. */
1832 if (GET_CODE (y) == CONST)
1835 if (GET_CODE (y) == PLUS || GET_CODE (y) == MINUS)
1837 regx1 = force_reg (Pmode, force_operand (XEXP (x, 0), 0));
1838 regy1 = force_reg (Pmode, force_operand (XEXP (y, 0), 0));
1839 regy2 = force_reg (Pmode, force_operand (XEXP (y, 1), 0));
1840 regx1 = force_reg (Pmode,
1841 gen_rtx_fmt_ee (GET_CODE (y), Pmode, regx1,
1843 return force_reg (Pmode, gen_rtx_PLUS (Pmode, regx1, regy1));
1849 /* Convert a non-PIC address in `orig' to a PIC address using @GOT or
1850 @GOTOFF in `reg'. */
1853 mn10300_legitimize_pic_address (rtx orig, rtx reg)
1857 if (GET_CODE (orig) == LABEL_REF
1858 || (GET_CODE (orig) == SYMBOL_REF
1859 && (CONSTANT_POOL_ADDRESS_P (orig)
1860 || ! MN10300_GLOBAL_P (orig))))
1863 reg = gen_reg_rtx (Pmode);
1865 x = gen_rtx_UNSPEC (SImode, gen_rtvec (1, orig), UNSPEC_GOTOFF);
1866 x = gen_rtx_CONST (SImode, x);
1867 emit_move_insn (reg, x);
1869 x = emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1871 else if (GET_CODE (orig) == SYMBOL_REF)
1874 reg = gen_reg_rtx (Pmode);
1876 x = gen_rtx_UNSPEC (SImode, gen_rtvec (1, orig), UNSPEC_GOT);
1877 x = gen_rtx_CONST (SImode, x);
1878 x = gen_rtx_PLUS (SImode, pic_offset_table_rtx, x);
1879 x = gen_const_mem (SImode, x);
1881 x = emit_move_insn (reg, x);
1886 set_unique_reg_note (x, REG_EQUAL, orig);
1890 /* Return zero if X references a SYMBOL_REF or LABEL_REF whose symbol
1891 isn't protected by a PIC unspec; nonzero otherwise. */
1894 mn10300_legitimate_pic_operand_p (rtx x)
1899 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1902 if (GET_CODE (x) == UNSPEC
1903 && (XINT (x, 1) == UNSPEC_PIC
1904 || XINT (x, 1) == UNSPEC_GOT
1905 || XINT (x, 1) == UNSPEC_GOTOFF
1906 || XINT (x, 1) == UNSPEC_PLT
1907 || XINT (x, 1) == UNSPEC_GOTSYM_OFF))
1910 fmt = GET_RTX_FORMAT (GET_CODE (x));
1911 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1917 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1918 if (! mn10300_legitimate_pic_operand_p (XVECEXP (x, i, j)))
1921 else if (fmt[i] == 'e'
1922 && ! mn10300_legitimate_pic_operand_p (XEXP (x, i)))
1929 /* Return TRUE if the address X, taken from a (MEM:MODE X) rtx, is
1930 legitimate, and FALSE otherwise.
1932 On the mn10300, the value in the address register must be
1933 in the same memory space/segment as the effective address.
1935 This is problematical for reload since it does not understand
1936 that base+index != index+base in a memory reference.
1938 Note it is still possible to use reg+reg addressing modes,
1939 it's just much more difficult. For a discussion of a possible
1940 workaround and solution, see the comments in pa.c before the
1941 function record_unscaled_index_insn_codes. */
1944 mn10300_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1946 if (CONSTANT_ADDRESS_P (x)
1947 && (! flag_pic || mn10300_legitimate_pic_operand_p (x)))
1950 if (RTX_OK_FOR_BASE_P (x, strict))
1954 && GET_CODE (x) == POST_INC
1955 && RTX_OK_FOR_BASE_P (XEXP (x, 0), strict)
1956 && (mode == SImode || mode == SFmode || mode == HImode))
1959 if (GET_CODE (x) == PLUS)
1961 rtx base = 0, index = 0;
1963 if (REG_P (XEXP (x, 0))
1964 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x, 0)), strict))
1967 index = XEXP (x, 1);
1970 if (REG_P (XEXP (x, 1))
1971 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x, 1)), strict))
1974 index = XEXP (x, 0);
1977 if (base != 0 && index != 0)
1979 if (CONST_INT_P (index))
1981 if (GET_CODE (index) == CONST
1982 && GET_CODE (XEXP (index, 0)) != PLUS
1984 || (mn10300_legitimate_pic_operand_p (index)
1985 && GET_MODE_SIZE (mode) == 4)))
1993 /* Used by LEGITIMATE_CONSTANT_P(). Returns TRUE if X is a valid
1994 constant. Note that some "constants" aren't valid, such as TLS
1995 symbols and unconverted GOT-based references, so we eliminate
1999 mn10300_legitimate_constant_p (rtx x)
2001 switch (GET_CODE (x))
2006 if (GET_CODE (x) == PLUS)
2008 if (! CONST_INT_P (XEXP (x, 1)))
2013 /* Only some unspecs are valid as "constants". */
2014 if (GET_CODE (x) == UNSPEC)
2016 switch (XINT (x, 1))
2018 case UNSPEC_INT_LABEL:
2029 /* We must have drilled down to a symbol. */
2030 if (! mn10300_symbolic_operand (x, Pmode))
2041 /* Undo pic address legitimization for the benefit of debug info. */
2044 mn10300_delegitimize_address (rtx orig_x)
2046 rtx x = orig_x, ret, addend = NULL;
2051 if (GET_CODE (x) != PLUS || GET_MODE (x) != Pmode)
2054 if (XEXP (x, 0) == pic_offset_table_rtx)
2056 /* With the REG+REG addressing of AM33, var-tracking can re-assemble
2057 some odd-looking "addresses" that were never valid in the first place.
2058 We need to look harder to avoid warnings being emitted. */
2059 else if (GET_CODE (XEXP (x, 0)) == PLUS)
2061 rtx x0 = XEXP (x, 0);
2062 rtx x00 = XEXP (x0, 0);
2063 rtx x01 = XEXP (x0, 1);
2065 if (x00 == pic_offset_table_rtx)
2067 else if (x01 == pic_offset_table_rtx)
2077 if (GET_CODE (x) != CONST)
2080 if (GET_CODE (x) != UNSPEC)
2083 ret = XVECEXP (x, 0, 0);
2084 if (XINT (x, 1) == UNSPEC_GOTOFF)
2086 else if (XINT (x, 1) == UNSPEC_GOT)
2091 gcc_assert (GET_CODE (ret) == SYMBOL_REF);
2092 if (need_mem != MEM_P (orig_x))
2094 if (need_mem && addend)
2097 ret = gen_rtx_PLUS (Pmode, addend, ret);
2101 /* For addresses, costs are relative to "MOV (Rm),Rn". For AM33 this is
2102 the 3-byte fully general instruction; for MN103 this is the 2-byte form
2103 with an address register. */
2106 mn10300_address_cost (rtx x, bool speed)
2111 switch (GET_CODE (x))
2116 /* We assume all of these require a 32-bit constant, even though
2117 some symbol and label references can be relaxed. */
2118 return speed ? 1 : 4;
2126 /* Assume any symbolic offset is a 32-bit constant. */
2127 i = (CONST_INT_P (XEXP (x, 1)) ? INTVAL (XEXP (x, 1)) : 0x12345678);
2128 if (IN_RANGE (i, -128, 127))
2129 return speed ? 0 : 1;
2132 if (IN_RANGE (i, -0x800000, 0x7fffff))
2138 index = XEXP (x, 1);
2139 if (register_operand (index, SImode))
2141 /* Attempt to minimize the number of registers in the address.
2142 This is similar to what other ports do. */
2143 if (register_operand (base, SImode))
2147 index = XEXP (x, 0);
2150 /* Assume any symbolic offset is a 32-bit constant. */
2151 i = (CONST_INT_P (XEXP (x, 1)) ? INTVAL (XEXP (x, 1)) : 0x12345678);
2152 if (IN_RANGE (i, -128, 127))
2153 return speed ? 0 : 1;
2154 if (IN_RANGE (i, -32768, 32767))
2155 return speed ? 0 : 2;
2156 return speed ? 2 : 6;
2159 return rtx_cost (x, MEM, speed);
2163 /* Implement the TARGET_REGISTER_MOVE_COST hook.
2165 Recall that the base value of 2 is required by assumptions elsewhere
2166 in the body of the compiler, and that cost 2 is special-cased as an
2167 early exit from reload meaning no work is required. */
2170 mn10300_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2171 reg_class_t ifrom, reg_class_t ito)
2173 enum reg_class from = (enum reg_class) ifrom;
2174 enum reg_class to = (enum reg_class) ito;
2175 enum reg_class scratch, test;
2177 /* Simplify the following code by unifying the fp register classes. */
2178 if (to == FP_ACC_REGS)
2180 if (from == FP_ACC_REGS)
2183 /* Diagnose invalid moves by costing them as two moves. */
2188 scratch = (TARGET_AM33 ? GENERAL_REGS : ADDRESS_REGS);
2189 else if (to == FP_REGS && to != from)
2190 scratch = GENERAL_REGS;
2194 if (from == SP_REGS)
2195 scratch = (TARGET_AM33 ? GENERAL_REGS : ADDRESS_REGS);
2196 else if (from == FP_REGS && to != from)
2197 scratch = GENERAL_REGS;
2199 if (scratch != NO_REGS && !reg_class_subset_p (test, scratch))
2200 return (mn10300_register_move_cost (VOIDmode, from, scratch)
2201 + mn10300_register_move_cost (VOIDmode, scratch, to));
2203 /* From here on, all we need consider are legal combinations. */
2207 /* The scale here is bytes * 2. */
2209 if (from == to && (to == ADDRESS_REGS || to == DATA_REGS))
2212 if (from == SP_REGS)
2213 return (to == ADDRESS_REGS ? 2 : 6);
2215 /* For MN103, all remaining legal moves are two bytes. */
2220 return (from == ADDRESS_REGS ? 4 : 6);
2222 if ((from == ADDRESS_REGS || from == DATA_REGS)
2223 && (to == ADDRESS_REGS || to == DATA_REGS))
2226 if (to == EXTENDED_REGS)
2227 return (to == from ? 6 : 4);
2229 /* What's left are SP_REGS, FP_REGS, or combinations of the above. */
2234 /* The scale here is cycles * 2. */
2238 if (from == FP_REGS)
2241 /* All legal moves between integral registers are single cycle. */
2246 /* Implement the TARGET_MEMORY_MOVE_COST hook.
2248 Given lack of the form of the address, this must be speed-relative,
2249 though we should never be less expensive than a size-relative register
2250 move cost above. This is not a problem. */
2253 mn10300_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2254 reg_class_t iclass, bool in ATTRIBUTE_UNUSED)
2256 enum reg_class rclass = (enum reg_class) iclass;
2258 if (rclass == FP_REGS)
2263 /* Implement the TARGET_RTX_COSTS hook.
2265 Speed-relative costs are relative to COSTS_N_INSNS, which is intended
2266 to represent cycles. Size-relative costs are in bytes. */
2269 mn10300_rtx_costs (rtx x, int code, int outer_code, int *ptotal, bool speed)
2271 /* This value is used for SYMBOL_REF etc where we want to pretend
2272 we have a full 32-bit constant. */
2273 HOST_WIDE_INT i = 0x12345678;
2283 if (outer_code == SET)
2285 /* 16-bit integer loads have latency 1, 32-bit loads 2. */
2286 if (IN_RANGE (i, -32768, 32767))
2287 total = COSTS_N_INSNS (1);
2289 total = COSTS_N_INSNS (2);
2293 /* 16-bit integer operands don't affect latency;
2294 24-bit and 32-bit operands add a cycle. */
2295 if (IN_RANGE (i, -32768, 32767))
2298 total = COSTS_N_INSNS (1);
2303 if (outer_code == SET)
2307 else if (IN_RANGE (i, -128, 127))
2309 else if (IN_RANGE (i, -32768, 32767))
2316 /* Reference here is ADD An,Dn, vs ADD imm,Dn. */
2317 if (IN_RANGE (i, -128, 127))
2319 else if (IN_RANGE (i, -32768, 32767))
2321 else if (TARGET_AM33 && IN_RANGE (i, -0x01000000, 0x00ffffff))
2333 /* We assume all of these require a 32-bit constant, even though
2334 some symbol and label references can be relaxed. */
2338 switch (XINT (x, 1))
2344 case UNSPEC_GOTSYM_OFF:
2345 /* The PIC unspecs also resolve to a 32-bit constant. */
2349 /* Assume any non-listed unspec is some sort of arithmetic. */
2350 goto do_arith_costs;
2354 /* Notice the size difference of INC and INC4. */
2355 if (!speed && outer_code == SET && CONST_INT_P (XEXP (x, 1)))
2357 i = INTVAL (XEXP (x, 1));
2358 if (i == 1 || i == 4)
2360 total = 1 + rtx_cost (XEXP (x, 0), PLUS, speed);
2364 goto do_arith_costs;
2378 total = (speed ? COSTS_N_INSNS (1) : 2);
2382 /* Notice the size difference of ASL2 and variants. */
2383 if (!speed && CONST_INT_P (XEXP (x, 1)))
2384 switch (INTVAL (XEXP (x, 1)))
2399 total = (speed ? COSTS_N_INSNS (1) : 3);
2403 total = (speed ? COSTS_N_INSNS (3) : 2);
2410 total = (speed ? COSTS_N_INSNS (39)
2411 /* Include space to load+retrieve MDR. */
2412 : code == MOD || code == UMOD ? 6 : 4);
2416 total = mn10300_address_cost (XEXP (x, 0), speed);
2418 total = COSTS_N_INSNS (2 + total);
2422 /* Probably not implemented. Assume external call. */
2423 total = (speed ? COSTS_N_INSNS (10) : 7);
2435 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
2436 may access it using GOTOFF instead of GOT. */
2439 mn10300_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
2445 symbol = XEXP (rtl, 0);
2446 if (GET_CODE (symbol) != SYMBOL_REF)
2450 SYMBOL_REF_FLAG (symbol) = (*targetm.binds_local_p) (decl);
2453 /* Dispatch tables on the mn10300 are extremely expensive in terms of code
2454 and readonly data size. So we crank up the case threshold value to
2455 encourage a series of if/else comparisons to implement many small switch
2456 statements. In theory, this value could be increased much more if we
2457 were solely optimizing for space, but we keep it "reasonable" to avoid
2458 serious code efficiency lossage. */
2461 mn10300_case_values_threshold (void)
2466 /* Worker function for TARGET_TRAMPOLINE_INIT. */
2469 mn10300_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2471 rtx mem, disp, fnaddr = XEXP (DECL_RTL (fndecl), 0);
2473 /* This is a strict alignment target, which means that we play
2474 some games to make sure that the locations at which we need
2475 to store <chain> and <disp> wind up at aligned addresses.
2478 0xfc 0xdd mov chain,a1
2480 0xf8 0xed 0x00 btst 0,d1
2484 Note that the two extra insns are effectively nops; they
2485 clobber the flags but do not affect the contents of D0 or D1. */
2487 disp = expand_binop (SImode, sub_optab, fnaddr,
2488 plus_constant (XEXP (m_tramp, 0), 11),
2489 NULL_RTX, 1, OPTAB_DIRECT);
2491 mem = adjust_address (m_tramp, SImode, 0);
2492 emit_move_insn (mem, gen_int_mode (0xddfc0028, SImode));
2493 mem = adjust_address (m_tramp, SImode, 4);
2494 emit_move_insn (mem, chain_value);
2495 mem = adjust_address (m_tramp, SImode, 8);
2496 emit_move_insn (mem, gen_int_mode (0xdc00edf8, SImode));
2497 mem = adjust_address (m_tramp, SImode, 12);
2498 emit_move_insn (mem, disp);
2501 /* Output the assembler code for a C++ thunk function.
2502 THUNK_DECL is the declaration for the thunk function itself, FUNCTION
2503 is the decl for the target function. DELTA is an immediate constant
2504 offset to be added to the THIS parameter. If VCALL_OFFSET is nonzero
2505 the word at the adjusted address *(*THIS' + VCALL_OFFSET) should be
2506 additionally added to THIS. Finally jump to the entry point of
2510 mn10300_asm_output_mi_thunk (FILE * file,
2511 tree thunk_fndecl ATTRIBUTE_UNUSED,
2512 HOST_WIDE_INT delta,
2513 HOST_WIDE_INT vcall_offset,
2518 /* Get the register holding the THIS parameter. Handle the case
2519 where there is a hidden first argument for a returned structure. */
2520 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
2521 _this = reg_names [FIRST_ARGUMENT_REGNUM + 1];
2523 _this = reg_names [FIRST_ARGUMENT_REGNUM];
2525 fprintf (file, "\t%s Thunk Entry Point:\n", ASM_COMMENT_START);
2528 fprintf (file, "\tadd %d, %s\n", (int) delta, _this);
2532 const char * scratch = reg_names [FIRST_ADDRESS_REGNUM + 1];
2534 fprintf (file, "\tmov %s, %s\n", _this, scratch);
2535 fprintf (file, "\tmov (%s), %s\n", scratch, scratch);
2536 fprintf (file, "\tadd %d, %s\n", (int) vcall_offset, scratch);
2537 fprintf (file, "\tmov (%s), %s\n", scratch, scratch);
2538 fprintf (file, "\tadd %s, %s\n", scratch, _this);
2541 fputs ("\tjmp ", file);
2542 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
2546 /* Return true if mn10300_output_mi_thunk would be able to output the
2547 assembler code for the thunk function specified by the arguments
2548 it is passed, and false otherwise. */
2551 mn10300_can_output_mi_thunk (const_tree thunk_fndecl ATTRIBUTE_UNUSED,
2552 HOST_WIDE_INT delta ATTRIBUTE_UNUSED,
2553 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
2554 const_tree function ATTRIBUTE_UNUSED)
2560 mn10300_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
2562 if (REGNO_REG_CLASS (regno) == FP_REGS
2563 || REGNO_REG_CLASS (regno) == FP_ACC_REGS)
2564 /* Do not store integer values in FP registers. */
2565 return GET_MODE_CLASS (mode) == MODE_FLOAT && ((regno & 1) == 0);
2567 if (((regno) & 1) == 0 || GET_MODE_SIZE (mode) == 4)
2570 if (REGNO_REG_CLASS (regno) == DATA_REGS
2571 || (TARGET_AM33 && REGNO_REG_CLASS (regno) == ADDRESS_REGS)
2572 || REGNO_REG_CLASS (regno) == EXTENDED_REGS)
2573 return GET_MODE_SIZE (mode) <= 4;
2579 mn10300_modes_tieable (enum machine_mode mode1, enum machine_mode mode2)
2581 if (GET_MODE_CLASS (mode1) == MODE_FLOAT
2582 && GET_MODE_CLASS (mode2) != MODE_FLOAT)
2585 if (GET_MODE_CLASS (mode2) == MODE_FLOAT
2586 && GET_MODE_CLASS (mode1) != MODE_FLOAT)
2591 || (GET_MODE_SIZE (mode1) <= 4 && GET_MODE_SIZE (mode2) <= 4))
2598 mn10300_select_cc_mode (rtx x)
2600 return (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) ? CC_FLOATmode : CCmode;
2604 is_load_insn (rtx insn)
2606 if (GET_CODE (PATTERN (insn)) != SET)
2609 return MEM_P (SET_SRC (PATTERN (insn)));
2613 is_store_insn (rtx insn)
2615 if (GET_CODE (PATTERN (insn)) != SET)
2618 return MEM_P (SET_DEST (PATTERN (insn)));
2621 /* Update scheduling costs for situations that cannot be
2622 described using the attributes and DFA machinery.
2623 DEP is the insn being scheduled.
2624 INSN is the previous insn.
2625 COST is the current cycle cost for DEP. */
2628 mn10300_adjust_sched_cost (rtx insn, rtx link, rtx dep, int cost)
2630 int timings = get_attr_timings (insn);
2635 if (GET_CODE (insn) == PARALLEL)
2636 insn = XVECEXP (insn, 0, 0);
2638 if (GET_CODE (dep) == PARALLEL)
2639 dep = XVECEXP (dep, 0, 0);
2641 /* For the AM34 a load instruction that follows a
2642 store instruction incurs an extra cycle of delay. */
2643 if (mn10300_tune_cpu == PROCESSOR_AM34
2644 && is_load_insn (dep)
2645 && is_store_insn (insn))
2648 /* For the AM34 a non-store, non-branch FPU insn that follows
2649 another FPU insn incurs a one cycle throughput increase. */
2650 else if (mn10300_tune_cpu == PROCESSOR_AM34
2651 && ! is_store_insn (insn)
2653 && GET_CODE (PATTERN (dep)) == SET
2654 && GET_CODE (PATTERN (insn)) == SET
2655 && GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (dep)))) == MODE_FLOAT
2656 && GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (insn)))) == MODE_FLOAT)
2659 /* Resolve the conflict described in section 1-7-4 of
2660 Chapter 3 of the MN103E Series Instruction Manual
2663 "When the preceeding instruction is a CPU load or
2664 store instruction, a following FPU instruction
2665 cannot be executed until the CPU completes the
2666 latency period even though there are no register
2667 or flag dependencies between them." */
2669 /* Only the AM33-2 (and later) CPUs have FPU instructions. */
2670 if (! TARGET_AM33_2)
2673 /* If a data dependence already exists then the cost is correct. */
2674 if (REG_NOTE_KIND (link) == 0)
2677 /* Check that the instruction about to scheduled is an FPU instruction. */
2678 if (GET_CODE (PATTERN (dep)) != SET)
2681 if (GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (dep)))) != MODE_FLOAT)
2684 /* Now check to see if the previous instruction is a load or store. */
2685 if (! is_load_insn (insn) && ! is_store_insn (insn))
2688 /* XXX: Verify: The text of 1-7-4 implies that the restriction
2689 only applies when an INTEGER load/store preceeds an FPU
2690 instruction, but is this true ? For now we assume that it is. */
2691 if (GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (insn)))) != MODE_INT)
2694 /* Extract the latency value from the timings attribute. */
2695 return timings < 100 ? (timings % 10) : (timings % 100);
2699 mn10300_conditional_register_usage (void)
2705 for (i = FIRST_EXTENDED_REGNUM;
2706 i <= LAST_EXTENDED_REGNUM; i++)
2707 fixed_regs[i] = call_used_regs[i] = 1;
2711 for (i = FIRST_FP_REGNUM;
2712 i <= LAST_FP_REGNUM; i++)
2713 fixed_regs[i] = call_used_regs[i] = 1;
2716 fixed_regs[PIC_OFFSET_TABLE_REGNUM] =
2717 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2720 /* Initialize the GCC target structure. */
2722 #undef TARGET_EXCEPT_UNWIND_INFO
2723 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
2725 #undef TARGET_ASM_ALIGNED_HI_OP
2726 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2728 #undef TARGET_LEGITIMIZE_ADDRESS
2729 #define TARGET_LEGITIMIZE_ADDRESS mn10300_legitimize_address
2731 #undef TARGET_ADDRESS_COST
2732 #define TARGET_ADDRESS_COST mn10300_address_cost
2733 #undef TARGET_REGISTER_MOVE_COST
2734 #define TARGET_REGISTER_MOVE_COST mn10300_register_move_cost
2735 #undef TARGET_MEMORY_MOVE_COST
2736 #define TARGET_MEMORY_MOVE_COST mn10300_memory_move_cost
2737 #undef TARGET_RTX_COSTS
2738 #define TARGET_RTX_COSTS mn10300_rtx_costs
2740 #undef TARGET_ASM_FILE_START
2741 #define TARGET_ASM_FILE_START mn10300_file_start
2742 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
2743 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
2745 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
2746 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA mn10300_asm_output_addr_const_extra
2748 #undef TARGET_DEFAULT_TARGET_FLAGS
2749 #define TARGET_DEFAULT_TARGET_FLAGS MASK_MULT_BUG | MASK_PTR_A0D0
2750 #undef TARGET_HANDLE_OPTION
2751 #define TARGET_HANDLE_OPTION mn10300_handle_option
2752 #undef TARGET_OPTION_OVERRIDE
2753 #define TARGET_OPTION_OVERRIDE mn10300_option_override
2754 #undef TARGET_OPTION_OPTIMIZATION_TABLE
2755 #define TARGET_OPTION_OPTIMIZATION_TABLE mn10300_option_optimization_table
2757 #undef TARGET_ENCODE_SECTION_INFO
2758 #define TARGET_ENCODE_SECTION_INFO mn10300_encode_section_info
2760 #undef TARGET_PROMOTE_PROTOTYPES
2761 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2762 #undef TARGET_RETURN_IN_MEMORY
2763 #define TARGET_RETURN_IN_MEMORY mn10300_return_in_memory
2764 #undef TARGET_PASS_BY_REFERENCE
2765 #define TARGET_PASS_BY_REFERENCE mn10300_pass_by_reference
2766 #undef TARGET_CALLEE_COPIES
2767 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
2768 #undef TARGET_ARG_PARTIAL_BYTES
2769 #define TARGET_ARG_PARTIAL_BYTES mn10300_arg_partial_bytes
2770 #undef TARGET_FUNCTION_ARG
2771 #define TARGET_FUNCTION_ARG mn10300_function_arg
2772 #undef TARGET_FUNCTION_ARG_ADVANCE
2773 #define TARGET_FUNCTION_ARG_ADVANCE mn10300_function_arg_advance
2775 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
2776 #define TARGET_EXPAND_BUILTIN_SAVEREGS mn10300_builtin_saveregs
2777 #undef TARGET_EXPAND_BUILTIN_VA_START
2778 #define TARGET_EXPAND_BUILTIN_VA_START mn10300_va_start
2780 #undef TARGET_CASE_VALUES_THRESHOLD
2781 #define TARGET_CASE_VALUES_THRESHOLD mn10300_case_values_threshold
2783 #undef TARGET_LEGITIMATE_ADDRESS_P
2784 #define TARGET_LEGITIMATE_ADDRESS_P mn10300_legitimate_address_p
2785 #undef TARGET_DELEGITIMIZE_ADDRESS
2786 #define TARGET_DELEGITIMIZE_ADDRESS mn10300_delegitimize_address
2788 #undef TARGET_PREFERRED_RELOAD_CLASS
2789 #define TARGET_PREFERRED_RELOAD_CLASS mn10300_preferred_reload_class
2790 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2791 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS mn10300_preferred_output_reload_class
2793 #undef TARGET_TRAMPOLINE_INIT
2794 #define TARGET_TRAMPOLINE_INIT mn10300_trampoline_init
2796 #undef TARGET_FUNCTION_VALUE
2797 #define TARGET_FUNCTION_VALUE mn10300_function_value
2798 #undef TARGET_LIBCALL_VALUE
2799 #define TARGET_LIBCALL_VALUE mn10300_libcall_value
2801 #undef TARGET_ASM_OUTPUT_MI_THUNK
2802 #define TARGET_ASM_OUTPUT_MI_THUNK mn10300_asm_output_mi_thunk
2803 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2804 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK mn10300_can_output_mi_thunk
2806 #undef TARGET_SCHED_ADJUST_COST
2807 #define TARGET_SCHED_ADJUST_COST mn10300_adjust_sched_cost
2809 #undef TARGET_CONDITIONAL_REGISTER_USAGE
2810 #define TARGET_CONDITIONAL_REGISTER_USAGE mn10300_conditional_register_usage
2812 struct gcc_target targetm = TARGET_INITIALIZER;