1 /* Subroutines for insn-output.c for Matsushita MN10300 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
41 #include "diagnostic-core.h"
45 #include "target-def.h"
48 /* This is used by GOTaddr2picreg to uniquely identify
50 int mn10300_unspec_int_label_counter;
52 /* This is used in the am33_2.0-linux-gnu port, in which global symbol
53 names are not prefixed by underscores, to tell whether to prefix a
54 label with a plus sign or not, so that the assembler can tell
55 symbol names from register names. */
56 int mn10300_protect_label;
58 /* The selected processor. */
59 enum processor_type mn10300_processor = PROCESSOR_DEFAULT;
61 /* Processor type to select for tuning. */
62 static const char * mn10300_tune_string = NULL;
64 /* Selected processor type for tuning. */
65 enum processor_type mn10300_tune_cpu = PROCESSOR_DEFAULT;
67 /* The size of the callee register save area. Right now we save everything
68 on entry since it costs us nothing in code size. It does cost us from a
69 speed standpoint, so we want to optimize this sooner or later. */
70 #define REG_SAVE_BYTES (4 * df_regs_ever_live_p (2) \
71 + 4 * df_regs_ever_live_p (3) \
72 + 4 * df_regs_ever_live_p (6) \
73 + 4 * df_regs_ever_live_p (7) \
74 + 16 * (df_regs_ever_live_p (14) \
75 || df_regs_ever_live_p (15) \
76 || df_regs_ever_live_p (16) \
77 || df_regs_ever_live_p (17)))
79 static int mn10300_address_cost (rtx, bool);
81 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
82 static const struct default_options mn10300_option_optimization_table[] =
84 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
85 { OPT_LEVELS_NONE, 0, NULL, 0 }
88 /* Implement TARGET_HANDLE_OPTION. */
91 mn10300_handle_option (size_t code,
92 const char *arg ATTRIBUTE_UNUSED,
98 mn10300_processor = value ? PROCESSOR_AM33 : PROCESSOR_MN10300;
102 mn10300_processor = (value
104 : MIN (PROCESSOR_AM33, PROCESSOR_DEFAULT));
108 mn10300_processor = (value ? PROCESSOR_AM34 : PROCESSOR_DEFAULT);
112 mn10300_tune_string = arg;
120 /* Implement TARGET_OPTION_OVERRIDE. */
123 mn10300_option_override (void)
126 target_flags &= ~MASK_MULT_BUG;
129 /* Disable scheduling for the MN10300 as we do
130 not have timing information available for it. */
131 flag_schedule_insns = 0;
132 flag_schedule_insns_after_reload = 0;
135 if (mn10300_tune_string)
137 if (strcasecmp (mn10300_tune_string, "mn10300") == 0)
138 mn10300_tune_cpu = PROCESSOR_MN10300;
139 else if (strcasecmp (mn10300_tune_string, "am33") == 0)
140 mn10300_tune_cpu = PROCESSOR_AM33;
141 else if (strcasecmp (mn10300_tune_string, "am33-2") == 0)
142 mn10300_tune_cpu = PROCESSOR_AM33_2;
143 else if (strcasecmp (mn10300_tune_string, "am34") == 0)
144 mn10300_tune_cpu = PROCESSOR_AM34;
146 error ("-mtune= expects mn10300, am33, am33-2, or am34");
151 mn10300_file_start (void)
153 default_file_start ();
156 fprintf (asm_out_file, "\t.am33_2\n");
157 else if (TARGET_AM33)
158 fprintf (asm_out_file, "\t.am33\n");
161 /* Print operand X using operand code CODE to assembly language output file
165 mn10300_print_operand (FILE *file, rtx x, int code)
171 if (GET_MODE (XEXP (x, 0)) == CC_FLOATmode)
173 switch (code == 'b' ? GET_CODE (x)
174 : reverse_condition_maybe_unordered (GET_CODE (x)))
177 fprintf (file, "ne");
180 fprintf (file, "eq");
183 fprintf (file, "ge");
186 fprintf (file, "gt");
189 fprintf (file, "le");
192 fprintf (file, "lt");
195 fprintf (file, "lge");
198 fprintf (file, "uo");
201 fprintf (file, "lg");
204 fprintf (file, "ue");
207 fprintf (file, "uge");
210 fprintf (file, "ug");
213 fprintf (file, "ule");
216 fprintf (file, "ul");
223 /* These are normal and reversed branches. */
224 switch (code == 'b' ? GET_CODE (x) : reverse_condition (GET_CODE (x)))
227 fprintf (file, "ne");
230 fprintf (file, "eq");
233 fprintf (file, "ge");
236 fprintf (file, "gt");
239 fprintf (file, "le");
242 fprintf (file, "lt");
245 fprintf (file, "cc");
248 fprintf (file, "hi");
251 fprintf (file, "ls");
254 fprintf (file, "cs");
261 /* This is used for the operand to a call instruction;
262 if it's a REG, enclose it in parens, else output
263 the operand normally. */
267 mn10300_print_operand (file, x, 0);
271 mn10300_print_operand (file, x, 0);
275 switch (GET_CODE (x))
279 output_address (XEXP (x, 0));
284 fprintf (file, "fd%d", REGNO (x) - 18);
292 /* These are the least significant word in a 64bit value. */
294 switch (GET_CODE (x))
298 output_address (XEXP (x, 0));
303 fprintf (file, "%s", reg_names[REGNO (x)]);
307 fprintf (file, "%s", reg_names[subreg_regno (x)]);
315 switch (GET_MODE (x))
318 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
319 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
320 fprintf (file, "0x%lx", val[0]);
323 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
324 REAL_VALUE_TO_TARGET_SINGLE (rv, val[0]);
325 fprintf (file, "0x%lx", val[0]);
329 mn10300_print_operand_address (file,
330 GEN_INT (CONST_DOUBLE_LOW (x)));
341 split_double (x, &low, &high);
342 fprintf (file, "%ld", (long)INTVAL (low));
351 /* Similarly, but for the most significant word. */
353 switch (GET_CODE (x))
357 x = adjust_address (x, SImode, 4);
358 output_address (XEXP (x, 0));
363 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
367 fprintf (file, "%s", reg_names[subreg_regno (x) + 1]);
375 switch (GET_MODE (x))
378 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
379 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
380 fprintf (file, "0x%lx", val[1]);
386 mn10300_print_operand_address (file,
387 GEN_INT (CONST_DOUBLE_HIGH (x)));
398 split_double (x, &low, &high);
399 fprintf (file, "%ld", (long)INTVAL (high));
410 if (REG_P (XEXP (x, 0)))
411 output_address (gen_rtx_PLUS (SImode, XEXP (x, 0), const0_rtx));
413 output_address (XEXP (x, 0));
418 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
419 fprintf (file, "%d", (int)((~INTVAL (x)) & 0xff));
423 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
424 fprintf (file, "%d", (int)(INTVAL (x) & 0xff));
427 /* For shift counts. The hardware ignores the upper bits of
428 any immediate, but the assembler will flag an out of range
429 shift count as an error. So we mask off the high bits
430 of the immediate here. */
434 fprintf (file, "%d", (int)(INTVAL (x) & 0x1f));
440 switch (GET_CODE (x))
444 output_address (XEXP (x, 0));
453 fprintf (file, "%s", reg_names[REGNO (x)]);
457 fprintf (file, "%s", reg_names[subreg_regno (x)]);
460 /* This will only be single precision.... */
466 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
467 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
468 fprintf (file, "0x%lx", val);
478 mn10300_print_operand_address (file, x);
487 /* Output assembly language output for the address ADDR to FILE. */
490 mn10300_print_operand_address (FILE *file, rtx addr)
492 switch (GET_CODE (addr))
495 mn10300_print_operand_address (file, XEXP (addr, 0));
499 mn10300_print_operand (file, addr, 0);
504 if (REG_P (XEXP (addr, 0))
505 && REG_OK_FOR_BASE_P (XEXP (addr, 0)))
506 base = XEXP (addr, 0), index = XEXP (addr, 1);
507 else if (REG_P (XEXP (addr, 1))
508 && REG_OK_FOR_BASE_P (XEXP (addr, 1)))
509 base = XEXP (addr, 1), index = XEXP (addr, 0);
512 mn10300_print_operand (file, index, 0);
514 mn10300_print_operand (file, base, 0);;
518 output_addr_const (file, addr);
521 output_addr_const (file, addr);
526 /* Count the number of FP registers that have to be saved. */
528 fp_regs_to_save (void)
535 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
536 if (df_regs_ever_live_p (i) && ! call_really_used_regs[i])
542 /* Print a set of registers in the format required by "movm" and "ret".
543 Register K is saved if bit K of MASK is set. The data and address
544 registers can be stored individually, but the extended registers cannot.
545 We assume that the mask already takes that into account. For instance,
546 bits 14 to 17 must have the same value. */
549 mn10300_print_reg_list (FILE *file, int mask)
557 for (i = 0; i < FIRST_EXTENDED_REGNUM; i++)
558 if ((mask & (1 << i)) != 0)
562 fputs (reg_names [i], file);
566 if ((mask & 0x3c000) != 0)
568 gcc_assert ((mask & 0x3c000) == 0x3c000);
571 fputs ("exreg1", file);
579 mn10300_can_use_return_insn (void)
581 /* size includes the fixed stack space needed for function calls. */
582 int size = get_frame_size () + crtl->outgoing_args_size;
584 /* And space for the return pointer. */
585 size += crtl->outgoing_args_size ? 4 : 0;
587 return (reload_completed
589 && !df_regs_ever_live_p (2)
590 && !df_regs_ever_live_p (3)
591 && !df_regs_ever_live_p (6)
592 && !df_regs_ever_live_p (7)
593 && !df_regs_ever_live_p (14)
594 && !df_regs_ever_live_p (15)
595 && !df_regs_ever_live_p (16)
596 && !df_regs_ever_live_p (17)
597 && fp_regs_to_save () == 0
598 && !frame_pointer_needed);
601 /* Returns the set of live, callee-saved registers as a bitmask. The
602 callee-saved extended registers cannot be stored individually, so
603 all of them will be included in the mask if any one of them is used. */
606 mn10300_get_live_callee_saved_regs (void)
612 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
613 if (df_regs_ever_live_p (i) && ! call_really_used_regs[i])
615 if ((mask & 0x3c000) != 0)
624 RTX_FRAME_RELATED_P (r) = 1;
628 /* Generate an instruction that pushes several registers onto the stack.
629 Register K will be saved if bit K in MASK is set. The function does
630 nothing if MASK is zero.
632 To be compatible with the "movm" instruction, the lowest-numbered
633 register must be stored in the lowest slot. If MASK is the set
634 { R1,...,RN }, where R1...RN are ordered least first, the generated
635 instruction will have the form:
638 (set (reg:SI 9) (plus:SI (reg:SI 9) (const_int -N*4)))
639 (set (mem:SI (plus:SI (reg:SI 9)
643 (set (mem:SI (plus:SI (reg:SI 9)
648 mn10300_gen_multiple_store (int mask)
657 /* Count how many registers need to be saved. */
659 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
660 if ((mask & (1 << i)) != 0)
663 /* We need one PARALLEL element to update the stack pointer and
664 an additional element for each register that is stored. */
665 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count + 1));
667 /* Create the instruction that updates the stack pointer. */
669 = F (gen_rtx_SET (SImode,
671 gen_rtx_PLUS (SImode,
673 GEN_INT (-count * 4))));
675 /* Create each store. */
677 for (i = LAST_EXTENDED_REGNUM; i >= 0; i--)
678 if ((mask & (1 << i)) != 0)
680 rtx address = gen_rtx_PLUS (SImode,
682 GEN_INT (-pari * 4));
683 XVECEXP(par, 0, pari)
684 = F (gen_rtx_SET (VOIDmode,
685 gen_rtx_MEM (SImode, address),
686 gen_rtx_REG (SImode, i)));
695 mn10300_expand_prologue (void)
699 /* SIZE includes the fixed stack space needed for function calls. */
700 size = get_frame_size () + crtl->outgoing_args_size;
701 size += (crtl->outgoing_args_size ? 4 : 0);
703 /* If we use any of the callee-saved registers, save them now. */
704 mn10300_gen_multiple_store (mn10300_get_live_callee_saved_regs ());
706 if (TARGET_AM33_2 && fp_regs_to_save ())
708 int num_regs_to_save = fp_regs_to_save (), i;
714 save_sp_partial_merge,
718 unsigned int strategy_size = (unsigned)-1, this_strategy_size;
721 /* We have several different strategies to save FP registers.
722 We can store them using SP offsets, which is beneficial if
723 there are just a few registers to save, or we can use `a0' in
724 post-increment mode (`a0' is the only call-clobbered address
725 register that is never used to pass information to a
726 function). Furthermore, if we don't need a frame pointer, we
727 can merge the two SP adds into a single one, but this isn't
728 always beneficial; sometimes we can just split the two adds
729 so that we don't exceed a 16-bit constant size. The code
730 below will select which strategy to use, so as to generate
731 smallest code. Ties are broken in favor or shorter sequences
732 (in terms of number of instructions). */
734 #define SIZE_ADD_AX(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
735 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 2)
736 #define SIZE_ADD_SP(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
737 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 3)
738 #define SIZE_FMOV_LIMIT(S,N,L,SIZE1,SIZE2,ELSE) \
739 (((S) >= (L)) ? (SIZE1) * (N) \
740 : ((S) + 4 * (N) >= (L)) ? (((L) - (S)) / 4 * (SIZE2) \
741 + ((S) + 4 * (N) - (L)) / 4 * (SIZE1)) \
743 #define SIZE_FMOV_SP_(S,N) \
744 (SIZE_FMOV_LIMIT ((S), (N), (1 << 24), 7, 6, \
745 SIZE_FMOV_LIMIT ((S), (N), (1 << 8), 6, 4, \
746 (S) ? 4 * (N) : 3 + 4 * ((N) - 1))))
747 #define SIZE_FMOV_SP(S,N) (SIZE_FMOV_SP_ ((unsigned HOST_WIDE_INT)(S), (N)))
749 /* Consider alternative save_sp_merge only if we don't need the
750 frame pointer and size is nonzero. */
751 if (! frame_pointer_needed && size)
753 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
754 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
755 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
756 this_strategy_size += SIZE_FMOV_SP (size, num_regs_to_save);
758 if (this_strategy_size < strategy_size)
760 strategy = save_sp_merge;
761 strategy_size = this_strategy_size;
765 /* Consider alternative save_sp_no_merge unconditionally. */
766 /* Insn: add -4 * num_regs_to_save, sp. */
767 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
768 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
769 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
772 /* Insn: add -size, sp. */
773 this_strategy_size += SIZE_ADD_SP (-size);
776 if (this_strategy_size < strategy_size)
778 strategy = save_sp_no_merge;
779 strategy_size = this_strategy_size;
782 /* Consider alternative save_sp_partial_merge only if we don't
783 need a frame pointer and size is reasonably large. */
784 if (! frame_pointer_needed && size + 4 * num_regs_to_save > 128)
786 /* Insn: add -128, sp. */
787 this_strategy_size = SIZE_ADD_SP (-128);
788 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
789 this_strategy_size += SIZE_FMOV_SP (128 - 4 * num_regs_to_save,
793 /* Insn: add 128-size, sp. */
794 this_strategy_size += SIZE_ADD_SP (128 - size);
797 if (this_strategy_size < strategy_size)
799 strategy = save_sp_partial_merge;
800 strategy_size = this_strategy_size;
804 /* Consider alternative save_a0_merge only if we don't need a
805 frame pointer, size is nonzero and the user hasn't
806 changed the calling conventions of a0. */
807 if (! frame_pointer_needed && size
808 && call_really_used_regs [FIRST_ADDRESS_REGNUM]
809 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
811 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
812 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
813 /* Insn: mov sp, a0. */
814 this_strategy_size++;
817 /* Insn: add size, a0. */
818 this_strategy_size += SIZE_ADD_AX (size);
820 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
821 this_strategy_size += 3 * num_regs_to_save;
823 if (this_strategy_size < strategy_size)
825 strategy = save_a0_merge;
826 strategy_size = this_strategy_size;
830 /* Consider alternative save_a0_no_merge if the user hasn't
831 changed the calling conventions of a0. */
832 if (call_really_used_regs [FIRST_ADDRESS_REGNUM]
833 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
835 /* Insn: add -4 * num_regs_to_save, sp. */
836 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
837 /* Insn: mov sp, a0. */
838 this_strategy_size++;
839 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
840 this_strategy_size += 3 * num_regs_to_save;
843 /* Insn: add -size, sp. */
844 this_strategy_size += SIZE_ADD_SP (-size);
847 if (this_strategy_size < strategy_size)
849 strategy = save_a0_no_merge;
850 strategy_size = this_strategy_size;
854 /* Emit the initial SP add, common to all strategies. */
857 case save_sp_no_merge:
858 case save_a0_no_merge:
859 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
861 GEN_INT (-4 * num_regs_to_save))));
865 case save_sp_partial_merge:
866 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
869 xsize = 128 - 4 * num_regs_to_save;
875 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
877 GEN_INT (-(size + 4 * num_regs_to_save)))));
878 /* We'll have to adjust FP register saves according to the
881 /* Since we've already created the stack frame, don't do it
882 again at the end of the function. */
890 /* Now prepare register a0, if we have decided to use it. */
894 case save_sp_no_merge:
895 case save_sp_partial_merge:
900 case save_a0_no_merge:
901 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM);
902 F (emit_insn (gen_movsi (reg, stack_pointer_rtx)));
904 F (emit_insn (gen_addsi3 (reg, reg, GEN_INT (xsize))));
905 reg = gen_rtx_POST_INC (SImode, reg);
912 /* Now actually save the FP registers. */
913 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
914 if (df_regs_ever_live_p (i) && ! call_really_used_regs [i])
922 /* If we aren't using `a0', use an SP offset. */
925 addr = gen_rtx_PLUS (SImode,
930 addr = stack_pointer_rtx;
935 F (emit_insn (gen_movsf (gen_rtx_MEM (SFmode, addr),
936 gen_rtx_REG (SFmode, i))));
940 /* Now put the frame pointer into the frame pointer register. */
941 if (frame_pointer_needed)
942 F (emit_move_insn (frame_pointer_rtx, stack_pointer_rtx));
944 /* Allocate stack for this frame. */
946 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
950 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
951 emit_insn (gen_GOTaddr2picreg ());
955 mn10300_expand_epilogue (void)
959 /* SIZE includes the fixed stack space needed for function calls. */
960 size = get_frame_size () + crtl->outgoing_args_size;
961 size += (crtl->outgoing_args_size ? 4 : 0);
963 if (TARGET_AM33_2 && fp_regs_to_save ())
965 int num_regs_to_save = fp_regs_to_save (), i;
968 /* We have several options to restore FP registers. We could
969 load them from SP offsets, but, if there are enough FP
970 registers to restore, we win if we use a post-increment
973 /* If we have a frame pointer, it's the best option, because we
974 already know it has the value we want. */
975 if (frame_pointer_needed)
976 reg = gen_rtx_REG (SImode, FRAME_POINTER_REGNUM);
977 /* Otherwise, we may use `a1', since it's call-clobbered and
978 it's never used for return values. But only do so if it's
979 smaller than using SP offsets. */
982 enum { restore_sp_post_adjust,
983 restore_sp_pre_adjust,
984 restore_sp_partial_adjust,
985 restore_a1 } strategy;
986 unsigned int this_strategy_size, strategy_size = (unsigned)-1;
988 /* Consider using sp offsets before adjusting sp. */
989 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
990 this_strategy_size = SIZE_FMOV_SP (size, num_regs_to_save);
991 /* If size is too large, we'll have to adjust SP with an
993 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
995 /* Insn: add size + 4 * num_regs_to_save, sp. */
996 this_strategy_size += SIZE_ADD_SP (size + 4 * num_regs_to_save);
998 /* If we don't have to restore any non-FP registers,
999 we'll be able to save one byte by using rets. */
1000 if (! REG_SAVE_BYTES)
1001 this_strategy_size--;
1003 if (this_strategy_size < strategy_size)
1005 strategy = restore_sp_post_adjust;
1006 strategy_size = this_strategy_size;
1009 /* Consider using sp offsets after adjusting sp. */
1010 /* Insn: add size, sp. */
1011 this_strategy_size = SIZE_ADD_SP (size);
1012 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1013 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
1014 /* We're going to use ret to release the FP registers
1015 save area, so, no savings. */
1017 if (this_strategy_size < strategy_size)
1019 strategy = restore_sp_pre_adjust;
1020 strategy_size = this_strategy_size;
1023 /* Consider using sp offsets after partially adjusting sp.
1024 When size is close to 32Kb, we may be able to adjust SP
1025 with an imm16 add instruction while still using fmov
1027 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1029 /* Insn: add size + 4 * num_regs_to_save
1030 + REG_SAVE_BYTES - 252,sp. */
1031 this_strategy_size = SIZE_ADD_SP (size + 4 * num_regs_to_save
1032 + REG_SAVE_BYTES - 252);
1033 /* Insn: fmov (##,sp),fs#, fo each fs# to be restored. */
1034 this_strategy_size += SIZE_FMOV_SP (252 - REG_SAVE_BYTES
1035 - 4 * num_regs_to_save,
1037 /* We're going to use ret to release the FP registers
1038 save area, so, no savings. */
1040 if (this_strategy_size < strategy_size)
1042 strategy = restore_sp_partial_adjust;
1043 strategy_size = this_strategy_size;
1047 /* Consider using a1 in post-increment mode, as long as the
1048 user hasn't changed the calling conventions of a1. */
1049 if (call_really_used_regs [FIRST_ADDRESS_REGNUM + 1]
1050 && ! fixed_regs[FIRST_ADDRESS_REGNUM+1])
1052 /* Insn: mov sp,a1. */
1053 this_strategy_size = 1;
1056 /* Insn: add size,a1. */
1057 this_strategy_size += SIZE_ADD_AX (size);
1059 /* Insn: fmov (a1+),fs#, for each fs# to be restored. */
1060 this_strategy_size += 3 * num_regs_to_save;
1061 /* If size is large enough, we may be able to save a
1063 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1065 /* Insn: mov a1,sp. */
1066 this_strategy_size += 2;
1068 /* If we don't have to restore any non-FP registers,
1069 we'll be able to save one byte by using rets. */
1070 if (! REG_SAVE_BYTES)
1071 this_strategy_size--;
1073 if (this_strategy_size < strategy_size)
1075 strategy = restore_a1;
1076 strategy_size = this_strategy_size;
1082 case restore_sp_post_adjust:
1085 case restore_sp_pre_adjust:
1086 emit_insn (gen_addsi3 (stack_pointer_rtx,
1092 case restore_sp_partial_adjust:
1093 emit_insn (gen_addsi3 (stack_pointer_rtx,
1095 GEN_INT (size + 4 * num_regs_to_save
1096 + REG_SAVE_BYTES - 252)));
1097 size = 252 - REG_SAVE_BYTES - 4 * num_regs_to_save;
1101 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM + 1);
1102 emit_insn (gen_movsi (reg, stack_pointer_rtx));
1104 emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
1112 /* Adjust the selected register, if any, for post-increment. */
1114 reg = gen_rtx_POST_INC (SImode, reg);
1116 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
1117 if (df_regs_ever_live_p (i) && ! call_really_used_regs [i])
1125 /* If we aren't using a post-increment register, use an
1127 addr = gen_rtx_PLUS (SImode,
1132 addr = stack_pointer_rtx;
1136 emit_insn (gen_movsf (gen_rtx_REG (SFmode, i),
1137 gen_rtx_MEM (SFmode, addr)));
1140 /* If we were using the restore_a1 strategy and the number of
1141 bytes to be released won't fit in the `ret' byte, copy `a1'
1142 to `sp', to avoid having to use `add' to adjust it. */
1143 if (! frame_pointer_needed && reg && size + REG_SAVE_BYTES > 255)
1145 emit_move_insn (stack_pointer_rtx, XEXP (reg, 0));
1150 /* Maybe cut back the stack, except for the register save area.
1152 If the frame pointer exists, then use the frame pointer to
1155 If the stack size + register save area is more than 255 bytes,
1156 then the stack must be cut back here since the size + register
1157 save size is too big for a ret/retf instruction.
1159 Else leave it alone, it will be cut back as part of the
1160 ret/retf instruction, or there wasn't any stack to begin with.
1162 Under no circumstances should the register save area be
1163 deallocated here, that would leave a window where an interrupt
1164 could occur and trash the register save area. */
1165 if (frame_pointer_needed)
1167 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1170 else if (size + REG_SAVE_BYTES > 255)
1172 emit_insn (gen_addsi3 (stack_pointer_rtx,
1178 /* Adjust the stack and restore callee-saved registers, if any. */
1179 if (size || df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1180 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1181 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1182 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1183 || frame_pointer_needed)
1184 emit_jump_insn (gen_return_internal_regs
1185 (GEN_INT (size + REG_SAVE_BYTES)));
1187 emit_jump_insn (gen_return_internal ());
1190 /* Recognize the PARALLEL rtx generated by mn10300_gen_multiple_store().
1191 This function is for MATCH_PARALLEL and so assumes OP is known to be
1192 parallel. If OP is a multiple store, return a mask indicating which
1193 registers it saves. Return 0 otherwise. */
1196 mn10300_store_multiple_operation (rtx op,
1197 enum machine_mode mode ATTRIBUTE_UNUSED)
1205 count = XVECLEN (op, 0);
1209 /* Check that first instruction has the form (set (sp) (plus A B)) */
1210 elt = XVECEXP (op, 0, 0);
1211 if (GET_CODE (elt) != SET
1212 || (! REG_P (SET_DEST (elt)))
1213 || REGNO (SET_DEST (elt)) != STACK_POINTER_REGNUM
1214 || GET_CODE (SET_SRC (elt)) != PLUS)
1217 /* Check that A is the stack pointer and B is the expected stack size.
1218 For OP to match, each subsequent instruction should push a word onto
1219 the stack. We therefore expect the first instruction to create
1220 COUNT-1 stack slots. */
1221 elt = SET_SRC (elt);
1222 if ((! REG_P (XEXP (elt, 0)))
1223 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1224 || (! CONST_INT_P (XEXP (elt, 1)))
1225 || INTVAL (XEXP (elt, 1)) != -(count - 1) * 4)
1228 /* Now go through the rest of the vector elements. They must be
1229 ordered so that the first instruction stores the highest-numbered
1230 register to the highest stack slot and that subsequent instructions
1231 store a lower-numbered register to the slot below.
1233 LAST keeps track of the smallest-numbered register stored so far.
1234 MASK is the set of stored registers. */
1235 last = LAST_EXTENDED_REGNUM + 1;
1237 for (i = 1; i < count; i++)
1239 /* Check that element i is a (set (mem M) R) and that R is valid. */
1240 elt = XVECEXP (op, 0, i);
1241 if (GET_CODE (elt) != SET
1242 || (! MEM_P (SET_DEST (elt)))
1243 || (! REG_P (SET_SRC (elt)))
1244 || REGNO (SET_SRC (elt)) >= last)
1247 /* R was OK, so provisionally add it to MASK. We return 0 in any
1248 case if the rest of the instruction has a flaw. */
1249 last = REGNO (SET_SRC (elt));
1250 mask |= (1 << last);
1252 /* Check that M has the form (plus (sp) (const_int -I*4)) */
1253 elt = XEXP (SET_DEST (elt), 0);
1254 if (GET_CODE (elt) != PLUS
1255 || (! REG_P (XEXP (elt, 0)))
1256 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1257 || (! CONST_INT_P (XEXP (elt, 1)))
1258 || INTVAL (XEXP (elt, 1)) != -i * 4)
1262 /* All or none of the callee-saved extended registers must be in the set. */
1263 if ((mask & 0x3c000) != 0
1264 && (mask & 0x3c000) != 0x3c000)
1270 /* What (if any) secondary registers are needed to move IN with mode
1271 MODE into a register in register class RCLASS.
1273 We might be able to simplify this. */
1276 mn10300_secondary_reload_class (enum reg_class rclass, enum machine_mode mode,
1281 /* Strip off any SUBREG expressions from IN. Basically we want
1282 to know if IN is a pseudo or (subreg (pseudo)) as those can
1283 turn into MEMs during reload. */
1284 while (GET_CODE (inner) == SUBREG)
1285 inner = SUBREG_REG (inner);
1287 /* Memory loads less than a full word wide can't have an
1288 address or stack pointer destination. They must use
1289 a data register as an intermediate register. */
1292 && REGNO (inner) >= FIRST_PSEUDO_REGISTER))
1293 && (mode == QImode || mode == HImode)
1294 && (rclass == ADDRESS_REGS || rclass == SP_REGS
1295 || rclass == SP_OR_ADDRESS_REGS))
1298 return DATA_OR_EXTENDED_REGS;
1302 /* We can't directly load sp + const_int into a data register;
1303 we must use an address register as an intermediate. */
1304 if (rclass != SP_REGS
1305 && rclass != ADDRESS_REGS
1306 && rclass != SP_OR_ADDRESS_REGS
1307 && rclass != SP_OR_EXTENDED_REGS
1308 && rclass != ADDRESS_OR_EXTENDED_REGS
1309 && rclass != SP_OR_ADDRESS_OR_EXTENDED_REGS
1310 && (in == stack_pointer_rtx
1311 || (GET_CODE (in) == PLUS
1312 && (XEXP (in, 0) == stack_pointer_rtx
1313 || XEXP (in, 1) == stack_pointer_rtx))))
1314 return ADDRESS_REGS;
1317 && rclass == FP_REGS)
1319 /* We can't load directly into an FP register from a
1320 constant address. */
1322 && CONSTANT_ADDRESS_P (XEXP (in, 0)))
1323 return DATA_OR_EXTENDED_REGS;
1325 /* Handle case were a pseudo may not get a hard register
1326 but has an equivalent memory location defined. */
1328 && REGNO (inner) >= FIRST_PSEUDO_REGISTER
1329 && reg_equiv_mem [REGNO (inner)]
1330 && CONSTANT_ADDRESS_P (XEXP (reg_equiv_mem [REGNO (inner)], 0)))
1331 return DATA_OR_EXTENDED_REGS;
1334 /* Otherwise assume no secondary reloads are needed. */
1339 mn10300_initial_offset (int from, int to)
1341 /* The difference between the argument pointer and the frame pointer
1342 is the size of the callee register save area. */
1343 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
1345 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1346 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1347 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1348 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1349 || fp_regs_to_save ()
1350 || frame_pointer_needed)
1351 return REG_SAVE_BYTES
1352 + 4 * fp_regs_to_save ();
1357 /* The difference between the argument pointer and the stack pointer is
1358 the sum of the size of this function's frame, the callee register save
1359 area, and the fixed stack space needed for function calls (if any). */
1360 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1362 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1363 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1364 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1365 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1366 || fp_regs_to_save ()
1367 || frame_pointer_needed)
1368 return (get_frame_size () + REG_SAVE_BYTES
1369 + 4 * fp_regs_to_save ()
1370 + (crtl->outgoing_args_size
1371 ? crtl->outgoing_args_size + 4 : 0));
1373 return (get_frame_size ()
1374 + (crtl->outgoing_args_size
1375 ? crtl->outgoing_args_size + 4 : 0));
1378 /* The difference between the frame pointer and stack pointer is the sum
1379 of the size of this function's frame and the fixed stack space needed
1380 for function calls (if any). */
1381 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1382 return (get_frame_size ()
1383 + (crtl->outgoing_args_size
1384 ? crtl->outgoing_args_size + 4 : 0));
1389 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1392 mn10300_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1394 /* Return values > 8 bytes in length in memory. */
1395 return (int_size_in_bytes (type) > 8
1396 || int_size_in_bytes (type) == 0
1397 || TYPE_MODE (type) == BLKmode);
1400 /* Flush the argument registers to the stack for a stdarg function;
1401 return the new argument pointer. */
1403 mn10300_builtin_saveregs (void)
1406 tree fntype = TREE_TYPE (current_function_decl);
1407 int argadj = ((!stdarg_p (fntype))
1408 ? UNITS_PER_WORD : 0);
1409 alias_set_type set = get_varargs_alias_set ();
1412 offset = plus_constant (crtl->args.arg_offset_rtx, argadj);
1414 offset = crtl->args.arg_offset_rtx;
1416 mem = gen_rtx_MEM (SImode, crtl->args.internal_arg_pointer);
1417 set_mem_alias_set (mem, set);
1418 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
1420 mem = gen_rtx_MEM (SImode,
1421 plus_constant (crtl->args.internal_arg_pointer, 4));
1422 set_mem_alias_set (mem, set);
1423 emit_move_insn (mem, gen_rtx_REG (SImode, 1));
1425 return copy_to_reg (expand_binop (Pmode, add_optab,
1426 crtl->args.internal_arg_pointer,
1427 offset, 0, 0, OPTAB_LIB_WIDEN));
1431 mn10300_va_start (tree valist, rtx nextarg)
1433 nextarg = expand_builtin_saveregs ();
1434 std_expand_builtin_va_start (valist, nextarg);
1437 /* Return true when a parameter should be passed by reference. */
1440 mn10300_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1441 enum machine_mode mode, const_tree type,
1442 bool named ATTRIBUTE_UNUSED)
1444 unsigned HOST_WIDE_INT size;
1447 size = int_size_in_bytes (type);
1449 size = GET_MODE_SIZE (mode);
1451 return (size > 8 || size == 0);
1454 /* Return an RTX to represent where a value with mode MODE will be returned
1455 from a function. If the result is NULL_RTX, the argument is pushed. */
1458 mn10300_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1459 const_tree type, bool named ATTRIBUTE_UNUSED)
1461 rtx result = NULL_RTX;
1464 /* We only support using 2 data registers as argument registers. */
1467 /* Figure out the size of the object to be passed. */
1468 if (mode == BLKmode)
1469 size = int_size_in_bytes (type);
1471 size = GET_MODE_SIZE (mode);
1473 /* Figure out the alignment of the object to be passed. */
1476 cum->nbytes = (cum->nbytes + 3) & ~3;
1478 /* Don't pass this arg via a register if all the argument registers
1480 if (cum->nbytes > nregs * UNITS_PER_WORD)
1483 /* Don't pass this arg via a register if it would be split between
1484 registers and memory. */
1485 if (type == NULL_TREE
1486 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1489 switch (cum->nbytes / UNITS_PER_WORD)
1492 result = gen_rtx_REG (mode, FIRST_ARGUMENT_REGNUM);
1495 result = gen_rtx_REG (mode, FIRST_ARGUMENT_REGNUM + 1);
1504 /* Update the data in CUM to advance over an argument
1505 of mode MODE and data type TYPE.
1506 (TYPE is null for libcalls where that information may not be available.) */
1509 mn10300_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1510 const_tree type, bool named ATTRIBUTE_UNUSED)
1512 cum->nbytes += (mode != BLKmode
1513 ? (GET_MODE_SIZE (mode) + 3) & ~3
1514 : (int_size_in_bytes (type) + 3) & ~3);
1517 /* Return the number of bytes of registers to use for an argument passed
1518 partially in registers and partially in memory. */
1521 mn10300_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1522 tree type, bool named ATTRIBUTE_UNUSED)
1526 /* We only support using 2 data registers as argument registers. */
1529 /* Figure out the size of the object to be passed. */
1530 if (mode == BLKmode)
1531 size = int_size_in_bytes (type);
1533 size = GET_MODE_SIZE (mode);
1535 /* Figure out the alignment of the object to be passed. */
1538 cum->nbytes = (cum->nbytes + 3) & ~3;
1540 /* Don't pass this arg via a register if all the argument registers
1542 if (cum->nbytes > nregs * UNITS_PER_WORD)
1545 if (cum->nbytes + size <= nregs * UNITS_PER_WORD)
1548 /* Don't pass this arg via a register if it would be split between
1549 registers and memory. */
1550 if (type == NULL_TREE
1551 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1554 return nregs * UNITS_PER_WORD - cum->nbytes;
1557 /* Return the location of the function's value. This will be either
1558 $d0 for integer functions, $a0 for pointers, or a PARALLEL of both
1559 $d0 and $a0 if the -mreturn-pointer-on-do flag is set. Note that
1560 we only return the PARALLEL for outgoing values; we do not want
1561 callers relying on this extra copy. */
1564 mn10300_function_value (const_tree valtype,
1565 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1569 enum machine_mode mode = TYPE_MODE (valtype);
1571 if (! POINTER_TYPE_P (valtype))
1572 return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
1573 else if (! TARGET_PTR_A0D0 || ! outgoing
1574 || cfun->returns_struct)
1575 return gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM);
1577 rv = gen_rtx_PARALLEL (mode, rtvec_alloc (2));
1579 = gen_rtx_EXPR_LIST (VOIDmode,
1580 gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM),
1584 = gen_rtx_EXPR_LIST (VOIDmode,
1585 gen_rtx_REG (mode, FIRST_DATA_REGNUM),
1590 /* Implements TARGET_LIBCALL_VALUE. */
1593 mn10300_libcall_value (enum machine_mode mode,
1594 const_rtx fun ATTRIBUTE_UNUSED)
1596 return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
1599 /* Implements FUNCTION_VALUE_REGNO_P. */
1602 mn10300_function_value_regno_p (const unsigned int regno)
1604 return (regno == FIRST_DATA_REGNUM || regno == FIRST_ADDRESS_REGNUM);
1607 /* Output a compare insn. */
1610 mn10300_output_cmp (rtx operand, rtx insn)
1615 /* We can save a byte if we can find a register which has the value
1617 temp = PREV_INSN (insn);
1618 while (optimize && temp)
1622 /* We allow the search to go through call insns. We record
1623 the fact that we've past a CALL_INSN and reject matches which
1624 use call clobbered registers. */
1627 || GET_CODE (temp) == BARRIER)
1633 if (GET_CODE (temp) == NOTE)
1635 temp = PREV_INSN (temp);
1639 /* It must be an insn, see if it is a simple set. */
1640 set = single_set (temp);
1643 temp = PREV_INSN (temp);
1647 /* Are we setting a data register to zero (this does not win for
1650 If it's a call clobbered register, have we past a call?
1652 Make sure the register we find isn't the same as ourself;
1653 the mn10300 can't encode that.
1655 ??? reg_set_between_p return nonzero anytime we pass a CALL_INSN
1656 so the code to detect calls here isn't doing anything useful. */
1657 if (REG_P (SET_DEST (set))
1658 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1659 && !reg_set_between_p (SET_DEST (set), temp, insn)
1660 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1661 == REGNO_REG_CLASS (REGNO (operand)))
1662 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) != EXTENDED_REGS
1663 && REGNO (SET_DEST (set)) != REGNO (operand)
1665 || ! call_really_used_regs [REGNO (SET_DEST (set))]))
1668 xoperands[0] = operand;
1669 xoperands[1] = SET_DEST (set);
1671 output_asm_insn ("cmp %1,%0", xoperands);
1675 if (REGNO_REG_CLASS (REGNO (operand)) == EXTENDED_REGS
1676 && REG_P (SET_DEST (set))
1677 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1678 && !reg_set_between_p (SET_DEST (set), temp, insn)
1679 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1680 != REGNO_REG_CLASS (REGNO (operand)))
1681 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) == EXTENDED_REGS
1682 && REGNO (SET_DEST (set)) != REGNO (operand)
1684 || ! call_really_used_regs [REGNO (SET_DEST (set))]))
1687 xoperands[0] = operand;
1688 xoperands[1] = SET_DEST (set);
1690 output_asm_insn ("cmp %1,%0", xoperands);
1693 temp = PREV_INSN (temp);
1698 /* Similarly, but when using a zero_extract pattern for a btst where
1699 the source operand might end up in memory. */
1701 mn10300_mask_ok_for_mem_btst (int len, int bit)
1703 unsigned int mask = 0;
1712 /* MASK must bit into an 8bit value. */
1713 return (((mask & 0xff) == mask)
1714 || ((mask & 0xff00) == mask)
1715 || ((mask & 0xff0000) == mask)
1716 || ((mask & 0xff000000) == mask));
1719 /* Return 1 if X contains a symbolic expression. We know these
1720 expressions will have one of a few well defined forms, so
1721 we need only check those forms. */
1724 mn10300_symbolic_operand (rtx op,
1725 enum machine_mode mode ATTRIBUTE_UNUSED)
1727 switch (GET_CODE (op))
1734 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1735 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1736 && CONST_INT_P (XEXP (op, 1)));
1742 /* Try machine dependent ways of modifying an illegitimate address
1743 to be legitimate. If we find one, return the new valid address.
1744 This macro is used in only one place: `memory_address' in explow.c.
1746 OLDX is the address as it was before break_out_memory_refs was called.
1747 In some cases it is useful to look at this to decide what needs to be done.
1749 Normally it is always safe for this macro to do nothing. It exists to
1750 recognize opportunities to optimize the output.
1752 But on a few ports with segmented architectures and indexed addressing
1753 (mn10300, hppa) it is used to rewrite certain problematical addresses. */
1756 mn10300_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1757 enum machine_mode mode ATTRIBUTE_UNUSED)
1759 if (flag_pic && ! mn10300_legitimate_pic_operand_p (x))
1760 x = mn10300_legitimize_pic_address (oldx, NULL_RTX);
1762 /* Uh-oh. We might have an address for x[n-100000]. This needs
1763 special handling to avoid creating an indexed memory address
1764 with x-100000 as the base. */
1765 if (GET_CODE (x) == PLUS
1766 && mn10300_symbolic_operand (XEXP (x, 1), VOIDmode))
1768 /* Ugly. We modify things here so that the address offset specified
1769 by the index expression is computed first, then added to x to form
1770 the entire address. */
1772 rtx regx1, regy1, regy2, y;
1774 /* Strip off any CONST. */
1776 if (GET_CODE (y) == CONST)
1779 if (GET_CODE (y) == PLUS || GET_CODE (y) == MINUS)
1781 regx1 = force_reg (Pmode, force_operand (XEXP (x, 0), 0));
1782 regy1 = force_reg (Pmode, force_operand (XEXP (y, 0), 0));
1783 regy2 = force_reg (Pmode, force_operand (XEXP (y, 1), 0));
1784 regx1 = force_reg (Pmode,
1785 gen_rtx_fmt_ee (GET_CODE (y), Pmode, regx1,
1787 return force_reg (Pmode, gen_rtx_PLUS (Pmode, regx1, regy1));
1793 /* Convert a non-PIC address in `orig' to a PIC address using @GOT or
1794 @GOTOFF in `reg'. */
1797 mn10300_legitimize_pic_address (rtx orig, rtx reg)
1799 if (GET_CODE (orig) == LABEL_REF
1800 || (GET_CODE (orig) == SYMBOL_REF
1801 && (CONSTANT_POOL_ADDRESS_P (orig)
1802 || ! MN10300_GLOBAL_P (orig))))
1805 reg = gen_reg_rtx (Pmode);
1807 emit_insn (gen_symGOTOFF2reg (reg, orig));
1810 else if (GET_CODE (orig) == SYMBOL_REF)
1813 reg = gen_reg_rtx (Pmode);
1815 emit_insn (gen_symGOT2reg (reg, orig));
1821 /* Return zero if X references a SYMBOL_REF or LABEL_REF whose symbol
1822 isn't protected by a PIC unspec; nonzero otherwise. */
1825 mn10300_legitimate_pic_operand_p (rtx x)
1830 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1833 if (GET_CODE (x) == UNSPEC
1834 && (XINT (x, 1) == UNSPEC_PIC
1835 || XINT (x, 1) == UNSPEC_GOT
1836 || XINT (x, 1) == UNSPEC_GOTOFF
1837 || XINT (x, 1) == UNSPEC_PLT
1838 || XINT (x, 1) == UNSPEC_GOTSYM_OFF))
1841 fmt = GET_RTX_FORMAT (GET_CODE (x));
1842 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1848 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1849 if (! mn10300_legitimate_pic_operand_p (XVECEXP (x, i, j)))
1852 else if (fmt[i] == 'e'
1853 && ! mn10300_legitimate_pic_operand_p (XEXP (x, i)))
1860 /* Return TRUE if the address X, taken from a (MEM:MODE X) rtx, is
1861 legitimate, and FALSE otherwise.
1863 On the mn10300, the value in the address register must be
1864 in the same memory space/segment as the effective address.
1866 This is problematical for reload since it does not understand
1867 that base+index != index+base in a memory reference.
1869 Note it is still possible to use reg+reg addressing modes,
1870 it's just much more difficult. For a discussion of a possible
1871 workaround and solution, see the comments in pa.c before the
1872 function record_unscaled_index_insn_codes. */
1875 mn10300_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1877 if (CONSTANT_ADDRESS_P (x)
1878 && (! flag_pic || mn10300_legitimate_pic_operand_p (x)))
1881 if (RTX_OK_FOR_BASE_P (x, strict))
1885 && GET_CODE (x) == POST_INC
1886 && RTX_OK_FOR_BASE_P (XEXP (x, 0), strict)
1887 && (mode == SImode || mode == SFmode || mode == HImode))
1890 if (GET_CODE (x) == PLUS)
1892 rtx base = 0, index = 0;
1894 if (REG_P (XEXP (x, 0))
1895 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x, 0)), strict))
1898 index = XEXP (x, 1);
1901 if (REG_P (XEXP (x, 1))
1902 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x, 1)), strict))
1905 index = XEXP (x, 0);
1908 if (base != 0 && index != 0)
1910 if (CONST_INT_P (index))
1912 if (GET_CODE (index) == CONST
1913 && GET_CODE (XEXP (index, 0)) != PLUS
1915 || (mn10300_legitimate_pic_operand_p (index)
1916 && GET_MODE_SIZE (mode) == 4)))
1924 /* Used by LEGITIMATE_CONSTANT_P(). Returns TRUE if X is a valid
1925 constant. Note that some "constants" aren't valid, such as TLS
1926 symbols and unconverted GOT-based references, so we eliminate
1930 mn10300_legitimate_constant_p (rtx x)
1932 switch (GET_CODE (x))
1937 if (GET_CODE (x) == PLUS)
1939 if (! CONST_INT_P (XEXP (x, 1)))
1944 /* Only some unspecs are valid as "constants". */
1945 if (GET_CODE (x) == UNSPEC)
1947 switch (XINT (x, 1))
1949 case UNSPEC_INT_LABEL:
1960 /* We must have drilled down to a symbol. */
1961 if (! mn10300_symbolic_operand (x, Pmode))
1973 mn10300_address_cost_1 (rtx x, int *unsig)
1975 switch (GET_CODE (x))
1978 switch (REGNO_REG_CLASS (REGNO (x)))
2004 return (mn10300_address_cost_1 (XEXP (x, 0), unsig)
2005 + mn10300_address_cost_1 (XEXP (x, 1), unsig));
2010 return mn10300_address_cost (XEXP (x, 0), !optimize_size);
2014 return mn10300_address_cost_1 (XEXP (x, 0), unsig);
2017 if (INTVAL (x) == 0)
2019 if (INTVAL (x) + (*unsig ? 0 : 0x80) < 0x100)
2021 if (INTVAL (x) + (*unsig ? 0 : 0x8000) < 0x10000)
2023 if (INTVAL (x) + (*unsig ? 0 : 0x800000) < 0x1000000)
2039 mn10300_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
2042 return mn10300_address_cost_1 (x, &s);
2046 mn10300_rtx_costs (rtx x, int code, int outer_code, int *total,
2047 bool speed ATTRIBUTE_UNUSED)
2052 /* Zeros are extremely cheap. */
2053 if (INTVAL (x) == 0 && (outer_code == SET || outer_code == COMPARE))
2055 /* If it fits in 8 bits, then it's still relatively cheap. */
2056 else if (INT_8_BITS (INTVAL (x)))
2058 /* This is the "base" cost, includes constants where either the
2059 upper or lower 16bits are all zeros. */
2060 else if (INT_16_BITS (INTVAL (x))
2061 || (INTVAL (x) & 0xffff) == 0
2062 || (INTVAL (x) & 0xffff0000) == 0)
2071 /* These are more costly than a CONST_INT, but we can relax them,
2072 so they're less costly than a CONST_DOUBLE. */
2077 /* We don't optimize CONST_DOUBLEs well nor do we relax them well,
2078 so their cost is very high. */
2083 /* This is cheap, we can use btst. */
2084 if (outer_code == COMPARE)
2088 /* ??? This probably needs more work. */
2100 /* Check whether a constant used to initialize a DImode or DFmode can
2101 use a clr instruction. The code here must be kept in sync with
2105 mn10300_wide_const_load_uses_clr (rtx operands[2])
2107 long val[2] = {0, 0};
2109 if ((! REG_P (operands[0]))
2110 || REGNO_REG_CLASS (REGNO (operands[0])) != DATA_REGS)
2113 switch (GET_CODE (operands[1]))
2118 split_double (operands[1], &low, &high);
2119 val[0] = INTVAL (low);
2120 val[1] = INTVAL (high);
2125 if (GET_MODE (operands[1]) == DFmode)
2129 REAL_VALUE_FROM_CONST_DOUBLE (rv, operands[1]);
2130 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
2132 else if (GET_MODE (operands[1]) == VOIDmode
2133 || GET_MODE (operands[1]) == DImode)
2135 val[0] = CONST_DOUBLE_LOW (operands[1]);
2136 val[1] = CONST_DOUBLE_HIGH (operands[1]);
2144 return val[0] == 0 || val[1] == 0;
2146 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
2147 may access it using GOTOFF instead of GOT. */
2150 mn10300_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
2156 symbol = XEXP (rtl, 0);
2157 if (GET_CODE (symbol) != SYMBOL_REF)
2161 SYMBOL_REF_FLAG (symbol) = (*targetm.binds_local_p) (decl);
2164 /* Dispatch tables on the mn10300 are extremely expensive in terms of code
2165 and readonly data size. So we crank up the case threshold value to
2166 encourage a series of if/else comparisons to implement many small switch
2167 statements. In theory, this value could be increased much more if we
2168 were solely optimizing for space, but we keep it "reasonable" to avoid
2169 serious code efficiency lossage. */
2172 mn10300_case_values_threshold (void)
2177 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
2180 mn10300_asm_trampoline_template (FILE *f)
2182 fprintf (f, "\tadd -4,sp\n");
2183 fprintf (f, "\t.long 0x0004fffa\n");
2184 fprintf (f, "\tmov (0,sp),a0\n");
2185 fprintf (f, "\tadd 4,sp\n");
2186 fprintf (f, "\tmov (13,a0),a1\n");
2187 fprintf (f, "\tmov (17,a0),a0\n");
2188 fprintf (f, "\tjmp (a0)\n");
2189 fprintf (f, "\t.long 0\n");
2190 fprintf (f, "\t.long 0\n");
2193 /* Worker function for TARGET_TRAMPOLINE_INIT. */
2196 mn10300_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2198 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2201 emit_block_move (m_tramp, assemble_trampoline_template (),
2202 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2204 mem = adjust_address (m_tramp, SImode, 0x14);
2205 emit_move_insn (mem, chain_value);
2206 mem = adjust_address (m_tramp, SImode, 0x18);
2207 emit_move_insn (mem, fnaddr);
2210 /* Output the assembler code for a C++ thunk function.
2211 THUNK_DECL is the declaration for the thunk function itself, FUNCTION
2212 is the decl for the target function. DELTA is an immediate constant
2213 offset to be added to the THIS parameter. If VCALL_OFFSET is nonzero
2214 the word at the adjusted address *(*THIS' + VCALL_OFFSET) should be
2215 additionally added to THIS. Finally jump to the entry point of
2219 mn10300_asm_output_mi_thunk (FILE * file,
2220 tree thunk_fndecl ATTRIBUTE_UNUSED,
2221 HOST_WIDE_INT delta,
2222 HOST_WIDE_INT vcall_offset,
2227 /* Get the register holding the THIS parameter. Handle the case
2228 where there is a hidden first argument for a returned structure. */
2229 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
2230 _this = reg_names [FIRST_ARGUMENT_REGNUM + 1];
2232 _this = reg_names [FIRST_ARGUMENT_REGNUM];
2234 fprintf (file, "\t%s Thunk Entry Point:\n", ASM_COMMENT_START);
2237 fprintf (file, "\tadd %d, %s\n", (int) delta, _this);
2241 const char * scratch = reg_names [FIRST_ADDRESS_REGNUM + 1];
2243 fprintf (file, "\tmov %s, %s\n", _this, scratch);
2244 fprintf (file, "\tmov (%s), %s\n", scratch, scratch);
2245 fprintf (file, "\tadd %d, %s\n", (int) vcall_offset, scratch);
2246 fprintf (file, "\tmov (%s), %s\n", scratch, scratch);
2247 fprintf (file, "\tadd %s, %s\n", scratch, _this);
2250 fputs ("\tjmp ", file);
2251 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
2255 /* Return true if mn10300_output_mi_thunk would be able to output the
2256 assembler code for the thunk function specified by the arguments
2257 it is passed, and false otherwise. */
2260 mn10300_can_output_mi_thunk (const_tree thunk_fndecl ATTRIBUTE_UNUSED,
2261 HOST_WIDE_INT delta ATTRIBUTE_UNUSED,
2262 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
2263 const_tree function ATTRIBUTE_UNUSED)
2269 mn10300_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
2271 if (REGNO_REG_CLASS (regno) == FP_REGS
2272 || REGNO_REG_CLASS (regno) == FP_ACC_REGS)
2273 /* Do not store integer values in FP registers. */
2274 return GET_MODE_CLASS (mode) == MODE_FLOAT && ((regno & 1) == 0);
2276 if (((regno) & 1) == 0 || GET_MODE_SIZE (mode) == 4)
2279 if (REGNO_REG_CLASS (regno) == DATA_REGS
2280 || (TARGET_AM33 && REGNO_REG_CLASS (regno) == ADDRESS_REGS)
2281 || REGNO_REG_CLASS (regno) == EXTENDED_REGS)
2282 return GET_MODE_SIZE (mode) <= 4;
2288 mn10300_modes_tieable (enum machine_mode mode1, enum machine_mode mode2)
2290 if (GET_MODE_CLASS (mode1) == MODE_FLOAT
2291 && GET_MODE_CLASS (mode2) != MODE_FLOAT)
2294 if (GET_MODE_CLASS (mode2) == MODE_FLOAT
2295 && GET_MODE_CLASS (mode1) != MODE_FLOAT)
2300 || (GET_MODE_SIZE (mode1) <= 4 && GET_MODE_SIZE (mode2) <= 4))
2307 mn10300_select_cc_mode (rtx x)
2309 return (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) ? CC_FLOATmode : CCmode;
2313 is_load_insn (rtx insn)
2315 if (GET_CODE (PATTERN (insn)) != SET)
2318 return MEM_P (SET_SRC (PATTERN (insn)));
2322 is_store_insn (rtx insn)
2324 if (GET_CODE (PATTERN (insn)) != SET)
2327 return MEM_P (SET_DEST (PATTERN (insn)));
2330 /* Update scheduling costs for situations that cannot be
2331 described using the attributes and DFA machinery.
2332 DEP is the insn being scheduled.
2333 INSN is the previous insn.
2334 COST is the current cycle cost for DEP. */
2337 mn10300_adjust_sched_cost (rtx insn, rtx link, rtx dep, int cost)
2339 int timings = get_attr_timings (insn);
2344 if (GET_CODE (insn) == PARALLEL)
2345 insn = XVECEXP (insn, 0, 0);
2347 if (GET_CODE (dep) == PARALLEL)
2348 dep = XVECEXP (dep, 0, 0);
2350 /* For the AM34 a load instruction that follows a
2351 store instruction incurs an extra cycle of delay. */
2352 if (mn10300_tune_cpu == PROCESSOR_AM34
2353 && is_load_insn (dep)
2354 && is_store_insn (insn))
2357 /* For the AM34 a non-store, non-branch FPU insn that follows
2358 another FPU insn incurs a one cycle throughput increase. */
2359 else if (mn10300_tune_cpu == PROCESSOR_AM34
2360 && ! is_store_insn (insn)
2362 && GET_CODE (PATTERN (dep)) == SET
2363 && GET_CODE (PATTERN (insn)) == SET
2364 && GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (dep)))) == MODE_FLOAT
2365 && GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (insn)))) == MODE_FLOAT)
2368 /* Resolve the conflict described in section 1-7-4 of
2369 Chapter 3 of the MN103E Series Instruction Manual
2372 "When the preceeding instruction is a CPU load or
2373 store instruction, a following FPU instruction
2374 cannot be executed until the CPU completes the
2375 latency period even though there are no register
2376 or flag dependencies between them." */
2378 /* Only the AM33-2 (and later) CPUs have FPU instructions. */
2379 if (! TARGET_AM33_2)
2382 /* If a data dependence already exists then the cost is correct. */
2383 if (REG_NOTE_KIND (link) == 0)
2386 /* Check that the instruction about to scheduled is an FPU instruction. */
2387 if (GET_CODE (PATTERN (dep)) != SET)
2390 if (GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (dep)))) != MODE_FLOAT)
2393 /* Now check to see if the previous instruction is a load or store. */
2394 if (! is_load_insn (insn) && ! is_store_insn (insn))
2397 /* XXX: Verify: The text of 1-7-4 implies that the restriction
2398 only applies when an INTEGER load/store preceeds an FPU
2399 instruction, but is this true ? For now we assume that it is. */
2400 if (GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (insn)))) != MODE_INT)
2403 /* Extract the latency value from the timings attribute. */
2404 return timings < 100 ? (timings % 10) : (timings % 100);
2407 /* Initialize the GCC target structure. */
2409 #undef TARGET_EXCEPT_UNWIND_INFO
2410 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
2412 #undef TARGET_ASM_ALIGNED_HI_OP
2413 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2415 #undef TARGET_LEGITIMIZE_ADDRESS
2416 #define TARGET_LEGITIMIZE_ADDRESS mn10300_legitimize_address
2418 #undef TARGET_RTX_COSTS
2419 #define TARGET_RTX_COSTS mn10300_rtx_costs
2420 #undef TARGET_ADDRESS_COST
2421 #define TARGET_ADDRESS_COST mn10300_address_cost
2423 #undef TARGET_ASM_FILE_START
2424 #define TARGET_ASM_FILE_START mn10300_file_start
2425 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
2426 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
2428 #undef TARGET_DEFAULT_TARGET_FLAGS
2429 #define TARGET_DEFAULT_TARGET_FLAGS MASK_MULT_BUG | MASK_PTR_A0D0
2430 #undef TARGET_HANDLE_OPTION
2431 #define TARGET_HANDLE_OPTION mn10300_handle_option
2432 #undef TARGET_OPTION_OVERRIDE
2433 #define TARGET_OPTION_OVERRIDE mn10300_option_override
2434 #undef TARGET_OPTION_OPTIMIZATION_TABLE
2435 #define TARGET_OPTION_OPTIMIZATION_TABLE mn10300_option_optimization_table
2437 #undef TARGET_ENCODE_SECTION_INFO
2438 #define TARGET_ENCODE_SECTION_INFO mn10300_encode_section_info
2440 #undef TARGET_PROMOTE_PROTOTYPES
2441 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2442 #undef TARGET_RETURN_IN_MEMORY
2443 #define TARGET_RETURN_IN_MEMORY mn10300_return_in_memory
2444 #undef TARGET_PASS_BY_REFERENCE
2445 #define TARGET_PASS_BY_REFERENCE mn10300_pass_by_reference
2446 #undef TARGET_CALLEE_COPIES
2447 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
2448 #undef TARGET_ARG_PARTIAL_BYTES
2449 #define TARGET_ARG_PARTIAL_BYTES mn10300_arg_partial_bytes
2450 #undef TARGET_FUNCTION_ARG
2451 #define TARGET_FUNCTION_ARG mn10300_function_arg
2452 #undef TARGET_FUNCTION_ARG_ADVANCE
2453 #define TARGET_FUNCTION_ARG_ADVANCE mn10300_function_arg_advance
2455 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
2456 #define TARGET_EXPAND_BUILTIN_SAVEREGS mn10300_builtin_saveregs
2457 #undef TARGET_EXPAND_BUILTIN_VA_START
2458 #define TARGET_EXPAND_BUILTIN_VA_START mn10300_va_start
2460 #undef TARGET_CASE_VALUES_THRESHOLD
2461 #define TARGET_CASE_VALUES_THRESHOLD mn10300_case_values_threshold
2463 #undef TARGET_LEGITIMATE_ADDRESS_P
2464 #define TARGET_LEGITIMATE_ADDRESS_P mn10300_legitimate_address_p
2466 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
2467 #define TARGET_ASM_TRAMPOLINE_TEMPLATE mn10300_asm_trampoline_template
2468 #undef TARGET_TRAMPOLINE_INIT
2469 #define TARGET_TRAMPOLINE_INIT mn10300_trampoline_init
2471 #undef TARGET_FUNCTION_VALUE
2472 #define TARGET_FUNCTION_VALUE mn10300_function_value
2473 #undef TARGET_LIBCALL_VALUE
2474 #define TARGET_LIBCALL_VALUE mn10300_libcall_value
2476 #undef TARGET_ASM_OUTPUT_MI_THUNK
2477 #define TARGET_ASM_OUTPUT_MI_THUNK mn10300_asm_output_mi_thunk
2478 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2479 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK mn10300_can_output_mi_thunk
2481 #undef TARGET_SCHED_ADJUST_COST
2482 #define TARGET_SCHED_ADJUST_COST mn10300_adjust_sched_cost
2484 struct gcc_target targetm = TARGET_INITIALIZER;