1 /* Subroutines for insn-output.c for Matsushita MN10300 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
34 #include "insn-attr.h"
45 #include "target-def.h"
47 /* This is used by GOTaddr2picreg to uniquely identify
49 int mn10300_unspec_int_label_counter;
51 /* This is used in the am33_2.0-linux-gnu port, in which global symbol
52 names are not prefixed by underscores, to tell whether to prefix a
53 label with a plus sign or not, so that the assembler can tell
54 symbol names from register names. */
55 int mn10300_protect_label;
57 /* The selected processor. */
58 enum processor_type mn10300_processor = PROCESSOR_DEFAULT;
60 /* The size of the callee register save area. Right now we save everything
61 on entry since it costs us nothing in code size. It does cost us from a
62 speed standpoint, so we want to optimize this sooner or later. */
63 #define REG_SAVE_BYTES (4 * df_regs_ever_live_p (2) \
64 + 4 * df_regs_ever_live_p (3) \
65 + 4 * df_regs_ever_live_p (6) \
66 + 4 * df_regs_ever_live_p (7) \
67 + 16 * (df_regs_ever_live_p (14) || df_regs_ever_live_p (15) \
68 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)))
71 static bool mn10300_handle_option (size_t, const char *, int);
72 static int mn10300_address_cost_1 (rtx, int *);
73 static int mn10300_address_cost (rtx, bool);
74 static bool mn10300_rtx_costs (rtx, int, int, int *, bool);
75 static void mn10300_file_start (void);
76 static bool mn10300_return_in_memory (const_tree, const_tree);
77 static rtx mn10300_builtin_saveregs (void);
78 static void mn10300_va_start (tree, rtx);
79 static rtx mn10300_legitimize_address (rtx, rtx, enum machine_mode);
80 static bool mn10300_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
82 static int mn10300_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
84 static unsigned int mn10300_case_values_threshold (void);
86 /* Initialize the GCC target structure. */
87 #undef TARGET_ASM_ALIGNED_HI_OP
88 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
90 #undef TARGET_LEGITIMIZE_ADDRESS
91 #define TARGET_LEGITIMIZE_ADDRESS mn10300_legitimize_address
93 #undef TARGET_RTX_COSTS
94 #define TARGET_RTX_COSTS mn10300_rtx_costs
95 #undef TARGET_ADDRESS_COST
96 #define TARGET_ADDRESS_COST mn10300_address_cost
98 #undef TARGET_ASM_FILE_START
99 #define TARGET_ASM_FILE_START mn10300_file_start
100 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
101 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
103 #undef TARGET_DEFAULT_TARGET_FLAGS
104 #define TARGET_DEFAULT_TARGET_FLAGS MASK_MULT_BUG | MASK_PTR_A0D0
105 #undef TARGET_HANDLE_OPTION
106 #define TARGET_HANDLE_OPTION mn10300_handle_option
108 #undef TARGET_ENCODE_SECTION_INFO
109 #define TARGET_ENCODE_SECTION_INFO mn10300_encode_section_info
111 #undef TARGET_PROMOTE_PROTOTYPES
112 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
113 #undef TARGET_RETURN_IN_MEMORY
114 #define TARGET_RETURN_IN_MEMORY mn10300_return_in_memory
115 #undef TARGET_PASS_BY_REFERENCE
116 #define TARGET_PASS_BY_REFERENCE mn10300_pass_by_reference
117 #undef TARGET_CALLEE_COPIES
118 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
119 #undef TARGET_ARG_PARTIAL_BYTES
120 #define TARGET_ARG_PARTIAL_BYTES mn10300_arg_partial_bytes
122 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
123 #define TARGET_EXPAND_BUILTIN_SAVEREGS mn10300_builtin_saveregs
124 #undef TARGET_EXPAND_BUILTIN_VA_START
125 #define TARGET_EXPAND_BUILTIN_VA_START mn10300_va_start
127 #undef TARGET_CASE_VALUES_THRESHOLD
128 #define TARGET_CASE_VALUES_THRESHOLD mn10300_case_values_threshold
130 static void mn10300_encode_section_info (tree, rtx, int);
131 struct gcc_target targetm = TARGET_INITIALIZER;
133 /* Implement TARGET_HANDLE_OPTION. */
136 mn10300_handle_option (size_t code,
137 const char *arg ATTRIBUTE_UNUSED,
143 mn10300_processor = value ? PROCESSOR_AM33 : PROCESSOR_MN10300;
146 mn10300_processor = (value
148 : MIN (PROCESSOR_AM33, PROCESSOR_DEFAULT));
155 /* Implement OVERRIDE_OPTIONS. */
158 mn10300_override_options (void)
161 target_flags &= ~MASK_MULT_BUG;
165 mn10300_file_start (void)
167 default_file_start ();
170 fprintf (asm_out_file, "\t.am33_2\n");
171 else if (TARGET_AM33)
172 fprintf (asm_out_file, "\t.am33\n");
176 /* Print operand X using operand code CODE to assembly language output file
180 print_operand (FILE *file, rtx x, int code)
186 if (cc_status.mdep.fpCC)
188 switch (code == 'b' ? GET_CODE (x)
189 : reverse_condition_maybe_unordered (GET_CODE (x)))
192 fprintf (file, "ne");
195 fprintf (file, "eq");
198 fprintf (file, "ge");
201 fprintf (file, "gt");
204 fprintf (file, "le");
207 fprintf (file, "lt");
210 fprintf (file, "lge");
213 fprintf (file, "uo");
216 fprintf (file, "lg");
219 fprintf (file, "ue");
222 fprintf (file, "uge");
225 fprintf (file, "ug");
228 fprintf (file, "ule");
231 fprintf (file, "ul");
238 /* These are normal and reversed branches. */
239 switch (code == 'b' ? GET_CODE (x) : reverse_condition (GET_CODE (x)))
242 fprintf (file, "ne");
245 fprintf (file, "eq");
248 fprintf (file, "ge");
251 fprintf (file, "gt");
254 fprintf (file, "le");
257 fprintf (file, "lt");
260 fprintf (file, "cc");
263 fprintf (file, "hi");
266 fprintf (file, "ls");
269 fprintf (file, "cs");
276 /* This is used for the operand to a call instruction;
277 if it's a REG, enclose it in parens, else output
278 the operand normally. */
279 if (GET_CODE (x) == REG)
282 print_operand (file, x, 0);
286 print_operand (file, x, 0);
290 switch (GET_CODE (x))
294 output_address (XEXP (x, 0));
299 fprintf (file, "fd%d", REGNO (x) - 18);
307 /* These are the least significant word in a 64bit value. */
309 switch (GET_CODE (x))
313 output_address (XEXP (x, 0));
318 fprintf (file, "%s", reg_names[REGNO (x)]);
322 fprintf (file, "%s", reg_names[subreg_regno (x)]);
330 switch (GET_MODE (x))
333 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
334 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
335 fprintf (file, "0x%lx", val[0]);
338 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
339 REAL_VALUE_TO_TARGET_SINGLE (rv, val[0]);
340 fprintf (file, "0x%lx", val[0]);
344 print_operand_address (file,
345 GEN_INT (CONST_DOUBLE_LOW (x)));
356 split_double (x, &low, &high);
357 fprintf (file, "%ld", (long)INTVAL (low));
366 /* Similarly, but for the most significant word. */
368 switch (GET_CODE (x))
372 x = adjust_address (x, SImode, 4);
373 output_address (XEXP (x, 0));
378 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
382 fprintf (file, "%s", reg_names[subreg_regno (x) + 1]);
390 switch (GET_MODE (x))
393 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
394 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
395 fprintf (file, "0x%lx", val[1]);
401 print_operand_address (file,
402 GEN_INT (CONST_DOUBLE_HIGH (x)));
413 split_double (x, &low, &high);
414 fprintf (file, "%ld", (long)INTVAL (high));
425 if (GET_CODE (XEXP (x, 0)) == REG)
426 output_address (gen_rtx_PLUS (SImode, XEXP (x, 0), const0_rtx));
428 output_address (XEXP (x, 0));
433 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
434 fprintf (file, "%d", (int)((~INTVAL (x)) & 0xff));
438 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
439 fprintf (file, "%d", (int)(INTVAL (x) & 0xff));
442 /* For shift counts. The hardware ignores the upper bits of
443 any immediate, but the assembler will flag an out of range
444 shift count as an error. So we mask off the high bits
445 of the immediate here. */
447 if (GET_CODE (x) == CONST_INT)
449 fprintf (file, "%d", (int)(INTVAL (x) & 0x1f));
455 switch (GET_CODE (x))
459 output_address (XEXP (x, 0));
468 fprintf (file, "%s", reg_names[REGNO (x)]);
472 fprintf (file, "%s", reg_names[subreg_regno (x)]);
475 /* This will only be single precision.... */
481 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
482 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
483 fprintf (file, "0x%lx", val);
493 print_operand_address (file, x);
502 /* Output assembly language output for the address ADDR to FILE. */
505 print_operand_address (FILE *file, rtx addr)
507 switch (GET_CODE (addr))
510 print_operand_address (file, XEXP (addr, 0));
514 print_operand (file, addr, 0);
519 if (REG_P (XEXP (addr, 0))
520 && REG_OK_FOR_BASE_P (XEXP (addr, 0)))
521 base = XEXP (addr, 0), index = XEXP (addr, 1);
522 else if (REG_P (XEXP (addr, 1))
523 && REG_OK_FOR_BASE_P (XEXP (addr, 1)))
524 base = XEXP (addr, 1), index = XEXP (addr, 0);
527 print_operand (file, index, 0);
529 print_operand (file, base, 0);;
533 output_addr_const (file, addr);
536 output_addr_const (file, addr);
541 /* Count the number of FP registers that have to be saved. */
543 fp_regs_to_save (void)
550 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
551 if (df_regs_ever_live_p (i) && ! call_really_used_regs[i])
557 /* Print a set of registers in the format required by "movm" and "ret".
558 Register K is saved if bit K of MASK is set. The data and address
559 registers can be stored individually, but the extended registers cannot.
560 We assume that the mask already takes that into account. For instance,
561 bits 14 to 17 must have the same value. */
564 mn10300_print_reg_list (FILE *file, int mask)
572 for (i = 0; i < FIRST_EXTENDED_REGNUM; i++)
573 if ((mask & (1 << i)) != 0)
577 fputs (reg_names [i], file);
581 if ((mask & 0x3c000) != 0)
583 gcc_assert ((mask & 0x3c000) == 0x3c000);
586 fputs ("exreg1", file);
594 can_use_return_insn (void)
596 /* size includes the fixed stack space needed for function calls. */
597 int size = get_frame_size () + crtl->outgoing_args_size;
599 /* And space for the return pointer. */
600 size += crtl->outgoing_args_size ? 4 : 0;
602 return (reload_completed
604 && !df_regs_ever_live_p (2)
605 && !df_regs_ever_live_p (3)
606 && !df_regs_ever_live_p (6)
607 && !df_regs_ever_live_p (7)
608 && !df_regs_ever_live_p (14)
609 && !df_regs_ever_live_p (15)
610 && !df_regs_ever_live_p (16)
611 && !df_regs_ever_live_p (17)
612 && fp_regs_to_save () == 0
613 && !frame_pointer_needed);
616 /* Returns the set of live, callee-saved registers as a bitmask. The
617 callee-saved extended registers cannot be stored individually, so
618 all of them will be included in the mask if any one of them is used. */
621 mn10300_get_live_callee_saved_regs (void)
627 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
628 if (df_regs_ever_live_p (i) && ! call_really_used_regs[i])
630 if ((mask & 0x3c000) != 0)
636 /* Generate an instruction that pushes several registers onto the stack.
637 Register K will be saved if bit K in MASK is set. The function does
638 nothing if MASK is zero.
640 To be compatible with the "movm" instruction, the lowest-numbered
641 register must be stored in the lowest slot. If MASK is the set
642 { R1,...,RN }, where R1...RN are ordered least first, the generated
643 instruction will have the form:
646 (set (reg:SI 9) (plus:SI (reg:SI 9) (const_int -N*4)))
647 (set (mem:SI (plus:SI (reg:SI 9)
651 (set (mem:SI (plus:SI (reg:SI 9)
656 mn10300_gen_multiple_store (int mask)
665 /* Count how many registers need to be saved. */
667 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
668 if ((mask & (1 << i)) != 0)
671 /* We need one PARALLEL element to update the stack pointer and
672 an additional element for each register that is stored. */
673 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count + 1));
675 /* Create the instruction that updates the stack pointer. */
677 = gen_rtx_SET (SImode,
679 gen_rtx_PLUS (SImode,
681 GEN_INT (-count * 4)));
683 /* Create each store. */
685 for (i = LAST_EXTENDED_REGNUM; i >= 0; i--)
686 if ((mask & (1 << i)) != 0)
688 rtx address = gen_rtx_PLUS (SImode,
690 GEN_INT (-pari * 4));
691 XVECEXP(par, 0, pari)
692 = gen_rtx_SET (VOIDmode,
693 gen_rtx_MEM (SImode, address),
694 gen_rtx_REG (SImode, i));
698 par = emit_insn (par);
699 RTX_FRAME_RELATED_P (par) = 1;
704 expand_prologue (void)
708 /* SIZE includes the fixed stack space needed for function calls. */
709 size = get_frame_size () + crtl->outgoing_args_size;
710 size += (crtl->outgoing_args_size ? 4 : 0);
712 /* If we use any of the callee-saved registers, save them now. */
713 mn10300_gen_multiple_store (mn10300_get_live_callee_saved_regs ());
715 if (TARGET_AM33_2 && fp_regs_to_save ())
717 int num_regs_to_save = fp_regs_to_save (), i;
719 enum { save_sp_merge,
721 save_sp_partial_merge,
723 save_a0_no_merge } strategy;
724 unsigned int strategy_size = (unsigned)-1, this_strategy_size;
728 /* We have several different strategies to save FP registers.
729 We can store them using SP offsets, which is beneficial if
730 there are just a few registers to save, or we can use `a0' in
731 post-increment mode (`a0' is the only call-clobbered address
732 register that is never used to pass information to a
733 function). Furthermore, if we don't need a frame pointer, we
734 can merge the two SP adds into a single one, but this isn't
735 always beneficial; sometimes we can just split the two adds
736 so that we don't exceed a 16-bit constant size. The code
737 below will select which strategy to use, so as to generate
738 smallest code. Ties are broken in favor or shorter sequences
739 (in terms of number of instructions). */
741 #define SIZE_ADD_AX(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
742 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 2)
743 #define SIZE_ADD_SP(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
744 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 3)
745 #define SIZE_FMOV_LIMIT(S,N,L,SIZE1,SIZE2,ELSE) \
746 (((S) >= (L)) ? (SIZE1) * (N) \
747 : ((S) + 4 * (N) >= (L)) ? (((L) - (S)) / 4 * (SIZE2) \
748 + ((S) + 4 * (N) - (L)) / 4 * (SIZE1)) \
750 #define SIZE_FMOV_SP_(S,N) \
751 (SIZE_FMOV_LIMIT ((S), (N), (1 << 24), 7, 6, \
752 SIZE_FMOV_LIMIT ((S), (N), (1 << 8), 6, 4, \
753 (S) ? 4 * (N) : 3 + 4 * ((N) - 1))))
754 #define SIZE_FMOV_SP(S,N) (SIZE_FMOV_SP_ ((unsigned HOST_WIDE_INT)(S), (N)))
756 /* Consider alternative save_sp_merge only if we don't need the
757 frame pointer and size is nonzero. */
758 if (! frame_pointer_needed && size)
760 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
761 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
762 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
763 this_strategy_size += SIZE_FMOV_SP (size, num_regs_to_save);
765 if (this_strategy_size < strategy_size)
767 strategy = save_sp_merge;
768 strategy_size = this_strategy_size;
772 /* Consider alternative save_sp_no_merge unconditionally. */
773 /* Insn: add -4 * num_regs_to_save, sp. */
774 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
775 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
776 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
779 /* Insn: add -size, sp. */
780 this_strategy_size += SIZE_ADD_SP (-size);
783 if (this_strategy_size < strategy_size)
785 strategy = save_sp_no_merge;
786 strategy_size = this_strategy_size;
789 /* Consider alternative save_sp_partial_merge only if we don't
790 need a frame pointer and size is reasonably large. */
791 if (! frame_pointer_needed && size + 4 * num_regs_to_save > 128)
793 /* Insn: add -128, sp. */
794 this_strategy_size = SIZE_ADD_SP (-128);
795 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
796 this_strategy_size += SIZE_FMOV_SP (128 - 4 * num_regs_to_save,
800 /* Insn: add 128-size, sp. */
801 this_strategy_size += SIZE_ADD_SP (128 - size);
804 if (this_strategy_size < strategy_size)
806 strategy = save_sp_partial_merge;
807 strategy_size = this_strategy_size;
811 /* Consider alternative save_a0_merge only if we don't need a
812 frame pointer, size is nonzero and the user hasn't
813 changed the calling conventions of a0. */
814 if (! frame_pointer_needed && size
815 && call_really_used_regs [FIRST_ADDRESS_REGNUM]
816 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
818 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
819 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
820 /* Insn: mov sp, a0. */
821 this_strategy_size++;
824 /* Insn: add size, a0. */
825 this_strategy_size += SIZE_ADD_AX (size);
827 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
828 this_strategy_size += 3 * num_regs_to_save;
830 if (this_strategy_size < strategy_size)
832 strategy = save_a0_merge;
833 strategy_size = this_strategy_size;
837 /* Consider alternative save_a0_no_merge if the user hasn't
838 changed the calling conventions of a0. */
839 if (call_really_used_regs [FIRST_ADDRESS_REGNUM]
840 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
842 /* Insn: add -4 * num_regs_to_save, sp. */
843 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
844 /* Insn: mov sp, a0. */
845 this_strategy_size++;
846 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
847 this_strategy_size += 3 * num_regs_to_save;
850 /* Insn: add -size, sp. */
851 this_strategy_size += SIZE_ADD_SP (-size);
854 if (this_strategy_size < strategy_size)
856 strategy = save_a0_no_merge;
857 strategy_size = this_strategy_size;
861 /* Emit the initial SP add, common to all strategies. */
864 case save_sp_no_merge:
865 case save_a0_no_merge:
866 emit_insn (gen_addsi3 (stack_pointer_rtx,
868 GEN_INT (-4 * num_regs_to_save)));
872 case save_sp_partial_merge:
873 emit_insn (gen_addsi3 (stack_pointer_rtx,
876 xsize = 128 - 4 * num_regs_to_save;
882 emit_insn (gen_addsi3 (stack_pointer_rtx,
884 GEN_INT (-(size + 4 * num_regs_to_save))));
885 /* We'll have to adjust FP register saves according to the
888 /* Since we've already created the stack frame, don't do it
889 again at the end of the function. */
897 /* Now prepare register a0, if we have decided to use it. */
901 case save_sp_no_merge:
902 case save_sp_partial_merge:
907 case save_a0_no_merge:
908 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM);
909 emit_insn (gen_movsi (reg, stack_pointer_rtx));
911 emit_insn (gen_addsi3 (reg, reg, GEN_INT (xsize)));
912 reg = gen_rtx_POST_INC (SImode, reg);
919 /* Now actually save the FP registers. */
920 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
921 if (df_regs_ever_live_p (i) && ! call_really_used_regs [i])
929 /* If we aren't using `a0', use an SP offset. */
932 addr = gen_rtx_PLUS (SImode,
937 addr = stack_pointer_rtx;
942 insn = emit_insn (gen_movsi (gen_rtx_MEM (SImode, addr),
943 gen_rtx_REG (SImode, i)));
945 RTX_FRAME_RELATED_P (insn) = 1;
949 /* Now put the frame pointer into the frame pointer register. */
950 if (frame_pointer_needed)
951 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
953 /* Allocate stack for this frame. */
955 emit_insn (gen_addsi3 (stack_pointer_rtx,
958 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
959 emit_insn (gen_GOTaddr2picreg ());
963 expand_epilogue (void)
967 /* SIZE includes the fixed stack space needed for function calls. */
968 size = get_frame_size () + crtl->outgoing_args_size;
969 size += (crtl->outgoing_args_size ? 4 : 0);
971 if (TARGET_AM33_2 && fp_regs_to_save ())
973 int num_regs_to_save = fp_regs_to_save (), i;
976 /* We have several options to restore FP registers. We could
977 load them from SP offsets, but, if there are enough FP
978 registers to restore, we win if we use a post-increment
981 /* If we have a frame pointer, it's the best option, because we
982 already know it has the value we want. */
983 if (frame_pointer_needed)
984 reg = gen_rtx_REG (SImode, FRAME_POINTER_REGNUM);
985 /* Otherwise, we may use `a1', since it's call-clobbered and
986 it's never used for return values. But only do so if it's
987 smaller than using SP offsets. */
990 enum { restore_sp_post_adjust,
991 restore_sp_pre_adjust,
992 restore_sp_partial_adjust,
993 restore_a1 } strategy;
994 unsigned int this_strategy_size, strategy_size = (unsigned)-1;
996 /* Consider using sp offsets before adjusting sp. */
997 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
998 this_strategy_size = SIZE_FMOV_SP (size, num_regs_to_save);
999 /* If size is too large, we'll have to adjust SP with an
1001 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1003 /* Insn: add size + 4 * num_regs_to_save, sp. */
1004 this_strategy_size += SIZE_ADD_SP (size + 4 * num_regs_to_save);
1006 /* If we don't have to restore any non-FP registers,
1007 we'll be able to save one byte by using rets. */
1008 if (! REG_SAVE_BYTES)
1009 this_strategy_size--;
1011 if (this_strategy_size < strategy_size)
1013 strategy = restore_sp_post_adjust;
1014 strategy_size = this_strategy_size;
1017 /* Consider using sp offsets after adjusting sp. */
1018 /* Insn: add size, sp. */
1019 this_strategy_size = SIZE_ADD_SP (size);
1020 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1021 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
1022 /* We're going to use ret to release the FP registers
1023 save area, so, no savings. */
1025 if (this_strategy_size < strategy_size)
1027 strategy = restore_sp_pre_adjust;
1028 strategy_size = this_strategy_size;
1031 /* Consider using sp offsets after partially adjusting sp.
1032 When size is close to 32Kb, we may be able to adjust SP
1033 with an imm16 add instruction while still using fmov
1035 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1037 /* Insn: add size + 4 * num_regs_to_save
1038 + REG_SAVE_BYTES - 252,sp. */
1039 this_strategy_size = SIZE_ADD_SP (size + 4 * num_regs_to_save
1040 + REG_SAVE_BYTES - 252);
1041 /* Insn: fmov (##,sp),fs#, fo each fs# to be restored. */
1042 this_strategy_size += SIZE_FMOV_SP (252 - REG_SAVE_BYTES
1043 - 4 * num_regs_to_save,
1045 /* We're going to use ret to release the FP registers
1046 save area, so, no savings. */
1048 if (this_strategy_size < strategy_size)
1050 strategy = restore_sp_partial_adjust;
1051 strategy_size = this_strategy_size;
1055 /* Consider using a1 in post-increment mode, as long as the
1056 user hasn't changed the calling conventions of a1. */
1057 if (call_really_used_regs [FIRST_ADDRESS_REGNUM + 1]
1058 && ! fixed_regs[FIRST_ADDRESS_REGNUM+1])
1060 /* Insn: mov sp,a1. */
1061 this_strategy_size = 1;
1064 /* Insn: add size,a1. */
1065 this_strategy_size += SIZE_ADD_AX (size);
1067 /* Insn: fmov (a1+),fs#, for each fs# to be restored. */
1068 this_strategy_size += 3 * num_regs_to_save;
1069 /* If size is large enough, we may be able to save a
1071 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1073 /* Insn: mov a1,sp. */
1074 this_strategy_size += 2;
1076 /* If we don't have to restore any non-FP registers,
1077 we'll be able to save one byte by using rets. */
1078 if (! REG_SAVE_BYTES)
1079 this_strategy_size--;
1081 if (this_strategy_size < strategy_size)
1083 strategy = restore_a1;
1084 strategy_size = this_strategy_size;
1090 case restore_sp_post_adjust:
1093 case restore_sp_pre_adjust:
1094 emit_insn (gen_addsi3 (stack_pointer_rtx,
1100 case restore_sp_partial_adjust:
1101 emit_insn (gen_addsi3 (stack_pointer_rtx,
1103 GEN_INT (size + 4 * num_regs_to_save
1104 + REG_SAVE_BYTES - 252)));
1105 size = 252 - REG_SAVE_BYTES - 4 * num_regs_to_save;
1109 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM + 1);
1110 emit_insn (gen_movsi (reg, stack_pointer_rtx));
1112 emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
1120 /* Adjust the selected register, if any, for post-increment. */
1122 reg = gen_rtx_POST_INC (SImode, reg);
1124 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
1125 if (df_regs_ever_live_p (i) && ! call_really_used_regs [i])
1133 /* If we aren't using a post-increment register, use an
1135 addr = gen_rtx_PLUS (SImode,
1140 addr = stack_pointer_rtx;
1144 emit_insn (gen_movsi (gen_rtx_REG (SImode, i),
1145 gen_rtx_MEM (SImode, addr)));
1148 /* If we were using the restore_a1 strategy and the number of
1149 bytes to be released won't fit in the `ret' byte, copy `a1'
1150 to `sp', to avoid having to use `add' to adjust it. */
1151 if (! frame_pointer_needed && reg && size + REG_SAVE_BYTES > 255)
1153 emit_move_insn (stack_pointer_rtx, XEXP (reg, 0));
1158 /* Maybe cut back the stack, except for the register save area.
1160 If the frame pointer exists, then use the frame pointer to
1163 If the stack size + register save area is more than 255 bytes,
1164 then the stack must be cut back here since the size + register
1165 save size is too big for a ret/retf instruction.
1167 Else leave it alone, it will be cut back as part of the
1168 ret/retf instruction, or there wasn't any stack to begin with.
1170 Under no circumstances should the register save area be
1171 deallocated here, that would leave a window where an interrupt
1172 could occur and trash the register save area. */
1173 if (frame_pointer_needed)
1175 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1178 else if (size + REG_SAVE_BYTES > 255)
1180 emit_insn (gen_addsi3 (stack_pointer_rtx,
1186 /* Adjust the stack and restore callee-saved registers, if any. */
1187 if (size || df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1188 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1189 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1190 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1191 || frame_pointer_needed)
1192 emit_jump_insn (gen_return_internal_regs
1193 (GEN_INT (size + REG_SAVE_BYTES)));
1195 emit_jump_insn (gen_return_internal ());
1198 /* Update the condition code from the insn. */
1201 notice_update_cc (rtx body, rtx insn)
1203 switch (get_attr_cc (insn))
1206 /* Insn does not affect CC at all. */
1210 /* Insn does not change CC, but the 0'th operand has been changed. */
1211 if (cc_status.value1 != 0
1212 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
1213 cc_status.value1 = 0;
1217 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
1218 V,C are unusable. */
1220 cc_status.flags |= CC_NO_CARRY | CC_OVERFLOW_UNUSABLE;
1221 cc_status.value1 = recog_data.operand[0];
1225 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
1228 cc_status.flags |= CC_NO_CARRY;
1229 cc_status.value1 = recog_data.operand[0];
1233 /* The insn is a compare instruction. */
1235 cc_status.value1 = SET_SRC (body);
1236 if (GET_CODE (cc_status.value1) == COMPARE
1237 && GET_MODE (XEXP (cc_status.value1, 0)) == SFmode)
1238 cc_status.mdep.fpCC = 1;
1242 /* Insn doesn't leave CC in a usable state. */
1251 /* Recognize the PARALLEL rtx generated by mn10300_gen_multiple_store().
1252 This function is for MATCH_PARALLEL and so assumes OP is known to be
1253 parallel. If OP is a multiple store, return a mask indicating which
1254 registers it saves. Return 0 otherwise. */
1257 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1265 count = XVECLEN (op, 0);
1269 /* Check that first instruction has the form (set (sp) (plus A B)) */
1270 elt = XVECEXP (op, 0, 0);
1271 if (GET_CODE (elt) != SET
1272 || GET_CODE (SET_DEST (elt)) != REG
1273 || REGNO (SET_DEST (elt)) != STACK_POINTER_REGNUM
1274 || GET_CODE (SET_SRC (elt)) != PLUS)
1277 /* Check that A is the stack pointer and B is the expected stack size.
1278 For OP to match, each subsequent instruction should push a word onto
1279 the stack. We therefore expect the first instruction to create
1280 COUNT-1 stack slots. */
1281 elt = SET_SRC (elt);
1282 if (GET_CODE (XEXP (elt, 0)) != REG
1283 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1284 || GET_CODE (XEXP (elt, 1)) != CONST_INT
1285 || INTVAL (XEXP (elt, 1)) != -(count - 1) * 4)
1288 /* Now go through the rest of the vector elements. They must be
1289 ordered so that the first instruction stores the highest-numbered
1290 register to the highest stack slot and that subsequent instructions
1291 store a lower-numbered register to the slot below.
1293 LAST keeps track of the smallest-numbered register stored so far.
1294 MASK is the set of stored registers. */
1295 last = LAST_EXTENDED_REGNUM + 1;
1297 for (i = 1; i < count; i++)
1299 /* Check that element i is a (set (mem M) R) and that R is valid. */
1300 elt = XVECEXP (op, 0, i);
1301 if (GET_CODE (elt) != SET
1302 || GET_CODE (SET_DEST (elt)) != MEM
1303 || GET_CODE (SET_SRC (elt)) != REG
1304 || REGNO (SET_SRC (elt)) >= last)
1307 /* R was OK, so provisionally add it to MASK. We return 0 in any
1308 case if the rest of the instruction has a flaw. */
1309 last = REGNO (SET_SRC (elt));
1310 mask |= (1 << last);
1312 /* Check that M has the form (plus (sp) (const_int -I*4)) */
1313 elt = XEXP (SET_DEST (elt), 0);
1314 if (GET_CODE (elt) != PLUS
1315 || GET_CODE (XEXP (elt, 0)) != REG
1316 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1317 || GET_CODE (XEXP (elt, 1)) != CONST_INT
1318 || INTVAL (XEXP (elt, 1)) != -i * 4)
1322 /* All or none of the callee-saved extended registers must be in the set. */
1323 if ((mask & 0x3c000) != 0
1324 && (mask & 0x3c000) != 0x3c000)
1330 /* What (if any) secondary registers are needed to move IN with mode
1331 MODE into a register in register class RCLASS.
1333 We might be able to simplify this. */
1335 mn10300_secondary_reload_class (enum reg_class rclass, enum machine_mode mode,
1340 /* Strip off any SUBREG expressions from IN. Basically we want
1341 to know if IN is a pseudo or (subreg (pseudo)) as those can
1342 turn into MEMs during reload. */
1343 while (GET_CODE (inner) == SUBREG)
1344 inner = SUBREG_REG (inner);
1346 /* Memory loads less than a full word wide can't have an
1347 address or stack pointer destination. They must use
1348 a data register as an intermediate register. */
1349 if ((GET_CODE (in) == MEM
1350 || (GET_CODE (inner) == REG
1351 && REGNO (inner) >= FIRST_PSEUDO_REGISTER))
1352 && (mode == QImode || mode == HImode)
1353 && (rclass == ADDRESS_REGS || rclass == SP_REGS
1354 || rclass == SP_OR_ADDRESS_REGS))
1357 return DATA_OR_EXTENDED_REGS;
1361 /* We can't directly load sp + const_int into a data register;
1362 we must use an address register as an intermediate. */
1363 if (rclass != SP_REGS
1364 && rclass != ADDRESS_REGS
1365 && rclass != SP_OR_ADDRESS_REGS
1366 && rclass != SP_OR_EXTENDED_REGS
1367 && rclass != ADDRESS_OR_EXTENDED_REGS
1368 && rclass != SP_OR_ADDRESS_OR_EXTENDED_REGS
1369 && (in == stack_pointer_rtx
1370 || (GET_CODE (in) == PLUS
1371 && (XEXP (in, 0) == stack_pointer_rtx
1372 || XEXP (in, 1) == stack_pointer_rtx))))
1373 return ADDRESS_REGS;
1375 if (GET_CODE (in) == PLUS
1376 && (XEXP (in, 0) == stack_pointer_rtx
1377 || XEXP (in, 1) == stack_pointer_rtx))
1378 return GENERAL_REGS;
1381 && rclass == FP_REGS)
1383 /* We can't load directly into an FP register from a
1384 constant address. */
1385 if (GET_CODE (in) == MEM
1386 && CONSTANT_ADDRESS_P (XEXP (in, 0)))
1387 return (TARGET_AM33 ? DATA_OR_EXTENDED_REGS : DATA_REGS);
1389 /* Handle case were a pseudo may not get a hard register
1390 but has an equivalent memory location defined. */
1391 if (GET_CODE (inner) == REG
1392 && REGNO (inner) >= FIRST_PSEUDO_REGISTER
1393 && reg_equiv_mem [REGNO (inner)]
1394 && CONSTANT_ADDRESS_P (XEXP (reg_equiv_mem [REGNO (inner)], 0)))
1395 return (TARGET_AM33 ? DATA_OR_EXTENDED_REGS : DATA_REGS);
1398 /* Otherwise assume no secondary reloads are needed. */
1403 initial_offset (int from, int to)
1405 /* The difference between the argument pointer and the frame pointer
1406 is the size of the callee register save area. */
1407 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
1409 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1410 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1411 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1412 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1413 || fp_regs_to_save ()
1414 || frame_pointer_needed)
1415 return REG_SAVE_BYTES
1416 + 4 * fp_regs_to_save ();
1421 /* The difference between the argument pointer and the stack pointer is
1422 the sum of the size of this function's frame, the callee register save
1423 area, and the fixed stack space needed for function calls (if any). */
1424 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1426 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1427 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1428 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1429 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1430 || fp_regs_to_save ()
1431 || frame_pointer_needed)
1432 return (get_frame_size () + REG_SAVE_BYTES
1433 + 4 * fp_regs_to_save ()
1434 + (crtl->outgoing_args_size
1435 ? crtl->outgoing_args_size + 4 : 0));
1437 return (get_frame_size ()
1438 + (crtl->outgoing_args_size
1439 ? crtl->outgoing_args_size + 4 : 0));
1442 /* The difference between the frame pointer and stack pointer is the sum
1443 of the size of this function's frame and the fixed stack space needed
1444 for function calls (if any). */
1445 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1446 return (get_frame_size ()
1447 + (crtl->outgoing_args_size
1448 ? crtl->outgoing_args_size + 4 : 0));
1453 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1456 mn10300_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1458 /* Return values > 8 bytes in length in memory. */
1459 return (int_size_in_bytes (type) > 8
1460 || int_size_in_bytes (type) == 0
1461 || TYPE_MODE (type) == BLKmode);
1464 /* Flush the argument registers to the stack for a stdarg function;
1465 return the new argument pointer. */
1467 mn10300_builtin_saveregs (void)
1470 tree fntype = TREE_TYPE (current_function_decl);
1471 int argadj = ((!(TYPE_ARG_TYPES (fntype) != 0
1472 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1473 != void_type_node)))
1474 ? UNITS_PER_WORD : 0);
1475 alias_set_type set = get_varargs_alias_set ();
1478 offset = plus_constant (crtl->args.arg_offset_rtx, argadj);
1480 offset = crtl->args.arg_offset_rtx;
1482 mem = gen_rtx_MEM (SImode, crtl->args.internal_arg_pointer);
1483 set_mem_alias_set (mem, set);
1484 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
1486 mem = gen_rtx_MEM (SImode,
1487 plus_constant (crtl->args.internal_arg_pointer, 4));
1488 set_mem_alias_set (mem, set);
1489 emit_move_insn (mem, gen_rtx_REG (SImode, 1));
1491 return copy_to_reg (expand_binop (Pmode, add_optab,
1492 crtl->args.internal_arg_pointer,
1493 offset, 0, 0, OPTAB_LIB_WIDEN));
1497 mn10300_va_start (tree valist, rtx nextarg)
1499 nextarg = expand_builtin_saveregs ();
1500 std_expand_builtin_va_start (valist, nextarg);
1503 /* Return true when a parameter should be passed by reference. */
1506 mn10300_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1507 enum machine_mode mode, const_tree type,
1508 bool named ATTRIBUTE_UNUSED)
1510 unsigned HOST_WIDE_INT size;
1513 size = int_size_in_bytes (type);
1515 size = GET_MODE_SIZE (mode);
1517 return (size > 8 || size == 0);
1520 /* Return an RTX to represent where a value with mode MODE will be returned
1521 from a function. If the result is 0, the argument is pushed. */
1524 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1525 tree type, int named ATTRIBUTE_UNUSED)
1530 /* We only support using 2 data registers as argument registers. */
1533 /* Figure out the size of the object to be passed. */
1534 if (mode == BLKmode)
1535 size = int_size_in_bytes (type);
1537 size = GET_MODE_SIZE (mode);
1539 /* Figure out the alignment of the object to be passed. */
1542 cum->nbytes = (cum->nbytes + 3) & ~3;
1544 /* Don't pass this arg via a register if all the argument registers
1546 if (cum->nbytes > nregs * UNITS_PER_WORD)
1549 /* Don't pass this arg via a register if it would be split between
1550 registers and memory. */
1551 if (type == NULL_TREE
1552 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1555 switch (cum->nbytes / UNITS_PER_WORD)
1558 result = gen_rtx_REG (mode, 0);
1561 result = gen_rtx_REG (mode, 1);
1570 /* Return the number of bytes of registers to use for an argument passed
1571 partially in registers and partially in memory. */
1574 mn10300_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1575 tree type, bool named ATTRIBUTE_UNUSED)
1579 /* We only support using 2 data registers as argument registers. */
1582 /* Figure out the size of the object to be passed. */
1583 if (mode == BLKmode)
1584 size = int_size_in_bytes (type);
1586 size = GET_MODE_SIZE (mode);
1588 /* Figure out the alignment of the object to be passed. */
1591 cum->nbytes = (cum->nbytes + 3) & ~3;
1593 /* Don't pass this arg via a register if all the argument registers
1595 if (cum->nbytes > nregs * UNITS_PER_WORD)
1598 if (cum->nbytes + size <= nregs * UNITS_PER_WORD)
1601 /* Don't pass this arg via a register if it would be split between
1602 registers and memory. */
1603 if (type == NULL_TREE
1604 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1607 return nregs * UNITS_PER_WORD - cum->nbytes;
1610 /* Return the location of the function's value. This will be either
1611 $d0 for integer functions, $a0 for pointers, or a PARALLEL of both
1612 $d0 and $a0 if the -mreturn-pointer-on-do flag is set. Note that
1613 we only return the PARALLEL for outgoing values; we do not want
1614 callers relying on this extra copy. */
1617 mn10300_function_value (const_tree valtype, const_tree func, int outgoing)
1620 enum machine_mode mode = TYPE_MODE (valtype);
1622 if (! POINTER_TYPE_P (valtype))
1623 return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
1624 else if (! TARGET_PTR_A0D0 || ! outgoing
1625 || cfun->returns_struct)
1626 return gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM);
1628 rv = gen_rtx_PARALLEL (mode, rtvec_alloc (2));
1630 = gen_rtx_EXPR_LIST (VOIDmode,
1631 gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM),
1635 = gen_rtx_EXPR_LIST (VOIDmode,
1636 gen_rtx_REG (mode, FIRST_DATA_REGNUM),
1641 /* Output a tst insn. */
1643 output_tst (rtx operand, rtx insn)
1648 /* We can save a byte if we can find a register which has the value
1650 temp = PREV_INSN (insn);
1651 while (optimize && temp)
1655 /* We allow the search to go through call insns. We record
1656 the fact that we've past a CALL_INSN and reject matches which
1657 use call clobbered registers. */
1658 if (GET_CODE (temp) == CODE_LABEL
1659 || GET_CODE (temp) == JUMP_INSN
1660 || GET_CODE (temp) == BARRIER)
1663 if (GET_CODE (temp) == CALL_INSN)
1666 if (GET_CODE (temp) == NOTE)
1668 temp = PREV_INSN (temp);
1672 /* It must be an insn, see if it is a simple set. */
1673 set = single_set (temp);
1676 temp = PREV_INSN (temp);
1680 /* Are we setting a data register to zero (this does not win for
1683 If it's a call clobbered register, have we past a call?
1685 Make sure the register we find isn't the same as ourself;
1686 the mn10300 can't encode that.
1688 ??? reg_set_between_p return nonzero anytime we pass a CALL_INSN
1689 so the code to detect calls here isn't doing anything useful. */
1690 if (REG_P (SET_DEST (set))
1691 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1692 && !reg_set_between_p (SET_DEST (set), temp, insn)
1693 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1694 == REGNO_REG_CLASS (REGNO (operand)))
1695 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) != EXTENDED_REGS
1696 && REGNO (SET_DEST (set)) != REGNO (operand)
1698 || ! call_really_used_regs [REGNO (SET_DEST (set))]))
1701 xoperands[0] = operand;
1702 xoperands[1] = SET_DEST (set);
1704 output_asm_insn ("cmp %1,%0", xoperands);
1708 if (REGNO_REG_CLASS (REGNO (operand)) == EXTENDED_REGS
1709 && REG_P (SET_DEST (set))
1710 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1711 && !reg_set_between_p (SET_DEST (set), temp, insn)
1712 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1713 != REGNO_REG_CLASS (REGNO (operand)))
1714 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) == EXTENDED_REGS
1715 && REGNO (SET_DEST (set)) != REGNO (operand)
1717 || ! call_really_used_regs [REGNO (SET_DEST (set))]))
1720 xoperands[0] = operand;
1721 xoperands[1] = SET_DEST (set);
1723 output_asm_insn ("cmp %1,%0", xoperands);
1726 temp = PREV_INSN (temp);
1732 impossible_plus_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1734 if (GET_CODE (op) != PLUS)
1737 if (XEXP (op, 0) == stack_pointer_rtx
1738 || XEXP (op, 1) == stack_pointer_rtx)
1744 /* Similarly, but when using a zero_extract pattern for a btst where
1745 the source operand might end up in memory. */
1747 mask_ok_for_mem_btst (int len, int bit)
1749 unsigned int mask = 0;
1758 /* MASK must bit into an 8bit value. */
1759 return (((mask & 0xff) == mask)
1760 || ((mask & 0xff00) == mask)
1761 || ((mask & 0xff0000) == mask)
1762 || ((mask & 0xff000000) == mask));
1765 /* Return 1 if X contains a symbolic expression. We know these
1766 expressions will have one of a few well defined forms, so
1767 we need only check those forms. */
1769 symbolic_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1771 switch (GET_CODE (op))
1778 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1779 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1780 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1786 /* Try machine dependent ways of modifying an illegitimate address
1787 to be legitimate. If we find one, return the new valid address.
1788 This macro is used in only one place: `memory_address' in explow.c.
1790 OLDX is the address as it was before break_out_memory_refs was called.
1791 In some cases it is useful to look at this to decide what needs to be done.
1793 MODE and WIN are passed so that this macro can use
1794 GO_IF_LEGITIMATE_ADDRESS.
1796 Normally it is always safe for this macro to do nothing. It exists to
1797 recognize opportunities to optimize the output.
1799 But on a few ports with segmented architectures and indexed addressing
1800 (mn10300, hppa) it is used to rewrite certain problematical addresses. */
1802 mn10300_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1803 enum machine_mode mode ATTRIBUTE_UNUSED)
1805 if (flag_pic && ! legitimate_pic_operand_p (x))
1806 x = legitimize_pic_address (oldx, NULL_RTX);
1808 /* Uh-oh. We might have an address for x[n-100000]. This needs
1809 special handling to avoid creating an indexed memory address
1810 with x-100000 as the base. */
1811 if (GET_CODE (x) == PLUS
1812 && symbolic_operand (XEXP (x, 1), VOIDmode))
1814 /* Ugly. We modify things here so that the address offset specified
1815 by the index expression is computed first, then added to x to form
1816 the entire address. */
1818 rtx regx1, regy1, regy2, y;
1820 /* Strip off any CONST. */
1822 if (GET_CODE (y) == CONST)
1825 if (GET_CODE (y) == PLUS || GET_CODE (y) == MINUS)
1827 regx1 = force_reg (Pmode, force_operand (XEXP (x, 0), 0));
1828 regy1 = force_reg (Pmode, force_operand (XEXP (y, 0), 0));
1829 regy2 = force_reg (Pmode, force_operand (XEXP (y, 1), 0));
1830 regx1 = force_reg (Pmode,
1831 gen_rtx_fmt_ee (GET_CODE (y), Pmode, regx1, regy2));
1832 return force_reg (Pmode, gen_rtx_PLUS (Pmode, regx1, regy1));
1838 /* Convert a non-PIC address in `orig' to a PIC address using @GOT or
1839 @GOTOFF in `reg'. */
1841 legitimize_pic_address (rtx orig, rtx reg)
1843 if (GET_CODE (orig) == LABEL_REF
1844 || (GET_CODE (orig) == SYMBOL_REF
1845 && (CONSTANT_POOL_ADDRESS_P (orig)
1846 || ! MN10300_GLOBAL_P (orig))))
1849 reg = gen_reg_rtx (Pmode);
1851 emit_insn (gen_symGOTOFF2reg (reg, orig));
1854 else if (GET_CODE (orig) == SYMBOL_REF)
1857 reg = gen_reg_rtx (Pmode);
1859 emit_insn (gen_symGOT2reg (reg, orig));
1865 /* Return zero if X references a SYMBOL_REF or LABEL_REF whose symbol
1866 isn't protected by a PIC unspec; nonzero otherwise. */
1868 legitimate_pic_operand_p (rtx x)
1870 register const char *fmt;
1873 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1876 if (GET_CODE (x) == UNSPEC
1877 && (XINT (x, 1) == UNSPEC_PIC
1878 || XINT (x, 1) == UNSPEC_GOT
1879 || XINT (x, 1) == UNSPEC_GOTOFF
1880 || XINT (x, 1) == UNSPEC_PLT
1881 || XINT (x, 1) == UNSPEC_GOTSYM_OFF))
1884 fmt = GET_RTX_FORMAT (GET_CODE (x));
1885 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1891 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1892 if (! legitimate_pic_operand_p (XVECEXP (x, i, j)))
1895 else if (fmt[i] == 'e' && ! legitimate_pic_operand_p (XEXP (x, i)))
1902 /* Return TRUE if the address X, taken from a (MEM:MODE X) rtx, is
1903 legitimate, and FALSE otherwise. */
1905 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
1907 if (CONSTANT_ADDRESS_P (x)
1908 && (! flag_pic || legitimate_pic_operand_p (x)))
1911 if (RTX_OK_FOR_BASE_P (x, strict))
1915 && GET_CODE (x) == POST_INC
1916 && RTX_OK_FOR_BASE_P (XEXP (x, 0), strict)
1917 && (mode == SImode || mode == SFmode || mode == HImode))
1920 if (GET_CODE (x) == PLUS)
1922 rtx base = 0, index = 0;
1924 if (REG_P (XEXP (x, 0))
1925 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x, 0)), strict))
1928 index = XEXP (x, 1);
1931 if (REG_P (XEXP (x, 1))
1932 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x, 1)), strict))
1935 index = XEXP (x, 0);
1938 if (base != 0 && index != 0)
1940 if (GET_CODE (index) == CONST_INT)
1942 if (GET_CODE (index) == CONST
1943 && GET_CODE (XEXP (index, 0)) != PLUS
1945 || legitimate_pic_operand_p (index)))
1954 mn10300_address_cost_1 (rtx x, int *unsig)
1956 switch (GET_CODE (x))
1959 switch (REGNO_REG_CLASS (REGNO (x)))
1985 return (mn10300_address_cost_1 (XEXP (x, 0), unsig)
1986 + mn10300_address_cost_1 (XEXP (x, 1), unsig));
1991 return mn10300_address_cost (XEXP (x, 0), !optimize_size);
1995 return mn10300_address_cost_1 (XEXP (x, 0), unsig);
1998 if (INTVAL (x) == 0)
2000 if (INTVAL (x) + (*unsig ? 0 : 0x80) < 0x100)
2002 if (INTVAL (x) + (*unsig ? 0 : 0x8000) < 0x10000)
2004 if (INTVAL (x) + (*unsig ? 0 : 0x800000) < 0x1000000)
2020 mn10300_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
2023 return mn10300_address_cost_1 (x, &s);
2027 mn10300_rtx_costs (rtx x, int code, int outer_code, int *total, bool speed ATTRIBUTE_UNUSED)
2032 /* Zeros are extremely cheap. */
2033 if (INTVAL (x) == 0 && outer_code == SET)
2035 /* If it fits in 8 bits, then it's still relatively cheap. */
2036 else if (INT_8_BITS (INTVAL (x)))
2038 /* This is the "base" cost, includes constants where either the
2039 upper or lower 16bits are all zeros. */
2040 else if (INT_16_BITS (INTVAL (x))
2041 || (INTVAL (x) & 0xffff) == 0
2042 || (INTVAL (x) & 0xffff0000) == 0)
2051 /* These are more costly than a CONST_INT, but we can relax them,
2052 so they're less costly than a CONST_DOUBLE. */
2057 /* We don't optimize CONST_DOUBLEs well nor do we relax them well,
2058 so their cost is very high. */
2062 /* ??? This probably needs more work. */
2074 /* Check whether a constant used to initialize a DImode or DFmode can
2075 use a clr instruction. The code here must be kept in sync with
2079 mn10300_wide_const_load_uses_clr (rtx operands[2])
2083 if (GET_CODE (operands[0]) != REG
2084 || REGNO_REG_CLASS (REGNO (operands[0])) != DATA_REGS)
2087 switch (GET_CODE (operands[1]))
2092 split_double (operands[1], &low, &high);
2093 val[0] = INTVAL (low);
2094 val[1] = INTVAL (high);
2099 if (GET_MODE (operands[1]) == DFmode)
2103 REAL_VALUE_FROM_CONST_DOUBLE (rv, operands[1]);
2104 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
2106 else if (GET_MODE (operands[1]) == VOIDmode
2107 || GET_MODE (operands[1]) == DImode)
2109 val[0] = CONST_DOUBLE_LOW (operands[1]);
2110 val[1] = CONST_DOUBLE_HIGH (operands[1]);
2118 return val[0] == 0 || val[1] == 0;
2120 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
2121 may access it using GOTOFF instead of GOT. */
2124 mn10300_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
2128 if (GET_CODE (rtl) != MEM)
2130 symbol = XEXP (rtl, 0);
2131 if (GET_CODE (symbol) != SYMBOL_REF)
2135 SYMBOL_REF_FLAG (symbol) = (*targetm.binds_local_p) (decl);
2138 /* Dispatch tables on the mn10300 are extremely expensive in terms of code
2139 and readonly data size. So we crank up the case threshold value to
2140 encourage a series of if/else comparisons to implement many small switch
2141 statements. In theory, this value could be increased much more if we
2142 were solely optimizing for space, but we keep it "reasonable" to avoid
2143 serious code efficiency lossage. */
2145 unsigned int mn10300_case_values_threshold (void)