1 /* Subroutines for insn-output.c for Matsushita MN10300 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
34 #include "insn-attr.h"
45 #include "target-def.h"
47 /* This is used by GOTaddr2picreg to uniquely identify
49 int mn10300_unspec_int_label_counter;
51 /* This is used in the am33_2.0-linux-gnu port, in which global symbol
52 names are not prefixed by underscores, to tell whether to prefix a
53 label with a plus sign or not, so that the assembler can tell
54 symbol names from register names. */
55 int mn10300_protect_label;
57 /* The selected processor. */
58 enum processor_type mn10300_processor = PROCESSOR_DEFAULT;
60 /* The size of the callee register save area. Right now we save everything
61 on entry since it costs us nothing in code size. It does cost us from a
62 speed standpoint, so we want to optimize this sooner or later. */
63 #define REG_SAVE_BYTES (4 * df_regs_ever_live_p (2) \
64 + 4 * df_regs_ever_live_p (3) \
65 + 4 * df_regs_ever_live_p (6) \
66 + 4 * df_regs_ever_live_p (7) \
67 + 16 * (df_regs_ever_live_p (14) || df_regs_ever_live_p (15) \
68 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)))
71 static bool mn10300_handle_option (size_t, const char *, int);
72 static int mn10300_address_cost_1 (rtx, int *);
73 static int mn10300_address_cost (rtx, bool);
74 static bool mn10300_rtx_costs (rtx, int, int, int *, bool);
75 static void mn10300_file_start (void);
76 static bool mn10300_return_in_memory (const_tree, const_tree);
77 static rtx mn10300_builtin_saveregs (void);
78 static void mn10300_va_start (tree, rtx);
79 static bool mn10300_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
81 static int mn10300_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
84 /* Initialize the GCC target structure. */
85 #undef TARGET_ASM_ALIGNED_HI_OP
86 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
88 #undef TARGET_RTX_COSTS
89 #define TARGET_RTX_COSTS mn10300_rtx_costs
90 #undef TARGET_ADDRESS_COST
91 #define TARGET_ADDRESS_COST mn10300_address_cost
93 #undef TARGET_ASM_FILE_START
94 #define TARGET_ASM_FILE_START mn10300_file_start
95 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
96 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
98 #undef TARGET_DEFAULT_TARGET_FLAGS
99 #define TARGET_DEFAULT_TARGET_FLAGS MASK_MULT_BUG | MASK_PTR_A0D0
100 #undef TARGET_HANDLE_OPTION
101 #define TARGET_HANDLE_OPTION mn10300_handle_option
103 #undef TARGET_ENCODE_SECTION_INFO
104 #define TARGET_ENCODE_SECTION_INFO mn10300_encode_section_info
106 #undef TARGET_PROMOTE_PROTOTYPES
107 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
108 #undef TARGET_RETURN_IN_MEMORY
109 #define TARGET_RETURN_IN_MEMORY mn10300_return_in_memory
110 #undef TARGET_PASS_BY_REFERENCE
111 #define TARGET_PASS_BY_REFERENCE mn10300_pass_by_reference
112 #undef TARGET_CALLEE_COPIES
113 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
114 #undef TARGET_ARG_PARTIAL_BYTES
115 #define TARGET_ARG_PARTIAL_BYTES mn10300_arg_partial_bytes
117 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
118 #define TARGET_EXPAND_BUILTIN_SAVEREGS mn10300_builtin_saveregs
119 #undef TARGET_EXPAND_BUILTIN_VA_START
120 #define TARGET_EXPAND_BUILTIN_VA_START mn10300_va_start
122 static void mn10300_encode_section_info (tree, rtx, int);
123 struct gcc_target targetm = TARGET_INITIALIZER;
125 /* Implement TARGET_HANDLE_OPTION. */
128 mn10300_handle_option (size_t code,
129 const char *arg ATTRIBUTE_UNUSED,
135 mn10300_processor = value ? PROCESSOR_AM33 : PROCESSOR_MN10300;
138 mn10300_processor = (value
140 : MIN (PROCESSOR_AM33, PROCESSOR_DEFAULT));
147 /* Implement OVERRIDE_OPTIONS. */
150 mn10300_override_options (void)
153 target_flags &= ~MASK_MULT_BUG;
157 mn10300_file_start (void)
159 default_file_start ();
162 fprintf (asm_out_file, "\t.am33_2\n");
163 else if (TARGET_AM33)
164 fprintf (asm_out_file, "\t.am33\n");
168 /* Print operand X using operand code CODE to assembly language output file
172 print_operand (FILE *file, rtx x, int code)
178 if (cc_status.mdep.fpCC)
180 switch (code == 'b' ? GET_CODE (x)
181 : reverse_condition_maybe_unordered (GET_CODE (x)))
184 fprintf (file, "ne");
187 fprintf (file, "eq");
190 fprintf (file, "ge");
193 fprintf (file, "gt");
196 fprintf (file, "le");
199 fprintf (file, "lt");
202 fprintf (file, "lge");
205 fprintf (file, "uo");
208 fprintf (file, "lg");
211 fprintf (file, "ue");
214 fprintf (file, "uge");
217 fprintf (file, "ug");
220 fprintf (file, "ule");
223 fprintf (file, "ul");
230 /* These are normal and reversed branches. */
231 switch (code == 'b' ? GET_CODE (x) : reverse_condition (GET_CODE (x)))
234 fprintf (file, "ne");
237 fprintf (file, "eq");
240 fprintf (file, "ge");
243 fprintf (file, "gt");
246 fprintf (file, "le");
249 fprintf (file, "lt");
252 fprintf (file, "cc");
255 fprintf (file, "hi");
258 fprintf (file, "ls");
261 fprintf (file, "cs");
268 /* This is used for the operand to a call instruction;
269 if it's a REG, enclose it in parens, else output
270 the operand normally. */
271 if (GET_CODE (x) == REG)
274 print_operand (file, x, 0);
278 print_operand (file, x, 0);
282 switch (GET_CODE (x))
286 output_address (XEXP (x, 0));
291 fprintf (file, "fd%d", REGNO (x) - 18);
299 /* These are the least significant word in a 64bit value. */
301 switch (GET_CODE (x))
305 output_address (XEXP (x, 0));
310 fprintf (file, "%s", reg_names[REGNO (x)]);
314 fprintf (file, "%s", reg_names[subreg_regno (x)]);
322 switch (GET_MODE (x))
325 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
326 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
327 fprintf (file, "0x%lx", val[0]);
330 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
331 REAL_VALUE_TO_TARGET_SINGLE (rv, val[0]);
332 fprintf (file, "0x%lx", val[0]);
336 print_operand_address (file,
337 GEN_INT (CONST_DOUBLE_LOW (x)));
348 split_double (x, &low, &high);
349 fprintf (file, "%ld", (long)INTVAL (low));
358 /* Similarly, but for the most significant word. */
360 switch (GET_CODE (x))
364 x = adjust_address (x, SImode, 4);
365 output_address (XEXP (x, 0));
370 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
374 fprintf (file, "%s", reg_names[subreg_regno (x) + 1]);
382 switch (GET_MODE (x))
385 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
386 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
387 fprintf (file, "0x%lx", val[1]);
393 print_operand_address (file,
394 GEN_INT (CONST_DOUBLE_HIGH (x)));
405 split_double (x, &low, &high);
406 fprintf (file, "%ld", (long)INTVAL (high));
417 if (GET_CODE (XEXP (x, 0)) == REG)
418 output_address (gen_rtx_PLUS (SImode, XEXP (x, 0), const0_rtx));
420 output_address (XEXP (x, 0));
425 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
426 fprintf (file, "%d", (int)((~INTVAL (x)) & 0xff));
430 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
431 fprintf (file, "%d", (int)(INTVAL (x) & 0xff));
434 /* For shift counts. The hardware ignores the upper bits of
435 any immediate, but the assembler will flag an out of range
436 shift count as an error. So we mask off the high bits
437 of the immediate here. */
439 if (GET_CODE (x) == CONST_INT)
441 fprintf (file, "%d", (int)(INTVAL (x) & 0x1f));
447 switch (GET_CODE (x))
451 output_address (XEXP (x, 0));
460 fprintf (file, "%s", reg_names[REGNO (x)]);
464 fprintf (file, "%s", reg_names[subreg_regno (x)]);
467 /* This will only be single precision.... */
473 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
474 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
475 fprintf (file, "0x%lx", val);
485 print_operand_address (file, x);
494 /* Output assembly language output for the address ADDR to FILE. */
497 print_operand_address (FILE *file, rtx addr)
499 switch (GET_CODE (addr))
502 print_operand_address (file, XEXP (addr, 0));
506 print_operand (file, addr, 0);
511 if (REG_P (XEXP (addr, 0))
512 && REG_OK_FOR_BASE_P (XEXP (addr, 0)))
513 base = XEXP (addr, 0), index = XEXP (addr, 1);
514 else if (REG_P (XEXP (addr, 1))
515 && REG_OK_FOR_BASE_P (XEXP (addr, 1)))
516 base = XEXP (addr, 1), index = XEXP (addr, 0);
519 print_operand (file, index, 0);
521 print_operand (file, base, 0);;
525 output_addr_const (file, addr);
528 output_addr_const (file, addr);
533 /* Count the number of FP registers that have to be saved. */
535 fp_regs_to_save (void)
542 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
543 if (df_regs_ever_live_p (i) && ! call_really_used_regs[i])
549 /* Print a set of registers in the format required by "movm" and "ret".
550 Register K is saved if bit K of MASK is set. The data and address
551 registers can be stored individually, but the extended registers cannot.
552 We assume that the mask already takes that into account. For instance,
553 bits 14 to 17 must have the same value. */
556 mn10300_print_reg_list (FILE *file, int mask)
564 for (i = 0; i < FIRST_EXTENDED_REGNUM; i++)
565 if ((mask & (1 << i)) != 0)
569 fputs (reg_names [i], file);
573 if ((mask & 0x3c000) != 0)
575 gcc_assert ((mask & 0x3c000) == 0x3c000);
578 fputs ("exreg1", file);
586 can_use_return_insn (void)
588 /* size includes the fixed stack space needed for function calls. */
589 int size = get_frame_size () + crtl->outgoing_args_size;
591 /* And space for the return pointer. */
592 size += crtl->outgoing_args_size ? 4 : 0;
594 return (reload_completed
596 && !df_regs_ever_live_p (2)
597 && !df_regs_ever_live_p (3)
598 && !df_regs_ever_live_p (6)
599 && !df_regs_ever_live_p (7)
600 && !df_regs_ever_live_p (14)
601 && !df_regs_ever_live_p (15)
602 && !df_regs_ever_live_p (16)
603 && !df_regs_ever_live_p (17)
604 && fp_regs_to_save () == 0
605 && !frame_pointer_needed);
608 /* Returns the set of live, callee-saved registers as a bitmask. The
609 callee-saved extended registers cannot be stored individually, so
610 all of them will be included in the mask if any one of them is used. */
613 mn10300_get_live_callee_saved_regs (void)
619 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
620 if (df_regs_ever_live_p (i) && ! call_really_used_regs[i])
622 if ((mask & 0x3c000) != 0)
628 /* Generate an instruction that pushes several registers onto the stack.
629 Register K will be saved if bit K in MASK is set. The function does
630 nothing if MASK is zero.
632 To be compatible with the "movm" instruction, the lowest-numbered
633 register must be stored in the lowest slot. If MASK is the set
634 { R1,...,RN }, where R1...RN are ordered least first, the generated
635 instruction will have the form:
638 (set (reg:SI 9) (plus:SI (reg:SI 9) (const_int -N*4)))
639 (set (mem:SI (plus:SI (reg:SI 9)
643 (set (mem:SI (plus:SI (reg:SI 9)
648 mn10300_gen_multiple_store (int mask)
657 /* Count how many registers need to be saved. */
659 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
660 if ((mask & (1 << i)) != 0)
663 /* We need one PARALLEL element to update the stack pointer and
664 an additional element for each register that is stored. */
665 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count + 1));
667 /* Create the instruction that updates the stack pointer. */
669 = gen_rtx_SET (SImode,
671 gen_rtx_PLUS (SImode,
673 GEN_INT (-count * 4)));
675 /* Create each store. */
677 for (i = LAST_EXTENDED_REGNUM; i >= 0; i--)
678 if ((mask & (1 << i)) != 0)
680 rtx address = gen_rtx_PLUS (SImode,
682 GEN_INT (-pari * 4));
683 XVECEXP(par, 0, pari)
684 = gen_rtx_SET (VOIDmode,
685 gen_rtx_MEM (SImode, address),
686 gen_rtx_REG (SImode, i));
690 par = emit_insn (par);
691 RTX_FRAME_RELATED_P (par) = 1;
696 expand_prologue (void)
700 /* SIZE includes the fixed stack space needed for function calls. */
701 size = get_frame_size () + crtl->outgoing_args_size;
702 size += (crtl->outgoing_args_size ? 4 : 0);
704 /* If we use any of the callee-saved registers, save them now. */
705 mn10300_gen_multiple_store (mn10300_get_live_callee_saved_regs ());
707 if (TARGET_AM33_2 && fp_regs_to_save ())
709 int num_regs_to_save = fp_regs_to_save (), i;
711 enum { save_sp_merge,
713 save_sp_partial_merge,
715 save_a0_no_merge } strategy;
716 unsigned int strategy_size = (unsigned)-1, this_strategy_size;
720 /* We have several different strategies to save FP registers.
721 We can store them using SP offsets, which is beneficial if
722 there are just a few registers to save, or we can use `a0' in
723 post-increment mode (`a0' is the only call-clobbered address
724 register that is never used to pass information to a
725 function). Furthermore, if we don't need a frame pointer, we
726 can merge the two SP adds into a single one, but this isn't
727 always beneficial; sometimes we can just split the two adds
728 so that we don't exceed a 16-bit constant size. The code
729 below will select which strategy to use, so as to generate
730 smallest code. Ties are broken in favor or shorter sequences
731 (in terms of number of instructions). */
733 #define SIZE_ADD_AX(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
734 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 2)
735 #define SIZE_ADD_SP(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
736 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 3)
737 #define SIZE_FMOV_LIMIT(S,N,L,SIZE1,SIZE2,ELSE) \
738 (((S) >= (L)) ? (SIZE1) * (N) \
739 : ((S) + 4 * (N) >= (L)) ? (((L) - (S)) / 4 * (SIZE2) \
740 + ((S) + 4 * (N) - (L)) / 4 * (SIZE1)) \
742 #define SIZE_FMOV_SP_(S,N) \
743 (SIZE_FMOV_LIMIT ((S), (N), (1 << 24), 7, 6, \
744 SIZE_FMOV_LIMIT ((S), (N), (1 << 8), 6, 4, \
745 (S) ? 4 * (N) : 3 + 4 * ((N) - 1))))
746 #define SIZE_FMOV_SP(S,N) (SIZE_FMOV_SP_ ((unsigned HOST_WIDE_INT)(S), (N)))
748 /* Consider alternative save_sp_merge only if we don't need the
749 frame pointer and size is nonzero. */
750 if (! frame_pointer_needed && size)
752 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
753 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
754 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
755 this_strategy_size += SIZE_FMOV_SP (size, num_regs_to_save);
757 if (this_strategy_size < strategy_size)
759 strategy = save_sp_merge;
760 strategy_size = this_strategy_size;
764 /* Consider alternative save_sp_no_merge unconditionally. */
765 /* Insn: add -4 * num_regs_to_save, sp. */
766 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
767 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
768 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
771 /* Insn: add -size, sp. */
772 this_strategy_size += SIZE_ADD_SP (-size);
775 if (this_strategy_size < strategy_size)
777 strategy = save_sp_no_merge;
778 strategy_size = this_strategy_size;
781 /* Consider alternative save_sp_partial_merge only if we don't
782 need a frame pointer and size is reasonably large. */
783 if (! frame_pointer_needed && size + 4 * num_regs_to_save > 128)
785 /* Insn: add -128, sp. */
786 this_strategy_size = SIZE_ADD_SP (-128);
787 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
788 this_strategy_size += SIZE_FMOV_SP (128 - 4 * num_regs_to_save,
792 /* Insn: add 128-size, sp. */
793 this_strategy_size += SIZE_ADD_SP (128 - size);
796 if (this_strategy_size < strategy_size)
798 strategy = save_sp_partial_merge;
799 strategy_size = this_strategy_size;
803 /* Consider alternative save_a0_merge only if we don't need a
804 frame pointer, size is nonzero and the user hasn't
805 changed the calling conventions of a0. */
806 if (! frame_pointer_needed && size
807 && call_really_used_regs [FIRST_ADDRESS_REGNUM]
808 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
810 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
811 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
812 /* Insn: mov sp, a0. */
813 this_strategy_size++;
816 /* Insn: add size, a0. */
817 this_strategy_size += SIZE_ADD_AX (size);
819 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
820 this_strategy_size += 3 * num_regs_to_save;
822 if (this_strategy_size < strategy_size)
824 strategy = save_a0_merge;
825 strategy_size = this_strategy_size;
829 /* Consider alternative save_a0_no_merge if the user hasn't
830 changed the calling conventions of a0. */
831 if (call_really_used_regs [FIRST_ADDRESS_REGNUM]
832 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
834 /* Insn: add -4 * num_regs_to_save, sp. */
835 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
836 /* Insn: mov sp, a0. */
837 this_strategy_size++;
838 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
839 this_strategy_size += 3 * num_regs_to_save;
842 /* Insn: add -size, sp. */
843 this_strategy_size += SIZE_ADD_SP (-size);
846 if (this_strategy_size < strategy_size)
848 strategy = save_a0_no_merge;
849 strategy_size = this_strategy_size;
853 /* Emit the initial SP add, common to all strategies. */
856 case save_sp_no_merge:
857 case save_a0_no_merge:
858 emit_insn (gen_addsi3 (stack_pointer_rtx,
860 GEN_INT (-4 * num_regs_to_save)));
864 case save_sp_partial_merge:
865 emit_insn (gen_addsi3 (stack_pointer_rtx,
868 xsize = 128 - 4 * num_regs_to_save;
874 emit_insn (gen_addsi3 (stack_pointer_rtx,
876 GEN_INT (-(size + 4 * num_regs_to_save))));
877 /* We'll have to adjust FP register saves according to the
880 /* Since we've already created the stack frame, don't do it
881 again at the end of the function. */
889 /* Now prepare register a0, if we have decided to use it. */
893 case save_sp_no_merge:
894 case save_sp_partial_merge:
899 case save_a0_no_merge:
900 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM);
901 emit_insn (gen_movsi (reg, stack_pointer_rtx));
903 emit_insn (gen_addsi3 (reg, reg, GEN_INT (xsize)));
904 reg = gen_rtx_POST_INC (SImode, reg);
911 /* Now actually save the FP registers. */
912 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
913 if (df_regs_ever_live_p (i) && ! call_really_used_regs [i])
921 /* If we aren't using `a0', use an SP offset. */
924 addr = gen_rtx_PLUS (SImode,
929 addr = stack_pointer_rtx;
934 insn = emit_insn (gen_movsi (gen_rtx_MEM (SImode, addr),
935 gen_rtx_REG (SImode, i)));
937 RTX_FRAME_RELATED_P (insn) = 1;
941 /* Now put the frame pointer into the frame pointer register. */
942 if (frame_pointer_needed)
943 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
945 /* Allocate stack for this frame. */
947 emit_insn (gen_addsi3 (stack_pointer_rtx,
950 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
951 emit_insn (gen_GOTaddr2picreg ());
955 expand_epilogue (void)
959 /* SIZE includes the fixed stack space needed for function calls. */
960 size = get_frame_size () + crtl->outgoing_args_size;
961 size += (crtl->outgoing_args_size ? 4 : 0);
963 if (TARGET_AM33_2 && fp_regs_to_save ())
965 int num_regs_to_save = fp_regs_to_save (), i;
968 /* We have several options to restore FP registers. We could
969 load them from SP offsets, but, if there are enough FP
970 registers to restore, we win if we use a post-increment
973 /* If we have a frame pointer, it's the best option, because we
974 already know it has the value we want. */
975 if (frame_pointer_needed)
976 reg = gen_rtx_REG (SImode, FRAME_POINTER_REGNUM);
977 /* Otherwise, we may use `a1', since it's call-clobbered and
978 it's never used for return values. But only do so if it's
979 smaller than using SP offsets. */
982 enum { restore_sp_post_adjust,
983 restore_sp_pre_adjust,
984 restore_sp_partial_adjust,
985 restore_a1 } strategy;
986 unsigned int this_strategy_size, strategy_size = (unsigned)-1;
988 /* Consider using sp offsets before adjusting sp. */
989 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
990 this_strategy_size = SIZE_FMOV_SP (size, num_regs_to_save);
991 /* If size is too large, we'll have to adjust SP with an
993 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
995 /* Insn: add size + 4 * num_regs_to_save, sp. */
996 this_strategy_size += SIZE_ADD_SP (size + 4 * num_regs_to_save);
998 /* If we don't have to restore any non-FP registers,
999 we'll be able to save one byte by using rets. */
1000 if (! REG_SAVE_BYTES)
1001 this_strategy_size--;
1003 if (this_strategy_size < strategy_size)
1005 strategy = restore_sp_post_adjust;
1006 strategy_size = this_strategy_size;
1009 /* Consider using sp offsets after adjusting sp. */
1010 /* Insn: add size, sp. */
1011 this_strategy_size = SIZE_ADD_SP (size);
1012 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1013 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
1014 /* We're going to use ret to release the FP registers
1015 save area, so, no savings. */
1017 if (this_strategy_size < strategy_size)
1019 strategy = restore_sp_pre_adjust;
1020 strategy_size = this_strategy_size;
1023 /* Consider using sp offsets after partially adjusting sp.
1024 When size is close to 32Kb, we may be able to adjust SP
1025 with an imm16 add instruction while still using fmov
1027 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1029 /* Insn: add size + 4 * num_regs_to_save
1030 + REG_SAVE_BYTES - 252,sp. */
1031 this_strategy_size = SIZE_ADD_SP (size + 4 * num_regs_to_save
1032 + REG_SAVE_BYTES - 252);
1033 /* Insn: fmov (##,sp),fs#, fo each fs# to be restored. */
1034 this_strategy_size += SIZE_FMOV_SP (252 - REG_SAVE_BYTES
1035 - 4 * num_regs_to_save,
1037 /* We're going to use ret to release the FP registers
1038 save area, so, no savings. */
1040 if (this_strategy_size < strategy_size)
1042 strategy = restore_sp_partial_adjust;
1043 strategy_size = this_strategy_size;
1047 /* Consider using a1 in post-increment mode, as long as the
1048 user hasn't changed the calling conventions of a1. */
1049 if (call_really_used_regs [FIRST_ADDRESS_REGNUM + 1]
1050 && ! fixed_regs[FIRST_ADDRESS_REGNUM+1])
1052 /* Insn: mov sp,a1. */
1053 this_strategy_size = 1;
1056 /* Insn: add size,a1. */
1057 this_strategy_size += SIZE_ADD_AX (size);
1059 /* Insn: fmov (a1+),fs#, for each fs# to be restored. */
1060 this_strategy_size += 3 * num_regs_to_save;
1061 /* If size is large enough, we may be able to save a
1063 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1065 /* Insn: mov a1,sp. */
1066 this_strategy_size += 2;
1068 /* If we don't have to restore any non-FP registers,
1069 we'll be able to save one byte by using rets. */
1070 if (! REG_SAVE_BYTES)
1071 this_strategy_size--;
1073 if (this_strategy_size < strategy_size)
1075 strategy = restore_a1;
1076 strategy_size = this_strategy_size;
1082 case restore_sp_post_adjust:
1085 case restore_sp_pre_adjust:
1086 emit_insn (gen_addsi3 (stack_pointer_rtx,
1092 case restore_sp_partial_adjust:
1093 emit_insn (gen_addsi3 (stack_pointer_rtx,
1095 GEN_INT (size + 4 * num_regs_to_save
1096 + REG_SAVE_BYTES - 252)));
1097 size = 252 - REG_SAVE_BYTES - 4 * num_regs_to_save;
1101 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM + 1);
1102 emit_insn (gen_movsi (reg, stack_pointer_rtx));
1104 emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
1112 /* Adjust the selected register, if any, for post-increment. */
1114 reg = gen_rtx_POST_INC (SImode, reg);
1116 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
1117 if (df_regs_ever_live_p (i) && ! call_really_used_regs [i])
1125 /* If we aren't using a post-increment register, use an
1127 addr = gen_rtx_PLUS (SImode,
1132 addr = stack_pointer_rtx;
1136 emit_insn (gen_movsi (gen_rtx_REG (SImode, i),
1137 gen_rtx_MEM (SImode, addr)));
1140 /* If we were using the restore_a1 strategy and the number of
1141 bytes to be released won't fit in the `ret' byte, copy `a1'
1142 to `sp', to avoid having to use `add' to adjust it. */
1143 if (! frame_pointer_needed && reg && size + REG_SAVE_BYTES > 255)
1145 emit_move_insn (stack_pointer_rtx, XEXP (reg, 0));
1150 /* Maybe cut back the stack, except for the register save area.
1152 If the frame pointer exists, then use the frame pointer to
1155 If the stack size + register save area is more than 255 bytes,
1156 then the stack must be cut back here since the size + register
1157 save size is too big for a ret/retf instruction.
1159 Else leave it alone, it will be cut back as part of the
1160 ret/retf instruction, or there wasn't any stack to begin with.
1162 Under no circumstances should the register save area be
1163 deallocated here, that would leave a window where an interrupt
1164 could occur and trash the register save area. */
1165 if (frame_pointer_needed)
1167 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1170 else if (size + REG_SAVE_BYTES > 255)
1172 emit_insn (gen_addsi3 (stack_pointer_rtx,
1178 /* Adjust the stack and restore callee-saved registers, if any. */
1179 if (size || df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1180 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1181 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1182 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1183 || frame_pointer_needed)
1184 emit_jump_insn (gen_return_internal_regs
1185 (GEN_INT (size + REG_SAVE_BYTES)));
1187 emit_jump_insn (gen_return_internal ());
1190 /* Update the condition code from the insn. */
1193 notice_update_cc (rtx body, rtx insn)
1195 switch (get_attr_cc (insn))
1198 /* Insn does not affect CC at all. */
1202 /* Insn does not change CC, but the 0'th operand has been changed. */
1203 if (cc_status.value1 != 0
1204 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
1205 cc_status.value1 = 0;
1209 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
1210 V,C are unusable. */
1212 cc_status.flags |= CC_NO_CARRY | CC_OVERFLOW_UNUSABLE;
1213 cc_status.value1 = recog_data.operand[0];
1217 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
1220 cc_status.flags |= CC_NO_CARRY;
1221 cc_status.value1 = recog_data.operand[0];
1225 /* The insn is a compare instruction. */
1227 cc_status.value1 = SET_SRC (body);
1228 if (GET_CODE (cc_status.value1) == COMPARE
1229 && GET_MODE (XEXP (cc_status.value1, 0)) == SFmode)
1230 cc_status.mdep.fpCC = 1;
1234 /* Insn doesn't leave CC in a usable state. */
1243 /* Recognize the PARALLEL rtx generated by mn10300_gen_multiple_store().
1244 This function is for MATCH_PARALLEL and so assumes OP is known to be
1245 parallel. If OP is a multiple store, return a mask indicating which
1246 registers it saves. Return 0 otherwise. */
1249 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1257 count = XVECLEN (op, 0);
1261 /* Check that first instruction has the form (set (sp) (plus A B)) */
1262 elt = XVECEXP (op, 0, 0);
1263 if (GET_CODE (elt) != SET
1264 || GET_CODE (SET_DEST (elt)) != REG
1265 || REGNO (SET_DEST (elt)) != STACK_POINTER_REGNUM
1266 || GET_CODE (SET_SRC (elt)) != PLUS)
1269 /* Check that A is the stack pointer and B is the expected stack size.
1270 For OP to match, each subsequent instruction should push a word onto
1271 the stack. We therefore expect the first instruction to create
1272 COUNT-1 stack slots. */
1273 elt = SET_SRC (elt);
1274 if (GET_CODE (XEXP (elt, 0)) != REG
1275 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1276 || GET_CODE (XEXP (elt, 1)) != CONST_INT
1277 || INTVAL (XEXP (elt, 1)) != -(count - 1) * 4)
1280 /* Now go through the rest of the vector elements. They must be
1281 ordered so that the first instruction stores the highest-numbered
1282 register to the highest stack slot and that subsequent instructions
1283 store a lower-numbered register to the slot below.
1285 LAST keeps track of the smallest-numbered register stored so far.
1286 MASK is the set of stored registers. */
1287 last = LAST_EXTENDED_REGNUM + 1;
1289 for (i = 1; i < count; i++)
1291 /* Check that element i is a (set (mem M) R) and that R is valid. */
1292 elt = XVECEXP (op, 0, i);
1293 if (GET_CODE (elt) != SET
1294 || GET_CODE (SET_DEST (elt)) != MEM
1295 || GET_CODE (SET_SRC (elt)) != REG
1296 || REGNO (SET_SRC (elt)) >= last)
1299 /* R was OK, so provisionally add it to MASK. We return 0 in any
1300 case if the rest of the instruction has a flaw. */
1301 last = REGNO (SET_SRC (elt));
1302 mask |= (1 << last);
1304 /* Check that M has the form (plus (sp) (const_int -I*4)) */
1305 elt = XEXP (SET_DEST (elt), 0);
1306 if (GET_CODE (elt) != PLUS
1307 || GET_CODE (XEXP (elt, 0)) != REG
1308 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1309 || GET_CODE (XEXP (elt, 1)) != CONST_INT
1310 || INTVAL (XEXP (elt, 1)) != -i * 4)
1314 /* All or none of the callee-saved extended registers must be in the set. */
1315 if ((mask & 0x3c000) != 0
1316 && (mask & 0x3c000) != 0x3c000)
1322 /* What (if any) secondary registers are needed to move IN with mode
1323 MODE into a register in register class RCLASS.
1325 We might be able to simplify this. */
1327 mn10300_secondary_reload_class (enum reg_class rclass, enum machine_mode mode,
1332 /* Strip off any SUBREG expressions from IN. Basically we want
1333 to know if IN is a pseudo or (subreg (pseudo)) as those can
1334 turn into MEMs during reload. */
1335 while (GET_CODE (inner) == SUBREG)
1336 inner = SUBREG_REG (inner);
1338 /* Memory loads less than a full word wide can't have an
1339 address or stack pointer destination. They must use
1340 a data register as an intermediate register. */
1341 if ((GET_CODE (in) == MEM
1342 || (GET_CODE (inner) == REG
1343 && REGNO (inner) >= FIRST_PSEUDO_REGISTER))
1344 && (mode == QImode || mode == HImode)
1345 && (rclass == ADDRESS_REGS || rclass == SP_REGS
1346 || rclass == SP_OR_ADDRESS_REGS))
1349 return DATA_OR_EXTENDED_REGS;
1353 /* We can't directly load sp + const_int into a data register;
1354 we must use an address register as an intermediate. */
1355 if (rclass != SP_REGS
1356 && rclass != ADDRESS_REGS
1357 && rclass != SP_OR_ADDRESS_REGS
1358 && rclass != SP_OR_EXTENDED_REGS
1359 && rclass != ADDRESS_OR_EXTENDED_REGS
1360 && rclass != SP_OR_ADDRESS_OR_EXTENDED_REGS
1361 && (in == stack_pointer_rtx
1362 || (GET_CODE (in) == PLUS
1363 && (XEXP (in, 0) == stack_pointer_rtx
1364 || XEXP (in, 1) == stack_pointer_rtx))))
1365 return ADDRESS_REGS;
1367 if (GET_CODE (in) == PLUS
1368 && (XEXP (in, 0) == stack_pointer_rtx
1369 || XEXP (in, 1) == stack_pointer_rtx))
1370 return GENERAL_REGS;
1373 && rclass == FP_REGS)
1375 /* We can't load directly into an FP register from a
1376 constant address. */
1377 if (GET_CODE (in) == MEM
1378 && CONSTANT_ADDRESS_P (XEXP (in, 0)))
1379 return (TARGET_AM33 ? DATA_OR_EXTENDED_REGS : DATA_REGS);
1381 /* Handle case were a pseudo may not get a hard register
1382 but has an equivalent memory location defined. */
1383 if (GET_CODE (inner) == REG
1384 && REGNO (inner) >= FIRST_PSEUDO_REGISTER
1385 && reg_equiv_mem [REGNO (inner)]
1386 && CONSTANT_ADDRESS_P (XEXP (reg_equiv_mem [REGNO (inner)], 0)))
1387 return (TARGET_AM33 ? DATA_OR_EXTENDED_REGS : DATA_REGS);
1390 /* Otherwise assume no secondary reloads are needed. */
1395 initial_offset (int from, int to)
1397 /* The difference between the argument pointer and the frame pointer
1398 is the size of the callee register save area. */
1399 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
1401 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1402 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1403 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1404 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1405 || fp_regs_to_save ()
1406 || frame_pointer_needed)
1407 return REG_SAVE_BYTES
1408 + 4 * fp_regs_to_save ();
1413 /* The difference between the argument pointer and the stack pointer is
1414 the sum of the size of this function's frame, the callee register save
1415 area, and the fixed stack space needed for function calls (if any). */
1416 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1418 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1419 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1420 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1421 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1422 || fp_regs_to_save ()
1423 || frame_pointer_needed)
1424 return (get_frame_size () + REG_SAVE_BYTES
1425 + 4 * fp_regs_to_save ()
1426 + (crtl->outgoing_args_size
1427 ? crtl->outgoing_args_size + 4 : 0));
1429 return (get_frame_size ()
1430 + (crtl->outgoing_args_size
1431 ? crtl->outgoing_args_size + 4 : 0));
1434 /* The difference between the frame pointer and stack pointer is the sum
1435 of the size of this function's frame and the fixed stack space needed
1436 for function calls (if any). */
1437 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1438 return (get_frame_size ()
1439 + (crtl->outgoing_args_size
1440 ? crtl->outgoing_args_size + 4 : 0));
1445 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1448 mn10300_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1450 /* Return values > 8 bytes in length in memory. */
1451 return (int_size_in_bytes (type) > 8
1452 || int_size_in_bytes (type) == 0
1453 || TYPE_MODE (type) == BLKmode);
1456 /* Flush the argument registers to the stack for a stdarg function;
1457 return the new argument pointer. */
1459 mn10300_builtin_saveregs (void)
1462 tree fntype = TREE_TYPE (current_function_decl);
1463 int argadj = ((!(TYPE_ARG_TYPES (fntype) != 0
1464 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1465 != void_type_node)))
1466 ? UNITS_PER_WORD : 0);
1467 alias_set_type set = get_varargs_alias_set ();
1470 offset = plus_constant (crtl->args.arg_offset_rtx, argadj);
1472 offset = crtl->args.arg_offset_rtx;
1474 mem = gen_rtx_MEM (SImode, crtl->args.internal_arg_pointer);
1475 set_mem_alias_set (mem, set);
1476 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
1478 mem = gen_rtx_MEM (SImode,
1479 plus_constant (crtl->args.internal_arg_pointer, 4));
1480 set_mem_alias_set (mem, set);
1481 emit_move_insn (mem, gen_rtx_REG (SImode, 1));
1483 return copy_to_reg (expand_binop (Pmode, add_optab,
1484 crtl->args.internal_arg_pointer,
1485 offset, 0, 0, OPTAB_LIB_WIDEN));
1489 mn10300_va_start (tree valist, rtx nextarg)
1491 nextarg = expand_builtin_saveregs ();
1492 std_expand_builtin_va_start (valist, nextarg);
1495 /* Return true when a parameter should be passed by reference. */
1498 mn10300_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1499 enum machine_mode mode, const_tree type,
1500 bool named ATTRIBUTE_UNUSED)
1502 unsigned HOST_WIDE_INT size;
1505 size = int_size_in_bytes (type);
1507 size = GET_MODE_SIZE (mode);
1509 return (size > 8 || size == 0);
1512 /* Return an RTX to represent where a value with mode MODE will be returned
1513 from a function. If the result is 0, the argument is pushed. */
1516 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1517 tree type, int named ATTRIBUTE_UNUSED)
1522 /* We only support using 2 data registers as argument registers. */
1525 /* Figure out the size of the object to be passed. */
1526 if (mode == BLKmode)
1527 size = int_size_in_bytes (type);
1529 size = GET_MODE_SIZE (mode);
1531 /* Figure out the alignment of the object to be passed. */
1534 cum->nbytes = (cum->nbytes + 3) & ~3;
1536 /* Don't pass this arg via a register if all the argument registers
1538 if (cum->nbytes > nregs * UNITS_PER_WORD)
1541 /* Don't pass this arg via a register if it would be split between
1542 registers and memory. */
1543 if (type == NULL_TREE
1544 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1547 switch (cum->nbytes / UNITS_PER_WORD)
1550 result = gen_rtx_REG (mode, 0);
1553 result = gen_rtx_REG (mode, 1);
1562 /* Return the number of bytes of registers to use for an argument passed
1563 partially in registers and partially in memory. */
1566 mn10300_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1567 tree type, bool named ATTRIBUTE_UNUSED)
1571 /* We only support using 2 data registers as argument registers. */
1574 /* Figure out the size of the object to be passed. */
1575 if (mode == BLKmode)
1576 size = int_size_in_bytes (type);
1578 size = GET_MODE_SIZE (mode);
1580 /* Figure out the alignment of the object to be passed. */
1583 cum->nbytes = (cum->nbytes + 3) & ~3;
1585 /* Don't pass this arg via a register if all the argument registers
1587 if (cum->nbytes > nregs * UNITS_PER_WORD)
1590 if (cum->nbytes + size <= nregs * UNITS_PER_WORD)
1593 /* Don't pass this arg via a register if it would be split between
1594 registers and memory. */
1595 if (type == NULL_TREE
1596 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1599 return nregs * UNITS_PER_WORD - cum->nbytes;
1602 /* Return the location of the function's value. This will be either
1603 $d0 for integer functions, $a0 for pointers, or a PARALLEL of both
1604 $d0 and $a0 if the -mreturn-pointer-on-do flag is set. Note that
1605 we only return the PARALLEL for outgoing values; we do not want
1606 callers relying on this extra copy. */
1609 mn10300_function_value (const_tree valtype, const_tree func, int outgoing)
1612 enum machine_mode mode = TYPE_MODE (valtype);
1614 if (! POINTER_TYPE_P (valtype))
1615 return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
1616 else if (! TARGET_PTR_A0D0 || ! outgoing
1617 || cfun->returns_struct)
1618 return gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM);
1620 rv = gen_rtx_PARALLEL (mode, rtvec_alloc (2));
1622 = gen_rtx_EXPR_LIST (VOIDmode,
1623 gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM),
1627 = gen_rtx_EXPR_LIST (VOIDmode,
1628 gen_rtx_REG (mode, FIRST_DATA_REGNUM),
1633 /* Output a tst insn. */
1635 output_tst (rtx operand, rtx insn)
1640 /* We can save a byte if we can find a register which has the value
1642 temp = PREV_INSN (insn);
1643 while (optimize && temp)
1647 /* We allow the search to go through call insns. We record
1648 the fact that we've past a CALL_INSN and reject matches which
1649 use call clobbered registers. */
1650 if (GET_CODE (temp) == CODE_LABEL
1651 || GET_CODE (temp) == JUMP_INSN
1652 || GET_CODE (temp) == BARRIER)
1655 if (GET_CODE (temp) == CALL_INSN)
1658 if (GET_CODE (temp) == NOTE)
1660 temp = PREV_INSN (temp);
1664 /* It must be an insn, see if it is a simple set. */
1665 set = single_set (temp);
1668 temp = PREV_INSN (temp);
1672 /* Are we setting a data register to zero (this does not win for
1675 If it's a call clobbered register, have we past a call?
1677 Make sure the register we find isn't the same as ourself;
1678 the mn10300 can't encode that.
1680 ??? reg_set_between_p return nonzero anytime we pass a CALL_INSN
1681 so the code to detect calls here isn't doing anything useful. */
1682 if (REG_P (SET_DEST (set))
1683 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1684 && !reg_set_between_p (SET_DEST (set), temp, insn)
1685 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1686 == REGNO_REG_CLASS (REGNO (operand)))
1687 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) != EXTENDED_REGS
1688 && REGNO (SET_DEST (set)) != REGNO (operand)
1690 || ! call_really_used_regs [REGNO (SET_DEST (set))]))
1693 xoperands[0] = operand;
1694 xoperands[1] = SET_DEST (set);
1696 output_asm_insn ("cmp %1,%0", xoperands);
1700 if (REGNO_REG_CLASS (REGNO (operand)) == EXTENDED_REGS
1701 && REG_P (SET_DEST (set))
1702 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1703 && !reg_set_between_p (SET_DEST (set), temp, insn)
1704 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1705 != REGNO_REG_CLASS (REGNO (operand)))
1706 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) == EXTENDED_REGS
1707 && REGNO (SET_DEST (set)) != REGNO (operand)
1709 || ! call_really_used_regs [REGNO (SET_DEST (set))]))
1712 xoperands[0] = operand;
1713 xoperands[1] = SET_DEST (set);
1715 output_asm_insn ("cmp %1,%0", xoperands);
1718 temp = PREV_INSN (temp);
1724 impossible_plus_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1726 if (GET_CODE (op) != PLUS)
1729 if (XEXP (op, 0) == stack_pointer_rtx
1730 || XEXP (op, 1) == stack_pointer_rtx)
1736 /* Similarly, but when using a zero_extract pattern for a btst where
1737 the source operand might end up in memory. */
1739 mask_ok_for_mem_btst (int len, int bit)
1741 unsigned int mask = 0;
1750 /* MASK must bit into an 8bit value. */
1751 return (((mask & 0xff) == mask)
1752 || ((mask & 0xff00) == mask)
1753 || ((mask & 0xff0000) == mask)
1754 || ((mask & 0xff000000) == mask));
1757 /* Return 1 if X contains a symbolic expression. We know these
1758 expressions will have one of a few well defined forms, so
1759 we need only check those forms. */
1761 symbolic_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1763 switch (GET_CODE (op))
1770 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1771 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1772 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1778 /* Try machine dependent ways of modifying an illegitimate address
1779 to be legitimate. If we find one, return the new valid address.
1780 This macro is used in only one place: `memory_address' in explow.c.
1782 OLDX is the address as it was before break_out_memory_refs was called.
1783 In some cases it is useful to look at this to decide what needs to be done.
1785 MODE and WIN are passed so that this macro can use
1786 GO_IF_LEGITIMATE_ADDRESS.
1788 Normally it is always safe for this macro to do nothing. It exists to
1789 recognize opportunities to optimize the output.
1791 But on a few ports with segmented architectures and indexed addressing
1792 (mn10300, hppa) it is used to rewrite certain problematical addresses. */
1794 legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1795 enum machine_mode mode ATTRIBUTE_UNUSED)
1797 if (flag_pic && ! legitimate_pic_operand_p (x))
1798 x = legitimize_pic_address (oldx, NULL_RTX);
1800 /* Uh-oh. We might have an address for x[n-100000]. This needs
1801 special handling to avoid creating an indexed memory address
1802 with x-100000 as the base. */
1803 if (GET_CODE (x) == PLUS
1804 && symbolic_operand (XEXP (x, 1), VOIDmode))
1806 /* Ugly. We modify things here so that the address offset specified
1807 by the index expression is computed first, then added to x to form
1808 the entire address. */
1810 rtx regx1, regy1, regy2, y;
1812 /* Strip off any CONST. */
1814 if (GET_CODE (y) == CONST)
1817 if (GET_CODE (y) == PLUS || GET_CODE (y) == MINUS)
1819 regx1 = force_reg (Pmode, force_operand (XEXP (x, 0), 0));
1820 regy1 = force_reg (Pmode, force_operand (XEXP (y, 0), 0));
1821 regy2 = force_reg (Pmode, force_operand (XEXP (y, 1), 0));
1822 regx1 = force_reg (Pmode,
1823 gen_rtx_fmt_ee (GET_CODE (y), Pmode, regx1, regy2));
1824 return force_reg (Pmode, gen_rtx_PLUS (Pmode, regx1, regy1));
1830 /* Convert a non-PIC address in `orig' to a PIC address using @GOT or
1831 @GOTOFF in `reg'. */
1833 legitimize_pic_address (rtx orig, rtx reg)
1835 if (GET_CODE (orig) == LABEL_REF
1836 || (GET_CODE (orig) == SYMBOL_REF
1837 && (CONSTANT_POOL_ADDRESS_P (orig)
1838 || ! MN10300_GLOBAL_P (orig))))
1841 reg = gen_reg_rtx (Pmode);
1843 emit_insn (gen_symGOTOFF2reg (reg, orig));
1846 else if (GET_CODE (orig) == SYMBOL_REF)
1849 reg = gen_reg_rtx (Pmode);
1851 emit_insn (gen_symGOT2reg (reg, orig));
1857 /* Return zero if X references a SYMBOL_REF or LABEL_REF whose symbol
1858 isn't protected by a PIC unspec; nonzero otherwise. */
1860 legitimate_pic_operand_p (rtx x)
1862 register const char *fmt;
1865 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1868 if (GET_CODE (x) == UNSPEC
1869 && (XINT (x, 1) == UNSPEC_PIC
1870 || XINT (x, 1) == UNSPEC_GOT
1871 || XINT (x, 1) == UNSPEC_GOTOFF
1872 || XINT (x, 1) == UNSPEC_PLT
1873 || XINT (x, 1) == UNSPEC_GOTSYM_OFF))
1876 fmt = GET_RTX_FORMAT (GET_CODE (x));
1877 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1883 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1884 if (! legitimate_pic_operand_p (XVECEXP (x, i, j)))
1887 else if (fmt[i] == 'e' && ! legitimate_pic_operand_p (XEXP (x, i)))
1894 /* Return TRUE if the address X, taken from a (MEM:MODE X) rtx, is
1895 legitimate, and FALSE otherwise. */
1897 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
1899 if (CONSTANT_ADDRESS_P (x)
1900 && (! flag_pic || legitimate_pic_operand_p (x)))
1903 if (RTX_OK_FOR_BASE_P (x, strict))
1907 && GET_CODE (x) == POST_INC
1908 && RTX_OK_FOR_BASE_P (XEXP (x, 0), strict)
1909 && (mode == SImode || mode == SFmode || mode == HImode))
1912 if (GET_CODE (x) == PLUS)
1914 rtx base = 0, index = 0;
1916 if (REG_P (XEXP (x, 0))
1917 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x, 0)), strict))
1920 index = XEXP (x, 1);
1923 if (REG_P (XEXP (x, 1))
1924 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x, 1)), strict))
1927 index = XEXP (x, 0);
1930 if (base != 0 && index != 0)
1932 if (GET_CODE (index) == CONST_INT)
1934 if (GET_CODE (index) == CONST
1935 && GET_CODE (XEXP (index, 0)) != PLUS
1937 || legitimate_pic_operand_p (index)))
1946 mn10300_address_cost_1 (rtx x, int *unsig)
1948 switch (GET_CODE (x))
1951 switch (REGNO_REG_CLASS (REGNO (x)))
1977 return (mn10300_address_cost_1 (XEXP (x, 0), unsig)
1978 + mn10300_address_cost_1 (XEXP (x, 1), unsig));
1983 return mn10300_address_cost (XEXP (x, 0), !optimize_size);
1987 return mn10300_address_cost_1 (XEXP (x, 0), unsig);
1990 if (INTVAL (x) == 0)
1992 if (INTVAL (x) + (*unsig ? 0 : 0x80) < 0x100)
1994 if (INTVAL (x) + (*unsig ? 0 : 0x8000) < 0x10000)
1996 if (INTVAL (x) + (*unsig ? 0 : 0x800000) < 0x1000000)
2012 mn10300_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
2015 return mn10300_address_cost_1 (x, &s);
2019 mn10300_rtx_costs (rtx x, int code, int outer_code, int *total, bool speed ATTRIBUTE_UNUSED)
2024 /* Zeros are extremely cheap. */
2025 if (INTVAL (x) == 0 && outer_code == SET)
2027 /* If it fits in 8 bits, then it's still relatively cheap. */
2028 else if (INT_8_BITS (INTVAL (x)))
2030 /* This is the "base" cost, includes constants where either the
2031 upper or lower 16bits are all zeros. */
2032 else if (INT_16_BITS (INTVAL (x))
2033 || (INTVAL (x) & 0xffff) == 0
2034 || (INTVAL (x) & 0xffff0000) == 0)
2043 /* These are more costly than a CONST_INT, but we can relax them,
2044 so they're less costly than a CONST_DOUBLE. */
2049 /* We don't optimize CONST_DOUBLEs well nor do we relax them well,
2050 so their cost is very high. */
2054 /* ??? This probably needs more work. */
2066 /* Check whether a constant used to initialize a DImode or DFmode can
2067 use a clr instruction. The code here must be kept in sync with
2071 mn10300_wide_const_load_uses_clr (rtx operands[2])
2075 if (GET_CODE (operands[0]) != REG
2076 || REGNO_REG_CLASS (REGNO (operands[0])) != DATA_REGS)
2079 switch (GET_CODE (operands[1]))
2084 split_double (operands[1], &low, &high);
2085 val[0] = INTVAL (low);
2086 val[1] = INTVAL (high);
2091 if (GET_MODE (operands[1]) == DFmode)
2095 REAL_VALUE_FROM_CONST_DOUBLE (rv, operands[1]);
2096 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
2098 else if (GET_MODE (operands[1]) == VOIDmode
2099 || GET_MODE (operands[1]) == DImode)
2101 val[0] = CONST_DOUBLE_LOW (operands[1]);
2102 val[1] = CONST_DOUBLE_HIGH (operands[1]);
2110 return val[0] == 0 || val[1] == 0;
2112 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
2113 may access it using GOTOFF instead of GOT. */
2116 mn10300_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
2120 if (GET_CODE (rtl) != MEM)
2122 symbol = XEXP (rtl, 0);
2123 if (GET_CODE (symbol) != SYMBOL_REF)
2127 SYMBOL_REF_FLAG (symbol) = (*targetm.binds_local_p) (decl);