1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
38 #include "diagnostic-core.h"
41 #include "integrate.h"
44 #include "target-def.h"
48 #define streq(a,b) (strcmp (a, b) == 0)
51 /* Function prototypes for stupid compilers: */
52 static bool v850_handle_option (size_t, const char *, int);
53 static void const_double_split (rtx, HOST_WIDE_INT *, HOST_WIDE_INT *);
54 static int const_costs_int (HOST_WIDE_INT, int);
55 static int const_costs (rtx, enum rtx_code);
56 static bool v850_rtx_costs (rtx, int, int, int *, bool);
57 static void substitute_ep_register (rtx, rtx, int, int, rtx *, rtx *);
58 static void v850_reorg (void);
59 static int ep_memory_offset (enum machine_mode, int);
60 static void v850_set_data_area (tree, v850_data_area);
61 static tree v850_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
62 static tree v850_handle_data_area_attribute (tree *, tree, tree, int, bool *);
63 static void v850_insert_attributes (tree, tree *);
64 static void v850_asm_init_sections (void);
65 static section *v850_select_section (tree, int, unsigned HOST_WIDE_INT);
66 static void v850_encode_data_area (tree, rtx);
67 static void v850_encode_section_info (tree, rtx, int);
68 static int v850_issue_rate (void);
69 static bool v850_return_in_memory (const_tree, const_tree);
70 static rtx v850_function_value (const_tree, const_tree, bool);
71 static void v850_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
73 static bool v850_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
75 static int v850_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
77 static bool v850_strict_argument_naming (CUMULATIVE_ARGS *);
78 static bool v850_can_eliminate (const int, const int);
79 static void v850_asm_trampoline_template (FILE *);
80 static void v850_trampoline_init (rtx, tree, rtx);
81 static void v850_print_operand (FILE *, rtx, int);
82 static void v850_print_operand_address (FILE *, rtx);
83 static bool v850_print_operand_punct_valid_p (unsigned char code);
85 /* Information about the various small memory areas. */
86 struct small_memory_info small_memory[ (int)SMALL_MEMORY_max ] =
88 /* name max physical max */
94 /* Names of the various data areas used on the v850. */
95 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
96 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
98 /* Track the current data area set by the data area pragma (which
99 can be nested). Tested by check_default_data_area. */
100 data_area_stack_element * data_area_stack = NULL;
102 /* True if we don't need to check any more if the current
103 function is an interrupt handler. */
104 static int v850_interrupt_cache_p = FALSE;
106 rtx v850_compare_op0, v850_compare_op1;
108 /* Whether current function is an interrupt handler. */
109 static int v850_interrupt_p = FALSE;
111 static GTY(()) section *rosdata_section;
112 static GTY(()) section *rozdata_section;
113 static GTY(()) section *tdata_section;
114 static GTY(()) section *zdata_section;
115 static GTY(()) section *zbss_section;
117 /* V850 specific attributes. */
119 static const struct attribute_spec v850_attribute_table[] =
121 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
122 { "interrupt_handler", 0, 0, true, false, false, v850_handle_interrupt_attribute },
123 { "interrupt", 0, 0, true, false, false, v850_handle_interrupt_attribute },
124 { "sda", 0, 0, true, false, false, v850_handle_data_area_attribute },
125 { "tda", 0, 0, true, false, false, v850_handle_data_area_attribute },
126 { "zda", 0, 0, true, false, false, v850_handle_data_area_attribute },
127 { NULL, 0, 0, false, false, false, NULL }
131 /* Initialize the GCC target structure. */
132 #undef TARGET_ASM_ALIGNED_HI_OP
133 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
135 #undef TARGET_PRINT_OPERAND
136 #define TARGET_PRINT_OPERAND v850_print_operand
137 #undef TARGET_PRINT_OPERAND_ADDRESS
138 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
139 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
140 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
142 #undef TARGET_ATTRIBUTE_TABLE
143 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
145 #undef TARGET_INSERT_ATTRIBUTES
146 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
148 #undef TARGET_ASM_SELECT_SECTION
149 #define TARGET_ASM_SELECT_SECTION v850_select_section
151 /* The assembler supports switchable .bss sections, but
152 v850_select_section doesn't yet make use of them. */
153 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
154 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
156 #undef TARGET_ENCODE_SECTION_INFO
157 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
159 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
160 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
162 #undef TARGET_DEFAULT_TARGET_FLAGS
163 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_DEFAULT | MASK_APP_REGS)
164 #undef TARGET_HANDLE_OPTION
165 #define TARGET_HANDLE_OPTION v850_handle_option
167 #undef TARGET_RTX_COSTS
168 #define TARGET_RTX_COSTS v850_rtx_costs
170 #undef TARGET_ADDRESS_COST
171 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
173 #undef TARGET_MACHINE_DEPENDENT_REORG
174 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
176 #undef TARGET_SCHED_ISSUE_RATE
177 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
179 #undef TARGET_PROMOTE_PROTOTYPES
180 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
182 #undef TARGET_RETURN_IN_MEMORY
183 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
185 #undef TARGET_FUNCTION_VALUE
186 #define TARGET_FUNCTION_VALUE v850_function_value
188 #undef TARGET_PASS_BY_REFERENCE
189 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
191 #undef TARGET_CALLEE_COPIES
192 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
194 #undef TARGET_SETUP_INCOMING_VARARGS
195 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
197 #undef TARGET_ARG_PARTIAL_BYTES
198 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
200 #undef TARGET_CAN_ELIMINATE
201 #define TARGET_CAN_ELIMINATE v850_can_eliminate
203 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
204 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
205 #undef TARGET_TRAMPOLINE_INIT
206 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
208 #undef TARGET_STRICT_ARGUMENT_NAMING
209 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
211 struct gcc_target targetm = TARGET_INITIALIZER;
213 /* Set the maximum size of small memory area TYPE to the value given
214 by VALUE. Return true if VALUE was syntactically correct. VALUE
215 starts with the argument separator: either "-" or "=". */
218 v850_handle_memory_option (enum small_memory_type type, const char *value)
222 if (*value != '-' && *value != '=')
226 for (i = 0; value[i]; i++)
227 if (!ISDIGIT (value[i]))
231 if (size > small_memory[type].physical_max)
232 error ("value passed to %<-m%s%> is too large", small_memory[type].name);
234 small_memory[type].max = size;
238 /* Implement TARGET_HANDLE_OPTION. */
241 v850_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
246 target_flags |= MASK_EP | MASK_PROLOG_FUNCTION;
250 target_flags &= ~(MASK_CPU ^ MASK_V850);
255 target_flags &= ~(MASK_CPU ^ MASK_V850E);
259 return v850_handle_memory_option (SMALL_MEMORY_TDA, arg);
262 return v850_handle_memory_option (SMALL_MEMORY_SDA, arg);
265 return v850_handle_memory_option (SMALL_MEMORY_ZDA, arg);
272 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
273 Specify whether to pass the argument by reference. */
276 v850_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
277 enum machine_mode mode, const_tree type,
278 bool named ATTRIBUTE_UNUSED)
280 unsigned HOST_WIDE_INT size;
283 size = int_size_in_bytes (type);
285 size = GET_MODE_SIZE (mode);
290 /* Implementing the Varargs Macros. */
293 v850_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
295 return !TARGET_GHS ? true : false;
298 /* Return an RTX to represent where an argument with mode MODE
299 and type TYPE will be passed to a function. If the result
300 is NULL_RTX, the argument will be pushed. */
303 function_arg (CUMULATIVE_ARGS * cum,
304 enum machine_mode mode,
308 rtx result = NULL_RTX;
315 size = int_size_in_bytes (type);
317 size = GET_MODE_SIZE (mode);
319 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
323 /* Once we have stopped using argument registers, do not start up again. */
324 cum->nbytes = 4 * UNITS_PER_WORD;
328 if (size <= UNITS_PER_WORD && type)
329 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
333 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
335 if (cum->nbytes > 4 * UNITS_PER_WORD)
338 if (type == NULL_TREE
339 && cum->nbytes + size > 4 * UNITS_PER_WORD)
342 switch (cum->nbytes / UNITS_PER_WORD)
345 result = gen_rtx_REG (mode, 6);
348 result = gen_rtx_REG (mode, 7);
351 result = gen_rtx_REG (mode, 8);
354 result = gen_rtx_REG (mode, 9);
363 /* Return the number of bytes which must be put into registers
364 for values which are part in registers and part in memory. */
366 v850_arg_partial_bytes (CUMULATIVE_ARGS * cum, enum machine_mode mode,
367 tree type, bool named)
371 if (TARGET_GHS && !named)
375 size = int_size_in_bytes (type);
377 size = GET_MODE_SIZE (mode);
383 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
387 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
389 if (cum->nbytes > 4 * UNITS_PER_WORD)
392 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
395 if (type == NULL_TREE
396 && cum->nbytes + size > 4 * UNITS_PER_WORD)
399 return 4 * UNITS_PER_WORD - cum->nbytes;
402 /* Return the high and low words of a CONST_DOUBLE */
405 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
407 if (GET_CODE (x) == CONST_DOUBLE)
412 switch (GET_MODE (x))
415 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
416 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
417 *p_high = t[1]; /* since v850 is little endian */
418 *p_low = t[0]; /* high is second word */
422 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
423 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
429 *p_high = CONST_DOUBLE_HIGH (x);
430 *p_low = CONST_DOUBLE_LOW (x);
438 fatal_insn ("const_double_split got a bad insn:", x);
442 /* Return the cost of the rtx R with code CODE. */
445 const_costs_int (HOST_WIDE_INT value, int zero_cost)
447 if (CONST_OK_FOR_I (value))
449 else if (CONST_OK_FOR_J (value))
451 else if (CONST_OK_FOR_K (value))
458 const_costs (rtx r, enum rtx_code c)
460 HOST_WIDE_INT high, low;
465 return const_costs_int (INTVAL (r), 0);
468 const_double_split (r, &high, &low);
469 if (GET_MODE (r) == SFmode)
470 return const_costs_int (high, 1);
472 return const_costs_int (high, 1) + const_costs_int (low, 1);
488 v850_rtx_costs (rtx x,
490 int outer_code ATTRIBUTE_UNUSED,
491 int * total, bool speed)
493 enum rtx_code code = (enum rtx_code) codearg;
502 *total = COSTS_N_INSNS (const_costs (x, code));
509 if (TARGET_V850E && !speed)
517 && ( GET_MODE (x) == SImode
518 || GET_MODE (x) == HImode
519 || GET_MODE (x) == QImode))
521 if (GET_CODE (XEXP (x, 1)) == REG)
523 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
525 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
527 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
536 if (outer_code == COMPARE)
545 /* Print operand X using operand code CODE to assembly language output file
549 v850_print_operand (FILE * file, rtx x, int code)
551 HOST_WIDE_INT high, low;
556 /* We use 'c' operands with symbols for .vtinherit */
557 if (GET_CODE (x) == SYMBOL_REF)
559 output_addr_const(file, x);
566 switch ((code == 'B' || code == 'C')
567 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
570 if (code == 'c' || code == 'C')
571 fprintf (file, "nz");
573 fprintf (file, "ne");
576 if (code == 'c' || code == 'C')
582 fprintf (file, "ge");
585 fprintf (file, "gt");
588 fprintf (file, "le");
591 fprintf (file, "lt");
594 fprintf (file, "nl");
600 fprintf (file, "nh");
609 case 'F': /* high word of CONST_DOUBLE */
610 switch (GET_CODE (x))
613 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
617 const_double_split (x, &high, &low);
618 fprintf (file, "%ld", (long) high);
625 case 'G': /* low word of CONST_DOUBLE */
626 switch (GET_CODE (x))
629 fprintf (file, "%ld", (long) INTVAL (x));
633 const_double_split (x, &high, &low);
634 fprintf (file, "%ld", (long) low);
642 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
645 fprintf (file, "%d", exact_log2 (INTVAL (x)));
648 gcc_assert (special_symbolref_operand (x, VOIDmode));
650 if (GET_CODE (x) == CONST)
651 x = XEXP (XEXP (x, 0), 0);
653 gcc_assert (GET_CODE (x) == SYMBOL_REF);
655 if (SYMBOL_REF_ZDA_P (x))
656 fprintf (file, "zdaoff");
657 else if (SYMBOL_REF_SDA_P (x))
658 fprintf (file, "sdaoff");
659 else if (SYMBOL_REF_TDA_P (x))
660 fprintf (file, "tdaoff");
665 gcc_assert (special_symbolref_operand (x, VOIDmode));
666 output_addr_const (file, x);
669 gcc_assert (special_symbolref_operand (x, VOIDmode));
671 if (GET_CODE (x) == CONST)
672 x = XEXP (XEXP (x, 0), 0);
674 gcc_assert (GET_CODE (x) == SYMBOL_REF);
676 if (SYMBOL_REF_ZDA_P (x))
677 fprintf (file, "r0");
678 else if (SYMBOL_REF_SDA_P (x))
679 fprintf (file, "gp");
680 else if (SYMBOL_REF_TDA_P (x))
681 fprintf (file, "ep");
685 case 'R': /* 2nd word of a double. */
686 switch (GET_CODE (x))
689 fprintf (file, reg_names[REGNO (x) + 1]);
692 x = XEXP (adjust_address (x, SImode, 4), 0);
693 v850_print_operand_address (file, x);
694 if (GET_CODE (x) == CONST_INT)
695 fprintf (file, "[r0]");
704 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
705 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
712 /* Like an 'S' operand above, but for unsigned loads only. */
713 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
718 case 'W': /* print the instruction suffix */
719 switch (GET_MODE (x))
724 case QImode: fputs (".b", file); break;
725 case HImode: fputs (".h", file); break;
726 case SImode: fputs (".w", file); break;
727 case SFmode: fputs (".w", file); break;
730 case '.': /* register r0 */
731 fputs (reg_names[0], file);
733 case 'z': /* reg or zero */
734 if (GET_CODE (x) == REG)
735 fputs (reg_names[REGNO (x)], file);
736 else if ((GET_MODE(x) == SImode
737 || GET_MODE(x) == DFmode
738 || GET_MODE(x) == SFmode)
739 && x == CONST0_RTX(GET_MODE(x)))
740 fputs (reg_names[0], file);
743 gcc_assert (x == const0_rtx);
744 fputs (reg_names[0], file);
748 switch (GET_CODE (x))
751 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
752 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
755 output_address (XEXP (x, 0));
759 fputs (reg_names[REGNO (x)], file);
762 fputs (reg_names[subreg_regno (x)], file);
769 v850_print_operand_address (file, x);
780 /* Output assembly language output for the address ADDR to FILE. */
783 v850_print_operand_address (FILE * file, rtx addr)
785 switch (GET_CODE (addr))
788 fprintf (file, "0[");
789 v850_print_operand (file, addr, 0);
793 if (GET_CODE (XEXP (addr, 0)) == REG)
796 fprintf (file, "lo(");
797 v850_print_operand (file, XEXP (addr, 1), 0);
798 fprintf (file, ")[");
799 v850_print_operand (file, XEXP (addr, 0), 0);
804 if (GET_CODE (XEXP (addr, 0)) == REG
805 || GET_CODE (XEXP (addr, 0)) == SUBREG)
808 v850_print_operand (file, XEXP (addr, 1), 0);
810 v850_print_operand (file, XEXP (addr, 0), 0);
815 v850_print_operand (file, XEXP (addr, 0), 0);
817 v850_print_operand (file, XEXP (addr, 1), 0);
822 const char *off_name = NULL;
823 const char *reg_name = NULL;
825 if (SYMBOL_REF_ZDA_P (addr))
830 else if (SYMBOL_REF_SDA_P (addr))
835 else if (SYMBOL_REF_TDA_P (addr))
842 fprintf (file, "%s(", off_name);
843 output_addr_const (file, addr);
845 fprintf (file, ")[%s]", reg_name);
849 if (special_symbolref_operand (addr, VOIDmode))
851 rtx x = XEXP (XEXP (addr, 0), 0);
852 const char *off_name;
853 const char *reg_name;
855 if (SYMBOL_REF_ZDA_P (x))
860 else if (SYMBOL_REF_SDA_P (x))
865 else if (SYMBOL_REF_TDA_P (x))
873 fprintf (file, "%s(", off_name);
874 output_addr_const (file, addr);
875 fprintf (file, ")[%s]", reg_name);
878 output_addr_const (file, addr);
881 output_addr_const (file, addr);
887 v850_print_operand_punct_valid_p (unsigned char code)
892 /* When assemble_integer is used to emit the offsets for a switch
893 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
894 output_addr_const will normally barf at this, but it is OK to omit
895 the truncate and just emit the difference of the two labels. The
896 .hword directive will automatically handle the truncation for us.
898 Returns 1 if rtx was handled, 0 otherwise. */
901 v850_output_addr_const_extra (FILE * file, rtx x)
903 if (GET_CODE (x) != TRUNCATE)
908 /* We must also handle the case where the switch table was passed a
909 constant value and so has been collapsed. In this case the first
910 label will have been deleted. In such a case it is OK to emit
911 nothing, since the table will not be used.
912 (cf gcc.c-torture/compile/990801-1.c). */
913 if (GET_CODE (x) == MINUS
914 && GET_CODE (XEXP (x, 0)) == LABEL_REF
915 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
916 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
919 output_addr_const (file, x);
923 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
927 output_move_single (rtx * operands)
929 rtx dst = operands[0];
930 rtx src = operands[1];
937 else if (GET_CODE (src) == CONST_INT)
939 HOST_WIDE_INT value = INTVAL (src);
941 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
944 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
945 return "movea %1,%.,%0";
947 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
948 return "movhi hi0(%1),%.,%0";
950 /* A random constant. */
951 else if (TARGET_V850E || TARGET_V850E2_ALL)
954 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
957 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
959 HOST_WIDE_INT high, low;
961 const_double_split (src, &high, &low);
963 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
966 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
967 return "movea %F1,%.,%0";
969 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
970 return "movhi hi0(%F1),%.,%0";
972 /* A random constant. */
973 else if (TARGET_V850E || TARGET_V850E2_ALL)
977 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
980 else if (GET_CODE (src) == MEM)
981 return "%S1ld%W1 %1,%0";
983 else if (special_symbolref_operand (src, VOIDmode))
984 return "movea %O1(%P1),%Q1,%0";
986 else if (GET_CODE (src) == LABEL_REF
987 || GET_CODE (src) == SYMBOL_REF
988 || GET_CODE (src) == CONST)
990 if (TARGET_V850E || TARGET_V850E2_ALL)
991 return "mov hilo(%1),%0";
993 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
996 else if (GET_CODE (src) == HIGH)
997 return "movhi hi(%1),%.,%0";
999 else if (GET_CODE (src) == LO_SUM)
1001 operands[2] = XEXP (src, 0);
1002 operands[3] = XEXP (src, 1);
1003 return "movea lo(%3),%2,%0";
1007 else if (GET_CODE (dst) == MEM)
1010 return "%S0st%W0 %1,%0";
1012 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
1013 return "%S0st%W0 %.,%0";
1015 else if (GET_CODE (src) == CONST_DOUBLE
1016 && CONST0_RTX (GET_MODE (dst)) == src)
1017 return "%S0st%W0 %.,%0";
1020 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
1024 /* Generate comparison code. */
1026 v850_float_z_comparison_operator (rtx op, enum machine_mode mode)
1028 enum rtx_code code = GET_CODE (op);
1030 if (GET_RTX_CLASS (code) != RTX_COMPARE
1031 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
1034 if (mode != GET_MODE (op) && mode != VOIDmode)
1037 if ((GET_CODE (XEXP (op, 0)) != REG
1038 || REGNO (XEXP (op, 0)) != CC_REGNUM)
1039 || XEXP (op, 1) != const0_rtx)
1042 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LTmode)
1044 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LEmode)
1046 if (GET_MODE (XEXP (op, 0)) == CC_FPU_EQmode)
1053 v850_float_nz_comparison_operator (rtx op, enum machine_mode mode)
1055 enum rtx_code code = GET_CODE (op);
1057 if (GET_RTX_CLASS (code) != RTX_COMPARE
1058 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
1061 if (mode != GET_MODE (op) && mode != VOIDmode)
1064 if ((GET_CODE (XEXP (op, 0)) != REG
1065 || REGNO (XEXP (op, 0)) != CC_REGNUM)
1066 || XEXP (op, 1) != const0_rtx)
1069 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GTmode)
1071 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GEmode)
1073 if (GET_MODE (XEXP (op, 0)) == CC_FPU_NEmode)
1080 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1)
1082 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
1087 return CC_FPU_LEmode;
1089 return CC_FPU_GEmode;
1091 return CC_FPU_LTmode;
1093 return CC_FPU_GTmode;
1095 return CC_FPU_EQmode;
1097 return CC_FPU_NEmode;
1106 v850_gen_float_compare (enum rtx_code cond, enum machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
1108 if (GET_MODE(op0) == DFmode)
1113 emit_insn (gen_cmpdf_le_insn (op0, op1));
1116 emit_insn (gen_cmpdf_ge_insn (op0, op1));
1119 emit_insn (gen_cmpdf_lt_insn (op0, op1));
1122 emit_insn (gen_cmpdf_gt_insn (op0, op1));
1125 emit_insn (gen_cmpdf_eq_insn (op0, op1));
1128 emit_insn (gen_cmpdf_ne_insn (op0, op1));
1134 else if (GET_MODE(v850_compare_op0) == SFmode)
1139 emit_insn (gen_cmpsf_le_insn(op0, op1));
1142 emit_insn (gen_cmpsf_ge_insn(op0, op1));
1145 emit_insn (gen_cmpsf_lt_insn(op0, op1));
1148 emit_insn (gen_cmpsf_gt_insn(op0, op1));
1151 emit_insn (gen_cmpsf_eq_insn(op0, op1));
1154 emit_insn (gen_cmpsf_ne_insn(op0, op1));
1165 return v850_select_cc_mode (cond, op0, op1);
1169 v850_gen_compare (enum rtx_code cond, enum machine_mode mode, rtx op0, rtx op1)
1171 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
1173 emit_insn (gen_cmpsi_insn (op0, op1));
1174 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
1179 mode = v850_gen_float_compare (cond, mode, op0, op1);
1180 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1181 emit_insn (gen_rtx_SET(mode, cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1183 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1187 /* Return maximum offset supported for a short EP memory reference of mode
1188 MODE and signedness UNSIGNEDP. */
1191 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1198 if (TARGET_SMALL_SLD)
1199 max_offset = (1 << 4);
1200 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1202 max_offset = (1 << 4);
1204 max_offset = (1 << 7);
1208 if (TARGET_SMALL_SLD)
1209 max_offset = (1 << 5);
1210 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1212 max_offset = (1 << 5);
1214 max_offset = (1 << 8);
1219 max_offset = (1 << 8);
1229 /* Return true if OP is a valid short EP memory reference */
1232 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1238 /* If we are not using the EP register on a per-function basis
1239 then do not allow this optimization at all. This is to
1240 prevent the use of the SLD/SST instructions which cannot be
1241 guaranteed to work properly due to a hardware bug. */
1245 if (GET_CODE (op) != MEM)
1248 max_offset = ep_memory_offset (mode, unsigned_load);
1250 mask = GET_MODE_SIZE (mode) - 1;
1252 addr = XEXP (op, 0);
1253 if (GET_CODE (addr) == CONST)
1254 addr = XEXP (addr, 0);
1256 switch (GET_CODE (addr))
1262 return SYMBOL_REF_TDA_P (addr);
1265 return REGNO (addr) == EP_REGNUM;
1268 op0 = XEXP (addr, 0);
1269 op1 = XEXP (addr, 1);
1270 if (GET_CODE (op1) == CONST_INT
1271 && INTVAL (op1) < max_offset
1272 && INTVAL (op1) >= 0
1273 && (INTVAL (op1) & mask) == 0)
1275 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1278 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1287 /* Substitute memory references involving a pointer, to use the ep pointer,
1288 taking care to save and preserve the ep. */
1291 substitute_ep_register (rtx first_insn,
1298 rtx reg = gen_rtx_REG (Pmode, regno);
1303 df_set_regs_ever_live (1, true);
1304 *p_r1 = gen_rtx_REG (Pmode, 1);
1305 *p_ep = gen_rtx_REG (Pmode, 30);
1310 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1311 2 * (uses - 3), uses, reg_names[regno],
1312 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1313 INSN_UID (first_insn), INSN_UID (last_insn));
1315 if (GET_CODE (first_insn) == NOTE)
1316 first_insn = next_nonnote_insn (first_insn);
1318 last_insn = next_nonnote_insn (last_insn);
1319 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1321 if (GET_CODE (insn) == INSN)
1323 rtx pattern = single_set (insn);
1325 /* Replace the memory references. */
1329 /* Memory operands are signed by default. */
1330 int unsignedp = FALSE;
1332 if (GET_CODE (SET_DEST (pattern)) == MEM
1333 && GET_CODE (SET_SRC (pattern)) == MEM)
1336 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1337 p_mem = &SET_DEST (pattern);
1339 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1340 p_mem = &SET_SRC (pattern);
1342 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1343 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1344 p_mem = &XEXP (SET_SRC (pattern), 0);
1346 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1347 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1349 p_mem = &XEXP (SET_SRC (pattern), 0);
1357 rtx addr = XEXP (*p_mem, 0);
1359 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1360 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1362 else if (GET_CODE (addr) == PLUS
1363 && GET_CODE (XEXP (addr, 0)) == REG
1364 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1365 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1366 && ((INTVAL (XEXP (addr, 1)))
1367 < ep_memory_offset (GET_MODE (*p_mem),
1369 && ((INTVAL (XEXP (addr, 1))) >= 0))
1370 *p_mem = change_address (*p_mem, VOIDmode,
1371 gen_rtx_PLUS (Pmode,
1379 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1380 insn = prev_nonnote_insn (first_insn);
1381 if (insn && GET_CODE (insn) == INSN
1382 && GET_CODE (PATTERN (insn)) == SET
1383 && SET_DEST (PATTERN (insn)) == *p_ep
1384 && SET_SRC (PATTERN (insn)) == *p_r1)
1387 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1389 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1390 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1394 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1395 the -mep mode to copy heavily used pointers to ep to use the implicit
1407 regs[FIRST_PSEUDO_REGISTER];
1416 /* If not ep mode, just return now. */
1420 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1423 regs[i].first_insn = NULL_RTX;
1424 regs[i].last_insn = NULL_RTX;
1427 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1429 switch (GET_CODE (insn))
1431 /* End of basic block */
1438 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1440 if (max_uses < regs[i].uses)
1442 max_uses = regs[i].uses;
1448 substitute_ep_register (regs[max_regno].first_insn,
1449 regs[max_regno].last_insn,
1450 max_uses, max_regno, &r1, &ep);
1454 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1457 regs[i].first_insn = NULL_RTX;
1458 regs[i].last_insn = NULL_RTX;
1466 pattern = single_set (insn);
1468 /* See if there are any memory references we can shorten */
1471 rtx src = SET_SRC (pattern);
1472 rtx dest = SET_DEST (pattern);
1474 /* Memory operands are signed by default. */
1475 int unsignedp = FALSE;
1477 /* We might have (SUBREG (MEM)) here, so just get rid of the
1478 subregs to make this code simpler. */
1479 if (GET_CODE (dest) == SUBREG
1480 && (GET_CODE (SUBREG_REG (dest)) == MEM
1481 || GET_CODE (SUBREG_REG (dest)) == REG))
1482 alter_subreg (&dest);
1483 if (GET_CODE (src) == SUBREG
1484 && (GET_CODE (SUBREG_REG (src)) == MEM
1485 || GET_CODE (SUBREG_REG (src)) == REG))
1486 alter_subreg (&src);
1488 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1491 else if (GET_CODE (dest) == MEM)
1494 else if (GET_CODE (src) == MEM)
1497 else if (GET_CODE (src) == SIGN_EXTEND
1498 && GET_CODE (XEXP (src, 0)) == MEM)
1499 mem = XEXP (src, 0);
1501 else if (GET_CODE (src) == ZERO_EXTEND
1502 && GET_CODE (XEXP (src, 0)) == MEM)
1504 mem = XEXP (src, 0);
1510 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1513 else if (!use_ep && mem
1514 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1516 rtx addr = XEXP (mem, 0);
1520 if (GET_CODE (addr) == REG)
1523 regno = REGNO (addr);
1526 else if (GET_CODE (addr) == PLUS
1527 && GET_CODE (XEXP (addr, 0)) == REG
1528 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1529 && ((INTVAL (XEXP (addr, 1)))
1530 < ep_memory_offset (GET_MODE (mem), unsignedp))
1531 && ((INTVAL (XEXP (addr, 1))) >= 0))
1534 regno = REGNO (XEXP (addr, 0));
1543 regs[regno].last_insn = insn;
1544 if (!regs[regno].first_insn)
1545 regs[regno].first_insn = insn;
1549 /* Loading up a register in the basic block zaps any savings
1551 if (GET_CODE (dest) == REG)
1553 enum machine_mode mode = GET_MODE (dest);
1557 regno = REGNO (dest);
1558 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1562 /* See if we can use the pointer before this
1567 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1569 if (max_uses < regs[i].uses)
1571 max_uses = regs[i].uses;
1577 && max_regno >= regno
1578 && max_regno < endregno)
1580 substitute_ep_register (regs[max_regno].first_insn,
1581 regs[max_regno].last_insn,
1582 max_uses, max_regno, &r1,
1585 /* Since we made a substitution, zap all remembered
1587 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1590 regs[i].first_insn = NULL_RTX;
1591 regs[i].last_insn = NULL_RTX;
1596 for (i = regno; i < endregno; i++)
1599 regs[i].first_insn = NULL_RTX;
1600 regs[i].last_insn = NULL_RTX;
1608 /* # of registers saved by the interrupt handler. */
1609 #define INTERRUPT_FIXED_NUM 5
1611 /* # of bytes for registers saved by the interrupt handler. */
1612 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1614 /* # of words saved for other registers. */
1615 #define INTERRUPT_ALL_SAVE_NUM \
1616 (30 - INTERRUPT_FIXED_NUM)
1618 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1621 compute_register_save_size (long * p_reg_saved)
1625 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1626 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1629 /* Count the return pointer if we need to save it. */
1630 if (crtl->profile && !call_p)
1632 df_set_regs_ever_live (LINK_POINTER_REGNUM, true);
1636 /* Count space for the register saves. */
1637 if (interrupt_handler)
1639 for (i = 0; i <= 31; i++)
1643 if (df_regs_ever_live_p (i) || call_p)
1646 reg_saved |= 1L << i;
1650 /* We don't save/restore r0 or the stack pointer */
1652 case STACK_POINTER_REGNUM:
1655 /* For registers with fixed use, we save them, set them to the
1656 appropriate value, and then restore them.
1657 These registers are handled specially, so don't list them
1658 on the list of registers to save in the prologue. */
1659 case 1: /* temp used to hold ep */
1661 case 10: /* temp used to call interrupt save/restore */
1662 case 11: /* temp used to call interrupt save/restore (long call) */
1663 case EP_REGNUM: /* ep */
1670 /* Find the first register that needs to be saved. */
1671 for (i = 0; i <= 31; i++)
1672 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1673 || i == LINK_POINTER_REGNUM))
1676 /* If it is possible that an out-of-line helper function might be
1677 used to generate the prologue for the current function, then we
1678 need to cover the possibility that such a helper function will
1679 be used, despite the fact that there might be gaps in the list of
1680 registers that need to be saved. To detect this we note that the
1681 helper functions always push at least register r29 (provided
1682 that the function is not an interrupt handler). */
1684 if (TARGET_PROLOG_FUNCTION
1685 && (i == 2 || ((i >= 20) && (i < 30))))
1690 reg_saved |= 1L << i;
1695 /* Helper functions save all registers between the starting
1696 register and the last register, regardless of whether they
1697 are actually used by the function or not. */
1698 for (; i <= 29; i++)
1701 reg_saved |= 1L << i;
1704 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1707 reg_saved |= 1L << LINK_POINTER_REGNUM;
1712 for (; i <= 31; i++)
1713 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1714 || i == LINK_POINTER_REGNUM))
1717 reg_saved |= 1L << i;
1723 *p_reg_saved = reg_saved;
1729 compute_frame_size (int size, long * p_reg_saved)
1732 + compute_register_save_size (p_reg_saved)
1733 + crtl->outgoing_args_size);
1737 use_prolog_function (int num_save, int frame_size)
1739 int alloc_stack = (4 * num_save);
1740 int unalloc_stack = frame_size - alloc_stack;
1741 int save_func_len, restore_func_len;
1742 int save_normal_len, restore_normal_len;
1744 if (! TARGET_DISABLE_CALLT)
1745 save_func_len = restore_func_len = 2;
1747 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1751 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1752 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1755 /* See if we would have used ep to save the stack. */
1756 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1757 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1759 save_normal_len = restore_normal_len = 4 * num_save;
1761 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1762 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1764 /* Don't bother checking if we don't actually save any space.
1765 This happens for instance if one register is saved and additional
1766 stack space is allocated. */
1767 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1771 expand_prologue (void)
1774 unsigned int size = get_frame_size ();
1775 unsigned int actual_fsize;
1776 unsigned int init_stack_alloc = 0;
1779 unsigned int num_save;
1781 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1784 actual_fsize = compute_frame_size (size, ®_saved);
1786 /* Save/setup global registers for interrupt functions right now. */
1787 if (interrupt_handler)
1789 if (! TARGET_DISABLE_CALLT)
1790 emit_insn (gen_callt_save_interrupt ());
1792 emit_insn (gen_save_interrupt ());
1794 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1796 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1797 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1800 /* Identify all of the saved registers. */
1802 for (i = 1; i < 32; i++)
1804 if (((1L << i) & reg_saved) != 0)
1805 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1808 /* See if we have an insn that allocates stack space and saves the particular
1809 registers we want to. */
1810 save_all = NULL_RTX;
1811 if (TARGET_PROLOG_FUNCTION && num_save > 0)
1813 if (use_prolog_function (num_save, actual_fsize))
1815 int alloc_stack = 4 * num_save;
1818 save_all = gen_rtx_PARALLEL
1820 rtvec_alloc (num_save + 1
1821 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1823 XVECEXP (save_all, 0, 0)
1824 = gen_rtx_SET (VOIDmode,
1826 gen_rtx_PLUS (Pmode,
1828 GEN_INT(-alloc_stack)));
1829 for (i = 0; i < num_save; i++)
1832 XVECEXP (save_all, 0, i+1)
1833 = gen_rtx_SET (VOIDmode,
1835 gen_rtx_PLUS (Pmode,
1841 if (TARGET_DISABLE_CALLT)
1843 XVECEXP (save_all, 0, num_save + 1)
1844 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1846 if (TARGET_LONG_CALLS)
1847 XVECEXP (save_all, 0, num_save + 2)
1848 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1851 code = recog (save_all, NULL_RTX, NULL);
1854 rtx insn = emit_insn (save_all);
1855 INSN_CODE (insn) = code;
1856 actual_fsize -= alloc_stack;
1860 save_all = NULL_RTX;
1864 /* If no prolog save function is available, store the registers the old
1865 fashioned way (one by one). */
1868 /* Special case interrupt functions that save all registers for a call. */
1869 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1871 if (! TARGET_DISABLE_CALLT)
1872 emit_insn (gen_callt_save_all_interrupt ());
1874 emit_insn (gen_save_all_interrupt ());
1879 /* If the stack is too big, allocate it in chunks so we can do the
1880 register saves. We use the register save size so we use the ep
1882 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1883 init_stack_alloc = compute_register_save_size (NULL);
1885 init_stack_alloc = actual_fsize;
1887 /* Save registers at the beginning of the stack frame. */
1888 offset = init_stack_alloc - 4;
1890 if (init_stack_alloc)
1891 emit_insn (gen_addsi3 (stack_pointer_rtx,
1893 GEN_INT (- (signed) init_stack_alloc)));
1895 /* Save the return pointer first. */
1896 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1898 emit_move_insn (gen_rtx_MEM (SImode,
1899 plus_constant (stack_pointer_rtx,
1901 save_regs[--num_save]);
1905 for (i = 0; i < num_save; i++)
1907 emit_move_insn (gen_rtx_MEM (SImode,
1908 plus_constant (stack_pointer_rtx,
1916 /* Allocate the rest of the stack that was not allocated above (either it is
1917 > 32K or we just called a function to save the registers and needed more
1919 if (actual_fsize > init_stack_alloc)
1921 int diff = actual_fsize - init_stack_alloc;
1922 if (CONST_OK_FOR_K (-diff))
1923 emit_insn (gen_addsi3 (stack_pointer_rtx,
1928 rtx reg = gen_rtx_REG (Pmode, 12);
1929 emit_move_insn (reg, GEN_INT (-diff));
1930 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1934 /* If we need a frame pointer, set it up now. */
1935 if (frame_pointer_needed)
1936 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1941 expand_epilogue (void)
1944 unsigned int size = get_frame_size ();
1946 int actual_fsize = compute_frame_size (size, ®_saved);
1947 rtx restore_regs[32];
1949 unsigned int num_restore;
1951 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1953 /* Eliminate the initial stack stored by interrupt functions. */
1954 if (interrupt_handler)
1956 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1957 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1958 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1961 /* Cut off any dynamic stack created. */
1962 if (frame_pointer_needed)
1963 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1965 /* Identify all of the saved registers. */
1967 for (i = 1; i < 32; i++)
1969 if (((1L << i) & reg_saved) != 0)
1970 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1973 /* See if we have an insn that restores the particular registers we
1975 restore_all = NULL_RTX;
1977 if (TARGET_PROLOG_FUNCTION
1979 && !interrupt_handler)
1981 int alloc_stack = (4 * num_restore);
1982 int restore_func_len;
1983 int restore_normal_len;
1985 /* Don't bother checking if we don't actually save any space. */
1986 if (use_prolog_function (num_restore, actual_fsize))
1989 restore_all = gen_rtx_PARALLEL (VOIDmode,
1990 rtvec_alloc (num_restore + 2));
1991 XVECEXP (restore_all, 0, 0) = gen_rtx_RETURN (VOIDmode);
1992 XVECEXP (restore_all, 0, 1)
1993 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1994 gen_rtx_PLUS (Pmode,
1996 GEN_INT (alloc_stack)));
1998 offset = alloc_stack - 4;
1999 for (i = 0; i < num_restore; i++)
2001 XVECEXP (restore_all, 0, i+2)
2002 = gen_rtx_SET (VOIDmode,
2005 gen_rtx_PLUS (Pmode,
2011 code = recog (restore_all, NULL_RTX, NULL);
2017 actual_fsize -= alloc_stack;
2020 if (CONST_OK_FOR_K (actual_fsize))
2021 emit_insn (gen_addsi3 (stack_pointer_rtx,
2023 GEN_INT (actual_fsize)));
2026 rtx reg = gen_rtx_REG (Pmode, 12);
2027 emit_move_insn (reg, GEN_INT (actual_fsize));
2028 emit_insn (gen_addsi3 (stack_pointer_rtx,
2034 insn = emit_jump_insn (restore_all);
2035 INSN_CODE (insn) = code;
2039 restore_all = NULL_RTX;
2043 /* If no epilogue save function is available, restore the registers the
2044 old fashioned way (one by one). */
2047 unsigned int init_stack_free;
2049 /* If the stack is large, we need to cut it down in 2 pieces. */
2050 if (interrupt_handler)
2051 init_stack_free = 0;
2052 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
2053 init_stack_free = 4 * num_restore;
2055 init_stack_free = (signed) actual_fsize;
2057 /* Deallocate the rest of the stack if it is > 32K. */
2058 if ((unsigned int) actual_fsize > init_stack_free)
2062 diff = actual_fsize - init_stack_free;
2064 if (CONST_OK_FOR_K (diff))
2065 emit_insn (gen_addsi3 (stack_pointer_rtx,
2070 rtx reg = gen_rtx_REG (Pmode, 12);
2071 emit_move_insn (reg, GEN_INT (diff));
2072 emit_insn (gen_addsi3 (stack_pointer_rtx,
2078 /* Special case interrupt functions that save all registers
2080 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
2082 if (! TARGET_DISABLE_CALLT)
2083 emit_insn (gen_callt_restore_all_interrupt ());
2085 emit_insn (gen_restore_all_interrupt ());
2089 /* Restore registers from the beginning of the stack frame. */
2090 int offset = init_stack_free - 4;
2092 /* Restore the return pointer first. */
2094 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
2096 emit_move_insn (restore_regs[--num_restore],
2097 gen_rtx_MEM (SImode,
2098 plus_constant (stack_pointer_rtx,
2103 for (i = 0; i < num_restore; i++)
2105 emit_move_insn (restore_regs[i],
2106 gen_rtx_MEM (SImode,
2107 plus_constant (stack_pointer_rtx,
2110 emit_use (restore_regs[i]);
2114 /* Cut back the remainder of the stack. */
2115 if (init_stack_free)
2116 emit_insn (gen_addsi3 (stack_pointer_rtx,
2118 GEN_INT (init_stack_free)));
2121 /* And return or use reti for interrupt handlers. */
2122 if (interrupt_handler)
2124 if (! TARGET_DISABLE_CALLT)
2125 emit_insn (gen_callt_return_interrupt ());
2127 emit_jump_insn (gen_return_interrupt ());
2129 else if (actual_fsize)
2130 emit_jump_insn (gen_return_internal ());
2132 emit_jump_insn (gen_return_simple ());
2135 v850_interrupt_cache_p = FALSE;
2136 v850_interrupt_p = FALSE;
2139 /* Update the condition code from the insn. */
2141 notice_update_cc (rtx body, rtx insn)
2143 switch (get_attr_cc (insn))
2146 /* Insn does not affect CC at all. */
2150 /* Insn does not change CC, but the 0'th operand has been changed. */
2151 if (cc_status.value1 != 0
2152 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2153 cc_status.value1 = 0;
2157 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2158 V,C is in an unusable state. */
2160 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2161 cc_status.value1 = recog_data.operand[0];
2165 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2166 C is in an unusable state. */
2168 cc_status.flags |= CC_NO_CARRY;
2169 cc_status.value1 = recog_data.operand[0];
2173 /* The insn is a compare instruction. */
2175 cc_status.value1 = SET_SRC (body);
2179 /* Insn doesn't leave CC in a usable state. */
2185 /* Retrieve the data area that has been chosen for the given decl. */
2188 v850_get_data_area (tree decl)
2190 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2191 return DATA_AREA_SDA;
2193 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2194 return DATA_AREA_TDA;
2196 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2197 return DATA_AREA_ZDA;
2199 return DATA_AREA_NORMAL;
2202 /* Store the indicated data area in the decl's attributes. */
2205 v850_set_data_area (tree decl, v850_data_area data_area)
2211 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2212 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2213 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2218 DECL_ATTRIBUTES (decl) = tree_cons
2219 (name, NULL, DECL_ATTRIBUTES (decl));
2222 /* Handle an "interrupt" attribute; arguments as in
2223 struct attribute_spec.handler. */
2225 v850_handle_interrupt_attribute (tree * node,
2227 tree args ATTRIBUTE_UNUSED,
2228 int flags ATTRIBUTE_UNUSED,
2229 bool * no_add_attrs)
2231 if (TREE_CODE (*node) != FUNCTION_DECL)
2233 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2235 *no_add_attrs = true;
2241 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2242 struct attribute_spec.handler. */
2244 v850_handle_data_area_attribute (tree* node,
2246 tree args ATTRIBUTE_UNUSED,
2247 int flags ATTRIBUTE_UNUSED,
2248 bool * no_add_attrs)
2250 v850_data_area data_area;
2251 v850_data_area area;
2254 /* Implement data area attribute. */
2255 if (is_attribute_p ("sda", name))
2256 data_area = DATA_AREA_SDA;
2257 else if (is_attribute_p ("tda", name))
2258 data_area = DATA_AREA_TDA;
2259 else if (is_attribute_p ("zda", name))
2260 data_area = DATA_AREA_ZDA;
2264 switch (TREE_CODE (decl))
2267 if (current_function_decl != NULL_TREE)
2269 error_at (DECL_SOURCE_LOCATION (decl),
2270 "data area attributes cannot be specified for "
2272 *no_add_attrs = true;
2278 area = v850_get_data_area (decl);
2279 if (area != DATA_AREA_NORMAL && data_area != area)
2281 error ("data area of %q+D conflicts with previous declaration",
2283 *no_add_attrs = true;
2295 /* Return nonzero if FUNC is an interrupt function as specified
2296 by the "interrupt" attribute. */
2299 v850_interrupt_function_p (tree func)
2304 if (v850_interrupt_cache_p)
2305 return v850_interrupt_p;
2307 if (TREE_CODE (func) != FUNCTION_DECL)
2310 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2316 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2317 ret = a != NULL_TREE;
2320 /* Its not safe to trust global variables until after function inlining has
2322 if (reload_completed | reload_in_progress)
2323 v850_interrupt_p = ret;
2330 v850_encode_data_area (tree decl, rtx symbol)
2334 /* Map explicit sections into the appropriate attribute */
2335 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2337 if (DECL_SECTION_NAME (decl))
2339 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2341 if (streq (name, ".zdata") || streq (name, ".zbss"))
2342 v850_set_data_area (decl, DATA_AREA_ZDA);
2344 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2345 v850_set_data_area (decl, DATA_AREA_SDA);
2347 else if (streq (name, ".tdata"))
2348 v850_set_data_area (decl, DATA_AREA_TDA);
2351 /* If no attribute, support -m{zda,sda,tda}=n */
2354 int size = int_size_in_bytes (TREE_TYPE (decl));
2358 else if (size <= small_memory [(int) SMALL_MEMORY_TDA].max)
2359 v850_set_data_area (decl, DATA_AREA_TDA);
2361 else if (size <= small_memory [(int) SMALL_MEMORY_SDA].max)
2362 v850_set_data_area (decl, DATA_AREA_SDA);
2364 else if (size <= small_memory [(int) SMALL_MEMORY_ZDA].max)
2365 v850_set_data_area (decl, DATA_AREA_ZDA);
2368 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2372 flags = SYMBOL_REF_FLAGS (symbol);
2373 switch (v850_get_data_area (decl))
2375 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2376 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2377 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2378 default: gcc_unreachable ();
2380 SYMBOL_REF_FLAGS (symbol) = flags;
2384 v850_encode_section_info (tree decl, rtx rtl, int first)
2386 default_encode_section_info (decl, rtl, first);
2388 if (TREE_CODE (decl) == VAR_DECL
2389 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2390 v850_encode_data_area (decl, XEXP (rtl, 0));
2393 /* Construct a JR instruction to a routine that will perform the equivalent of
2394 the RTL passed in as an argument. This RTL is a function epilogue that
2395 pops registers off the stack and possibly releases some extra stack space
2396 as well. The code has already verified that the RTL matches these
2400 construct_restore_jr (rtx op)
2402 int count = XVECLEN (op, 0);
2404 unsigned long int mask;
2405 unsigned long int first;
2406 unsigned long int last;
2408 static char buff [100]; /* XXX */
2412 error ("bogus JR construction: %d", count);
2416 /* Work out how many bytes to pop off the stack before retrieving
2418 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2419 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2420 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2422 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2424 /* Each pop will remove 4 bytes from the stack.... */
2425 stack_bytes -= (count - 2) * 4;
2427 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2428 if (stack_bytes != 0)
2430 error ("bad amount of stack space removal: %d", stack_bytes);
2434 /* Now compute the bit mask of registers to push. */
2436 for (i = 2; i < count; i++)
2438 rtx vector_element = XVECEXP (op, 0, i);
2440 gcc_assert (GET_CODE (vector_element) == SET);
2441 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2442 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2445 mask |= 1 << REGNO (SET_DEST (vector_element));
2448 /* Scan for the first register to pop. */
2449 for (first = 0; first < 32; first++)
2451 if (mask & (1 << first))
2455 gcc_assert (first < 32);
2457 /* Discover the last register to pop. */
2458 if (mask & (1 << LINK_POINTER_REGNUM))
2460 last = LINK_POINTER_REGNUM;
2464 gcc_assert (!stack_bytes);
2465 gcc_assert (mask & (1 << 29));
2470 /* Note, it is possible to have gaps in the register mask.
2471 We ignore this here, and generate a JR anyway. We will
2472 be popping more registers than is strictly necessary, but
2473 it does save code space. */
2475 if (TARGET_LONG_CALLS)
2480 sprintf (name, "__return_%s", reg_names [first]);
2482 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2484 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2490 sprintf (buff, "jr __return_%s", reg_names [first]);
2492 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2499 /* Construct a JARL instruction to a routine that will perform the equivalent
2500 of the RTL passed as a parameter. This RTL is a function prologue that
2501 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2502 some stack space as well. The code has already verified that the RTL
2503 matches these requirements. */
2505 construct_save_jarl (rtx op)
2507 int count = XVECLEN (op, 0);
2509 unsigned long int mask;
2510 unsigned long int first;
2511 unsigned long int last;
2513 static char buff [100]; /* XXX */
2515 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2517 error ("bogus JARL construction: %d\n", count);
2522 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2523 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2524 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2525 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2527 /* Work out how many bytes to push onto the stack after storing the
2529 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2531 /* Each push will put 4 bytes from the stack.... */
2532 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2534 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2535 if (stack_bytes != 0)
2537 error ("bad amount of stack space removal: %d", stack_bytes);
2541 /* Now compute the bit mask of registers to push. */
2543 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2545 rtx vector_element = XVECEXP (op, 0, i);
2547 gcc_assert (GET_CODE (vector_element) == SET);
2548 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2549 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2552 mask |= 1 << REGNO (SET_SRC (vector_element));
2555 /* Scan for the first register to push. */
2556 for (first = 0; first < 32; first++)
2558 if (mask & (1 << first))
2562 gcc_assert (first < 32);
2564 /* Discover the last register to push. */
2565 if (mask & (1 << LINK_POINTER_REGNUM))
2567 last = LINK_POINTER_REGNUM;
2571 gcc_assert (!stack_bytes);
2572 gcc_assert (mask & (1 << 29));
2577 /* Note, it is possible to have gaps in the register mask.
2578 We ignore this here, and generate a JARL anyway. We will
2579 be pushing more registers than is strictly necessary, but
2580 it does save code space. */
2582 if (TARGET_LONG_CALLS)
2587 sprintf (name, "__save_%s", reg_names [first]);
2589 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2591 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2597 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2599 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2606 extern tree last_assemble_variable_decl;
2607 extern int size_directive_output;
2609 /* A version of asm_output_aligned_bss() that copes with the special
2610 data areas of the v850. */
2612 v850_output_aligned_bss (FILE * file,
2615 unsigned HOST_WIDE_INT size,
2618 switch (v850_get_data_area (decl))
2621 switch_to_section (zbss_section);
2625 switch_to_section (sbss_section);
2629 switch_to_section (tdata_section);
2632 switch_to_section (bss_section);
2636 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2637 #ifdef ASM_DECLARE_OBJECT_NAME
2638 last_assemble_variable_decl = decl;
2639 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2641 /* Standard thing is just output label for the object. */
2642 ASM_OUTPUT_LABEL (file, name);
2643 #endif /* ASM_DECLARE_OBJECT_NAME */
2644 ASM_OUTPUT_SKIP (file, size ? size : 1);
2647 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2649 v850_output_common (FILE * file,
2655 if (decl == NULL_TREE)
2657 fprintf (file, "%s", COMMON_ASM_OP);
2661 switch (v850_get_data_area (decl))
2664 fprintf (file, "%s", ZCOMMON_ASM_OP);
2668 fprintf (file, "%s", SCOMMON_ASM_OP);
2672 fprintf (file, "%s", TCOMMON_ASM_OP);
2676 fprintf (file, "%s", COMMON_ASM_OP);
2681 assemble_name (file, name);
2682 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2685 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2687 v850_output_local (FILE * file,
2693 fprintf (file, "%s", LOCAL_ASM_OP);
2694 assemble_name (file, name);
2695 fprintf (file, "\n");
2697 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2700 /* Add data area to the given declaration if a ghs data area pragma is
2701 currently in effect (#pragma ghs startXXX/endXXX). */
2703 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2706 && data_area_stack->data_area
2707 && current_function_decl == NULL_TREE
2708 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2709 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2710 v850_set_data_area (decl, data_area_stack->data_area);
2712 /* Initialize the default names of the v850 specific sections,
2713 if this has not been done before. */
2715 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2717 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2718 = build_string (sizeof (".sdata")-1, ".sdata");
2720 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2721 = build_string (sizeof (".rosdata")-1, ".rosdata");
2723 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2724 = build_string (sizeof (".tdata")-1, ".tdata");
2726 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2727 = build_string (sizeof (".zdata")-1, ".zdata");
2729 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2730 = build_string (sizeof (".rozdata")-1, ".rozdata");
2733 if (current_function_decl == NULL_TREE
2734 && (TREE_CODE (decl) == VAR_DECL
2735 || TREE_CODE (decl) == CONST_DECL
2736 || TREE_CODE (decl) == FUNCTION_DECL)
2737 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2738 && !DECL_SECTION_NAME (decl))
2740 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2741 tree chosen_section;
2743 if (TREE_CODE (decl) == FUNCTION_DECL)
2744 kind = GHS_SECTION_KIND_TEXT;
2747 /* First choose a section kind based on the data area of the decl. */
2748 switch (v850_get_data_area (decl))
2754 kind = ((TREE_READONLY (decl))
2755 ? GHS_SECTION_KIND_ROSDATA
2756 : GHS_SECTION_KIND_SDATA);
2760 kind = GHS_SECTION_KIND_TDATA;
2764 kind = ((TREE_READONLY (decl))
2765 ? GHS_SECTION_KIND_ROZDATA
2766 : GHS_SECTION_KIND_ZDATA);
2769 case DATA_AREA_NORMAL: /* default data area */
2770 if (TREE_READONLY (decl))
2771 kind = GHS_SECTION_KIND_RODATA;
2772 else if (DECL_INITIAL (decl))
2773 kind = GHS_SECTION_KIND_DATA;
2775 kind = GHS_SECTION_KIND_BSS;
2779 /* Now, if the section kind has been explicitly renamed,
2780 then attach a section attribute. */
2781 chosen_section = GHS_current_section_names [(int) kind];
2783 /* Otherwise, if this kind of section needs an explicit section
2784 attribute, then also attach one. */
2785 if (chosen_section == NULL)
2786 chosen_section = GHS_default_section_names [(int) kind];
2790 /* Only set the section name if specified by a pragma, because
2791 otherwise it will force those variables to get allocated storage
2792 in this module, rather than by the linker. */
2793 DECL_SECTION_NAME (decl) = chosen_section;
2798 /* Construct a DISPOSE instruction that is the equivalent of
2799 the given RTX. We have already verified that this should
2803 construct_dispose_instruction (rtx op)
2805 int count = XVECLEN (op, 0);
2807 unsigned long int mask;
2809 static char buff[ 100 ]; /* XXX */
2814 error ("bogus DISPOSE construction: %d", count);
2818 /* Work out how many bytes to pop off the
2819 stack before retrieving registers. */
2820 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2821 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2822 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2824 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2826 /* Each pop will remove 4 bytes from the stack.... */
2827 stack_bytes -= (count - 2) * 4;
2829 /* Make sure that the amount we are popping
2830 will fit into the DISPOSE instruction. */
2831 if (stack_bytes > 128)
2833 error ("too much stack space to dispose of: %d", stack_bytes);
2837 /* Now compute the bit mask of registers to push. */
2840 for (i = 2; i < count; i++)
2842 rtx vector_element = XVECEXP (op, 0, i);
2844 gcc_assert (GET_CODE (vector_element) == SET);
2845 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2846 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2849 if (REGNO (SET_DEST (vector_element)) == 2)
2852 mask |= 1 << REGNO (SET_DEST (vector_element));
2855 if (! TARGET_DISABLE_CALLT
2856 && (use_callt || stack_bytes == 0))
2860 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2865 for (i = 20; i < 32; i++)
2866 if (mask & (1 << i))
2870 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2872 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2873 i, (mask & (1 << 31)) ? "31c" : "29");
2878 static char regs [100]; /* XXX */
2881 /* Generate the DISPOSE instruction. Note we could just issue the
2882 bit mask as a number as the assembler can cope with this, but for
2883 the sake of our readers we turn it into a textual description. */
2887 for (i = 20; i < 32; i++)
2889 if (mask & (1 << i))
2894 strcat (regs, ", ");
2899 strcat (regs, reg_names[ first ]);
2901 for (i++; i < 32; i++)
2902 if ((mask & (1 << i)) == 0)
2907 strcat (regs, " - ");
2908 strcat (regs, reg_names[ i - 1 ] );
2913 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2919 /* Construct a PREPARE instruction that is the equivalent of
2920 the given RTL. We have already verified that this should
2924 construct_prepare_instruction (rtx op)
2928 unsigned long int mask;
2930 static char buff[ 100 ]; /* XXX */
2933 if (XVECLEN (op, 0) <= 1)
2935 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2939 /* Work out how many bytes to push onto
2940 the stack after storing the registers. */
2941 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2942 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2943 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2945 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2948 /* Make sure that the amount we are popping
2949 will fit into the DISPOSE instruction. */
2950 if (stack_bytes < -128)
2952 error ("too much stack space to prepare: %d", stack_bytes);
2956 /* Now compute the bit mask of registers to push. */
2959 for (i = 1; i < XVECLEN (op, 0); i++)
2961 rtx vector_element = XVECEXP (op, 0, i);
2963 if (GET_CODE (vector_element) == CLOBBER)
2966 gcc_assert (GET_CODE (vector_element) == SET);
2967 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2968 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2971 if (REGNO (SET_SRC (vector_element)) == 2)
2974 mask |= 1 << REGNO (SET_SRC (vector_element));
2978 stack_bytes += count * 4;
2980 if ((! TARGET_DISABLE_CALLT)
2981 && (use_callt || stack_bytes == 0))
2985 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2989 for (i = 20; i < 32; i++)
2990 if (mask & (1 << i))
2994 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2996 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2997 i, (mask & (1 << 31)) ? "31c" : "29");
3001 static char regs [100]; /* XXX */
3005 /* Generate the PREPARE instruction. Note we could just issue the
3006 bit mask as a number as the assembler can cope with this, but for
3007 the sake of our readers we turn it into a textual description. */
3011 for (i = 20; i < 32; i++)
3013 if (mask & (1 << i))
3018 strcat (regs, ", ");
3023 strcat (regs, reg_names[ first ]);
3025 for (i++; i < 32; i++)
3026 if ((mask & (1 << i)) == 0)
3031 strcat (regs, " - ");
3032 strcat (regs, reg_names[ i - 1 ] );
3037 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
3043 /* Return an RTX indicating where the return address to the
3044 calling function can be found. */
3047 v850_return_addr (int count)
3052 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
3055 /* Implement TARGET_ASM_INIT_SECTIONS. */
3058 v850_asm_init_sections (void)
3061 = get_unnamed_section (0, output_section_asm_op,
3062 "\t.section .rosdata,\"a\"");
3065 = get_unnamed_section (0, output_section_asm_op,
3066 "\t.section .rozdata,\"a\"");
3069 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
3070 "\t.section .tdata,\"aw\"");
3073 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
3074 "\t.section .zdata,\"aw\"");
3077 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
3078 output_section_asm_op,
3079 "\t.section .zbss,\"aw\"");
3083 v850_select_section (tree exp,
3084 int reloc ATTRIBUTE_UNUSED,
3085 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
3087 if (TREE_CODE (exp) == VAR_DECL)
3090 if (!TREE_READONLY (exp)
3091 || TREE_SIDE_EFFECTS (exp)
3092 || !DECL_INITIAL (exp)
3093 || (DECL_INITIAL (exp) != error_mark_node
3094 && !TREE_CONSTANT (DECL_INITIAL (exp))))
3099 switch (v850_get_data_area (exp))
3102 return is_const ? rozdata_section : zdata_section;
3105 return tdata_section;
3108 return is_const ? rosdata_section : sdata_section;
3111 return is_const ? readonly_data_section : data_section;
3114 return readonly_data_section;
3117 /* Worker function for TARGET_RETURN_IN_MEMORY. */
3120 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
3122 /* Return values > 8 bytes in length in memory. */
3123 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode;
3126 /* Worker function for TARGET_FUNCTION_VALUE. */
3129 v850_function_value (const_tree valtype,
3130 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
3131 bool outgoing ATTRIBUTE_UNUSED)
3133 return gen_rtx_REG (TYPE_MODE (valtype), 10);
3137 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
3140 v850_setup_incoming_varargs (CUMULATIVE_ARGS *ca,
3141 enum machine_mode mode ATTRIBUTE_UNUSED,
3142 tree type ATTRIBUTE_UNUSED,
3143 int *pretend_arg_size ATTRIBUTE_UNUSED,
3144 int second_time ATTRIBUTE_UNUSED)
3146 ca->anonymous_args = (!TARGET_GHS ? 1 : 0);
3149 /* Worker function for TARGET_CAN_ELIMINATE. */
3152 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3154 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
3158 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3161 v850_asm_trampoline_template (FILE *f)
3163 fprintf (f, "\tjarl .+4,r12\n");
3164 fprintf (f, "\tld.w 12[r12],r20\n");
3165 fprintf (f, "\tld.w 16[r12],r12\n");
3166 fprintf (f, "\tjmp [r12]\n");
3167 fprintf (f, "\tnop\n");
3168 fprintf (f, "\t.long 0\n");
3169 fprintf (f, "\t.long 0\n");
3172 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3175 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3177 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3179 emit_block_move (m_tramp, assemble_trampoline_template (),
3180 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3182 mem = adjust_address (m_tramp, SImode, 16);
3183 emit_move_insn (mem, chain_value);
3184 mem = adjust_address (m_tramp, SImode, 20);
3185 emit_move_insn (mem, fnaddr);
3189 v850_issue_rate (void)
3191 return (TARGET_V850E2_ALL? 2 : 1);
3193 #include "gt-v850.h"