1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
38 #include "diagnostic-core.h"
41 #include "integrate.h"
44 #include "target-def.h"
48 #define streq(a,b) (strcmp (a, b) == 0)
51 static void v850_print_operand_address (FILE *, rtx);
53 /* Information about the various small memory areas. */
54 struct small_memory_info small_memory[ (int)SMALL_MEMORY_max ] =
56 /* Name Max Physical max. */
62 /* Names of the various data areas used on the v850. */
63 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
64 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
66 /* Track the current data area set by the data area pragma (which
67 can be nested). Tested by check_default_data_area. */
68 data_area_stack_element * data_area_stack = NULL;
70 /* True if we don't need to check any more if the current
71 function is an interrupt handler. */
72 static int v850_interrupt_cache_p = FALSE;
74 rtx v850_compare_op0, v850_compare_op1;
76 /* Whether current function is an interrupt handler. */
77 static int v850_interrupt_p = FALSE;
79 static GTY(()) section * rosdata_section;
80 static GTY(()) section * rozdata_section;
81 static GTY(()) section * tdata_section;
82 static GTY(()) section * zdata_section;
83 static GTY(()) section * zbss_section;
85 /* Set the maximum size of small memory area TYPE to the value given
86 by VALUE. Return true if VALUE was syntactically correct. VALUE
87 starts with the argument separator: either "-" or "=". */
90 v850_handle_memory_option (enum small_memory_type type, const char *value)
94 if (*value != '-' && *value != '=')
98 for (i = 0; value[i]; i++)
99 if (!ISDIGIT (value[i]))
103 if (size > small_memory[type].physical_max)
104 error ("value passed to %<-m%s%> is too large", small_memory[type].name);
106 small_memory[type].max = size;
110 /* Implement TARGET_HANDLE_OPTION. */
113 v850_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
118 target_flags |= MASK_EP | MASK_PROLOG_FUNCTION;
122 target_flags &= ~(MASK_CPU ^ MASK_V850);
127 target_flags &= ~(MASK_CPU ^ MASK_V850E);
131 return v850_handle_memory_option (SMALL_MEMORY_TDA, arg);
134 return v850_handle_memory_option (SMALL_MEMORY_SDA, arg);
137 return v850_handle_memory_option (SMALL_MEMORY_ZDA, arg);
144 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
146 static const struct default_options v850_option_optimization_table[] =
148 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
149 /* Note - we no longer enable MASK_EP when optimizing. This is
150 because of a hardware bug which stops the SLD and SST instructions
151 from correctly detecting some hazards. If the user is sure that
152 their hardware is fixed or that their program will not encounter
153 the conditions that trigger the bug then they can enable -mep by
155 { OPT_LEVELS_1_PLUS, OPT_mprolog_function, NULL, 1 },
156 { OPT_LEVELS_NONE, 0, NULL, 0 }
159 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
160 Specify whether to pass the argument by reference. */
163 v850_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
164 enum machine_mode mode, const_tree type,
165 bool named ATTRIBUTE_UNUSED)
167 unsigned HOST_WIDE_INT size;
170 size = int_size_in_bytes (type);
172 size = GET_MODE_SIZE (mode);
177 /* Implementing the Varargs Macros. */
180 v850_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
182 return !TARGET_GHS ? true : false;
185 /* Return an RTX to represent where an argument with mode MODE
186 and type TYPE will be passed to a function. If the result
187 is NULL_RTX, the argument will be pushed. */
190 v850_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
191 const_tree type, bool named)
193 rtx result = NULL_RTX;
200 size = int_size_in_bytes (type);
202 size = GET_MODE_SIZE (mode);
204 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
208 /* Once we have stopped using argument registers, do not start up again. */
209 cum->nbytes = 4 * UNITS_PER_WORD;
213 if (size <= UNITS_PER_WORD && type)
214 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
218 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
220 if (cum->nbytes > 4 * UNITS_PER_WORD)
223 if (type == NULL_TREE
224 && cum->nbytes + size > 4 * UNITS_PER_WORD)
227 switch (cum->nbytes / UNITS_PER_WORD)
230 result = gen_rtx_REG (mode, 6);
233 result = gen_rtx_REG (mode, 7);
236 result = gen_rtx_REG (mode, 8);
239 result = gen_rtx_REG (mode, 9);
248 /* Return the number of bytes which must be put into registers
249 for values which are part in registers and part in memory. */
251 v850_arg_partial_bytes (CUMULATIVE_ARGS * cum, enum machine_mode mode,
252 tree type, bool named)
256 if (TARGET_GHS && !named)
260 size = int_size_in_bytes (type);
262 size = GET_MODE_SIZE (mode);
268 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
272 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
274 if (cum->nbytes > 4 * UNITS_PER_WORD)
277 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
280 if (type == NULL_TREE
281 && cum->nbytes + size > 4 * UNITS_PER_WORD)
284 return 4 * UNITS_PER_WORD - cum->nbytes;
287 /* Update the data in CUM to advance over an argument
288 of mode MODE and data type TYPE.
289 (TYPE is null for libcalls where that information may not be available.) */
292 v850_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
293 const_tree type, bool named ATTRIBUTE_UNUSED)
295 cum->nbytes += (((type && int_size_in_bytes (type) > 8
296 ? GET_MODE_SIZE (Pmode)
298 ? GET_MODE_SIZE (mode)
299 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
303 /* Return the high and low words of a CONST_DOUBLE */
306 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
308 if (GET_CODE (x) == CONST_DOUBLE)
313 switch (GET_MODE (x))
316 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
317 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
318 *p_high = t[1]; /* since v850 is little endian */
319 *p_low = t[0]; /* high is second word */
323 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
324 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
330 *p_high = CONST_DOUBLE_HIGH (x);
331 *p_low = CONST_DOUBLE_LOW (x);
339 fatal_insn ("const_double_split got a bad insn:", x);
343 /* Return the cost of the rtx R with code CODE. */
346 const_costs_int (HOST_WIDE_INT value, int zero_cost)
348 if (CONST_OK_FOR_I (value))
350 else if (CONST_OK_FOR_J (value))
352 else if (CONST_OK_FOR_K (value))
359 const_costs (rtx r, enum rtx_code c)
361 HOST_WIDE_INT high, low;
366 return const_costs_int (INTVAL (r), 0);
369 const_double_split (r, &high, &low);
370 if (GET_MODE (r) == SFmode)
371 return const_costs_int (high, 1);
373 return const_costs_int (high, 1) + const_costs_int (low, 1);
389 v850_rtx_costs (rtx x,
391 int outer_code ATTRIBUTE_UNUSED,
392 int * total, bool speed)
394 enum rtx_code code = (enum rtx_code) codearg;
403 *total = COSTS_N_INSNS (const_costs (x, code));
410 if (TARGET_V850E && !speed)
418 && ( GET_MODE (x) == SImode
419 || GET_MODE (x) == HImode
420 || GET_MODE (x) == QImode))
422 if (GET_CODE (XEXP (x, 1)) == REG)
424 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
426 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
428 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
437 if (outer_code == COMPARE)
446 /* Print operand X using operand code CODE to assembly language output file
450 v850_print_operand (FILE * file, rtx x, int code)
452 HOST_WIDE_INT high, low;
457 /* We use 'c' operands with symbols for .vtinherit */
458 if (GET_CODE (x) == SYMBOL_REF)
460 output_addr_const(file, x);
467 switch ((code == 'B' || code == 'C')
468 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
471 if (code == 'c' || code == 'C')
472 fprintf (file, "nz");
474 fprintf (file, "ne");
477 if (code == 'c' || code == 'C')
483 fprintf (file, "ge");
486 fprintf (file, "gt");
489 fprintf (file, "le");
492 fprintf (file, "lt");
495 fprintf (file, "nl");
501 fprintf (file, "nh");
510 case 'F': /* high word of CONST_DOUBLE */
511 switch (GET_CODE (x))
514 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
518 const_double_split (x, &high, &low);
519 fprintf (file, "%ld", (long) high);
526 case 'G': /* low word of CONST_DOUBLE */
527 switch (GET_CODE (x))
530 fprintf (file, "%ld", (long) INTVAL (x));
534 const_double_split (x, &high, &low);
535 fprintf (file, "%ld", (long) low);
543 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
546 fprintf (file, "%d", exact_log2 (INTVAL (x)));
549 gcc_assert (special_symbolref_operand (x, VOIDmode));
551 if (GET_CODE (x) == CONST)
552 x = XEXP (XEXP (x, 0), 0);
554 gcc_assert (GET_CODE (x) == SYMBOL_REF);
556 if (SYMBOL_REF_ZDA_P (x))
557 fprintf (file, "zdaoff");
558 else if (SYMBOL_REF_SDA_P (x))
559 fprintf (file, "sdaoff");
560 else if (SYMBOL_REF_TDA_P (x))
561 fprintf (file, "tdaoff");
566 gcc_assert (special_symbolref_operand (x, VOIDmode));
567 output_addr_const (file, x);
570 gcc_assert (special_symbolref_operand (x, VOIDmode));
572 if (GET_CODE (x) == CONST)
573 x = XEXP (XEXP (x, 0), 0);
575 gcc_assert (GET_CODE (x) == SYMBOL_REF);
577 if (SYMBOL_REF_ZDA_P (x))
578 fprintf (file, "r0");
579 else if (SYMBOL_REF_SDA_P (x))
580 fprintf (file, "gp");
581 else if (SYMBOL_REF_TDA_P (x))
582 fprintf (file, "ep");
586 case 'R': /* 2nd word of a double. */
587 switch (GET_CODE (x))
590 fprintf (file, reg_names[REGNO (x) + 1]);
593 x = XEXP (adjust_address (x, SImode, 4), 0);
594 v850_print_operand_address (file, x);
595 if (GET_CODE (x) == CONST_INT)
596 fprintf (file, "[r0]");
605 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
606 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
613 /* Like an 'S' operand above, but for unsigned loads only. */
614 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
619 case 'W': /* print the instruction suffix */
620 switch (GET_MODE (x))
625 case QImode: fputs (".b", file); break;
626 case HImode: fputs (".h", file); break;
627 case SImode: fputs (".w", file); break;
628 case SFmode: fputs (".w", file); break;
631 case '.': /* register r0 */
632 fputs (reg_names[0], file);
634 case 'z': /* reg or zero */
635 if (GET_CODE (x) == REG)
636 fputs (reg_names[REGNO (x)], file);
637 else if ((GET_MODE(x) == SImode
638 || GET_MODE(x) == DFmode
639 || GET_MODE(x) == SFmode)
640 && x == CONST0_RTX(GET_MODE(x)))
641 fputs (reg_names[0], file);
644 gcc_assert (x == const0_rtx);
645 fputs (reg_names[0], file);
649 switch (GET_CODE (x))
652 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
653 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
656 output_address (XEXP (x, 0));
660 fputs (reg_names[REGNO (x)], file);
663 fputs (reg_names[subreg_regno (x)], file);
670 v850_print_operand_address (file, x);
681 /* Output assembly language output for the address ADDR to FILE. */
684 v850_print_operand_address (FILE * file, rtx addr)
686 switch (GET_CODE (addr))
689 fprintf (file, "0[");
690 v850_print_operand (file, addr, 0);
694 if (GET_CODE (XEXP (addr, 0)) == REG)
697 fprintf (file, "lo(");
698 v850_print_operand (file, XEXP (addr, 1), 0);
699 fprintf (file, ")[");
700 v850_print_operand (file, XEXP (addr, 0), 0);
705 if (GET_CODE (XEXP (addr, 0)) == REG
706 || GET_CODE (XEXP (addr, 0)) == SUBREG)
709 v850_print_operand (file, XEXP (addr, 1), 0);
711 v850_print_operand (file, XEXP (addr, 0), 0);
716 v850_print_operand (file, XEXP (addr, 0), 0);
718 v850_print_operand (file, XEXP (addr, 1), 0);
723 const char *off_name = NULL;
724 const char *reg_name = NULL;
726 if (SYMBOL_REF_ZDA_P (addr))
731 else if (SYMBOL_REF_SDA_P (addr))
736 else if (SYMBOL_REF_TDA_P (addr))
743 fprintf (file, "%s(", off_name);
744 output_addr_const (file, addr);
746 fprintf (file, ")[%s]", reg_name);
750 if (special_symbolref_operand (addr, VOIDmode))
752 rtx x = XEXP (XEXP (addr, 0), 0);
753 const char *off_name;
754 const char *reg_name;
756 if (SYMBOL_REF_ZDA_P (x))
761 else if (SYMBOL_REF_SDA_P (x))
766 else if (SYMBOL_REF_TDA_P (x))
774 fprintf (file, "%s(", off_name);
775 output_addr_const (file, addr);
776 fprintf (file, ")[%s]", reg_name);
779 output_addr_const (file, addr);
782 output_addr_const (file, addr);
788 v850_print_operand_punct_valid_p (unsigned char code)
793 /* When assemble_integer is used to emit the offsets for a switch
794 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
795 output_addr_const will normally barf at this, but it is OK to omit
796 the truncate and just emit the difference of the two labels. The
797 .hword directive will automatically handle the truncation for us.
799 Returns 1 if rtx was handled, 0 otherwise. */
802 v850_output_addr_const_extra (FILE * file, rtx x)
804 if (GET_CODE (x) != TRUNCATE)
809 /* We must also handle the case where the switch table was passed a
810 constant value and so has been collapsed. In this case the first
811 label will have been deleted. In such a case it is OK to emit
812 nothing, since the table will not be used.
813 (cf gcc.c-torture/compile/990801-1.c). */
814 if (GET_CODE (x) == MINUS
815 && GET_CODE (XEXP (x, 0)) == LABEL_REF
816 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
817 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
820 output_addr_const (file, x);
824 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
828 output_move_single (rtx * operands)
830 rtx dst = operands[0];
831 rtx src = operands[1];
838 else if (GET_CODE (src) == CONST_INT)
840 HOST_WIDE_INT value = INTVAL (src);
842 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
845 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
846 return "movea %1,%.,%0";
848 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
849 return "movhi hi0(%1),%.,%0";
851 /* A random constant. */
852 else if (TARGET_V850E || TARGET_V850E2_ALL)
855 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
858 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
860 HOST_WIDE_INT high, low;
862 const_double_split (src, &high, &low);
864 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
867 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
868 return "movea %F1,%.,%0";
870 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
871 return "movhi hi0(%F1),%.,%0";
873 /* A random constant. */
874 else if (TARGET_V850E || TARGET_V850E2_ALL)
878 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
881 else if (GET_CODE (src) == MEM)
882 return "%S1ld%W1 %1,%0";
884 else if (special_symbolref_operand (src, VOIDmode))
885 return "movea %O1(%P1),%Q1,%0";
887 else if (GET_CODE (src) == LABEL_REF
888 || GET_CODE (src) == SYMBOL_REF
889 || GET_CODE (src) == CONST)
891 if (TARGET_V850E || TARGET_V850E2_ALL)
892 return "mov hilo(%1),%0";
894 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
897 else if (GET_CODE (src) == HIGH)
898 return "movhi hi(%1),%.,%0";
900 else if (GET_CODE (src) == LO_SUM)
902 operands[2] = XEXP (src, 0);
903 operands[3] = XEXP (src, 1);
904 return "movea lo(%3),%2,%0";
908 else if (GET_CODE (dst) == MEM)
911 return "%S0st%W0 %1,%0";
913 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
914 return "%S0st%W0 %.,%0";
916 else if (GET_CODE (src) == CONST_DOUBLE
917 && CONST0_RTX (GET_MODE (dst)) == src)
918 return "%S0st%W0 %.,%0";
921 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
925 /* Generate comparison code. */
927 v850_float_z_comparison_operator (rtx op, enum machine_mode mode)
929 enum rtx_code code = GET_CODE (op);
931 if (GET_RTX_CLASS (code) != RTX_COMPARE
932 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
935 if (mode != GET_MODE (op) && mode != VOIDmode)
938 if ((GET_CODE (XEXP (op, 0)) != REG
939 || REGNO (XEXP (op, 0)) != CC_REGNUM)
940 || XEXP (op, 1) != const0_rtx)
943 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LTmode)
945 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LEmode)
947 if (GET_MODE (XEXP (op, 0)) == CC_FPU_EQmode)
954 v850_float_nz_comparison_operator (rtx op, enum machine_mode mode)
956 enum rtx_code code = GET_CODE (op);
958 if (GET_RTX_CLASS (code) != RTX_COMPARE
959 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
962 if (mode != GET_MODE (op) && mode != VOIDmode)
965 if ((GET_CODE (XEXP (op, 0)) != REG
966 || REGNO (XEXP (op, 0)) != CC_REGNUM)
967 || XEXP (op, 1) != const0_rtx)
970 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GTmode)
972 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GEmode)
974 if (GET_MODE (XEXP (op, 0)) == CC_FPU_NEmode)
981 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
983 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
988 return CC_FPU_LEmode;
990 return CC_FPU_GEmode;
992 return CC_FPU_LTmode;
994 return CC_FPU_GTmode;
996 return CC_FPU_EQmode;
998 return CC_FPU_NEmode;
1007 v850_gen_float_compare (enum rtx_code cond, enum machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
1009 if (GET_MODE(op0) == DFmode)
1014 emit_insn (gen_cmpdf_le_insn (op0, op1));
1017 emit_insn (gen_cmpdf_ge_insn (op0, op1));
1020 emit_insn (gen_cmpdf_lt_insn (op0, op1));
1023 emit_insn (gen_cmpdf_gt_insn (op0, op1));
1026 emit_insn (gen_cmpdf_eq_insn (op0, op1));
1029 emit_insn (gen_cmpdf_ne_insn (op0, op1));
1035 else if (GET_MODE(v850_compare_op0) == SFmode)
1040 emit_insn (gen_cmpsf_le_insn(op0, op1));
1043 emit_insn (gen_cmpsf_ge_insn(op0, op1));
1046 emit_insn (gen_cmpsf_lt_insn(op0, op1));
1049 emit_insn (gen_cmpsf_gt_insn(op0, op1));
1052 emit_insn (gen_cmpsf_eq_insn(op0, op1));
1055 emit_insn (gen_cmpsf_ne_insn(op0, op1));
1066 return v850_select_cc_mode (cond, op0, op1);
1070 v850_gen_compare (enum rtx_code cond, enum machine_mode mode, rtx op0, rtx op1)
1072 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
1074 emit_insn (gen_cmpsi_insn (op0, op1));
1075 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
1080 mode = v850_gen_float_compare (cond, mode, op0, op1);
1081 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1082 emit_insn (gen_rtx_SET(mode, cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1084 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1088 /* Return maximum offset supported for a short EP memory reference of mode
1089 MODE and signedness UNSIGNEDP. */
1092 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1099 if (TARGET_SMALL_SLD)
1100 max_offset = (1 << 4);
1101 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1103 max_offset = (1 << 4);
1105 max_offset = (1 << 7);
1109 if (TARGET_SMALL_SLD)
1110 max_offset = (1 << 5);
1111 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1113 max_offset = (1 << 5);
1115 max_offset = (1 << 8);
1120 max_offset = (1 << 8);
1130 /* Return true if OP is a valid short EP memory reference */
1133 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1139 /* If we are not using the EP register on a per-function basis
1140 then do not allow this optimization at all. This is to
1141 prevent the use of the SLD/SST instructions which cannot be
1142 guaranteed to work properly due to a hardware bug. */
1146 if (GET_CODE (op) != MEM)
1149 max_offset = ep_memory_offset (mode, unsigned_load);
1151 mask = GET_MODE_SIZE (mode) - 1;
1153 addr = XEXP (op, 0);
1154 if (GET_CODE (addr) == CONST)
1155 addr = XEXP (addr, 0);
1157 switch (GET_CODE (addr))
1163 return SYMBOL_REF_TDA_P (addr);
1166 return REGNO (addr) == EP_REGNUM;
1169 op0 = XEXP (addr, 0);
1170 op1 = XEXP (addr, 1);
1171 if (GET_CODE (op1) == CONST_INT
1172 && INTVAL (op1) < max_offset
1173 && INTVAL (op1) >= 0
1174 && (INTVAL (op1) & mask) == 0)
1176 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1179 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1188 /* Substitute memory references involving a pointer, to use the ep pointer,
1189 taking care to save and preserve the ep. */
1192 substitute_ep_register (rtx first_insn,
1199 rtx reg = gen_rtx_REG (Pmode, regno);
1204 df_set_regs_ever_live (1, true);
1205 *p_r1 = gen_rtx_REG (Pmode, 1);
1206 *p_ep = gen_rtx_REG (Pmode, 30);
1211 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1212 2 * (uses - 3), uses, reg_names[regno],
1213 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1214 INSN_UID (first_insn), INSN_UID (last_insn));
1216 if (GET_CODE (first_insn) == NOTE)
1217 first_insn = next_nonnote_insn (first_insn);
1219 last_insn = next_nonnote_insn (last_insn);
1220 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1222 if (GET_CODE (insn) == INSN)
1224 rtx pattern = single_set (insn);
1226 /* Replace the memory references. */
1230 /* Memory operands are signed by default. */
1231 int unsignedp = FALSE;
1233 if (GET_CODE (SET_DEST (pattern)) == MEM
1234 && GET_CODE (SET_SRC (pattern)) == MEM)
1237 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1238 p_mem = &SET_DEST (pattern);
1240 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1241 p_mem = &SET_SRC (pattern);
1243 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1244 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1245 p_mem = &XEXP (SET_SRC (pattern), 0);
1247 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1248 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1250 p_mem = &XEXP (SET_SRC (pattern), 0);
1258 rtx addr = XEXP (*p_mem, 0);
1260 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1261 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1263 else if (GET_CODE (addr) == PLUS
1264 && GET_CODE (XEXP (addr, 0)) == REG
1265 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1266 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1267 && ((INTVAL (XEXP (addr, 1)))
1268 < ep_memory_offset (GET_MODE (*p_mem),
1270 && ((INTVAL (XEXP (addr, 1))) >= 0))
1271 *p_mem = change_address (*p_mem, VOIDmode,
1272 gen_rtx_PLUS (Pmode,
1280 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1281 insn = prev_nonnote_insn (first_insn);
1282 if (insn && GET_CODE (insn) == INSN
1283 && GET_CODE (PATTERN (insn)) == SET
1284 && SET_DEST (PATTERN (insn)) == *p_ep
1285 && SET_SRC (PATTERN (insn)) == *p_r1)
1288 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1290 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1291 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1295 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1296 the -mep mode to copy heavily used pointers to ep to use the implicit
1308 regs[FIRST_PSEUDO_REGISTER];
1317 /* If not ep mode, just return now. */
1321 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1324 regs[i].first_insn = NULL_RTX;
1325 regs[i].last_insn = NULL_RTX;
1328 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1330 switch (GET_CODE (insn))
1332 /* End of basic block */
1339 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1341 if (max_uses < regs[i].uses)
1343 max_uses = regs[i].uses;
1349 substitute_ep_register (regs[max_regno].first_insn,
1350 regs[max_regno].last_insn,
1351 max_uses, max_regno, &r1, &ep);
1355 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1358 regs[i].first_insn = NULL_RTX;
1359 regs[i].last_insn = NULL_RTX;
1367 pattern = single_set (insn);
1369 /* See if there are any memory references we can shorten */
1372 rtx src = SET_SRC (pattern);
1373 rtx dest = SET_DEST (pattern);
1375 /* Memory operands are signed by default. */
1376 int unsignedp = FALSE;
1378 /* We might have (SUBREG (MEM)) here, so just get rid of the
1379 subregs to make this code simpler. */
1380 if (GET_CODE (dest) == SUBREG
1381 && (GET_CODE (SUBREG_REG (dest)) == MEM
1382 || GET_CODE (SUBREG_REG (dest)) == REG))
1383 alter_subreg (&dest);
1384 if (GET_CODE (src) == SUBREG
1385 && (GET_CODE (SUBREG_REG (src)) == MEM
1386 || GET_CODE (SUBREG_REG (src)) == REG))
1387 alter_subreg (&src);
1389 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1392 else if (GET_CODE (dest) == MEM)
1395 else if (GET_CODE (src) == MEM)
1398 else if (GET_CODE (src) == SIGN_EXTEND
1399 && GET_CODE (XEXP (src, 0)) == MEM)
1400 mem = XEXP (src, 0);
1402 else if (GET_CODE (src) == ZERO_EXTEND
1403 && GET_CODE (XEXP (src, 0)) == MEM)
1405 mem = XEXP (src, 0);
1411 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1414 else if (!use_ep && mem
1415 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1417 rtx addr = XEXP (mem, 0);
1421 if (GET_CODE (addr) == REG)
1424 regno = REGNO (addr);
1427 else if (GET_CODE (addr) == PLUS
1428 && GET_CODE (XEXP (addr, 0)) == REG
1429 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1430 && ((INTVAL (XEXP (addr, 1)))
1431 < ep_memory_offset (GET_MODE (mem), unsignedp))
1432 && ((INTVAL (XEXP (addr, 1))) >= 0))
1435 regno = REGNO (XEXP (addr, 0));
1444 regs[regno].last_insn = insn;
1445 if (!regs[regno].first_insn)
1446 regs[regno].first_insn = insn;
1450 /* Loading up a register in the basic block zaps any savings
1452 if (GET_CODE (dest) == REG)
1454 enum machine_mode mode = GET_MODE (dest);
1458 regno = REGNO (dest);
1459 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1463 /* See if we can use the pointer before this
1468 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1470 if (max_uses < regs[i].uses)
1472 max_uses = regs[i].uses;
1478 && max_regno >= regno
1479 && max_regno < endregno)
1481 substitute_ep_register (regs[max_regno].first_insn,
1482 regs[max_regno].last_insn,
1483 max_uses, max_regno, &r1,
1486 /* Since we made a substitution, zap all remembered
1488 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1491 regs[i].first_insn = NULL_RTX;
1492 regs[i].last_insn = NULL_RTX;
1497 for (i = regno; i < endregno; i++)
1500 regs[i].first_insn = NULL_RTX;
1501 regs[i].last_insn = NULL_RTX;
1509 /* # of registers saved by the interrupt handler. */
1510 #define INTERRUPT_FIXED_NUM 5
1512 /* # of bytes for registers saved by the interrupt handler. */
1513 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1515 /* # of words saved for other registers. */
1516 #define INTERRUPT_ALL_SAVE_NUM \
1517 (30 - INTERRUPT_FIXED_NUM)
1519 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1522 compute_register_save_size (long * p_reg_saved)
1526 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1527 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1530 /* Count the return pointer if we need to save it. */
1531 if (crtl->profile && !call_p)
1533 df_set_regs_ever_live (LINK_POINTER_REGNUM, true);
1537 /* Count space for the register saves. */
1538 if (interrupt_handler)
1540 for (i = 0; i <= 31; i++)
1544 if (df_regs_ever_live_p (i) || call_p)
1547 reg_saved |= 1L << i;
1551 /* We don't save/restore r0 or the stack pointer */
1553 case STACK_POINTER_REGNUM:
1556 /* For registers with fixed use, we save them, set them to the
1557 appropriate value, and then restore them.
1558 These registers are handled specially, so don't list them
1559 on the list of registers to save in the prologue. */
1560 case 1: /* temp used to hold ep */
1562 case 10: /* temp used to call interrupt save/restore */
1563 case 11: /* temp used to call interrupt save/restore (long call) */
1564 case EP_REGNUM: /* ep */
1571 /* Find the first register that needs to be saved. */
1572 for (i = 0; i <= 31; i++)
1573 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1574 || i == LINK_POINTER_REGNUM))
1577 /* If it is possible that an out-of-line helper function might be
1578 used to generate the prologue for the current function, then we
1579 need to cover the possibility that such a helper function will
1580 be used, despite the fact that there might be gaps in the list of
1581 registers that need to be saved. To detect this we note that the
1582 helper functions always push at least register r29 (provided
1583 that the function is not an interrupt handler). */
1585 if (TARGET_PROLOG_FUNCTION
1586 && (i == 2 || ((i >= 20) && (i < 30))))
1591 reg_saved |= 1L << i;
1596 /* Helper functions save all registers between the starting
1597 register and the last register, regardless of whether they
1598 are actually used by the function or not. */
1599 for (; i <= 29; i++)
1602 reg_saved |= 1L << i;
1605 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1608 reg_saved |= 1L << LINK_POINTER_REGNUM;
1613 for (; i <= 31; i++)
1614 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1615 || i == LINK_POINTER_REGNUM))
1618 reg_saved |= 1L << i;
1624 *p_reg_saved = reg_saved;
1630 compute_frame_size (int size, long * p_reg_saved)
1633 + compute_register_save_size (p_reg_saved)
1634 + crtl->outgoing_args_size);
1638 use_prolog_function (int num_save, int frame_size)
1640 int alloc_stack = (4 * num_save);
1641 int unalloc_stack = frame_size - alloc_stack;
1642 int save_func_len, restore_func_len;
1643 int save_normal_len, restore_normal_len;
1645 if (! TARGET_DISABLE_CALLT)
1646 save_func_len = restore_func_len = 2;
1648 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1652 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1653 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1656 /* See if we would have used ep to save the stack. */
1657 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1658 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1660 save_normal_len = restore_normal_len = 4 * num_save;
1662 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1663 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1665 /* Don't bother checking if we don't actually save any space.
1666 This happens for instance if one register is saved and additional
1667 stack space is allocated. */
1668 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1672 expand_prologue (void)
1675 unsigned int size = get_frame_size ();
1676 unsigned int actual_fsize;
1677 unsigned int init_stack_alloc = 0;
1680 unsigned int num_save;
1682 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1685 actual_fsize = compute_frame_size (size, ®_saved);
1687 /* Save/setup global registers for interrupt functions right now. */
1688 if (interrupt_handler)
1690 if (! TARGET_DISABLE_CALLT)
1691 emit_insn (gen_callt_save_interrupt ());
1693 emit_insn (gen_save_interrupt ());
1695 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1697 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1698 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1701 /* Identify all of the saved registers. */
1703 for (i = 1; i < 32; i++)
1705 if (((1L << i) & reg_saved) != 0)
1706 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1709 /* See if we have an insn that allocates stack space and saves the particular
1710 registers we want to. */
1711 save_all = NULL_RTX;
1712 if (TARGET_PROLOG_FUNCTION && num_save > 0)
1714 if (use_prolog_function (num_save, actual_fsize))
1716 int alloc_stack = 4 * num_save;
1719 save_all = gen_rtx_PARALLEL
1721 rtvec_alloc (num_save + 1
1722 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1724 XVECEXP (save_all, 0, 0)
1725 = gen_rtx_SET (VOIDmode,
1727 gen_rtx_PLUS (Pmode,
1729 GEN_INT(-alloc_stack)));
1730 for (i = 0; i < num_save; i++)
1733 XVECEXP (save_all, 0, i+1)
1734 = gen_rtx_SET (VOIDmode,
1736 gen_rtx_PLUS (Pmode,
1742 if (TARGET_DISABLE_CALLT)
1744 XVECEXP (save_all, 0, num_save + 1)
1745 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1747 if (TARGET_LONG_CALLS)
1748 XVECEXP (save_all, 0, num_save + 2)
1749 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1752 code = recog (save_all, NULL_RTX, NULL);
1755 rtx insn = emit_insn (save_all);
1756 INSN_CODE (insn) = code;
1757 actual_fsize -= alloc_stack;
1761 save_all = NULL_RTX;
1765 /* If no prolog save function is available, store the registers the old
1766 fashioned way (one by one). */
1769 /* Special case interrupt functions that save all registers for a call. */
1770 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1772 if (! TARGET_DISABLE_CALLT)
1773 emit_insn (gen_callt_save_all_interrupt ());
1775 emit_insn (gen_save_all_interrupt ());
1780 /* If the stack is too big, allocate it in chunks so we can do the
1781 register saves. We use the register save size so we use the ep
1783 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1784 init_stack_alloc = compute_register_save_size (NULL);
1786 init_stack_alloc = actual_fsize;
1788 /* Save registers at the beginning of the stack frame. */
1789 offset = init_stack_alloc - 4;
1791 if (init_stack_alloc)
1792 emit_insn (gen_addsi3 (stack_pointer_rtx,
1794 GEN_INT (- (signed) init_stack_alloc)));
1796 /* Save the return pointer first. */
1797 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1799 emit_move_insn (gen_rtx_MEM (SImode,
1800 plus_constant (stack_pointer_rtx,
1802 save_regs[--num_save]);
1806 for (i = 0; i < num_save; i++)
1808 emit_move_insn (gen_rtx_MEM (SImode,
1809 plus_constant (stack_pointer_rtx,
1817 /* Allocate the rest of the stack that was not allocated above (either it is
1818 > 32K or we just called a function to save the registers and needed more
1820 if (actual_fsize > init_stack_alloc)
1822 int diff = actual_fsize - init_stack_alloc;
1823 if (CONST_OK_FOR_K (-diff))
1824 emit_insn (gen_addsi3 (stack_pointer_rtx,
1829 rtx reg = gen_rtx_REG (Pmode, 12);
1830 emit_move_insn (reg, GEN_INT (-diff));
1831 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1835 /* If we need a frame pointer, set it up now. */
1836 if (frame_pointer_needed)
1837 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1842 expand_epilogue (void)
1845 unsigned int size = get_frame_size ();
1847 int actual_fsize = compute_frame_size (size, ®_saved);
1848 rtx restore_regs[32];
1850 unsigned int num_restore;
1852 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1854 /* Eliminate the initial stack stored by interrupt functions. */
1855 if (interrupt_handler)
1857 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1858 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1859 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1862 /* Cut off any dynamic stack created. */
1863 if (frame_pointer_needed)
1864 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1866 /* Identify all of the saved registers. */
1868 for (i = 1; i < 32; i++)
1870 if (((1L << i) & reg_saved) != 0)
1871 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1874 /* See if we have an insn that restores the particular registers we
1876 restore_all = NULL_RTX;
1878 if (TARGET_PROLOG_FUNCTION
1880 && !interrupt_handler)
1882 int alloc_stack = (4 * num_restore);
1884 /* Don't bother checking if we don't actually save any space. */
1885 if (use_prolog_function (num_restore, actual_fsize))
1888 restore_all = gen_rtx_PARALLEL (VOIDmode,
1889 rtvec_alloc (num_restore + 2));
1890 XVECEXP (restore_all, 0, 0) = gen_rtx_RETURN (VOIDmode);
1891 XVECEXP (restore_all, 0, 1)
1892 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1893 gen_rtx_PLUS (Pmode,
1895 GEN_INT (alloc_stack)));
1897 offset = alloc_stack - 4;
1898 for (i = 0; i < num_restore; i++)
1900 XVECEXP (restore_all, 0, i+2)
1901 = gen_rtx_SET (VOIDmode,
1904 gen_rtx_PLUS (Pmode,
1910 code = recog (restore_all, NULL_RTX, NULL);
1916 actual_fsize -= alloc_stack;
1919 if (CONST_OK_FOR_K (actual_fsize))
1920 emit_insn (gen_addsi3 (stack_pointer_rtx,
1922 GEN_INT (actual_fsize)));
1925 rtx reg = gen_rtx_REG (Pmode, 12);
1926 emit_move_insn (reg, GEN_INT (actual_fsize));
1927 emit_insn (gen_addsi3 (stack_pointer_rtx,
1933 insn = emit_jump_insn (restore_all);
1934 INSN_CODE (insn) = code;
1938 restore_all = NULL_RTX;
1942 /* If no epilogue save function is available, restore the registers the
1943 old fashioned way (one by one). */
1946 unsigned int init_stack_free;
1948 /* If the stack is large, we need to cut it down in 2 pieces. */
1949 if (interrupt_handler)
1950 init_stack_free = 0;
1951 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1952 init_stack_free = 4 * num_restore;
1954 init_stack_free = (signed) actual_fsize;
1956 /* Deallocate the rest of the stack if it is > 32K. */
1957 if ((unsigned int) actual_fsize > init_stack_free)
1961 diff = actual_fsize - init_stack_free;
1963 if (CONST_OK_FOR_K (diff))
1964 emit_insn (gen_addsi3 (stack_pointer_rtx,
1969 rtx reg = gen_rtx_REG (Pmode, 12);
1970 emit_move_insn (reg, GEN_INT (diff));
1971 emit_insn (gen_addsi3 (stack_pointer_rtx,
1977 /* Special case interrupt functions that save all registers
1979 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1981 if (! TARGET_DISABLE_CALLT)
1982 emit_insn (gen_callt_restore_all_interrupt ());
1984 emit_insn (gen_restore_all_interrupt ());
1988 /* Restore registers from the beginning of the stack frame. */
1989 int offset = init_stack_free - 4;
1991 /* Restore the return pointer first. */
1993 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1995 emit_move_insn (restore_regs[--num_restore],
1996 gen_rtx_MEM (SImode,
1997 plus_constant (stack_pointer_rtx,
2002 for (i = 0; i < num_restore; i++)
2004 emit_move_insn (restore_regs[i],
2005 gen_rtx_MEM (SImode,
2006 plus_constant (stack_pointer_rtx,
2009 emit_use (restore_regs[i]);
2013 /* Cut back the remainder of the stack. */
2014 if (init_stack_free)
2015 emit_insn (gen_addsi3 (stack_pointer_rtx,
2017 GEN_INT (init_stack_free)));
2020 /* And return or use reti for interrupt handlers. */
2021 if (interrupt_handler)
2023 if (! TARGET_DISABLE_CALLT)
2024 emit_insn (gen_callt_return_interrupt ());
2026 emit_jump_insn (gen_return_interrupt ());
2028 else if (actual_fsize)
2029 emit_jump_insn (gen_return_internal ());
2031 emit_jump_insn (gen_return_simple ());
2034 v850_interrupt_cache_p = FALSE;
2035 v850_interrupt_p = FALSE;
2038 /* Update the condition code from the insn. */
2040 notice_update_cc (rtx body, rtx insn)
2042 switch (get_attr_cc (insn))
2045 /* Insn does not affect CC at all. */
2049 /* Insn does not change CC, but the 0'th operand has been changed. */
2050 if (cc_status.value1 != 0
2051 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2052 cc_status.value1 = 0;
2056 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2057 V,C is in an unusable state. */
2059 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2060 cc_status.value1 = recog_data.operand[0];
2064 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2065 C is in an unusable state. */
2067 cc_status.flags |= CC_NO_CARRY;
2068 cc_status.value1 = recog_data.operand[0];
2072 /* The insn is a compare instruction. */
2074 cc_status.value1 = SET_SRC (body);
2078 /* Insn doesn't leave CC in a usable state. */
2087 /* Retrieve the data area that has been chosen for the given decl. */
2090 v850_get_data_area (tree decl)
2092 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2093 return DATA_AREA_SDA;
2095 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2096 return DATA_AREA_TDA;
2098 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2099 return DATA_AREA_ZDA;
2101 return DATA_AREA_NORMAL;
2104 /* Store the indicated data area in the decl's attributes. */
2107 v850_set_data_area (tree decl, v850_data_area data_area)
2113 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2114 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2115 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2120 DECL_ATTRIBUTES (decl) = tree_cons
2121 (name, NULL, DECL_ATTRIBUTES (decl));
2124 /* Handle an "interrupt" attribute; arguments as in
2125 struct attribute_spec.handler. */
2127 v850_handle_interrupt_attribute (tree * node,
2129 tree args ATTRIBUTE_UNUSED,
2130 int flags ATTRIBUTE_UNUSED,
2131 bool * no_add_attrs)
2133 if (TREE_CODE (*node) != FUNCTION_DECL)
2135 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2137 *no_add_attrs = true;
2143 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2144 struct attribute_spec.handler. */
2146 v850_handle_data_area_attribute (tree* node,
2148 tree args ATTRIBUTE_UNUSED,
2149 int flags ATTRIBUTE_UNUSED,
2150 bool * no_add_attrs)
2152 v850_data_area data_area;
2153 v850_data_area area;
2156 /* Implement data area attribute. */
2157 if (is_attribute_p ("sda", name))
2158 data_area = DATA_AREA_SDA;
2159 else if (is_attribute_p ("tda", name))
2160 data_area = DATA_AREA_TDA;
2161 else if (is_attribute_p ("zda", name))
2162 data_area = DATA_AREA_ZDA;
2166 switch (TREE_CODE (decl))
2169 if (current_function_decl != NULL_TREE)
2171 error_at (DECL_SOURCE_LOCATION (decl),
2172 "data area attributes cannot be specified for "
2174 *no_add_attrs = true;
2180 area = v850_get_data_area (decl);
2181 if (area != DATA_AREA_NORMAL && data_area != area)
2183 error ("data area of %q+D conflicts with previous declaration",
2185 *no_add_attrs = true;
2197 /* Return nonzero if FUNC is an interrupt function as specified
2198 by the "interrupt" attribute. */
2201 v850_interrupt_function_p (tree func)
2206 if (v850_interrupt_cache_p)
2207 return v850_interrupt_p;
2209 if (TREE_CODE (func) != FUNCTION_DECL)
2212 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2218 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2219 ret = a != NULL_TREE;
2222 /* Its not safe to trust global variables until after function inlining has
2224 if (reload_completed | reload_in_progress)
2225 v850_interrupt_p = ret;
2232 v850_encode_data_area (tree decl, rtx symbol)
2236 /* Map explicit sections into the appropriate attribute */
2237 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2239 if (DECL_SECTION_NAME (decl))
2241 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2243 if (streq (name, ".zdata") || streq (name, ".zbss"))
2244 v850_set_data_area (decl, DATA_AREA_ZDA);
2246 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2247 v850_set_data_area (decl, DATA_AREA_SDA);
2249 else if (streq (name, ".tdata"))
2250 v850_set_data_area (decl, DATA_AREA_TDA);
2253 /* If no attribute, support -m{zda,sda,tda}=n */
2256 int size = int_size_in_bytes (TREE_TYPE (decl));
2260 else if (size <= small_memory [(int) SMALL_MEMORY_TDA].max)
2261 v850_set_data_area (decl, DATA_AREA_TDA);
2263 else if (size <= small_memory [(int) SMALL_MEMORY_SDA].max)
2264 v850_set_data_area (decl, DATA_AREA_SDA);
2266 else if (size <= small_memory [(int) SMALL_MEMORY_ZDA].max)
2267 v850_set_data_area (decl, DATA_AREA_ZDA);
2270 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2274 flags = SYMBOL_REF_FLAGS (symbol);
2275 switch (v850_get_data_area (decl))
2277 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2278 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2279 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2280 default: gcc_unreachable ();
2282 SYMBOL_REF_FLAGS (symbol) = flags;
2286 v850_encode_section_info (tree decl, rtx rtl, int first)
2288 default_encode_section_info (decl, rtl, first);
2290 if (TREE_CODE (decl) == VAR_DECL
2291 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2292 v850_encode_data_area (decl, XEXP (rtl, 0));
2295 /* Construct a JR instruction to a routine that will perform the equivalent of
2296 the RTL passed in as an argument. This RTL is a function epilogue that
2297 pops registers off the stack and possibly releases some extra stack space
2298 as well. The code has already verified that the RTL matches these
2302 construct_restore_jr (rtx op)
2304 int count = XVECLEN (op, 0);
2306 unsigned long int mask;
2307 unsigned long int first;
2308 unsigned long int last;
2310 static char buff [100]; /* XXX */
2314 error ("bogus JR construction: %d", count);
2318 /* Work out how many bytes to pop off the stack before retrieving
2320 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2321 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2322 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2324 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2326 /* Each pop will remove 4 bytes from the stack.... */
2327 stack_bytes -= (count - 2) * 4;
2329 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2330 if (stack_bytes != 0)
2332 error ("bad amount of stack space removal: %d", stack_bytes);
2336 /* Now compute the bit mask of registers to push. */
2338 for (i = 2; i < count; i++)
2340 rtx vector_element = XVECEXP (op, 0, i);
2342 gcc_assert (GET_CODE (vector_element) == SET);
2343 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2344 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2347 mask |= 1 << REGNO (SET_DEST (vector_element));
2350 /* Scan for the first register to pop. */
2351 for (first = 0; first < 32; first++)
2353 if (mask & (1 << first))
2357 gcc_assert (first < 32);
2359 /* Discover the last register to pop. */
2360 if (mask & (1 << LINK_POINTER_REGNUM))
2362 last = LINK_POINTER_REGNUM;
2366 gcc_assert (!stack_bytes);
2367 gcc_assert (mask & (1 << 29));
2372 /* Note, it is possible to have gaps in the register mask.
2373 We ignore this here, and generate a JR anyway. We will
2374 be popping more registers than is strictly necessary, but
2375 it does save code space. */
2377 if (TARGET_LONG_CALLS)
2382 sprintf (name, "__return_%s", reg_names [first]);
2384 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2386 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2392 sprintf (buff, "jr __return_%s", reg_names [first]);
2394 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2401 /* Construct a JARL instruction to a routine that will perform the equivalent
2402 of the RTL passed as a parameter. This RTL is a function prologue that
2403 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2404 some stack space as well. The code has already verified that the RTL
2405 matches these requirements. */
2407 construct_save_jarl (rtx op)
2409 int count = XVECLEN (op, 0);
2411 unsigned long int mask;
2412 unsigned long int first;
2413 unsigned long int last;
2415 static char buff [100]; /* XXX */
2417 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2419 error ("bogus JARL construction: %d", count);
2424 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2425 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2426 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2427 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2429 /* Work out how many bytes to push onto the stack after storing the
2431 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2433 /* Each push will put 4 bytes from the stack.... */
2434 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2436 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2437 if (stack_bytes != 0)
2439 error ("bad amount of stack space removal: %d", stack_bytes);
2443 /* Now compute the bit mask of registers to push. */
2445 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2447 rtx vector_element = XVECEXP (op, 0, i);
2449 gcc_assert (GET_CODE (vector_element) == SET);
2450 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2451 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2454 mask |= 1 << REGNO (SET_SRC (vector_element));
2457 /* Scan for the first register to push. */
2458 for (first = 0; first < 32; first++)
2460 if (mask & (1 << first))
2464 gcc_assert (first < 32);
2466 /* Discover the last register to push. */
2467 if (mask & (1 << LINK_POINTER_REGNUM))
2469 last = LINK_POINTER_REGNUM;
2473 gcc_assert (!stack_bytes);
2474 gcc_assert (mask & (1 << 29));
2479 /* Note, it is possible to have gaps in the register mask.
2480 We ignore this here, and generate a JARL anyway. We will
2481 be pushing more registers than is strictly necessary, but
2482 it does save code space. */
2484 if (TARGET_LONG_CALLS)
2489 sprintf (name, "__save_%s", reg_names [first]);
2491 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2493 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2499 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2501 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2508 extern tree last_assemble_variable_decl;
2509 extern int size_directive_output;
2511 /* A version of asm_output_aligned_bss() that copes with the special
2512 data areas of the v850. */
2514 v850_output_aligned_bss (FILE * file,
2517 unsigned HOST_WIDE_INT size,
2520 switch (v850_get_data_area (decl))
2523 switch_to_section (zbss_section);
2527 switch_to_section (sbss_section);
2531 switch_to_section (tdata_section);
2534 switch_to_section (bss_section);
2538 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2539 #ifdef ASM_DECLARE_OBJECT_NAME
2540 last_assemble_variable_decl = decl;
2541 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2543 /* Standard thing is just output label for the object. */
2544 ASM_OUTPUT_LABEL (file, name);
2545 #endif /* ASM_DECLARE_OBJECT_NAME */
2546 ASM_OUTPUT_SKIP (file, size ? size : 1);
2549 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2551 v850_output_common (FILE * file,
2557 if (decl == NULL_TREE)
2559 fprintf (file, "%s", COMMON_ASM_OP);
2563 switch (v850_get_data_area (decl))
2566 fprintf (file, "%s", ZCOMMON_ASM_OP);
2570 fprintf (file, "%s", SCOMMON_ASM_OP);
2574 fprintf (file, "%s", TCOMMON_ASM_OP);
2578 fprintf (file, "%s", COMMON_ASM_OP);
2583 assemble_name (file, name);
2584 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2587 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2589 v850_output_local (FILE * file,
2595 fprintf (file, "%s", LOCAL_ASM_OP);
2596 assemble_name (file, name);
2597 fprintf (file, "\n");
2599 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2602 /* Add data area to the given declaration if a ghs data area pragma is
2603 currently in effect (#pragma ghs startXXX/endXXX). */
2605 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2608 && data_area_stack->data_area
2609 && current_function_decl == NULL_TREE
2610 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2611 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2612 v850_set_data_area (decl, data_area_stack->data_area);
2614 /* Initialize the default names of the v850 specific sections,
2615 if this has not been done before. */
2617 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2619 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2620 = build_string (sizeof (".sdata")-1, ".sdata");
2622 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2623 = build_string (sizeof (".rosdata")-1, ".rosdata");
2625 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2626 = build_string (sizeof (".tdata")-1, ".tdata");
2628 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2629 = build_string (sizeof (".zdata")-1, ".zdata");
2631 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2632 = build_string (sizeof (".rozdata")-1, ".rozdata");
2635 if (current_function_decl == NULL_TREE
2636 && (TREE_CODE (decl) == VAR_DECL
2637 || TREE_CODE (decl) == CONST_DECL
2638 || TREE_CODE (decl) == FUNCTION_DECL)
2639 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2640 && !DECL_SECTION_NAME (decl))
2642 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2643 tree chosen_section;
2645 if (TREE_CODE (decl) == FUNCTION_DECL)
2646 kind = GHS_SECTION_KIND_TEXT;
2649 /* First choose a section kind based on the data area of the decl. */
2650 switch (v850_get_data_area (decl))
2656 kind = ((TREE_READONLY (decl))
2657 ? GHS_SECTION_KIND_ROSDATA
2658 : GHS_SECTION_KIND_SDATA);
2662 kind = GHS_SECTION_KIND_TDATA;
2666 kind = ((TREE_READONLY (decl))
2667 ? GHS_SECTION_KIND_ROZDATA
2668 : GHS_SECTION_KIND_ZDATA);
2671 case DATA_AREA_NORMAL: /* default data area */
2672 if (TREE_READONLY (decl))
2673 kind = GHS_SECTION_KIND_RODATA;
2674 else if (DECL_INITIAL (decl))
2675 kind = GHS_SECTION_KIND_DATA;
2677 kind = GHS_SECTION_KIND_BSS;
2681 /* Now, if the section kind has been explicitly renamed,
2682 then attach a section attribute. */
2683 chosen_section = GHS_current_section_names [(int) kind];
2685 /* Otherwise, if this kind of section needs an explicit section
2686 attribute, then also attach one. */
2687 if (chosen_section == NULL)
2688 chosen_section = GHS_default_section_names [(int) kind];
2692 /* Only set the section name if specified by a pragma, because
2693 otherwise it will force those variables to get allocated storage
2694 in this module, rather than by the linker. */
2695 DECL_SECTION_NAME (decl) = chosen_section;
2700 /* Construct a DISPOSE instruction that is the equivalent of
2701 the given RTX. We have already verified that this should
2705 construct_dispose_instruction (rtx op)
2707 int count = XVECLEN (op, 0);
2709 unsigned long int mask;
2711 static char buff[ 100 ]; /* XXX */
2716 error ("bogus DISPOSE construction: %d", count);
2720 /* Work out how many bytes to pop off the
2721 stack before retrieving registers. */
2722 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2723 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2724 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2726 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2728 /* Each pop will remove 4 bytes from the stack.... */
2729 stack_bytes -= (count - 2) * 4;
2731 /* Make sure that the amount we are popping
2732 will fit into the DISPOSE instruction. */
2733 if (stack_bytes > 128)
2735 error ("too much stack space to dispose of: %d", stack_bytes);
2739 /* Now compute the bit mask of registers to push. */
2742 for (i = 2; i < count; i++)
2744 rtx vector_element = XVECEXP (op, 0, i);
2746 gcc_assert (GET_CODE (vector_element) == SET);
2747 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2748 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2751 if (REGNO (SET_DEST (vector_element)) == 2)
2754 mask |= 1 << REGNO (SET_DEST (vector_element));
2757 if (! TARGET_DISABLE_CALLT
2758 && (use_callt || stack_bytes == 0))
2762 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2767 for (i = 20; i < 32; i++)
2768 if (mask & (1 << i))
2772 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2774 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2775 i, (mask & (1 << 31)) ? "31c" : "29");
2780 static char regs [100]; /* XXX */
2783 /* Generate the DISPOSE instruction. Note we could just issue the
2784 bit mask as a number as the assembler can cope with this, but for
2785 the sake of our readers we turn it into a textual description. */
2789 for (i = 20; i < 32; i++)
2791 if (mask & (1 << i))
2796 strcat (regs, ", ");
2801 strcat (regs, reg_names[ first ]);
2803 for (i++; i < 32; i++)
2804 if ((mask & (1 << i)) == 0)
2809 strcat (regs, " - ");
2810 strcat (regs, reg_names[ i - 1 ] );
2815 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2821 /* Construct a PREPARE instruction that is the equivalent of
2822 the given RTL. We have already verified that this should
2826 construct_prepare_instruction (rtx op)
2830 unsigned long int mask;
2832 static char buff[ 100 ]; /* XXX */
2835 if (XVECLEN (op, 0) <= 1)
2837 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2841 /* Work out how many bytes to push onto
2842 the stack after storing the registers. */
2843 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2844 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2845 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2847 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2850 /* Make sure that the amount we are popping
2851 will fit into the DISPOSE instruction. */
2852 if (stack_bytes < -128)
2854 error ("too much stack space to prepare: %d", stack_bytes);
2858 /* Now compute the bit mask of registers to push. */
2861 for (i = 1; i < XVECLEN (op, 0); i++)
2863 rtx vector_element = XVECEXP (op, 0, i);
2865 if (GET_CODE (vector_element) == CLOBBER)
2868 gcc_assert (GET_CODE (vector_element) == SET);
2869 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2870 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2873 if (REGNO (SET_SRC (vector_element)) == 2)
2876 mask |= 1 << REGNO (SET_SRC (vector_element));
2880 stack_bytes += count * 4;
2882 if ((! TARGET_DISABLE_CALLT)
2883 && (use_callt || stack_bytes == 0))
2887 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2891 for (i = 20; i < 32; i++)
2892 if (mask & (1 << i))
2896 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2898 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2899 i, (mask & (1 << 31)) ? "31c" : "29");
2903 static char regs [100]; /* XXX */
2907 /* Generate the PREPARE instruction. Note we could just issue the
2908 bit mask as a number as the assembler can cope with this, but for
2909 the sake of our readers we turn it into a textual description. */
2913 for (i = 20; i < 32; i++)
2915 if (mask & (1 << i))
2920 strcat (regs, ", ");
2925 strcat (regs, reg_names[ first ]);
2927 for (i++; i < 32; i++)
2928 if ((mask & (1 << i)) == 0)
2933 strcat (regs, " - ");
2934 strcat (regs, reg_names[ i - 1 ] );
2939 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2945 /* Return an RTX indicating where the return address to the
2946 calling function can be found. */
2949 v850_return_addr (int count)
2954 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2957 /* Implement TARGET_ASM_INIT_SECTIONS. */
2960 v850_asm_init_sections (void)
2963 = get_unnamed_section (0, output_section_asm_op,
2964 "\t.section .rosdata,\"a\"");
2967 = get_unnamed_section (0, output_section_asm_op,
2968 "\t.section .rozdata,\"a\"");
2971 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2972 "\t.section .tdata,\"aw\"");
2975 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2976 "\t.section .zdata,\"aw\"");
2979 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2980 output_section_asm_op,
2981 "\t.section .zbss,\"aw\"");
2985 v850_select_section (tree exp,
2986 int reloc ATTRIBUTE_UNUSED,
2987 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2989 if (TREE_CODE (exp) == VAR_DECL)
2992 if (!TREE_READONLY (exp)
2993 || TREE_SIDE_EFFECTS (exp)
2994 || !DECL_INITIAL (exp)
2995 || (DECL_INITIAL (exp) != error_mark_node
2996 && !TREE_CONSTANT (DECL_INITIAL (exp))))
3001 switch (v850_get_data_area (exp))
3004 return is_const ? rozdata_section : zdata_section;
3007 return tdata_section;
3010 return is_const ? rosdata_section : sdata_section;
3013 return is_const ? readonly_data_section : data_section;
3016 return readonly_data_section;
3019 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
3022 v850_function_value_regno_p (const unsigned int regno)
3024 return (regno == 10);
3027 /* Worker function for TARGET_RETURN_IN_MEMORY. */
3030 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
3032 /* Return values > 8 bytes in length in memory. */
3033 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode;
3036 /* Worker function for TARGET_FUNCTION_VALUE. */
3039 v850_function_value (const_tree valtype,
3040 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
3041 bool outgoing ATTRIBUTE_UNUSED)
3043 return gen_rtx_REG (TYPE_MODE (valtype), 10);
3047 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
3050 v850_setup_incoming_varargs (CUMULATIVE_ARGS *ca,
3051 enum machine_mode mode ATTRIBUTE_UNUSED,
3052 tree type ATTRIBUTE_UNUSED,
3053 int *pretend_arg_size ATTRIBUTE_UNUSED,
3054 int second_time ATTRIBUTE_UNUSED)
3056 ca->anonymous_args = (!TARGET_GHS ? 1 : 0);
3059 /* Worker function for TARGET_CAN_ELIMINATE. */
3062 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3064 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
3068 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3071 v850_asm_trampoline_template (FILE *f)
3073 fprintf (f, "\tjarl .+4,r12\n");
3074 fprintf (f, "\tld.w 12[r12],r20\n");
3075 fprintf (f, "\tld.w 16[r12],r12\n");
3076 fprintf (f, "\tjmp [r12]\n");
3077 fprintf (f, "\tnop\n");
3078 fprintf (f, "\t.long 0\n");
3079 fprintf (f, "\t.long 0\n");
3082 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3085 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3087 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3089 emit_block_move (m_tramp, assemble_trampoline_template (),
3090 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3092 mem = adjust_address (m_tramp, SImode, 16);
3093 emit_move_insn (mem, chain_value);
3094 mem = adjust_address (m_tramp, SImode, 20);
3095 emit_move_insn (mem, fnaddr);
3099 v850_issue_rate (void)
3101 return (TARGET_V850E2_ALL? 2 : 1);
3104 /* V850 specific attributes. */
3106 static const struct attribute_spec v850_attribute_table[] =
3108 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
3109 { "interrupt_handler", 0, 0, true, false, false, v850_handle_interrupt_attribute },
3110 { "interrupt", 0, 0, true, false, false, v850_handle_interrupt_attribute },
3111 { "sda", 0, 0, true, false, false, v850_handle_data_area_attribute },
3112 { "tda", 0, 0, true, false, false, v850_handle_data_area_attribute },
3113 { "zda", 0, 0, true, false, false, v850_handle_data_area_attribute },
3114 { NULL, 0, 0, false, false, false, NULL }
3117 /* Initialize the GCC target structure. */
3118 #undef TARGET_ASM_ALIGNED_HI_OP
3119 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3121 #undef TARGET_PRINT_OPERAND
3122 #define TARGET_PRINT_OPERAND v850_print_operand
3123 #undef TARGET_PRINT_OPERAND_ADDRESS
3124 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3125 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3126 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3128 #undef TARGET_ATTRIBUTE_TABLE
3129 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3131 #undef TARGET_INSERT_ATTRIBUTES
3132 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3134 #undef TARGET_ASM_SELECT_SECTION
3135 #define TARGET_ASM_SELECT_SECTION v850_select_section
3137 /* The assembler supports switchable .bss sections, but
3138 v850_select_section doesn't yet make use of them. */
3139 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3140 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3142 #undef TARGET_ENCODE_SECTION_INFO
3143 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3145 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3146 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3148 #undef TARGET_DEFAULT_TARGET_FLAGS
3149 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_DEFAULT | MASK_APP_REGS)
3150 #undef TARGET_HANDLE_OPTION
3151 #define TARGET_HANDLE_OPTION v850_handle_option
3153 #undef TARGET_RTX_COSTS
3154 #define TARGET_RTX_COSTS v850_rtx_costs
3156 #undef TARGET_ADDRESS_COST
3157 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3159 #undef TARGET_MACHINE_DEPENDENT_REORG
3160 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3162 #undef TARGET_SCHED_ISSUE_RATE
3163 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3165 #undef TARGET_FUNCTION_VALUE_REGNO_P
3166 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3167 #undef TARGET_FUNCTION_VALUE
3168 #define TARGET_FUNCTION_VALUE v850_function_value
3170 #undef TARGET_PROMOTE_PROTOTYPES
3171 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3173 #undef TARGET_RETURN_IN_MEMORY
3174 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3176 #undef TARGET_PASS_BY_REFERENCE
3177 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3179 #undef TARGET_CALLEE_COPIES
3180 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3182 #undef TARGET_SETUP_INCOMING_VARARGS
3183 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
3185 #undef TARGET_ARG_PARTIAL_BYTES
3186 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3188 #undef TARGET_FUNCTION_ARG
3189 #define TARGET_FUNCTION_ARG v850_function_arg
3191 #undef TARGET_FUNCTION_ARG_ADVANCE
3192 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3194 #undef TARGET_CAN_ELIMINATE
3195 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3197 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3198 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3199 #undef TARGET_TRAMPOLINE_INIT
3200 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3202 #undef TARGET_STRICT_ARGUMENT_NAMING
3203 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
3205 #undef TARGET_OPTION_OPTIMIZATION_TABLE
3206 #define TARGET_OPTION_OPTIMIZATION_TABLE v850_option_optimization_table
3208 struct gcc_target targetm = TARGET_INITIALIZER;
3210 #include "gt-v850.h"