1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
38 #include "diagnostic-core.h"
40 #include "integrate.h"
43 #include "target-def.h"
48 #define streq(a,b) (strcmp (a, b) == 0)
51 static void v850_print_operand_address (FILE *, rtx);
53 /* Names of the various data areas used on the v850. */
54 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
55 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
57 /* Track the current data area set by the data area pragma (which
58 can be nested). Tested by check_default_data_area. */
59 data_area_stack_element * data_area_stack = NULL;
61 /* True if we don't need to check any more if the current
62 function is an interrupt handler. */
63 static int v850_interrupt_cache_p = FALSE;
65 rtx v850_compare_op0, v850_compare_op1;
67 /* Whether current function is an interrupt handler. */
68 static int v850_interrupt_p = FALSE;
70 static GTY(()) section * rosdata_section;
71 static GTY(()) section * rozdata_section;
72 static GTY(()) section * tdata_section;
73 static GTY(()) section * zdata_section;
74 static GTY(()) section * zbss_section;
76 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
77 Specify whether to pass the argument by reference. */
80 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
81 enum machine_mode mode, const_tree type,
82 bool named ATTRIBUTE_UNUSED)
84 unsigned HOST_WIDE_INT size;
87 size = int_size_in_bytes (type);
89 size = GET_MODE_SIZE (mode);
94 /* Implementing the Varargs Macros. */
97 v850_strict_argument_naming (cumulative_args_t ca ATTRIBUTE_UNUSED)
99 return !TARGET_GHS ? true : false;
102 /* Return an RTX to represent where an argument with mode MODE
103 and type TYPE will be passed to a function. If the result
104 is NULL_RTX, the argument will be pushed. */
107 v850_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
108 const_tree type, bool named)
110 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
111 rtx result = NULL_RTX;
118 size = int_size_in_bytes (type);
120 size = GET_MODE_SIZE (mode);
122 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
126 /* Once we have stopped using argument registers, do not start up again. */
127 cum->nbytes = 4 * UNITS_PER_WORD;
131 if (size <= UNITS_PER_WORD && type)
132 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
136 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
138 if (cum->nbytes > 4 * UNITS_PER_WORD)
141 if (type == NULL_TREE
142 && cum->nbytes + size > 4 * UNITS_PER_WORD)
145 switch (cum->nbytes / UNITS_PER_WORD)
148 result = gen_rtx_REG (mode, 6);
151 result = gen_rtx_REG (mode, 7);
154 result = gen_rtx_REG (mode, 8);
157 result = gen_rtx_REG (mode, 9);
166 /* Return the number of bytes which must be put into registers
167 for values which are part in registers and part in memory. */
169 v850_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
170 tree type, bool named)
172 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
175 if (TARGET_GHS && !named)
179 size = int_size_in_bytes (type);
181 size = GET_MODE_SIZE (mode);
187 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
191 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
193 if (cum->nbytes > 4 * UNITS_PER_WORD)
196 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
199 if (type == NULL_TREE
200 && cum->nbytes + size > 4 * UNITS_PER_WORD)
203 return 4 * UNITS_PER_WORD - cum->nbytes;
206 /* Update the data in CUM to advance over an argument
207 of mode MODE and data type TYPE.
208 (TYPE is null for libcalls where that information may not be available.) */
211 v850_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
212 const_tree type, bool named ATTRIBUTE_UNUSED)
214 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
216 cum->nbytes += (((type && int_size_in_bytes (type) > 8
217 ? GET_MODE_SIZE (Pmode)
219 ? GET_MODE_SIZE (mode)
220 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
224 /* Return the high and low words of a CONST_DOUBLE */
227 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
229 if (GET_CODE (x) == CONST_DOUBLE)
234 switch (GET_MODE (x))
237 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
238 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
239 *p_high = t[1]; /* since v850 is little endian */
240 *p_low = t[0]; /* high is second word */
244 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
245 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
251 *p_high = CONST_DOUBLE_HIGH (x);
252 *p_low = CONST_DOUBLE_LOW (x);
260 fatal_insn ("const_double_split got a bad insn:", x);
264 /* Return the cost of the rtx R with code CODE. */
267 const_costs_int (HOST_WIDE_INT value, int zero_cost)
269 if (CONST_OK_FOR_I (value))
271 else if (CONST_OK_FOR_J (value))
273 else if (CONST_OK_FOR_K (value))
280 const_costs (rtx r, enum rtx_code c)
282 HOST_WIDE_INT high, low;
287 return const_costs_int (INTVAL (r), 0);
290 const_double_split (r, &high, &low);
291 if (GET_MODE (r) == SFmode)
292 return const_costs_int (high, 1);
294 return const_costs_int (high, 1) + const_costs_int (low, 1);
310 v850_rtx_costs (rtx x,
312 int outer_code ATTRIBUTE_UNUSED,
313 int opno ATTRIBUTE_UNUSED,
314 int * total, bool speed)
316 enum rtx_code code = (enum rtx_code) codearg;
325 *total = COSTS_N_INSNS (const_costs (x, code));
332 if (TARGET_V850E && !speed)
340 && ( GET_MODE (x) == SImode
341 || GET_MODE (x) == HImode
342 || GET_MODE (x) == QImode))
344 if (GET_CODE (XEXP (x, 1)) == REG)
346 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
348 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
350 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
359 if (outer_code == COMPARE)
368 /* Print operand X using operand code CODE to assembly language output file
372 v850_print_operand (FILE * file, rtx x, int code)
374 HOST_WIDE_INT high, low;
379 /* We use 'c' operands with symbols for .vtinherit */
380 if (GET_CODE (x) == SYMBOL_REF)
382 output_addr_const(file, x);
389 switch ((code == 'B' || code == 'C')
390 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
393 if (code == 'c' || code == 'C')
394 fprintf (file, "nz");
396 fprintf (file, "ne");
399 if (code == 'c' || code == 'C')
405 fprintf (file, "ge");
408 fprintf (file, "gt");
411 fprintf (file, "le");
414 fprintf (file, "lt");
417 fprintf (file, "nl");
423 fprintf (file, "nh");
432 case 'F': /* high word of CONST_DOUBLE */
433 switch (GET_CODE (x))
436 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
440 const_double_split (x, &high, &low);
441 fprintf (file, "%ld", (long) high);
448 case 'G': /* low word of CONST_DOUBLE */
449 switch (GET_CODE (x))
452 fprintf (file, "%ld", (long) INTVAL (x));
456 const_double_split (x, &high, &low);
457 fprintf (file, "%ld", (long) low);
465 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
468 fprintf (file, "%d", exact_log2 (INTVAL (x)));
471 gcc_assert (special_symbolref_operand (x, VOIDmode));
473 if (GET_CODE (x) == CONST)
474 x = XEXP (XEXP (x, 0), 0);
476 gcc_assert (GET_CODE (x) == SYMBOL_REF);
478 if (SYMBOL_REF_ZDA_P (x))
479 fprintf (file, "zdaoff");
480 else if (SYMBOL_REF_SDA_P (x))
481 fprintf (file, "sdaoff");
482 else if (SYMBOL_REF_TDA_P (x))
483 fprintf (file, "tdaoff");
488 gcc_assert (special_symbolref_operand (x, VOIDmode));
489 output_addr_const (file, x);
492 gcc_assert (special_symbolref_operand (x, VOIDmode));
494 if (GET_CODE (x) == CONST)
495 x = XEXP (XEXP (x, 0), 0);
497 gcc_assert (GET_CODE (x) == SYMBOL_REF);
499 if (SYMBOL_REF_ZDA_P (x))
500 fprintf (file, "r0");
501 else if (SYMBOL_REF_SDA_P (x))
502 fprintf (file, "gp");
503 else if (SYMBOL_REF_TDA_P (x))
504 fprintf (file, "ep");
508 case 'R': /* 2nd word of a double. */
509 switch (GET_CODE (x))
512 fprintf (file, reg_names[REGNO (x) + 1]);
515 x = XEXP (adjust_address (x, SImode, 4), 0);
516 v850_print_operand_address (file, x);
517 if (GET_CODE (x) == CONST_INT)
518 fprintf (file, "[r0]");
527 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
528 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
535 /* Like an 'S' operand above, but for unsigned loads only. */
536 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
541 case 'W': /* print the instruction suffix */
542 switch (GET_MODE (x))
547 case QImode: fputs (".b", file); break;
548 case HImode: fputs (".h", file); break;
549 case SImode: fputs (".w", file); break;
550 case SFmode: fputs (".w", file); break;
553 case '.': /* register r0 */
554 fputs (reg_names[0], file);
556 case 'z': /* reg or zero */
557 if (GET_CODE (x) == REG)
558 fputs (reg_names[REGNO (x)], file);
559 else if ((GET_MODE(x) == SImode
560 || GET_MODE(x) == DFmode
561 || GET_MODE(x) == SFmode)
562 && x == CONST0_RTX(GET_MODE(x)))
563 fputs (reg_names[0], file);
566 gcc_assert (x == const0_rtx);
567 fputs (reg_names[0], file);
571 switch (GET_CODE (x))
574 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
575 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
578 output_address (XEXP (x, 0));
582 fputs (reg_names[REGNO (x)], file);
585 fputs (reg_names[subreg_regno (x)], file);
592 v850_print_operand_address (file, x);
603 /* Output assembly language output for the address ADDR to FILE. */
606 v850_print_operand_address (FILE * file, rtx addr)
608 switch (GET_CODE (addr))
611 fprintf (file, "0[");
612 v850_print_operand (file, addr, 0);
616 if (GET_CODE (XEXP (addr, 0)) == REG)
619 fprintf (file, "lo(");
620 v850_print_operand (file, XEXP (addr, 1), 0);
621 fprintf (file, ")[");
622 v850_print_operand (file, XEXP (addr, 0), 0);
627 if (GET_CODE (XEXP (addr, 0)) == REG
628 || GET_CODE (XEXP (addr, 0)) == SUBREG)
631 v850_print_operand (file, XEXP (addr, 1), 0);
633 v850_print_operand (file, XEXP (addr, 0), 0);
638 v850_print_operand (file, XEXP (addr, 0), 0);
640 v850_print_operand (file, XEXP (addr, 1), 0);
645 const char *off_name = NULL;
646 const char *reg_name = NULL;
648 if (SYMBOL_REF_ZDA_P (addr))
653 else if (SYMBOL_REF_SDA_P (addr))
658 else if (SYMBOL_REF_TDA_P (addr))
665 fprintf (file, "%s(", off_name);
666 output_addr_const (file, addr);
668 fprintf (file, ")[%s]", reg_name);
672 if (special_symbolref_operand (addr, VOIDmode))
674 rtx x = XEXP (XEXP (addr, 0), 0);
675 const char *off_name;
676 const char *reg_name;
678 if (SYMBOL_REF_ZDA_P (x))
683 else if (SYMBOL_REF_SDA_P (x))
688 else if (SYMBOL_REF_TDA_P (x))
696 fprintf (file, "%s(", off_name);
697 output_addr_const (file, addr);
698 fprintf (file, ")[%s]", reg_name);
701 output_addr_const (file, addr);
704 output_addr_const (file, addr);
710 v850_print_operand_punct_valid_p (unsigned char code)
715 /* When assemble_integer is used to emit the offsets for a switch
716 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
717 output_addr_const will normally barf at this, but it is OK to omit
718 the truncate and just emit the difference of the two labels. The
719 .hword directive will automatically handle the truncation for us.
721 Returns true if rtx was handled, false otherwise. */
724 v850_output_addr_const_extra (FILE * file, rtx x)
726 if (GET_CODE (x) != TRUNCATE)
731 /* We must also handle the case where the switch table was passed a
732 constant value and so has been collapsed. In this case the first
733 label will have been deleted. In such a case it is OK to emit
734 nothing, since the table will not be used.
735 (cf gcc.c-torture/compile/990801-1.c). */
736 if (GET_CODE (x) == MINUS
737 && GET_CODE (XEXP (x, 0)) == LABEL_REF
738 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
739 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
742 output_addr_const (file, x);
746 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
750 output_move_single (rtx * operands)
752 rtx dst = operands[0];
753 rtx src = operands[1];
760 else if (GET_CODE (src) == CONST_INT)
762 HOST_WIDE_INT value = INTVAL (src);
764 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
767 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
768 return "movea %1,%.,%0";
770 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
771 return "movhi hi0(%1),%.,%0";
773 /* A random constant. */
774 else if (TARGET_V850E || TARGET_V850E2_ALL)
777 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
780 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
782 HOST_WIDE_INT high, low;
784 const_double_split (src, &high, &low);
786 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
789 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
790 return "movea %F1,%.,%0";
792 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
793 return "movhi hi0(%F1),%.,%0";
795 /* A random constant. */
796 else if (TARGET_V850E || TARGET_V850E2_ALL)
800 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
803 else if (GET_CODE (src) == MEM)
804 return "%S1ld%W1 %1,%0";
806 else if (special_symbolref_operand (src, VOIDmode))
807 return "movea %O1(%P1),%Q1,%0";
809 else if (GET_CODE (src) == LABEL_REF
810 || GET_CODE (src) == SYMBOL_REF
811 || GET_CODE (src) == CONST)
813 if (TARGET_V850E || TARGET_V850E2_ALL)
814 return "mov hilo(%1),%0";
816 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
819 else if (GET_CODE (src) == HIGH)
820 return "movhi hi(%1),%.,%0";
822 else if (GET_CODE (src) == LO_SUM)
824 operands[2] = XEXP (src, 0);
825 operands[3] = XEXP (src, 1);
826 return "movea lo(%3),%2,%0";
830 else if (GET_CODE (dst) == MEM)
833 return "%S0st%W0 %1,%0";
835 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
836 return "%S0st%W0 %.,%0";
838 else if (GET_CODE (src) == CONST_DOUBLE
839 && CONST0_RTX (GET_MODE (dst)) == src)
840 return "%S0st%W0 %.,%0";
843 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
847 /* Generate comparison code. */
849 v850_float_z_comparison_operator (rtx op, enum machine_mode mode)
851 enum rtx_code code = GET_CODE (op);
853 if (GET_RTX_CLASS (code) != RTX_COMPARE
854 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
857 if (mode != GET_MODE (op) && mode != VOIDmode)
860 if ((GET_CODE (XEXP (op, 0)) != REG
861 || REGNO (XEXP (op, 0)) != CC_REGNUM)
862 || XEXP (op, 1) != const0_rtx)
865 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LTmode)
867 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LEmode)
869 if (GET_MODE (XEXP (op, 0)) == CC_FPU_EQmode)
876 v850_float_nz_comparison_operator (rtx op, enum machine_mode mode)
878 enum rtx_code code = GET_CODE (op);
880 if (GET_RTX_CLASS (code) != RTX_COMPARE
881 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
884 if (mode != GET_MODE (op) && mode != VOIDmode)
887 if ((GET_CODE (XEXP (op, 0)) != REG
888 || REGNO (XEXP (op, 0)) != CC_REGNUM)
889 || XEXP (op, 1) != const0_rtx)
892 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GTmode)
894 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GEmode)
896 if (GET_MODE (XEXP (op, 0)) == CC_FPU_NEmode)
903 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
905 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
910 return CC_FPU_LEmode;
912 return CC_FPU_GEmode;
914 return CC_FPU_LTmode;
916 return CC_FPU_GTmode;
918 return CC_FPU_EQmode;
920 return CC_FPU_NEmode;
929 v850_gen_float_compare (enum rtx_code cond, enum machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
931 if (GET_MODE(op0) == DFmode)
936 emit_insn (gen_cmpdf_le_insn (op0, op1));
939 emit_insn (gen_cmpdf_ge_insn (op0, op1));
942 emit_insn (gen_cmpdf_lt_insn (op0, op1));
945 emit_insn (gen_cmpdf_gt_insn (op0, op1));
948 emit_insn (gen_cmpdf_eq_insn (op0, op1));
951 emit_insn (gen_cmpdf_ne_insn (op0, op1));
957 else if (GET_MODE(v850_compare_op0) == SFmode)
962 emit_insn (gen_cmpsf_le_insn(op0, op1));
965 emit_insn (gen_cmpsf_ge_insn(op0, op1));
968 emit_insn (gen_cmpsf_lt_insn(op0, op1));
971 emit_insn (gen_cmpsf_gt_insn(op0, op1));
974 emit_insn (gen_cmpsf_eq_insn(op0, op1));
977 emit_insn (gen_cmpsf_ne_insn(op0, op1));
988 return v850_select_cc_mode (cond, op0, op1);
992 v850_gen_compare (enum rtx_code cond, enum machine_mode mode, rtx op0, rtx op1)
994 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
996 emit_insn (gen_cmpsi_insn (op0, op1));
997 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
1002 mode = v850_gen_float_compare (cond, mode, op0, op1);
1003 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1004 emit_insn (gen_rtx_SET(mode, cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1006 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1010 /* Return maximum offset supported for a short EP memory reference of mode
1011 MODE and signedness UNSIGNEDP. */
1014 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1021 if (TARGET_SMALL_SLD)
1022 max_offset = (1 << 4);
1023 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1025 max_offset = (1 << 4);
1027 max_offset = (1 << 7);
1031 if (TARGET_SMALL_SLD)
1032 max_offset = (1 << 5);
1033 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1035 max_offset = (1 << 5);
1037 max_offset = (1 << 8);
1042 max_offset = (1 << 8);
1052 /* Return true if OP is a valid short EP memory reference */
1055 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1061 /* If we are not using the EP register on a per-function basis
1062 then do not allow this optimization at all. This is to
1063 prevent the use of the SLD/SST instructions which cannot be
1064 guaranteed to work properly due to a hardware bug. */
1068 if (GET_CODE (op) != MEM)
1071 max_offset = ep_memory_offset (mode, unsigned_load);
1073 mask = GET_MODE_SIZE (mode) - 1;
1075 addr = XEXP (op, 0);
1076 if (GET_CODE (addr) == CONST)
1077 addr = XEXP (addr, 0);
1079 switch (GET_CODE (addr))
1085 return SYMBOL_REF_TDA_P (addr);
1088 return REGNO (addr) == EP_REGNUM;
1091 op0 = XEXP (addr, 0);
1092 op1 = XEXP (addr, 1);
1093 if (GET_CODE (op1) == CONST_INT
1094 && INTVAL (op1) < max_offset
1095 && INTVAL (op1) >= 0
1096 && (INTVAL (op1) & mask) == 0)
1098 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1101 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1110 /* Substitute memory references involving a pointer, to use the ep pointer,
1111 taking care to save and preserve the ep. */
1114 substitute_ep_register (rtx first_insn,
1121 rtx reg = gen_rtx_REG (Pmode, regno);
1126 df_set_regs_ever_live (1, true);
1127 *p_r1 = gen_rtx_REG (Pmode, 1);
1128 *p_ep = gen_rtx_REG (Pmode, 30);
1133 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1134 2 * (uses - 3), uses, reg_names[regno],
1135 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1136 INSN_UID (first_insn), INSN_UID (last_insn));
1138 if (GET_CODE (first_insn) == NOTE)
1139 first_insn = next_nonnote_insn (first_insn);
1141 last_insn = next_nonnote_insn (last_insn);
1142 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1144 if (GET_CODE (insn) == INSN)
1146 rtx pattern = single_set (insn);
1148 /* Replace the memory references. */
1152 /* Memory operands are signed by default. */
1153 int unsignedp = FALSE;
1155 if (GET_CODE (SET_DEST (pattern)) == MEM
1156 && GET_CODE (SET_SRC (pattern)) == MEM)
1159 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1160 p_mem = &SET_DEST (pattern);
1162 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1163 p_mem = &SET_SRC (pattern);
1165 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1166 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1167 p_mem = &XEXP (SET_SRC (pattern), 0);
1169 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1170 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1172 p_mem = &XEXP (SET_SRC (pattern), 0);
1180 rtx addr = XEXP (*p_mem, 0);
1182 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1183 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1185 else if (GET_CODE (addr) == PLUS
1186 && GET_CODE (XEXP (addr, 0)) == REG
1187 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1188 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1189 && ((INTVAL (XEXP (addr, 1)))
1190 < ep_memory_offset (GET_MODE (*p_mem),
1192 && ((INTVAL (XEXP (addr, 1))) >= 0))
1193 *p_mem = change_address (*p_mem, VOIDmode,
1194 gen_rtx_PLUS (Pmode,
1202 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1203 insn = prev_nonnote_insn (first_insn);
1204 if (insn && GET_CODE (insn) == INSN
1205 && GET_CODE (PATTERN (insn)) == SET
1206 && SET_DEST (PATTERN (insn)) == *p_ep
1207 && SET_SRC (PATTERN (insn)) == *p_r1)
1210 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1212 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1213 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1217 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1218 the -mep mode to copy heavily used pointers to ep to use the implicit
1230 regs[FIRST_PSEUDO_REGISTER];
1239 /* If not ep mode, just return now. */
1243 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1246 regs[i].first_insn = NULL_RTX;
1247 regs[i].last_insn = NULL_RTX;
1250 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1252 switch (GET_CODE (insn))
1254 /* End of basic block */
1261 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1263 if (max_uses < regs[i].uses)
1265 max_uses = regs[i].uses;
1271 substitute_ep_register (regs[max_regno].first_insn,
1272 regs[max_regno].last_insn,
1273 max_uses, max_regno, &r1, &ep);
1277 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1280 regs[i].first_insn = NULL_RTX;
1281 regs[i].last_insn = NULL_RTX;
1289 pattern = single_set (insn);
1291 /* See if there are any memory references we can shorten */
1294 rtx src = SET_SRC (pattern);
1295 rtx dest = SET_DEST (pattern);
1297 /* Memory operands are signed by default. */
1298 int unsignedp = FALSE;
1300 /* We might have (SUBREG (MEM)) here, so just get rid of the
1301 subregs to make this code simpler. */
1302 if (GET_CODE (dest) == SUBREG
1303 && (GET_CODE (SUBREG_REG (dest)) == MEM
1304 || GET_CODE (SUBREG_REG (dest)) == REG))
1305 alter_subreg (&dest);
1306 if (GET_CODE (src) == SUBREG
1307 && (GET_CODE (SUBREG_REG (src)) == MEM
1308 || GET_CODE (SUBREG_REG (src)) == REG))
1309 alter_subreg (&src);
1311 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1314 else if (GET_CODE (dest) == MEM)
1317 else if (GET_CODE (src) == MEM)
1320 else if (GET_CODE (src) == SIGN_EXTEND
1321 && GET_CODE (XEXP (src, 0)) == MEM)
1322 mem = XEXP (src, 0);
1324 else if (GET_CODE (src) == ZERO_EXTEND
1325 && GET_CODE (XEXP (src, 0)) == MEM)
1327 mem = XEXP (src, 0);
1333 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1336 else if (!use_ep && mem
1337 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1339 rtx addr = XEXP (mem, 0);
1343 if (GET_CODE (addr) == REG)
1346 regno = REGNO (addr);
1349 else if (GET_CODE (addr) == PLUS
1350 && GET_CODE (XEXP (addr, 0)) == REG
1351 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1352 && ((INTVAL (XEXP (addr, 1)))
1353 < ep_memory_offset (GET_MODE (mem), unsignedp))
1354 && ((INTVAL (XEXP (addr, 1))) >= 0))
1357 regno = REGNO (XEXP (addr, 0));
1366 regs[regno].last_insn = insn;
1367 if (!regs[regno].first_insn)
1368 regs[regno].first_insn = insn;
1372 /* Loading up a register in the basic block zaps any savings
1374 if (GET_CODE (dest) == REG)
1376 enum machine_mode mode = GET_MODE (dest);
1380 regno = REGNO (dest);
1381 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1385 /* See if we can use the pointer before this
1390 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1392 if (max_uses < regs[i].uses)
1394 max_uses = regs[i].uses;
1400 && max_regno >= regno
1401 && max_regno < endregno)
1403 substitute_ep_register (regs[max_regno].first_insn,
1404 regs[max_regno].last_insn,
1405 max_uses, max_regno, &r1,
1408 /* Since we made a substitution, zap all remembered
1410 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1413 regs[i].first_insn = NULL_RTX;
1414 regs[i].last_insn = NULL_RTX;
1419 for (i = regno; i < endregno; i++)
1422 regs[i].first_insn = NULL_RTX;
1423 regs[i].last_insn = NULL_RTX;
1431 /* # of registers saved by the interrupt handler. */
1432 #define INTERRUPT_FIXED_NUM 5
1434 /* # of bytes for registers saved by the interrupt handler. */
1435 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1437 /* # of words saved for other registers. */
1438 #define INTERRUPT_ALL_SAVE_NUM \
1439 (30 - INTERRUPT_FIXED_NUM)
1441 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1444 compute_register_save_size (long * p_reg_saved)
1448 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1449 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1452 /* Count the return pointer if we need to save it. */
1453 if (crtl->profile && !call_p)
1455 df_set_regs_ever_live (LINK_POINTER_REGNUM, true);
1459 /* Count space for the register saves. */
1460 if (interrupt_handler)
1462 for (i = 0; i <= 31; i++)
1466 if (df_regs_ever_live_p (i) || call_p)
1469 reg_saved |= 1L << i;
1473 /* We don't save/restore r0 or the stack pointer */
1475 case STACK_POINTER_REGNUM:
1478 /* For registers with fixed use, we save them, set them to the
1479 appropriate value, and then restore them.
1480 These registers are handled specially, so don't list them
1481 on the list of registers to save in the prologue. */
1482 case 1: /* temp used to hold ep */
1484 case 10: /* temp used to call interrupt save/restore */
1485 case 11: /* temp used to call interrupt save/restore (long call) */
1486 case EP_REGNUM: /* ep */
1493 /* Find the first register that needs to be saved. */
1494 for (i = 0; i <= 31; i++)
1495 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1496 || i == LINK_POINTER_REGNUM))
1499 /* If it is possible that an out-of-line helper function might be
1500 used to generate the prologue for the current function, then we
1501 need to cover the possibility that such a helper function will
1502 be used, despite the fact that there might be gaps in the list of
1503 registers that need to be saved. To detect this we note that the
1504 helper functions always push at least register r29 (provided
1505 that the function is not an interrupt handler). */
1507 if (TARGET_PROLOG_FUNCTION
1508 && (i == 2 || ((i >= 20) && (i < 30))))
1513 reg_saved |= 1L << i;
1518 /* Helper functions save all registers between the starting
1519 register and the last register, regardless of whether they
1520 are actually used by the function or not. */
1521 for (; i <= 29; i++)
1524 reg_saved |= 1L << i;
1527 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1530 reg_saved |= 1L << LINK_POINTER_REGNUM;
1535 for (; i <= 31; i++)
1536 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1537 || i == LINK_POINTER_REGNUM))
1540 reg_saved |= 1L << i;
1546 *p_reg_saved = reg_saved;
1552 compute_frame_size (int size, long * p_reg_saved)
1555 + compute_register_save_size (p_reg_saved)
1556 + crtl->outgoing_args_size);
1560 use_prolog_function (int num_save, int frame_size)
1562 int alloc_stack = (4 * num_save);
1563 int unalloc_stack = frame_size - alloc_stack;
1564 int save_func_len, restore_func_len;
1565 int save_normal_len, restore_normal_len;
1567 if (! TARGET_DISABLE_CALLT)
1568 save_func_len = restore_func_len = 2;
1570 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1574 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1575 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1578 /* See if we would have used ep to save the stack. */
1579 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1580 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1582 save_normal_len = restore_normal_len = 4 * num_save;
1584 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1585 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1587 /* Don't bother checking if we don't actually save any space.
1588 This happens for instance if one register is saved and additional
1589 stack space is allocated. */
1590 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1594 expand_prologue (void)
1597 unsigned int size = get_frame_size ();
1598 unsigned int actual_fsize;
1599 unsigned int init_stack_alloc = 0;
1602 unsigned int num_save;
1604 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1607 actual_fsize = compute_frame_size (size, ®_saved);
1609 /* Save/setup global registers for interrupt functions right now. */
1610 if (interrupt_handler)
1612 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1613 emit_insn (gen_callt_save_interrupt ());
1615 emit_insn (gen_save_interrupt ());
1617 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1619 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1620 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1623 /* Identify all of the saved registers. */
1625 for (i = 1; i < 32; i++)
1627 if (((1L << i) & reg_saved) != 0)
1628 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1631 /* See if we have an insn that allocates stack space and saves the particular
1632 registers we want to. */
1633 save_all = NULL_RTX;
1634 if (TARGET_PROLOG_FUNCTION && num_save > 0)
1636 if (use_prolog_function (num_save, actual_fsize))
1638 int alloc_stack = 4 * num_save;
1641 save_all = gen_rtx_PARALLEL
1643 rtvec_alloc (num_save + 1
1644 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1646 XVECEXP (save_all, 0, 0)
1647 = gen_rtx_SET (VOIDmode,
1649 gen_rtx_PLUS (Pmode,
1651 GEN_INT(-alloc_stack)));
1652 for (i = 0; i < num_save; i++)
1655 XVECEXP (save_all, 0, i+1)
1656 = gen_rtx_SET (VOIDmode,
1658 gen_rtx_PLUS (Pmode,
1664 if (TARGET_DISABLE_CALLT)
1666 XVECEXP (save_all, 0, num_save + 1)
1667 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1669 if (TARGET_LONG_CALLS)
1670 XVECEXP (save_all, 0, num_save + 2)
1671 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1674 code = recog (save_all, NULL_RTX, NULL);
1677 rtx insn = emit_insn (save_all);
1678 INSN_CODE (insn) = code;
1679 actual_fsize -= alloc_stack;
1683 save_all = NULL_RTX;
1687 /* If no prolog save function is available, store the registers the old
1688 fashioned way (one by one). */
1691 /* Special case interrupt functions that save all registers for a call. */
1692 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1694 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1695 emit_insn (gen_callt_save_all_interrupt ());
1697 emit_insn (gen_save_all_interrupt ());
1702 /* If the stack is too big, allocate it in chunks so we can do the
1703 register saves. We use the register save size so we use the ep
1705 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1706 init_stack_alloc = compute_register_save_size (NULL);
1708 init_stack_alloc = actual_fsize;
1710 /* Save registers at the beginning of the stack frame. */
1711 offset = init_stack_alloc - 4;
1713 if (init_stack_alloc)
1714 emit_insn (gen_addsi3 (stack_pointer_rtx,
1716 GEN_INT (- (signed) init_stack_alloc)));
1718 /* Save the return pointer first. */
1719 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1721 emit_move_insn (gen_rtx_MEM (SImode,
1722 plus_constant (stack_pointer_rtx,
1724 save_regs[--num_save]);
1728 for (i = 0; i < num_save; i++)
1730 emit_move_insn (gen_rtx_MEM (SImode,
1731 plus_constant (stack_pointer_rtx,
1739 /* Allocate the rest of the stack that was not allocated above (either it is
1740 > 32K or we just called a function to save the registers and needed more
1742 if (actual_fsize > init_stack_alloc)
1744 int diff = actual_fsize - init_stack_alloc;
1745 if (CONST_OK_FOR_K (-diff))
1746 emit_insn (gen_addsi3 (stack_pointer_rtx,
1751 rtx reg = gen_rtx_REG (Pmode, 12);
1752 emit_move_insn (reg, GEN_INT (-diff));
1753 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1757 /* If we need a frame pointer, set it up now. */
1758 if (frame_pointer_needed)
1759 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1764 expand_epilogue (void)
1767 unsigned int size = get_frame_size ();
1769 int actual_fsize = compute_frame_size (size, ®_saved);
1770 rtx restore_regs[32];
1772 unsigned int num_restore;
1774 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1776 /* Eliminate the initial stack stored by interrupt functions. */
1777 if (interrupt_handler)
1779 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1780 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1781 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1784 /* Cut off any dynamic stack created. */
1785 if (frame_pointer_needed)
1786 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1788 /* Identify all of the saved registers. */
1790 for (i = 1; i < 32; i++)
1792 if (((1L << i) & reg_saved) != 0)
1793 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1796 /* See if we have an insn that restores the particular registers we
1798 restore_all = NULL_RTX;
1800 if (TARGET_PROLOG_FUNCTION
1802 && !interrupt_handler)
1804 int alloc_stack = (4 * num_restore);
1806 /* Don't bother checking if we don't actually save any space. */
1807 if (use_prolog_function (num_restore, actual_fsize))
1810 restore_all = gen_rtx_PARALLEL (VOIDmode,
1811 rtvec_alloc (num_restore + 2));
1812 XVECEXP (restore_all, 0, 0) = ret_rtx;
1813 XVECEXP (restore_all, 0, 1)
1814 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1815 gen_rtx_PLUS (Pmode,
1817 GEN_INT (alloc_stack)));
1819 offset = alloc_stack - 4;
1820 for (i = 0; i < num_restore; i++)
1822 XVECEXP (restore_all, 0, i+2)
1823 = gen_rtx_SET (VOIDmode,
1826 gen_rtx_PLUS (Pmode,
1832 code = recog (restore_all, NULL_RTX, NULL);
1838 actual_fsize -= alloc_stack;
1841 if (CONST_OK_FOR_K (actual_fsize))
1842 emit_insn (gen_addsi3 (stack_pointer_rtx,
1844 GEN_INT (actual_fsize)));
1847 rtx reg = gen_rtx_REG (Pmode, 12);
1848 emit_move_insn (reg, GEN_INT (actual_fsize));
1849 emit_insn (gen_addsi3 (stack_pointer_rtx,
1855 insn = emit_jump_insn (restore_all);
1856 INSN_CODE (insn) = code;
1860 restore_all = NULL_RTX;
1864 /* If no epilogue save function is available, restore the registers the
1865 old fashioned way (one by one). */
1868 unsigned int init_stack_free;
1870 /* If the stack is large, we need to cut it down in 2 pieces. */
1871 if (interrupt_handler)
1872 init_stack_free = 0;
1873 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1874 init_stack_free = 4 * num_restore;
1876 init_stack_free = (signed) actual_fsize;
1878 /* Deallocate the rest of the stack if it is > 32K. */
1879 if ((unsigned int) actual_fsize > init_stack_free)
1883 diff = actual_fsize - init_stack_free;
1885 if (CONST_OK_FOR_K (diff))
1886 emit_insn (gen_addsi3 (stack_pointer_rtx,
1891 rtx reg = gen_rtx_REG (Pmode, 12);
1892 emit_move_insn (reg, GEN_INT (diff));
1893 emit_insn (gen_addsi3 (stack_pointer_rtx,
1899 /* Special case interrupt functions that save all registers
1901 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1903 if (! TARGET_DISABLE_CALLT)
1904 emit_insn (gen_callt_restore_all_interrupt ());
1906 emit_insn (gen_restore_all_interrupt ());
1910 /* Restore registers from the beginning of the stack frame. */
1911 int offset = init_stack_free - 4;
1913 /* Restore the return pointer first. */
1915 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1917 emit_move_insn (restore_regs[--num_restore],
1918 gen_rtx_MEM (SImode,
1919 plus_constant (stack_pointer_rtx,
1924 for (i = 0; i < num_restore; i++)
1926 emit_move_insn (restore_regs[i],
1927 gen_rtx_MEM (SImode,
1928 plus_constant (stack_pointer_rtx,
1931 emit_use (restore_regs[i]);
1935 /* Cut back the remainder of the stack. */
1936 if (init_stack_free)
1937 emit_insn (gen_addsi3 (stack_pointer_rtx,
1939 GEN_INT (init_stack_free)));
1942 /* And return or use reti for interrupt handlers. */
1943 if (interrupt_handler)
1945 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1946 emit_insn (gen_callt_return_interrupt ());
1948 emit_jump_insn (gen_return_interrupt ());
1950 else if (actual_fsize)
1951 emit_jump_insn (gen_return_internal ());
1953 emit_jump_insn (gen_return_simple ());
1956 v850_interrupt_cache_p = FALSE;
1957 v850_interrupt_p = FALSE;
1960 /* Update the condition code from the insn. */
1962 notice_update_cc (rtx body, rtx insn)
1964 switch (get_attr_cc (insn))
1967 /* Insn does not affect CC at all. */
1971 /* Insn does not change CC, but the 0'th operand has been changed. */
1972 if (cc_status.value1 != 0
1973 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
1974 cc_status.value1 = 0;
1978 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
1979 V,C is in an unusable state. */
1981 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
1982 cc_status.value1 = recog_data.operand[0];
1986 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
1987 C is in an unusable state. */
1989 cc_status.flags |= CC_NO_CARRY;
1990 cc_status.value1 = recog_data.operand[0];
1994 /* The insn is a compare instruction. */
1996 cc_status.value1 = SET_SRC (body);
2000 /* Insn doesn't leave CC in a usable state. */
2009 /* Retrieve the data area that has been chosen for the given decl. */
2012 v850_get_data_area (tree decl)
2014 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2015 return DATA_AREA_SDA;
2017 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2018 return DATA_AREA_TDA;
2020 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2021 return DATA_AREA_ZDA;
2023 return DATA_AREA_NORMAL;
2026 /* Store the indicated data area in the decl's attributes. */
2029 v850_set_data_area (tree decl, v850_data_area data_area)
2035 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2036 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2037 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2042 DECL_ATTRIBUTES (decl) = tree_cons
2043 (name, NULL, DECL_ATTRIBUTES (decl));
2046 /* Handle an "interrupt" attribute; arguments as in
2047 struct attribute_spec.handler. */
2049 v850_handle_interrupt_attribute (tree * node,
2051 tree args ATTRIBUTE_UNUSED,
2052 int flags ATTRIBUTE_UNUSED,
2053 bool * no_add_attrs)
2055 if (TREE_CODE (*node) != FUNCTION_DECL)
2057 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2059 *no_add_attrs = true;
2065 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2066 struct attribute_spec.handler. */
2068 v850_handle_data_area_attribute (tree* node,
2070 tree args ATTRIBUTE_UNUSED,
2071 int flags ATTRIBUTE_UNUSED,
2072 bool * no_add_attrs)
2074 v850_data_area data_area;
2075 v850_data_area area;
2078 /* Implement data area attribute. */
2079 if (is_attribute_p ("sda", name))
2080 data_area = DATA_AREA_SDA;
2081 else if (is_attribute_p ("tda", name))
2082 data_area = DATA_AREA_TDA;
2083 else if (is_attribute_p ("zda", name))
2084 data_area = DATA_AREA_ZDA;
2088 switch (TREE_CODE (decl))
2091 if (current_function_decl != NULL_TREE)
2093 error_at (DECL_SOURCE_LOCATION (decl),
2094 "data area attributes cannot be specified for "
2096 *no_add_attrs = true;
2102 area = v850_get_data_area (decl);
2103 if (area != DATA_AREA_NORMAL && data_area != area)
2105 error ("data area of %q+D conflicts with previous declaration",
2107 *no_add_attrs = true;
2119 /* Return nonzero if FUNC is an interrupt function as specified
2120 by the "interrupt" attribute. */
2123 v850_interrupt_function_p (tree func)
2128 if (v850_interrupt_cache_p)
2129 return v850_interrupt_p;
2131 if (TREE_CODE (func) != FUNCTION_DECL)
2134 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2140 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2141 ret = a != NULL_TREE;
2144 /* Its not safe to trust global variables until after function inlining has
2146 if (reload_completed | reload_in_progress)
2147 v850_interrupt_p = ret;
2154 v850_encode_data_area (tree decl, rtx symbol)
2158 /* Map explicit sections into the appropriate attribute */
2159 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2161 if (DECL_SECTION_NAME (decl))
2163 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2165 if (streq (name, ".zdata") || streq (name, ".zbss"))
2166 v850_set_data_area (decl, DATA_AREA_ZDA);
2168 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2169 v850_set_data_area (decl, DATA_AREA_SDA);
2171 else if (streq (name, ".tdata"))
2172 v850_set_data_area (decl, DATA_AREA_TDA);
2175 /* If no attribute, support -m{zda,sda,tda}=n */
2178 int size = int_size_in_bytes (TREE_TYPE (decl));
2182 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2183 v850_set_data_area (decl, DATA_AREA_TDA);
2185 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2186 v850_set_data_area (decl, DATA_AREA_SDA);
2188 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2189 v850_set_data_area (decl, DATA_AREA_ZDA);
2192 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2196 flags = SYMBOL_REF_FLAGS (symbol);
2197 switch (v850_get_data_area (decl))
2199 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2200 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2201 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2202 default: gcc_unreachable ();
2204 SYMBOL_REF_FLAGS (symbol) = flags;
2208 v850_encode_section_info (tree decl, rtx rtl, int first)
2210 default_encode_section_info (decl, rtl, first);
2212 if (TREE_CODE (decl) == VAR_DECL
2213 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2214 v850_encode_data_area (decl, XEXP (rtl, 0));
2217 /* Construct a JR instruction to a routine that will perform the equivalent of
2218 the RTL passed in as an argument. This RTL is a function epilogue that
2219 pops registers off the stack and possibly releases some extra stack space
2220 as well. The code has already verified that the RTL matches these
2224 construct_restore_jr (rtx op)
2226 int count = XVECLEN (op, 0);
2228 unsigned long int mask;
2229 unsigned long int first;
2230 unsigned long int last;
2232 static char buff [100]; /* XXX */
2236 error ("bogus JR construction: %d", count);
2240 /* Work out how many bytes to pop off the stack before retrieving
2242 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2243 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2244 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2246 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2248 /* Each pop will remove 4 bytes from the stack.... */
2249 stack_bytes -= (count - 2) * 4;
2251 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2252 if (stack_bytes != 0)
2254 error ("bad amount of stack space removal: %d", stack_bytes);
2258 /* Now compute the bit mask of registers to push. */
2260 for (i = 2; i < count; i++)
2262 rtx vector_element = XVECEXP (op, 0, i);
2264 gcc_assert (GET_CODE (vector_element) == SET);
2265 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2266 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2269 mask |= 1 << REGNO (SET_DEST (vector_element));
2272 /* Scan for the first register to pop. */
2273 for (first = 0; first < 32; first++)
2275 if (mask & (1 << first))
2279 gcc_assert (first < 32);
2281 /* Discover the last register to pop. */
2282 if (mask & (1 << LINK_POINTER_REGNUM))
2284 last = LINK_POINTER_REGNUM;
2288 gcc_assert (!stack_bytes);
2289 gcc_assert (mask & (1 << 29));
2294 /* Note, it is possible to have gaps in the register mask.
2295 We ignore this here, and generate a JR anyway. We will
2296 be popping more registers than is strictly necessary, but
2297 it does save code space. */
2299 if (TARGET_LONG_CALLS)
2304 sprintf (name, "__return_%s", reg_names [first]);
2306 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2308 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2314 sprintf (buff, "jr __return_%s", reg_names [first]);
2316 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2323 /* Construct a JARL instruction to a routine that will perform the equivalent
2324 of the RTL passed as a parameter. This RTL is a function prologue that
2325 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2326 some stack space as well. The code has already verified that the RTL
2327 matches these requirements. */
2329 construct_save_jarl (rtx op)
2331 int count = XVECLEN (op, 0);
2333 unsigned long int mask;
2334 unsigned long int first;
2335 unsigned long int last;
2337 static char buff [100]; /* XXX */
2339 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2341 error ("bogus JARL construction: %d", count);
2346 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2347 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2348 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2349 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2351 /* Work out how many bytes to push onto the stack after storing the
2353 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2355 /* Each push will put 4 bytes from the stack.... */
2356 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2358 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2359 if (stack_bytes != 0)
2361 error ("bad amount of stack space removal: %d", stack_bytes);
2365 /* Now compute the bit mask of registers to push. */
2367 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2369 rtx vector_element = XVECEXP (op, 0, i);
2371 gcc_assert (GET_CODE (vector_element) == SET);
2372 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2373 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2376 mask |= 1 << REGNO (SET_SRC (vector_element));
2379 /* Scan for the first register to push. */
2380 for (first = 0; first < 32; first++)
2382 if (mask & (1 << first))
2386 gcc_assert (first < 32);
2388 /* Discover the last register to push. */
2389 if (mask & (1 << LINK_POINTER_REGNUM))
2391 last = LINK_POINTER_REGNUM;
2395 gcc_assert (!stack_bytes);
2396 gcc_assert (mask & (1 << 29));
2401 /* Note, it is possible to have gaps in the register mask.
2402 We ignore this here, and generate a JARL anyway. We will
2403 be pushing more registers than is strictly necessary, but
2404 it does save code space. */
2406 if (TARGET_LONG_CALLS)
2411 sprintf (name, "__save_%s", reg_names [first]);
2413 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2415 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2421 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2423 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2430 extern tree last_assemble_variable_decl;
2431 extern int size_directive_output;
2433 /* A version of asm_output_aligned_bss() that copes with the special
2434 data areas of the v850. */
2436 v850_output_aligned_bss (FILE * file,
2439 unsigned HOST_WIDE_INT size,
2442 switch (v850_get_data_area (decl))
2445 switch_to_section (zbss_section);
2449 switch_to_section (sbss_section);
2453 switch_to_section (tdata_section);
2456 switch_to_section (bss_section);
2460 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2461 #ifdef ASM_DECLARE_OBJECT_NAME
2462 last_assemble_variable_decl = decl;
2463 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2465 /* Standard thing is just output label for the object. */
2466 ASM_OUTPUT_LABEL (file, name);
2467 #endif /* ASM_DECLARE_OBJECT_NAME */
2468 ASM_OUTPUT_SKIP (file, size ? size : 1);
2471 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2473 v850_output_common (FILE * file,
2479 if (decl == NULL_TREE)
2481 fprintf (file, "%s", COMMON_ASM_OP);
2485 switch (v850_get_data_area (decl))
2488 fprintf (file, "%s", ZCOMMON_ASM_OP);
2492 fprintf (file, "%s", SCOMMON_ASM_OP);
2496 fprintf (file, "%s", TCOMMON_ASM_OP);
2500 fprintf (file, "%s", COMMON_ASM_OP);
2505 assemble_name (file, name);
2506 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2509 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2511 v850_output_local (FILE * file,
2517 fprintf (file, "%s", LOCAL_ASM_OP);
2518 assemble_name (file, name);
2519 fprintf (file, "\n");
2521 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2524 /* Add data area to the given declaration if a ghs data area pragma is
2525 currently in effect (#pragma ghs startXXX/endXXX). */
2527 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2530 && data_area_stack->data_area
2531 && current_function_decl == NULL_TREE
2532 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2533 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2534 v850_set_data_area (decl, data_area_stack->data_area);
2536 /* Initialize the default names of the v850 specific sections,
2537 if this has not been done before. */
2539 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2541 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2542 = build_string (sizeof (".sdata")-1, ".sdata");
2544 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2545 = build_string (sizeof (".rosdata")-1, ".rosdata");
2547 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2548 = build_string (sizeof (".tdata")-1, ".tdata");
2550 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2551 = build_string (sizeof (".zdata")-1, ".zdata");
2553 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2554 = build_string (sizeof (".rozdata")-1, ".rozdata");
2557 if (current_function_decl == NULL_TREE
2558 && (TREE_CODE (decl) == VAR_DECL
2559 || TREE_CODE (decl) == CONST_DECL
2560 || TREE_CODE (decl) == FUNCTION_DECL)
2561 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2562 && !DECL_SECTION_NAME (decl))
2564 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2565 tree chosen_section;
2567 if (TREE_CODE (decl) == FUNCTION_DECL)
2568 kind = GHS_SECTION_KIND_TEXT;
2571 /* First choose a section kind based on the data area of the decl. */
2572 switch (v850_get_data_area (decl))
2578 kind = ((TREE_READONLY (decl))
2579 ? GHS_SECTION_KIND_ROSDATA
2580 : GHS_SECTION_KIND_SDATA);
2584 kind = GHS_SECTION_KIND_TDATA;
2588 kind = ((TREE_READONLY (decl))
2589 ? GHS_SECTION_KIND_ROZDATA
2590 : GHS_SECTION_KIND_ZDATA);
2593 case DATA_AREA_NORMAL: /* default data area */
2594 if (TREE_READONLY (decl))
2595 kind = GHS_SECTION_KIND_RODATA;
2596 else if (DECL_INITIAL (decl))
2597 kind = GHS_SECTION_KIND_DATA;
2599 kind = GHS_SECTION_KIND_BSS;
2603 /* Now, if the section kind has been explicitly renamed,
2604 then attach a section attribute. */
2605 chosen_section = GHS_current_section_names [(int) kind];
2607 /* Otherwise, if this kind of section needs an explicit section
2608 attribute, then also attach one. */
2609 if (chosen_section == NULL)
2610 chosen_section = GHS_default_section_names [(int) kind];
2614 /* Only set the section name if specified by a pragma, because
2615 otherwise it will force those variables to get allocated storage
2616 in this module, rather than by the linker. */
2617 DECL_SECTION_NAME (decl) = chosen_section;
2622 /* Construct a DISPOSE instruction that is the equivalent of
2623 the given RTX. We have already verified that this should
2627 construct_dispose_instruction (rtx op)
2629 int count = XVECLEN (op, 0);
2631 unsigned long int mask;
2633 static char buff[ 100 ]; /* XXX */
2638 error ("bogus DISPOSE construction: %d", count);
2642 /* Work out how many bytes to pop off the
2643 stack before retrieving registers. */
2644 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2645 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2646 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2648 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2650 /* Each pop will remove 4 bytes from the stack.... */
2651 stack_bytes -= (count - 2) * 4;
2653 /* Make sure that the amount we are popping
2654 will fit into the DISPOSE instruction. */
2655 if (stack_bytes > 128)
2657 error ("too much stack space to dispose of: %d", stack_bytes);
2661 /* Now compute the bit mask of registers to push. */
2664 for (i = 2; i < count; i++)
2666 rtx vector_element = XVECEXP (op, 0, i);
2668 gcc_assert (GET_CODE (vector_element) == SET);
2669 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2670 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2673 if (REGNO (SET_DEST (vector_element)) == 2)
2676 mask |= 1 << REGNO (SET_DEST (vector_element));
2679 if (! TARGET_DISABLE_CALLT
2680 && (use_callt || stack_bytes == 0))
2684 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2689 for (i = 20; i < 32; i++)
2690 if (mask & (1 << i))
2694 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2696 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2697 i, (mask & (1 << 31)) ? "31c" : "29");
2702 static char regs [100]; /* XXX */
2705 /* Generate the DISPOSE instruction. Note we could just issue the
2706 bit mask as a number as the assembler can cope with this, but for
2707 the sake of our readers we turn it into a textual description. */
2711 for (i = 20; i < 32; i++)
2713 if (mask & (1 << i))
2718 strcat (regs, ", ");
2723 strcat (regs, reg_names[ first ]);
2725 for (i++; i < 32; i++)
2726 if ((mask & (1 << i)) == 0)
2731 strcat (regs, " - ");
2732 strcat (regs, reg_names[ i - 1 ] );
2737 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2743 /* Construct a PREPARE instruction that is the equivalent of
2744 the given RTL. We have already verified that this should
2748 construct_prepare_instruction (rtx op)
2752 unsigned long int mask;
2754 static char buff[ 100 ]; /* XXX */
2757 if (XVECLEN (op, 0) <= 1)
2759 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2763 /* Work out how many bytes to push onto
2764 the stack after storing the registers. */
2765 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2766 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2767 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2769 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2772 /* Make sure that the amount we are popping
2773 will fit into the DISPOSE instruction. */
2774 if (stack_bytes < -128)
2776 error ("too much stack space to prepare: %d", stack_bytes);
2780 /* Now compute the bit mask of registers to push. */
2783 for (i = 1; i < XVECLEN (op, 0); i++)
2785 rtx vector_element = XVECEXP (op, 0, i);
2787 if (GET_CODE (vector_element) == CLOBBER)
2790 gcc_assert (GET_CODE (vector_element) == SET);
2791 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2792 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2795 if (REGNO (SET_SRC (vector_element)) == 2)
2798 mask |= 1 << REGNO (SET_SRC (vector_element));
2802 stack_bytes += count * 4;
2804 if ((! TARGET_DISABLE_CALLT)
2805 && (use_callt || stack_bytes == 0))
2809 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2813 for (i = 20; i < 32; i++)
2814 if (mask & (1 << i))
2818 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2820 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2821 i, (mask & (1 << 31)) ? "31c" : "29");
2825 static char regs [100]; /* XXX */
2829 /* Generate the PREPARE instruction. Note we could just issue the
2830 bit mask as a number as the assembler can cope with this, but for
2831 the sake of our readers we turn it into a textual description. */
2835 for (i = 20; i < 32; i++)
2837 if (mask & (1 << i))
2842 strcat (regs, ", ");
2847 strcat (regs, reg_names[ first ]);
2849 for (i++; i < 32; i++)
2850 if ((mask & (1 << i)) == 0)
2855 strcat (regs, " - ");
2856 strcat (regs, reg_names[ i - 1 ] );
2861 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2867 /* Return an RTX indicating where the return address to the
2868 calling function can be found. */
2871 v850_return_addr (int count)
2876 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2879 /* Implement TARGET_ASM_INIT_SECTIONS. */
2882 v850_asm_init_sections (void)
2885 = get_unnamed_section (0, output_section_asm_op,
2886 "\t.section .rosdata,\"a\"");
2889 = get_unnamed_section (0, output_section_asm_op,
2890 "\t.section .rozdata,\"a\"");
2893 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2894 "\t.section .tdata,\"aw\"");
2897 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2898 "\t.section .zdata,\"aw\"");
2901 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2902 output_section_asm_op,
2903 "\t.section .zbss,\"aw\"");
2907 v850_select_section (tree exp,
2908 int reloc ATTRIBUTE_UNUSED,
2909 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2911 if (TREE_CODE (exp) == VAR_DECL)
2914 if (!TREE_READONLY (exp)
2915 || TREE_SIDE_EFFECTS (exp)
2916 || !DECL_INITIAL (exp)
2917 || (DECL_INITIAL (exp) != error_mark_node
2918 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2923 switch (v850_get_data_area (exp))
2926 return is_const ? rozdata_section : zdata_section;
2929 return tdata_section;
2932 return is_const ? rosdata_section : sdata_section;
2935 return is_const ? readonly_data_section : data_section;
2938 return readonly_data_section;
2941 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2944 v850_function_value_regno_p (const unsigned int regno)
2946 return (regno == 10);
2949 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2952 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2954 /* Return values > 8 bytes in length in memory. */
2955 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode;
2958 /* Worker function for TARGET_FUNCTION_VALUE. */
2961 v850_function_value (const_tree valtype,
2962 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
2963 bool outgoing ATTRIBUTE_UNUSED)
2965 return gen_rtx_REG (TYPE_MODE (valtype), 10);
2969 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2972 v850_setup_incoming_varargs (cumulative_args_t ca,
2973 enum machine_mode mode ATTRIBUTE_UNUSED,
2974 tree type ATTRIBUTE_UNUSED,
2975 int *pretend_arg_size ATTRIBUTE_UNUSED,
2976 int second_time ATTRIBUTE_UNUSED)
2978 get_cumulative_args (ca)->anonymous_args = (!TARGET_GHS ? 1 : 0);
2981 /* Worker function for TARGET_CAN_ELIMINATE. */
2984 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2986 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
2989 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
2991 If TARGET_APP_REGS is not defined then add r2 and r5 to
2992 the pool of fixed registers. See PR 14505. */
2995 v850_conditional_register_usage (void)
2997 if (TARGET_APP_REGS)
2999 fixed_regs[2] = 0; call_used_regs[2] = 0;
3000 fixed_regs[5] = 0; call_used_regs[5] = 1;
3004 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3007 v850_asm_trampoline_template (FILE *f)
3009 fprintf (f, "\tjarl .+4,r12\n");
3010 fprintf (f, "\tld.w 12[r12],r20\n");
3011 fprintf (f, "\tld.w 16[r12],r12\n");
3012 fprintf (f, "\tjmp [r12]\n");
3013 fprintf (f, "\tnop\n");
3014 fprintf (f, "\t.long 0\n");
3015 fprintf (f, "\t.long 0\n");
3018 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3021 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3023 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3025 emit_block_move (m_tramp, assemble_trampoline_template (),
3026 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3028 mem = adjust_address (m_tramp, SImode, 16);
3029 emit_move_insn (mem, chain_value);
3030 mem = adjust_address (m_tramp, SImode, 20);
3031 emit_move_insn (mem, fnaddr);
3035 v850_issue_rate (void)
3037 return (TARGET_V850E2_ALL? 2 : 1);
3040 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3043 v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3045 return (GET_CODE (x) == CONST_DOUBLE
3046 || !(GET_CODE (x) == CONST
3047 && GET_CODE (XEXP (x, 0)) == PLUS
3048 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3049 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3050 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3054 v850_memory_move_cost (enum machine_mode mode,
3055 reg_class_t reg_class ATTRIBUTE_UNUSED,
3058 switch (GET_MODE_SIZE (mode))
3068 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3072 /* V850 specific attributes. */
3074 static const struct attribute_spec v850_attribute_table[] =
3076 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3077 affects_type_identity } */
3078 { "interrupt_handler", 0, 0, true, false, false,
3079 v850_handle_interrupt_attribute, false },
3080 { "interrupt", 0, 0, true, false, false,
3081 v850_handle_interrupt_attribute, false },
3082 { "sda", 0, 0, true, false, false,
3083 v850_handle_data_area_attribute, false },
3084 { "tda", 0, 0, true, false, false,
3085 v850_handle_data_area_attribute, false },
3086 { "zda", 0, 0, true, false, false,
3087 v850_handle_data_area_attribute, false },
3088 { NULL, 0, 0, false, false, false, NULL, false }
3091 /* Initialize the GCC target structure. */
3093 #undef TARGET_MEMORY_MOVE_COST
3094 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3096 #undef TARGET_ASM_ALIGNED_HI_OP
3097 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3099 #undef TARGET_PRINT_OPERAND
3100 #define TARGET_PRINT_OPERAND v850_print_operand
3101 #undef TARGET_PRINT_OPERAND_ADDRESS
3102 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3103 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3104 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3106 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3107 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3109 #undef TARGET_ATTRIBUTE_TABLE
3110 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3112 #undef TARGET_INSERT_ATTRIBUTES
3113 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3115 #undef TARGET_ASM_SELECT_SECTION
3116 #define TARGET_ASM_SELECT_SECTION v850_select_section
3118 /* The assembler supports switchable .bss sections, but
3119 v850_select_section doesn't yet make use of them. */
3120 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3121 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3123 #undef TARGET_ENCODE_SECTION_INFO
3124 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3126 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3127 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3129 #undef TARGET_RTX_COSTS
3130 #define TARGET_RTX_COSTS v850_rtx_costs
3132 #undef TARGET_ADDRESS_COST
3133 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3135 #undef TARGET_MACHINE_DEPENDENT_REORG
3136 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3138 #undef TARGET_SCHED_ISSUE_RATE
3139 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3141 #undef TARGET_FUNCTION_VALUE_REGNO_P
3142 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3143 #undef TARGET_FUNCTION_VALUE
3144 #define TARGET_FUNCTION_VALUE v850_function_value
3146 #undef TARGET_PROMOTE_PROTOTYPES
3147 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3149 #undef TARGET_RETURN_IN_MEMORY
3150 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3152 #undef TARGET_PASS_BY_REFERENCE
3153 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3155 #undef TARGET_CALLEE_COPIES
3156 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3158 #undef TARGET_SETUP_INCOMING_VARARGS
3159 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
3161 #undef TARGET_ARG_PARTIAL_BYTES
3162 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3164 #undef TARGET_FUNCTION_ARG
3165 #define TARGET_FUNCTION_ARG v850_function_arg
3167 #undef TARGET_FUNCTION_ARG_ADVANCE
3168 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3170 #undef TARGET_CAN_ELIMINATE
3171 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3173 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3174 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3176 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3177 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3178 #undef TARGET_TRAMPOLINE_INIT
3179 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3181 #undef TARGET_STRICT_ARGUMENT_NAMING
3182 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
3184 #undef TARGET_LEGITIMATE_CONSTANT_P
3185 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3187 struct gcc_target targetm = TARGET_INITIALIZER;
3189 #include "gt-v850.h"