1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
38 #include "diagnostic-core.h"
40 #include "integrate.h"
43 #include "target-def.h"
48 #define streq(a,b) (strcmp (a, b) == 0)
51 static void v850_print_operand_address (FILE *, rtx);
53 /* Information about the various small memory areas. */
54 static const int small_memory_physical_max[(int) SMALL_MEMORY_max] =
61 /* Names of the various data areas used on the v850. */
62 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
63 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
65 /* Track the current data area set by the data area pragma (which
66 can be nested). Tested by check_default_data_area. */
67 data_area_stack_element * data_area_stack = NULL;
69 /* True if we don't need to check any more if the current
70 function is an interrupt handler. */
71 static int v850_interrupt_cache_p = FALSE;
73 rtx v850_compare_op0, v850_compare_op1;
75 /* Whether current function is an interrupt handler. */
76 static int v850_interrupt_p = FALSE;
78 static GTY(()) section * rosdata_section;
79 static GTY(()) section * rozdata_section;
80 static GTY(()) section * tdata_section;
81 static GTY(()) section * zdata_section;
82 static GTY(()) section * zbss_section;
84 /* Set the maximum size of small memory area TYPE to the value given
85 by SIZE in structure OPTS (option text OPT passed at location LOC). */
88 v850_handle_memory_option (enum small_memory_type type,
89 struct gcc_options *opts, const char *opt,
90 int size, location_t loc)
92 if (size > small_memory_physical_max[type])
93 error_at (loc, "value passed in %qs is too large", opt);
95 opts->x_small_memory_max[type] = size;
98 /* Implement TARGET_HANDLE_OPTION. */
101 v850_handle_option (struct gcc_options *opts,
102 struct gcc_options *opts_set ATTRIBUTE_UNUSED,
103 const struct cl_decoded_option *decoded,
106 size_t code = decoded->opt_index;
107 int value = decoded->value;
112 opts->x_target_flags |= MASK_EP | MASK_PROLOG_FUNCTION;
116 opts->x_target_flags &= ~(MASK_CPU ^ MASK_V850);
121 opts->x_target_flags &= ~(MASK_CPU ^ MASK_V850E);
125 v850_handle_memory_option (SMALL_MEMORY_TDA, opts,
126 decoded->orig_option_with_args_text,
131 v850_handle_memory_option (SMALL_MEMORY_SDA, opts,
132 decoded->orig_option_with_args_text,
137 v850_handle_memory_option (SMALL_MEMORY_ZDA, opts,
138 decoded->orig_option_with_args_text,
147 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
149 static const struct default_options v850_option_optimization_table[] =
151 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
152 /* Note - we no longer enable MASK_EP when optimizing. This is
153 because of a hardware bug which stops the SLD and SST instructions
154 from correctly detecting some hazards. If the user is sure that
155 their hardware is fixed or that their program will not encounter
156 the conditions that trigger the bug then they can enable -mep by
158 { OPT_LEVELS_1_PLUS, OPT_mprolog_function, NULL, 1 },
159 { OPT_LEVELS_NONE, 0, NULL, 0 }
162 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
163 Specify whether to pass the argument by reference. */
166 v850_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
167 enum machine_mode mode, const_tree type,
168 bool named ATTRIBUTE_UNUSED)
170 unsigned HOST_WIDE_INT size;
173 size = int_size_in_bytes (type);
175 size = GET_MODE_SIZE (mode);
180 /* Implementing the Varargs Macros. */
183 v850_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
185 return !TARGET_GHS ? true : false;
188 /* Return an RTX to represent where an argument with mode MODE
189 and type TYPE will be passed to a function. If the result
190 is NULL_RTX, the argument will be pushed. */
193 v850_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
194 const_tree type, bool named)
196 rtx result = NULL_RTX;
203 size = int_size_in_bytes (type);
205 size = GET_MODE_SIZE (mode);
207 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
211 /* Once we have stopped using argument registers, do not start up again. */
212 cum->nbytes = 4 * UNITS_PER_WORD;
216 if (size <= UNITS_PER_WORD && type)
217 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
221 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
223 if (cum->nbytes > 4 * UNITS_PER_WORD)
226 if (type == NULL_TREE
227 && cum->nbytes + size > 4 * UNITS_PER_WORD)
230 switch (cum->nbytes / UNITS_PER_WORD)
233 result = gen_rtx_REG (mode, 6);
236 result = gen_rtx_REG (mode, 7);
239 result = gen_rtx_REG (mode, 8);
242 result = gen_rtx_REG (mode, 9);
251 /* Return the number of bytes which must be put into registers
252 for values which are part in registers and part in memory. */
254 v850_arg_partial_bytes (CUMULATIVE_ARGS * cum, enum machine_mode mode,
255 tree type, bool named)
259 if (TARGET_GHS && !named)
263 size = int_size_in_bytes (type);
265 size = GET_MODE_SIZE (mode);
271 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
275 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
277 if (cum->nbytes > 4 * UNITS_PER_WORD)
280 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
283 if (type == NULL_TREE
284 && cum->nbytes + size > 4 * UNITS_PER_WORD)
287 return 4 * UNITS_PER_WORD - cum->nbytes;
290 /* Update the data in CUM to advance over an argument
291 of mode MODE and data type TYPE.
292 (TYPE is null for libcalls where that information may not be available.) */
295 v850_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
296 const_tree type, bool named ATTRIBUTE_UNUSED)
298 cum->nbytes += (((type && int_size_in_bytes (type) > 8
299 ? GET_MODE_SIZE (Pmode)
301 ? GET_MODE_SIZE (mode)
302 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
306 /* Return the high and low words of a CONST_DOUBLE */
309 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
311 if (GET_CODE (x) == CONST_DOUBLE)
316 switch (GET_MODE (x))
319 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
320 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
321 *p_high = t[1]; /* since v850 is little endian */
322 *p_low = t[0]; /* high is second word */
326 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
327 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
333 *p_high = CONST_DOUBLE_HIGH (x);
334 *p_low = CONST_DOUBLE_LOW (x);
342 fatal_insn ("const_double_split got a bad insn:", x);
346 /* Return the cost of the rtx R with code CODE. */
349 const_costs_int (HOST_WIDE_INT value, int zero_cost)
351 if (CONST_OK_FOR_I (value))
353 else if (CONST_OK_FOR_J (value))
355 else if (CONST_OK_FOR_K (value))
362 const_costs (rtx r, enum rtx_code c)
364 HOST_WIDE_INT high, low;
369 return const_costs_int (INTVAL (r), 0);
372 const_double_split (r, &high, &low);
373 if (GET_MODE (r) == SFmode)
374 return const_costs_int (high, 1);
376 return const_costs_int (high, 1) + const_costs_int (low, 1);
392 v850_rtx_costs (rtx x,
394 int outer_code ATTRIBUTE_UNUSED,
395 int * total, bool speed)
397 enum rtx_code code = (enum rtx_code) codearg;
406 *total = COSTS_N_INSNS (const_costs (x, code));
413 if (TARGET_V850E && !speed)
421 && ( GET_MODE (x) == SImode
422 || GET_MODE (x) == HImode
423 || GET_MODE (x) == QImode))
425 if (GET_CODE (XEXP (x, 1)) == REG)
427 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
429 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
431 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
440 if (outer_code == COMPARE)
449 /* Print operand X using operand code CODE to assembly language output file
453 v850_print_operand (FILE * file, rtx x, int code)
455 HOST_WIDE_INT high, low;
460 /* We use 'c' operands with symbols for .vtinherit */
461 if (GET_CODE (x) == SYMBOL_REF)
463 output_addr_const(file, x);
470 switch ((code == 'B' || code == 'C')
471 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
474 if (code == 'c' || code == 'C')
475 fprintf (file, "nz");
477 fprintf (file, "ne");
480 if (code == 'c' || code == 'C')
486 fprintf (file, "ge");
489 fprintf (file, "gt");
492 fprintf (file, "le");
495 fprintf (file, "lt");
498 fprintf (file, "nl");
504 fprintf (file, "nh");
513 case 'F': /* high word of CONST_DOUBLE */
514 switch (GET_CODE (x))
517 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
521 const_double_split (x, &high, &low);
522 fprintf (file, "%ld", (long) high);
529 case 'G': /* low word of CONST_DOUBLE */
530 switch (GET_CODE (x))
533 fprintf (file, "%ld", (long) INTVAL (x));
537 const_double_split (x, &high, &low);
538 fprintf (file, "%ld", (long) low);
546 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
549 fprintf (file, "%d", exact_log2 (INTVAL (x)));
552 gcc_assert (special_symbolref_operand (x, VOIDmode));
554 if (GET_CODE (x) == CONST)
555 x = XEXP (XEXP (x, 0), 0);
557 gcc_assert (GET_CODE (x) == SYMBOL_REF);
559 if (SYMBOL_REF_ZDA_P (x))
560 fprintf (file, "zdaoff");
561 else if (SYMBOL_REF_SDA_P (x))
562 fprintf (file, "sdaoff");
563 else if (SYMBOL_REF_TDA_P (x))
564 fprintf (file, "tdaoff");
569 gcc_assert (special_symbolref_operand (x, VOIDmode));
570 output_addr_const (file, x);
573 gcc_assert (special_symbolref_operand (x, VOIDmode));
575 if (GET_CODE (x) == CONST)
576 x = XEXP (XEXP (x, 0), 0);
578 gcc_assert (GET_CODE (x) == SYMBOL_REF);
580 if (SYMBOL_REF_ZDA_P (x))
581 fprintf (file, "r0");
582 else if (SYMBOL_REF_SDA_P (x))
583 fprintf (file, "gp");
584 else if (SYMBOL_REF_TDA_P (x))
585 fprintf (file, "ep");
589 case 'R': /* 2nd word of a double. */
590 switch (GET_CODE (x))
593 fprintf (file, reg_names[REGNO (x) + 1]);
596 x = XEXP (adjust_address (x, SImode, 4), 0);
597 v850_print_operand_address (file, x);
598 if (GET_CODE (x) == CONST_INT)
599 fprintf (file, "[r0]");
608 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
609 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
616 /* Like an 'S' operand above, but for unsigned loads only. */
617 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
622 case 'W': /* print the instruction suffix */
623 switch (GET_MODE (x))
628 case QImode: fputs (".b", file); break;
629 case HImode: fputs (".h", file); break;
630 case SImode: fputs (".w", file); break;
631 case SFmode: fputs (".w", file); break;
634 case '.': /* register r0 */
635 fputs (reg_names[0], file);
637 case 'z': /* reg or zero */
638 if (GET_CODE (x) == REG)
639 fputs (reg_names[REGNO (x)], file);
640 else if ((GET_MODE(x) == SImode
641 || GET_MODE(x) == DFmode
642 || GET_MODE(x) == SFmode)
643 && x == CONST0_RTX(GET_MODE(x)))
644 fputs (reg_names[0], file);
647 gcc_assert (x == const0_rtx);
648 fputs (reg_names[0], file);
652 switch (GET_CODE (x))
655 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
656 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
659 output_address (XEXP (x, 0));
663 fputs (reg_names[REGNO (x)], file);
666 fputs (reg_names[subreg_regno (x)], file);
673 v850_print_operand_address (file, x);
684 /* Output assembly language output for the address ADDR to FILE. */
687 v850_print_operand_address (FILE * file, rtx addr)
689 switch (GET_CODE (addr))
692 fprintf (file, "0[");
693 v850_print_operand (file, addr, 0);
697 if (GET_CODE (XEXP (addr, 0)) == REG)
700 fprintf (file, "lo(");
701 v850_print_operand (file, XEXP (addr, 1), 0);
702 fprintf (file, ")[");
703 v850_print_operand (file, XEXP (addr, 0), 0);
708 if (GET_CODE (XEXP (addr, 0)) == REG
709 || GET_CODE (XEXP (addr, 0)) == SUBREG)
712 v850_print_operand (file, XEXP (addr, 1), 0);
714 v850_print_operand (file, XEXP (addr, 0), 0);
719 v850_print_operand (file, XEXP (addr, 0), 0);
721 v850_print_operand (file, XEXP (addr, 1), 0);
726 const char *off_name = NULL;
727 const char *reg_name = NULL;
729 if (SYMBOL_REF_ZDA_P (addr))
734 else if (SYMBOL_REF_SDA_P (addr))
739 else if (SYMBOL_REF_TDA_P (addr))
746 fprintf (file, "%s(", off_name);
747 output_addr_const (file, addr);
749 fprintf (file, ")[%s]", reg_name);
753 if (special_symbolref_operand (addr, VOIDmode))
755 rtx x = XEXP (XEXP (addr, 0), 0);
756 const char *off_name;
757 const char *reg_name;
759 if (SYMBOL_REF_ZDA_P (x))
764 else if (SYMBOL_REF_SDA_P (x))
769 else if (SYMBOL_REF_TDA_P (x))
777 fprintf (file, "%s(", off_name);
778 output_addr_const (file, addr);
779 fprintf (file, ")[%s]", reg_name);
782 output_addr_const (file, addr);
785 output_addr_const (file, addr);
791 v850_print_operand_punct_valid_p (unsigned char code)
796 /* When assemble_integer is used to emit the offsets for a switch
797 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
798 output_addr_const will normally barf at this, but it is OK to omit
799 the truncate and just emit the difference of the two labels. The
800 .hword directive will automatically handle the truncation for us.
802 Returns true if rtx was handled, false otherwise. */
805 v850_output_addr_const_extra (FILE * file, rtx x)
807 if (GET_CODE (x) != TRUNCATE)
812 /* We must also handle the case where the switch table was passed a
813 constant value and so has been collapsed. In this case the first
814 label will have been deleted. In such a case it is OK to emit
815 nothing, since the table will not be used.
816 (cf gcc.c-torture/compile/990801-1.c). */
817 if (GET_CODE (x) == MINUS
818 && GET_CODE (XEXP (x, 0)) == LABEL_REF
819 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
820 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
823 output_addr_const (file, x);
827 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
831 output_move_single (rtx * operands)
833 rtx dst = operands[0];
834 rtx src = operands[1];
841 else if (GET_CODE (src) == CONST_INT)
843 HOST_WIDE_INT value = INTVAL (src);
845 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
848 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
849 return "movea %1,%.,%0";
851 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
852 return "movhi hi0(%1),%.,%0";
854 /* A random constant. */
855 else if (TARGET_V850E || TARGET_V850E2_ALL)
858 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
861 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
863 HOST_WIDE_INT high, low;
865 const_double_split (src, &high, &low);
867 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
870 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
871 return "movea %F1,%.,%0";
873 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
874 return "movhi hi0(%F1),%.,%0";
876 /* A random constant. */
877 else if (TARGET_V850E || TARGET_V850E2_ALL)
881 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
884 else if (GET_CODE (src) == MEM)
885 return "%S1ld%W1 %1,%0";
887 else if (special_symbolref_operand (src, VOIDmode))
888 return "movea %O1(%P1),%Q1,%0";
890 else if (GET_CODE (src) == LABEL_REF
891 || GET_CODE (src) == SYMBOL_REF
892 || GET_CODE (src) == CONST)
894 if (TARGET_V850E || TARGET_V850E2_ALL)
895 return "mov hilo(%1),%0";
897 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
900 else if (GET_CODE (src) == HIGH)
901 return "movhi hi(%1),%.,%0";
903 else if (GET_CODE (src) == LO_SUM)
905 operands[2] = XEXP (src, 0);
906 operands[3] = XEXP (src, 1);
907 return "movea lo(%3),%2,%0";
911 else if (GET_CODE (dst) == MEM)
914 return "%S0st%W0 %1,%0";
916 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
917 return "%S0st%W0 %.,%0";
919 else if (GET_CODE (src) == CONST_DOUBLE
920 && CONST0_RTX (GET_MODE (dst)) == src)
921 return "%S0st%W0 %.,%0";
924 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
928 /* Generate comparison code. */
930 v850_float_z_comparison_operator (rtx op, enum machine_mode mode)
932 enum rtx_code code = GET_CODE (op);
934 if (GET_RTX_CLASS (code) != RTX_COMPARE
935 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
938 if (mode != GET_MODE (op) && mode != VOIDmode)
941 if ((GET_CODE (XEXP (op, 0)) != REG
942 || REGNO (XEXP (op, 0)) != CC_REGNUM)
943 || XEXP (op, 1) != const0_rtx)
946 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LTmode)
948 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LEmode)
950 if (GET_MODE (XEXP (op, 0)) == CC_FPU_EQmode)
957 v850_float_nz_comparison_operator (rtx op, enum machine_mode mode)
959 enum rtx_code code = GET_CODE (op);
961 if (GET_RTX_CLASS (code) != RTX_COMPARE
962 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
965 if (mode != GET_MODE (op) && mode != VOIDmode)
968 if ((GET_CODE (XEXP (op, 0)) != REG
969 || REGNO (XEXP (op, 0)) != CC_REGNUM)
970 || XEXP (op, 1) != const0_rtx)
973 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GTmode)
975 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GEmode)
977 if (GET_MODE (XEXP (op, 0)) == CC_FPU_NEmode)
984 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
986 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
991 return CC_FPU_LEmode;
993 return CC_FPU_GEmode;
995 return CC_FPU_LTmode;
997 return CC_FPU_GTmode;
999 return CC_FPU_EQmode;
1001 return CC_FPU_NEmode;
1010 v850_gen_float_compare (enum rtx_code cond, enum machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
1012 if (GET_MODE(op0) == DFmode)
1017 emit_insn (gen_cmpdf_le_insn (op0, op1));
1020 emit_insn (gen_cmpdf_ge_insn (op0, op1));
1023 emit_insn (gen_cmpdf_lt_insn (op0, op1));
1026 emit_insn (gen_cmpdf_gt_insn (op0, op1));
1029 emit_insn (gen_cmpdf_eq_insn (op0, op1));
1032 emit_insn (gen_cmpdf_ne_insn (op0, op1));
1038 else if (GET_MODE(v850_compare_op0) == SFmode)
1043 emit_insn (gen_cmpsf_le_insn(op0, op1));
1046 emit_insn (gen_cmpsf_ge_insn(op0, op1));
1049 emit_insn (gen_cmpsf_lt_insn(op0, op1));
1052 emit_insn (gen_cmpsf_gt_insn(op0, op1));
1055 emit_insn (gen_cmpsf_eq_insn(op0, op1));
1058 emit_insn (gen_cmpsf_ne_insn(op0, op1));
1069 return v850_select_cc_mode (cond, op0, op1);
1073 v850_gen_compare (enum rtx_code cond, enum machine_mode mode, rtx op0, rtx op1)
1075 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
1077 emit_insn (gen_cmpsi_insn (op0, op1));
1078 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
1083 mode = v850_gen_float_compare (cond, mode, op0, op1);
1084 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1085 emit_insn (gen_rtx_SET(mode, cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1087 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1091 /* Return maximum offset supported for a short EP memory reference of mode
1092 MODE and signedness UNSIGNEDP. */
1095 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1102 if (TARGET_SMALL_SLD)
1103 max_offset = (1 << 4);
1104 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1106 max_offset = (1 << 4);
1108 max_offset = (1 << 7);
1112 if (TARGET_SMALL_SLD)
1113 max_offset = (1 << 5);
1114 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1116 max_offset = (1 << 5);
1118 max_offset = (1 << 8);
1123 max_offset = (1 << 8);
1133 /* Return true if OP is a valid short EP memory reference */
1136 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1142 /* If we are not using the EP register on a per-function basis
1143 then do not allow this optimization at all. This is to
1144 prevent the use of the SLD/SST instructions which cannot be
1145 guaranteed to work properly due to a hardware bug. */
1149 if (GET_CODE (op) != MEM)
1152 max_offset = ep_memory_offset (mode, unsigned_load);
1154 mask = GET_MODE_SIZE (mode) - 1;
1156 addr = XEXP (op, 0);
1157 if (GET_CODE (addr) == CONST)
1158 addr = XEXP (addr, 0);
1160 switch (GET_CODE (addr))
1166 return SYMBOL_REF_TDA_P (addr);
1169 return REGNO (addr) == EP_REGNUM;
1172 op0 = XEXP (addr, 0);
1173 op1 = XEXP (addr, 1);
1174 if (GET_CODE (op1) == CONST_INT
1175 && INTVAL (op1) < max_offset
1176 && INTVAL (op1) >= 0
1177 && (INTVAL (op1) & mask) == 0)
1179 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1182 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1191 /* Substitute memory references involving a pointer, to use the ep pointer,
1192 taking care to save and preserve the ep. */
1195 substitute_ep_register (rtx first_insn,
1202 rtx reg = gen_rtx_REG (Pmode, regno);
1207 df_set_regs_ever_live (1, true);
1208 *p_r1 = gen_rtx_REG (Pmode, 1);
1209 *p_ep = gen_rtx_REG (Pmode, 30);
1214 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1215 2 * (uses - 3), uses, reg_names[regno],
1216 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1217 INSN_UID (first_insn), INSN_UID (last_insn));
1219 if (GET_CODE (first_insn) == NOTE)
1220 first_insn = next_nonnote_insn (first_insn);
1222 last_insn = next_nonnote_insn (last_insn);
1223 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1225 if (GET_CODE (insn) == INSN)
1227 rtx pattern = single_set (insn);
1229 /* Replace the memory references. */
1233 /* Memory operands are signed by default. */
1234 int unsignedp = FALSE;
1236 if (GET_CODE (SET_DEST (pattern)) == MEM
1237 && GET_CODE (SET_SRC (pattern)) == MEM)
1240 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1241 p_mem = &SET_DEST (pattern);
1243 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1244 p_mem = &SET_SRC (pattern);
1246 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1247 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1248 p_mem = &XEXP (SET_SRC (pattern), 0);
1250 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1251 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1253 p_mem = &XEXP (SET_SRC (pattern), 0);
1261 rtx addr = XEXP (*p_mem, 0);
1263 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1264 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1266 else if (GET_CODE (addr) == PLUS
1267 && GET_CODE (XEXP (addr, 0)) == REG
1268 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1269 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1270 && ((INTVAL (XEXP (addr, 1)))
1271 < ep_memory_offset (GET_MODE (*p_mem),
1273 && ((INTVAL (XEXP (addr, 1))) >= 0))
1274 *p_mem = change_address (*p_mem, VOIDmode,
1275 gen_rtx_PLUS (Pmode,
1283 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1284 insn = prev_nonnote_insn (first_insn);
1285 if (insn && GET_CODE (insn) == INSN
1286 && GET_CODE (PATTERN (insn)) == SET
1287 && SET_DEST (PATTERN (insn)) == *p_ep
1288 && SET_SRC (PATTERN (insn)) == *p_r1)
1291 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1293 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1294 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1298 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1299 the -mep mode to copy heavily used pointers to ep to use the implicit
1311 regs[FIRST_PSEUDO_REGISTER];
1320 /* If not ep mode, just return now. */
1324 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1327 regs[i].first_insn = NULL_RTX;
1328 regs[i].last_insn = NULL_RTX;
1331 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1333 switch (GET_CODE (insn))
1335 /* End of basic block */
1342 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1344 if (max_uses < regs[i].uses)
1346 max_uses = regs[i].uses;
1352 substitute_ep_register (regs[max_regno].first_insn,
1353 regs[max_regno].last_insn,
1354 max_uses, max_regno, &r1, &ep);
1358 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1361 regs[i].first_insn = NULL_RTX;
1362 regs[i].last_insn = NULL_RTX;
1370 pattern = single_set (insn);
1372 /* See if there are any memory references we can shorten */
1375 rtx src = SET_SRC (pattern);
1376 rtx dest = SET_DEST (pattern);
1378 /* Memory operands are signed by default. */
1379 int unsignedp = FALSE;
1381 /* We might have (SUBREG (MEM)) here, so just get rid of the
1382 subregs to make this code simpler. */
1383 if (GET_CODE (dest) == SUBREG
1384 && (GET_CODE (SUBREG_REG (dest)) == MEM
1385 || GET_CODE (SUBREG_REG (dest)) == REG))
1386 alter_subreg (&dest);
1387 if (GET_CODE (src) == SUBREG
1388 && (GET_CODE (SUBREG_REG (src)) == MEM
1389 || GET_CODE (SUBREG_REG (src)) == REG))
1390 alter_subreg (&src);
1392 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1395 else if (GET_CODE (dest) == MEM)
1398 else if (GET_CODE (src) == MEM)
1401 else if (GET_CODE (src) == SIGN_EXTEND
1402 && GET_CODE (XEXP (src, 0)) == MEM)
1403 mem = XEXP (src, 0);
1405 else if (GET_CODE (src) == ZERO_EXTEND
1406 && GET_CODE (XEXP (src, 0)) == MEM)
1408 mem = XEXP (src, 0);
1414 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1417 else if (!use_ep && mem
1418 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1420 rtx addr = XEXP (mem, 0);
1424 if (GET_CODE (addr) == REG)
1427 regno = REGNO (addr);
1430 else if (GET_CODE (addr) == PLUS
1431 && GET_CODE (XEXP (addr, 0)) == REG
1432 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1433 && ((INTVAL (XEXP (addr, 1)))
1434 < ep_memory_offset (GET_MODE (mem), unsignedp))
1435 && ((INTVAL (XEXP (addr, 1))) >= 0))
1438 regno = REGNO (XEXP (addr, 0));
1447 regs[regno].last_insn = insn;
1448 if (!regs[regno].first_insn)
1449 regs[regno].first_insn = insn;
1453 /* Loading up a register in the basic block zaps any savings
1455 if (GET_CODE (dest) == REG)
1457 enum machine_mode mode = GET_MODE (dest);
1461 regno = REGNO (dest);
1462 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1466 /* See if we can use the pointer before this
1471 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1473 if (max_uses < regs[i].uses)
1475 max_uses = regs[i].uses;
1481 && max_regno >= regno
1482 && max_regno < endregno)
1484 substitute_ep_register (regs[max_regno].first_insn,
1485 regs[max_regno].last_insn,
1486 max_uses, max_regno, &r1,
1489 /* Since we made a substitution, zap all remembered
1491 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1494 regs[i].first_insn = NULL_RTX;
1495 regs[i].last_insn = NULL_RTX;
1500 for (i = regno; i < endregno; i++)
1503 regs[i].first_insn = NULL_RTX;
1504 regs[i].last_insn = NULL_RTX;
1512 /* # of registers saved by the interrupt handler. */
1513 #define INTERRUPT_FIXED_NUM 5
1515 /* # of bytes for registers saved by the interrupt handler. */
1516 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1518 /* # of words saved for other registers. */
1519 #define INTERRUPT_ALL_SAVE_NUM \
1520 (30 - INTERRUPT_FIXED_NUM)
1522 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1525 compute_register_save_size (long * p_reg_saved)
1529 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1530 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1533 /* Count the return pointer if we need to save it. */
1534 if (crtl->profile && !call_p)
1536 df_set_regs_ever_live (LINK_POINTER_REGNUM, true);
1540 /* Count space for the register saves. */
1541 if (interrupt_handler)
1543 for (i = 0; i <= 31; i++)
1547 if (df_regs_ever_live_p (i) || call_p)
1550 reg_saved |= 1L << i;
1554 /* We don't save/restore r0 or the stack pointer */
1556 case STACK_POINTER_REGNUM:
1559 /* For registers with fixed use, we save them, set them to the
1560 appropriate value, and then restore them.
1561 These registers are handled specially, so don't list them
1562 on the list of registers to save in the prologue. */
1563 case 1: /* temp used to hold ep */
1565 case 10: /* temp used to call interrupt save/restore */
1566 case 11: /* temp used to call interrupt save/restore (long call) */
1567 case EP_REGNUM: /* ep */
1574 /* Find the first register that needs to be saved. */
1575 for (i = 0; i <= 31; i++)
1576 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1577 || i == LINK_POINTER_REGNUM))
1580 /* If it is possible that an out-of-line helper function might be
1581 used to generate the prologue for the current function, then we
1582 need to cover the possibility that such a helper function will
1583 be used, despite the fact that there might be gaps in the list of
1584 registers that need to be saved. To detect this we note that the
1585 helper functions always push at least register r29 (provided
1586 that the function is not an interrupt handler). */
1588 if (TARGET_PROLOG_FUNCTION
1589 && (i == 2 || ((i >= 20) && (i < 30))))
1594 reg_saved |= 1L << i;
1599 /* Helper functions save all registers between the starting
1600 register and the last register, regardless of whether they
1601 are actually used by the function or not. */
1602 for (; i <= 29; i++)
1605 reg_saved |= 1L << i;
1608 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1611 reg_saved |= 1L << LINK_POINTER_REGNUM;
1616 for (; i <= 31; i++)
1617 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1618 || i == LINK_POINTER_REGNUM))
1621 reg_saved |= 1L << i;
1627 *p_reg_saved = reg_saved;
1633 compute_frame_size (int size, long * p_reg_saved)
1636 + compute_register_save_size (p_reg_saved)
1637 + crtl->outgoing_args_size);
1641 use_prolog_function (int num_save, int frame_size)
1643 int alloc_stack = (4 * num_save);
1644 int unalloc_stack = frame_size - alloc_stack;
1645 int save_func_len, restore_func_len;
1646 int save_normal_len, restore_normal_len;
1648 if (! TARGET_DISABLE_CALLT)
1649 save_func_len = restore_func_len = 2;
1651 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1655 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1656 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1659 /* See if we would have used ep to save the stack. */
1660 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1661 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1663 save_normal_len = restore_normal_len = 4 * num_save;
1665 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1666 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1668 /* Don't bother checking if we don't actually save any space.
1669 This happens for instance if one register is saved and additional
1670 stack space is allocated. */
1671 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1675 expand_prologue (void)
1678 unsigned int size = get_frame_size ();
1679 unsigned int actual_fsize;
1680 unsigned int init_stack_alloc = 0;
1683 unsigned int num_save;
1685 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1688 actual_fsize = compute_frame_size (size, ®_saved);
1690 /* Save/setup global registers for interrupt functions right now. */
1691 if (interrupt_handler)
1693 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1694 emit_insn (gen_callt_save_interrupt ());
1696 emit_insn (gen_save_interrupt ());
1698 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1700 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1701 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1704 /* Identify all of the saved registers. */
1706 for (i = 1; i < 32; i++)
1708 if (((1L << i) & reg_saved) != 0)
1709 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1712 /* See if we have an insn that allocates stack space and saves the particular
1713 registers we want to. */
1714 save_all = NULL_RTX;
1715 if (TARGET_PROLOG_FUNCTION && num_save > 0)
1717 if (use_prolog_function (num_save, actual_fsize))
1719 int alloc_stack = 4 * num_save;
1722 save_all = gen_rtx_PARALLEL
1724 rtvec_alloc (num_save + 1
1725 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1727 XVECEXP (save_all, 0, 0)
1728 = gen_rtx_SET (VOIDmode,
1730 gen_rtx_PLUS (Pmode,
1732 GEN_INT(-alloc_stack)));
1733 for (i = 0; i < num_save; i++)
1736 XVECEXP (save_all, 0, i+1)
1737 = gen_rtx_SET (VOIDmode,
1739 gen_rtx_PLUS (Pmode,
1745 if (TARGET_DISABLE_CALLT)
1747 XVECEXP (save_all, 0, num_save + 1)
1748 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1750 if (TARGET_LONG_CALLS)
1751 XVECEXP (save_all, 0, num_save + 2)
1752 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1755 code = recog (save_all, NULL_RTX, NULL);
1758 rtx insn = emit_insn (save_all);
1759 INSN_CODE (insn) = code;
1760 actual_fsize -= alloc_stack;
1764 save_all = NULL_RTX;
1768 /* If no prolog save function is available, store the registers the old
1769 fashioned way (one by one). */
1772 /* Special case interrupt functions that save all registers for a call. */
1773 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1775 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1776 emit_insn (gen_callt_save_all_interrupt ());
1778 emit_insn (gen_save_all_interrupt ());
1783 /* If the stack is too big, allocate it in chunks so we can do the
1784 register saves. We use the register save size so we use the ep
1786 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1787 init_stack_alloc = compute_register_save_size (NULL);
1789 init_stack_alloc = actual_fsize;
1791 /* Save registers at the beginning of the stack frame. */
1792 offset = init_stack_alloc - 4;
1794 if (init_stack_alloc)
1795 emit_insn (gen_addsi3 (stack_pointer_rtx,
1797 GEN_INT (- (signed) init_stack_alloc)));
1799 /* Save the return pointer first. */
1800 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1802 emit_move_insn (gen_rtx_MEM (SImode,
1803 plus_constant (stack_pointer_rtx,
1805 save_regs[--num_save]);
1809 for (i = 0; i < num_save; i++)
1811 emit_move_insn (gen_rtx_MEM (SImode,
1812 plus_constant (stack_pointer_rtx,
1820 /* Allocate the rest of the stack that was not allocated above (either it is
1821 > 32K or we just called a function to save the registers and needed more
1823 if (actual_fsize > init_stack_alloc)
1825 int diff = actual_fsize - init_stack_alloc;
1826 if (CONST_OK_FOR_K (-diff))
1827 emit_insn (gen_addsi3 (stack_pointer_rtx,
1832 rtx reg = gen_rtx_REG (Pmode, 12);
1833 emit_move_insn (reg, GEN_INT (-diff));
1834 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1838 /* If we need a frame pointer, set it up now. */
1839 if (frame_pointer_needed)
1840 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1845 expand_epilogue (void)
1848 unsigned int size = get_frame_size ();
1850 int actual_fsize = compute_frame_size (size, ®_saved);
1851 rtx restore_regs[32];
1853 unsigned int num_restore;
1855 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1857 /* Eliminate the initial stack stored by interrupt functions. */
1858 if (interrupt_handler)
1860 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1861 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1862 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1865 /* Cut off any dynamic stack created. */
1866 if (frame_pointer_needed)
1867 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1869 /* Identify all of the saved registers. */
1871 for (i = 1; i < 32; i++)
1873 if (((1L << i) & reg_saved) != 0)
1874 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1877 /* See if we have an insn that restores the particular registers we
1879 restore_all = NULL_RTX;
1881 if (TARGET_PROLOG_FUNCTION
1883 && !interrupt_handler)
1885 int alloc_stack = (4 * num_restore);
1887 /* Don't bother checking if we don't actually save any space. */
1888 if (use_prolog_function (num_restore, actual_fsize))
1891 restore_all = gen_rtx_PARALLEL (VOIDmode,
1892 rtvec_alloc (num_restore + 2));
1893 XVECEXP (restore_all, 0, 0) = gen_rtx_RETURN (VOIDmode);
1894 XVECEXP (restore_all, 0, 1)
1895 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1896 gen_rtx_PLUS (Pmode,
1898 GEN_INT (alloc_stack)));
1900 offset = alloc_stack - 4;
1901 for (i = 0; i < num_restore; i++)
1903 XVECEXP (restore_all, 0, i+2)
1904 = gen_rtx_SET (VOIDmode,
1907 gen_rtx_PLUS (Pmode,
1913 code = recog (restore_all, NULL_RTX, NULL);
1919 actual_fsize -= alloc_stack;
1922 if (CONST_OK_FOR_K (actual_fsize))
1923 emit_insn (gen_addsi3 (stack_pointer_rtx,
1925 GEN_INT (actual_fsize)));
1928 rtx reg = gen_rtx_REG (Pmode, 12);
1929 emit_move_insn (reg, GEN_INT (actual_fsize));
1930 emit_insn (gen_addsi3 (stack_pointer_rtx,
1936 insn = emit_jump_insn (restore_all);
1937 INSN_CODE (insn) = code;
1941 restore_all = NULL_RTX;
1945 /* If no epilogue save function is available, restore the registers the
1946 old fashioned way (one by one). */
1949 unsigned int init_stack_free;
1951 /* If the stack is large, we need to cut it down in 2 pieces. */
1952 if (interrupt_handler)
1953 init_stack_free = 0;
1954 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1955 init_stack_free = 4 * num_restore;
1957 init_stack_free = (signed) actual_fsize;
1959 /* Deallocate the rest of the stack if it is > 32K. */
1960 if ((unsigned int) actual_fsize > init_stack_free)
1964 diff = actual_fsize - init_stack_free;
1966 if (CONST_OK_FOR_K (diff))
1967 emit_insn (gen_addsi3 (stack_pointer_rtx,
1972 rtx reg = gen_rtx_REG (Pmode, 12);
1973 emit_move_insn (reg, GEN_INT (diff));
1974 emit_insn (gen_addsi3 (stack_pointer_rtx,
1980 /* Special case interrupt functions that save all registers
1982 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1984 if (! TARGET_DISABLE_CALLT)
1985 emit_insn (gen_callt_restore_all_interrupt ());
1987 emit_insn (gen_restore_all_interrupt ());
1991 /* Restore registers from the beginning of the stack frame. */
1992 int offset = init_stack_free - 4;
1994 /* Restore the return pointer first. */
1996 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1998 emit_move_insn (restore_regs[--num_restore],
1999 gen_rtx_MEM (SImode,
2000 plus_constant (stack_pointer_rtx,
2005 for (i = 0; i < num_restore; i++)
2007 emit_move_insn (restore_regs[i],
2008 gen_rtx_MEM (SImode,
2009 plus_constant (stack_pointer_rtx,
2012 emit_use (restore_regs[i]);
2016 /* Cut back the remainder of the stack. */
2017 if (init_stack_free)
2018 emit_insn (gen_addsi3 (stack_pointer_rtx,
2020 GEN_INT (init_stack_free)));
2023 /* And return or use reti for interrupt handlers. */
2024 if (interrupt_handler)
2026 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
2027 emit_insn (gen_callt_return_interrupt ());
2029 emit_jump_insn (gen_return_interrupt ());
2031 else if (actual_fsize)
2032 emit_jump_insn (gen_return_internal ());
2034 emit_jump_insn (gen_return_simple ());
2037 v850_interrupt_cache_p = FALSE;
2038 v850_interrupt_p = FALSE;
2041 /* Update the condition code from the insn. */
2043 notice_update_cc (rtx body, rtx insn)
2045 switch (get_attr_cc (insn))
2048 /* Insn does not affect CC at all. */
2052 /* Insn does not change CC, but the 0'th operand has been changed. */
2053 if (cc_status.value1 != 0
2054 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2055 cc_status.value1 = 0;
2059 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2060 V,C is in an unusable state. */
2062 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2063 cc_status.value1 = recog_data.operand[0];
2067 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2068 C is in an unusable state. */
2070 cc_status.flags |= CC_NO_CARRY;
2071 cc_status.value1 = recog_data.operand[0];
2075 /* The insn is a compare instruction. */
2077 cc_status.value1 = SET_SRC (body);
2081 /* Insn doesn't leave CC in a usable state. */
2090 /* Retrieve the data area that has been chosen for the given decl. */
2093 v850_get_data_area (tree decl)
2095 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2096 return DATA_AREA_SDA;
2098 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2099 return DATA_AREA_TDA;
2101 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2102 return DATA_AREA_ZDA;
2104 return DATA_AREA_NORMAL;
2107 /* Store the indicated data area in the decl's attributes. */
2110 v850_set_data_area (tree decl, v850_data_area data_area)
2116 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2117 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2118 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2123 DECL_ATTRIBUTES (decl) = tree_cons
2124 (name, NULL, DECL_ATTRIBUTES (decl));
2127 /* Handle an "interrupt" attribute; arguments as in
2128 struct attribute_spec.handler. */
2130 v850_handle_interrupt_attribute (tree * node,
2132 tree args ATTRIBUTE_UNUSED,
2133 int flags ATTRIBUTE_UNUSED,
2134 bool * no_add_attrs)
2136 if (TREE_CODE (*node) != FUNCTION_DECL)
2138 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2140 *no_add_attrs = true;
2146 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2147 struct attribute_spec.handler. */
2149 v850_handle_data_area_attribute (tree* node,
2151 tree args ATTRIBUTE_UNUSED,
2152 int flags ATTRIBUTE_UNUSED,
2153 bool * no_add_attrs)
2155 v850_data_area data_area;
2156 v850_data_area area;
2159 /* Implement data area attribute. */
2160 if (is_attribute_p ("sda", name))
2161 data_area = DATA_AREA_SDA;
2162 else if (is_attribute_p ("tda", name))
2163 data_area = DATA_AREA_TDA;
2164 else if (is_attribute_p ("zda", name))
2165 data_area = DATA_AREA_ZDA;
2169 switch (TREE_CODE (decl))
2172 if (current_function_decl != NULL_TREE)
2174 error_at (DECL_SOURCE_LOCATION (decl),
2175 "data area attributes cannot be specified for "
2177 *no_add_attrs = true;
2183 area = v850_get_data_area (decl);
2184 if (area != DATA_AREA_NORMAL && data_area != area)
2186 error ("data area of %q+D conflicts with previous declaration",
2188 *no_add_attrs = true;
2200 /* Return nonzero if FUNC is an interrupt function as specified
2201 by the "interrupt" attribute. */
2204 v850_interrupt_function_p (tree func)
2209 if (v850_interrupt_cache_p)
2210 return v850_interrupt_p;
2212 if (TREE_CODE (func) != FUNCTION_DECL)
2215 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2221 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2222 ret = a != NULL_TREE;
2225 /* Its not safe to trust global variables until after function inlining has
2227 if (reload_completed | reload_in_progress)
2228 v850_interrupt_p = ret;
2235 v850_encode_data_area (tree decl, rtx symbol)
2239 /* Map explicit sections into the appropriate attribute */
2240 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2242 if (DECL_SECTION_NAME (decl))
2244 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2246 if (streq (name, ".zdata") || streq (name, ".zbss"))
2247 v850_set_data_area (decl, DATA_AREA_ZDA);
2249 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2250 v850_set_data_area (decl, DATA_AREA_SDA);
2252 else if (streq (name, ".tdata"))
2253 v850_set_data_area (decl, DATA_AREA_TDA);
2256 /* If no attribute, support -m{zda,sda,tda}=n */
2259 int size = int_size_in_bytes (TREE_TYPE (decl));
2263 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2264 v850_set_data_area (decl, DATA_AREA_TDA);
2266 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2267 v850_set_data_area (decl, DATA_AREA_SDA);
2269 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2270 v850_set_data_area (decl, DATA_AREA_ZDA);
2273 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2277 flags = SYMBOL_REF_FLAGS (symbol);
2278 switch (v850_get_data_area (decl))
2280 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2281 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2282 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2283 default: gcc_unreachable ();
2285 SYMBOL_REF_FLAGS (symbol) = flags;
2289 v850_encode_section_info (tree decl, rtx rtl, int first)
2291 default_encode_section_info (decl, rtl, first);
2293 if (TREE_CODE (decl) == VAR_DECL
2294 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2295 v850_encode_data_area (decl, XEXP (rtl, 0));
2298 /* Construct a JR instruction to a routine that will perform the equivalent of
2299 the RTL passed in as an argument. This RTL is a function epilogue that
2300 pops registers off the stack and possibly releases some extra stack space
2301 as well. The code has already verified that the RTL matches these
2305 construct_restore_jr (rtx op)
2307 int count = XVECLEN (op, 0);
2309 unsigned long int mask;
2310 unsigned long int first;
2311 unsigned long int last;
2313 static char buff [100]; /* XXX */
2317 error ("bogus JR construction: %d", count);
2321 /* Work out how many bytes to pop off the stack before retrieving
2323 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2324 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2325 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2327 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2329 /* Each pop will remove 4 bytes from the stack.... */
2330 stack_bytes -= (count - 2) * 4;
2332 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2333 if (stack_bytes != 0)
2335 error ("bad amount of stack space removal: %d", stack_bytes);
2339 /* Now compute the bit mask of registers to push. */
2341 for (i = 2; i < count; i++)
2343 rtx vector_element = XVECEXP (op, 0, i);
2345 gcc_assert (GET_CODE (vector_element) == SET);
2346 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2347 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2350 mask |= 1 << REGNO (SET_DEST (vector_element));
2353 /* Scan for the first register to pop. */
2354 for (first = 0; first < 32; first++)
2356 if (mask & (1 << first))
2360 gcc_assert (first < 32);
2362 /* Discover the last register to pop. */
2363 if (mask & (1 << LINK_POINTER_REGNUM))
2365 last = LINK_POINTER_REGNUM;
2369 gcc_assert (!stack_bytes);
2370 gcc_assert (mask & (1 << 29));
2375 /* Note, it is possible to have gaps in the register mask.
2376 We ignore this here, and generate a JR anyway. We will
2377 be popping more registers than is strictly necessary, but
2378 it does save code space. */
2380 if (TARGET_LONG_CALLS)
2385 sprintf (name, "__return_%s", reg_names [first]);
2387 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2389 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2395 sprintf (buff, "jr __return_%s", reg_names [first]);
2397 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2404 /* Construct a JARL instruction to a routine that will perform the equivalent
2405 of the RTL passed as a parameter. This RTL is a function prologue that
2406 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2407 some stack space as well. The code has already verified that the RTL
2408 matches these requirements. */
2410 construct_save_jarl (rtx op)
2412 int count = XVECLEN (op, 0);
2414 unsigned long int mask;
2415 unsigned long int first;
2416 unsigned long int last;
2418 static char buff [100]; /* XXX */
2420 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2422 error ("bogus JARL construction: %d", count);
2427 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2428 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2429 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2430 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2432 /* Work out how many bytes to push onto the stack after storing the
2434 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2436 /* Each push will put 4 bytes from the stack.... */
2437 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2439 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2440 if (stack_bytes != 0)
2442 error ("bad amount of stack space removal: %d", stack_bytes);
2446 /* Now compute the bit mask of registers to push. */
2448 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2450 rtx vector_element = XVECEXP (op, 0, i);
2452 gcc_assert (GET_CODE (vector_element) == SET);
2453 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2454 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2457 mask |= 1 << REGNO (SET_SRC (vector_element));
2460 /* Scan for the first register to push. */
2461 for (first = 0; first < 32; first++)
2463 if (mask & (1 << first))
2467 gcc_assert (first < 32);
2469 /* Discover the last register to push. */
2470 if (mask & (1 << LINK_POINTER_REGNUM))
2472 last = LINK_POINTER_REGNUM;
2476 gcc_assert (!stack_bytes);
2477 gcc_assert (mask & (1 << 29));
2482 /* Note, it is possible to have gaps in the register mask.
2483 We ignore this here, and generate a JARL anyway. We will
2484 be pushing more registers than is strictly necessary, but
2485 it does save code space. */
2487 if (TARGET_LONG_CALLS)
2492 sprintf (name, "__save_%s", reg_names [first]);
2494 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2496 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2502 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2504 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2511 extern tree last_assemble_variable_decl;
2512 extern int size_directive_output;
2514 /* A version of asm_output_aligned_bss() that copes with the special
2515 data areas of the v850. */
2517 v850_output_aligned_bss (FILE * file,
2520 unsigned HOST_WIDE_INT size,
2523 switch (v850_get_data_area (decl))
2526 switch_to_section (zbss_section);
2530 switch_to_section (sbss_section);
2534 switch_to_section (tdata_section);
2537 switch_to_section (bss_section);
2541 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2542 #ifdef ASM_DECLARE_OBJECT_NAME
2543 last_assemble_variable_decl = decl;
2544 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2546 /* Standard thing is just output label for the object. */
2547 ASM_OUTPUT_LABEL (file, name);
2548 #endif /* ASM_DECLARE_OBJECT_NAME */
2549 ASM_OUTPUT_SKIP (file, size ? size : 1);
2552 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2554 v850_output_common (FILE * file,
2560 if (decl == NULL_TREE)
2562 fprintf (file, "%s", COMMON_ASM_OP);
2566 switch (v850_get_data_area (decl))
2569 fprintf (file, "%s", ZCOMMON_ASM_OP);
2573 fprintf (file, "%s", SCOMMON_ASM_OP);
2577 fprintf (file, "%s", TCOMMON_ASM_OP);
2581 fprintf (file, "%s", COMMON_ASM_OP);
2586 assemble_name (file, name);
2587 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2590 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2592 v850_output_local (FILE * file,
2598 fprintf (file, "%s", LOCAL_ASM_OP);
2599 assemble_name (file, name);
2600 fprintf (file, "\n");
2602 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2605 /* Add data area to the given declaration if a ghs data area pragma is
2606 currently in effect (#pragma ghs startXXX/endXXX). */
2608 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2611 && data_area_stack->data_area
2612 && current_function_decl == NULL_TREE
2613 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2614 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2615 v850_set_data_area (decl, data_area_stack->data_area);
2617 /* Initialize the default names of the v850 specific sections,
2618 if this has not been done before. */
2620 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2622 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2623 = build_string (sizeof (".sdata")-1, ".sdata");
2625 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2626 = build_string (sizeof (".rosdata")-1, ".rosdata");
2628 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2629 = build_string (sizeof (".tdata")-1, ".tdata");
2631 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2632 = build_string (sizeof (".zdata")-1, ".zdata");
2634 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2635 = build_string (sizeof (".rozdata")-1, ".rozdata");
2638 if (current_function_decl == NULL_TREE
2639 && (TREE_CODE (decl) == VAR_DECL
2640 || TREE_CODE (decl) == CONST_DECL
2641 || TREE_CODE (decl) == FUNCTION_DECL)
2642 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2643 && !DECL_SECTION_NAME (decl))
2645 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2646 tree chosen_section;
2648 if (TREE_CODE (decl) == FUNCTION_DECL)
2649 kind = GHS_SECTION_KIND_TEXT;
2652 /* First choose a section kind based on the data area of the decl. */
2653 switch (v850_get_data_area (decl))
2659 kind = ((TREE_READONLY (decl))
2660 ? GHS_SECTION_KIND_ROSDATA
2661 : GHS_SECTION_KIND_SDATA);
2665 kind = GHS_SECTION_KIND_TDATA;
2669 kind = ((TREE_READONLY (decl))
2670 ? GHS_SECTION_KIND_ROZDATA
2671 : GHS_SECTION_KIND_ZDATA);
2674 case DATA_AREA_NORMAL: /* default data area */
2675 if (TREE_READONLY (decl))
2676 kind = GHS_SECTION_KIND_RODATA;
2677 else if (DECL_INITIAL (decl))
2678 kind = GHS_SECTION_KIND_DATA;
2680 kind = GHS_SECTION_KIND_BSS;
2684 /* Now, if the section kind has been explicitly renamed,
2685 then attach a section attribute. */
2686 chosen_section = GHS_current_section_names [(int) kind];
2688 /* Otherwise, if this kind of section needs an explicit section
2689 attribute, then also attach one. */
2690 if (chosen_section == NULL)
2691 chosen_section = GHS_default_section_names [(int) kind];
2695 /* Only set the section name if specified by a pragma, because
2696 otherwise it will force those variables to get allocated storage
2697 in this module, rather than by the linker. */
2698 DECL_SECTION_NAME (decl) = chosen_section;
2703 /* Construct a DISPOSE instruction that is the equivalent of
2704 the given RTX. We have already verified that this should
2708 construct_dispose_instruction (rtx op)
2710 int count = XVECLEN (op, 0);
2712 unsigned long int mask;
2714 static char buff[ 100 ]; /* XXX */
2719 error ("bogus DISPOSE construction: %d", count);
2723 /* Work out how many bytes to pop off the
2724 stack before retrieving registers. */
2725 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2726 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2727 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2729 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2731 /* Each pop will remove 4 bytes from the stack.... */
2732 stack_bytes -= (count - 2) * 4;
2734 /* Make sure that the amount we are popping
2735 will fit into the DISPOSE instruction. */
2736 if (stack_bytes > 128)
2738 error ("too much stack space to dispose of: %d", stack_bytes);
2742 /* Now compute the bit mask of registers to push. */
2745 for (i = 2; i < count; i++)
2747 rtx vector_element = XVECEXP (op, 0, i);
2749 gcc_assert (GET_CODE (vector_element) == SET);
2750 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2751 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2754 if (REGNO (SET_DEST (vector_element)) == 2)
2757 mask |= 1 << REGNO (SET_DEST (vector_element));
2760 if (! TARGET_DISABLE_CALLT
2761 && (use_callt || stack_bytes == 0))
2765 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2770 for (i = 20; i < 32; i++)
2771 if (mask & (1 << i))
2775 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2777 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2778 i, (mask & (1 << 31)) ? "31c" : "29");
2783 static char regs [100]; /* XXX */
2786 /* Generate the DISPOSE instruction. Note we could just issue the
2787 bit mask as a number as the assembler can cope with this, but for
2788 the sake of our readers we turn it into a textual description. */
2792 for (i = 20; i < 32; i++)
2794 if (mask & (1 << i))
2799 strcat (regs, ", ");
2804 strcat (regs, reg_names[ first ]);
2806 for (i++; i < 32; i++)
2807 if ((mask & (1 << i)) == 0)
2812 strcat (regs, " - ");
2813 strcat (regs, reg_names[ i - 1 ] );
2818 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2824 /* Construct a PREPARE instruction that is the equivalent of
2825 the given RTL. We have already verified that this should
2829 construct_prepare_instruction (rtx op)
2833 unsigned long int mask;
2835 static char buff[ 100 ]; /* XXX */
2838 if (XVECLEN (op, 0) <= 1)
2840 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2844 /* Work out how many bytes to push onto
2845 the stack after storing the registers. */
2846 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2847 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2848 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2850 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2853 /* Make sure that the amount we are popping
2854 will fit into the DISPOSE instruction. */
2855 if (stack_bytes < -128)
2857 error ("too much stack space to prepare: %d", stack_bytes);
2861 /* Now compute the bit mask of registers to push. */
2864 for (i = 1; i < XVECLEN (op, 0); i++)
2866 rtx vector_element = XVECEXP (op, 0, i);
2868 if (GET_CODE (vector_element) == CLOBBER)
2871 gcc_assert (GET_CODE (vector_element) == SET);
2872 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2873 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2876 if (REGNO (SET_SRC (vector_element)) == 2)
2879 mask |= 1 << REGNO (SET_SRC (vector_element));
2883 stack_bytes += count * 4;
2885 if ((! TARGET_DISABLE_CALLT)
2886 && (use_callt || stack_bytes == 0))
2890 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2894 for (i = 20; i < 32; i++)
2895 if (mask & (1 << i))
2899 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2901 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2902 i, (mask & (1 << 31)) ? "31c" : "29");
2906 static char regs [100]; /* XXX */
2910 /* Generate the PREPARE instruction. Note we could just issue the
2911 bit mask as a number as the assembler can cope with this, but for
2912 the sake of our readers we turn it into a textual description. */
2916 for (i = 20; i < 32; i++)
2918 if (mask & (1 << i))
2923 strcat (regs, ", ");
2928 strcat (regs, reg_names[ first ]);
2930 for (i++; i < 32; i++)
2931 if ((mask & (1 << i)) == 0)
2936 strcat (regs, " - ");
2937 strcat (regs, reg_names[ i - 1 ] );
2942 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2948 /* Return an RTX indicating where the return address to the
2949 calling function can be found. */
2952 v850_return_addr (int count)
2957 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2960 /* Implement TARGET_ASM_INIT_SECTIONS. */
2963 v850_asm_init_sections (void)
2966 = get_unnamed_section (0, output_section_asm_op,
2967 "\t.section .rosdata,\"a\"");
2970 = get_unnamed_section (0, output_section_asm_op,
2971 "\t.section .rozdata,\"a\"");
2974 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2975 "\t.section .tdata,\"aw\"");
2978 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2979 "\t.section .zdata,\"aw\"");
2982 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2983 output_section_asm_op,
2984 "\t.section .zbss,\"aw\"");
2988 v850_select_section (tree exp,
2989 int reloc ATTRIBUTE_UNUSED,
2990 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2992 if (TREE_CODE (exp) == VAR_DECL)
2995 if (!TREE_READONLY (exp)
2996 || TREE_SIDE_EFFECTS (exp)
2997 || !DECL_INITIAL (exp)
2998 || (DECL_INITIAL (exp) != error_mark_node
2999 && !TREE_CONSTANT (DECL_INITIAL (exp))))
3004 switch (v850_get_data_area (exp))
3007 return is_const ? rozdata_section : zdata_section;
3010 return tdata_section;
3013 return is_const ? rosdata_section : sdata_section;
3016 return is_const ? readonly_data_section : data_section;
3019 return readonly_data_section;
3022 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
3025 v850_function_value_regno_p (const unsigned int regno)
3027 return (regno == 10);
3030 /* Worker function for TARGET_RETURN_IN_MEMORY. */
3033 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
3035 /* Return values > 8 bytes in length in memory. */
3036 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode;
3039 /* Worker function for TARGET_FUNCTION_VALUE. */
3042 v850_function_value (const_tree valtype,
3043 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
3044 bool outgoing ATTRIBUTE_UNUSED)
3046 return gen_rtx_REG (TYPE_MODE (valtype), 10);
3050 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
3053 v850_setup_incoming_varargs (CUMULATIVE_ARGS *ca,
3054 enum machine_mode mode ATTRIBUTE_UNUSED,
3055 tree type ATTRIBUTE_UNUSED,
3056 int *pretend_arg_size ATTRIBUTE_UNUSED,
3057 int second_time ATTRIBUTE_UNUSED)
3059 ca->anonymous_args = (!TARGET_GHS ? 1 : 0);
3062 /* Worker function for TARGET_CAN_ELIMINATE. */
3065 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3067 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
3070 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3072 If TARGET_APP_REGS is not defined then add r2 and r5 to
3073 the pool of fixed registers. See PR 14505. */
3076 v850_conditional_register_usage (void)
3078 if (TARGET_APP_REGS)
3080 fixed_regs[2] = 0; call_used_regs[2] = 0;
3081 fixed_regs[5] = 0; call_used_regs[5] = 1;
3085 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3088 v850_asm_trampoline_template (FILE *f)
3090 fprintf (f, "\tjarl .+4,r12\n");
3091 fprintf (f, "\tld.w 12[r12],r20\n");
3092 fprintf (f, "\tld.w 16[r12],r12\n");
3093 fprintf (f, "\tjmp [r12]\n");
3094 fprintf (f, "\tnop\n");
3095 fprintf (f, "\t.long 0\n");
3096 fprintf (f, "\t.long 0\n");
3099 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3102 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3104 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3106 emit_block_move (m_tramp, assemble_trampoline_template (),
3107 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3109 mem = adjust_address (m_tramp, SImode, 16);
3110 emit_move_insn (mem, chain_value);
3111 mem = adjust_address (m_tramp, SImode, 20);
3112 emit_move_insn (mem, fnaddr);
3116 v850_issue_rate (void)
3118 return (TARGET_V850E2_ALL? 2 : 1);
3121 /* V850 specific attributes. */
3123 static const struct attribute_spec v850_attribute_table[] =
3125 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3126 affects_type_identity } */
3127 { "interrupt_handler", 0, 0, true, false, false,
3128 v850_handle_interrupt_attribute, false },
3129 { "interrupt", 0, 0, true, false, false,
3130 v850_handle_interrupt_attribute, false },
3131 { "sda", 0, 0, true, false, false,
3132 v850_handle_data_area_attribute, false },
3133 { "tda", 0, 0, true, false, false,
3134 v850_handle_data_area_attribute, false },
3135 { "zda", 0, 0, true, false, false,
3136 v850_handle_data_area_attribute, false },
3137 { NULL, 0, 0, false, false, false, NULL, false }
3140 /* Initialize the GCC target structure. */
3141 #undef TARGET_ASM_ALIGNED_HI_OP
3142 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3144 #undef TARGET_PRINT_OPERAND
3145 #define TARGET_PRINT_OPERAND v850_print_operand
3146 #undef TARGET_PRINT_OPERAND_ADDRESS
3147 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3148 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3149 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3151 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3152 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3154 #undef TARGET_ATTRIBUTE_TABLE
3155 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3157 #undef TARGET_INSERT_ATTRIBUTES
3158 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3160 #undef TARGET_ASM_SELECT_SECTION
3161 #define TARGET_ASM_SELECT_SECTION v850_select_section
3163 /* The assembler supports switchable .bss sections, but
3164 v850_select_section doesn't yet make use of them. */
3165 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3166 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3168 #undef TARGET_ENCODE_SECTION_INFO
3169 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3171 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3172 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3174 #undef TARGET_DEFAULT_TARGET_FLAGS
3175 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_DEFAULT | MASK_APP_REGS)
3176 #undef TARGET_HANDLE_OPTION
3177 #define TARGET_HANDLE_OPTION v850_handle_option
3179 #undef TARGET_RTX_COSTS
3180 #define TARGET_RTX_COSTS v850_rtx_costs
3182 #undef TARGET_ADDRESS_COST
3183 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3185 #undef TARGET_MACHINE_DEPENDENT_REORG
3186 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3188 #undef TARGET_SCHED_ISSUE_RATE
3189 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3191 #undef TARGET_FUNCTION_VALUE_REGNO_P
3192 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3193 #undef TARGET_FUNCTION_VALUE
3194 #define TARGET_FUNCTION_VALUE v850_function_value
3196 #undef TARGET_PROMOTE_PROTOTYPES
3197 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3199 #undef TARGET_RETURN_IN_MEMORY
3200 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3202 #undef TARGET_PASS_BY_REFERENCE
3203 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3205 #undef TARGET_CALLEE_COPIES
3206 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3208 #undef TARGET_SETUP_INCOMING_VARARGS
3209 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
3211 #undef TARGET_ARG_PARTIAL_BYTES
3212 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3214 #undef TARGET_FUNCTION_ARG
3215 #define TARGET_FUNCTION_ARG v850_function_arg
3217 #undef TARGET_FUNCTION_ARG_ADVANCE
3218 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3220 #undef TARGET_CAN_ELIMINATE
3221 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3223 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3224 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3226 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3227 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3228 #undef TARGET_TRAMPOLINE_INIT
3229 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3231 #undef TARGET_STRICT_ARGUMENT_NAMING
3232 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
3234 #undef TARGET_OPTION_OPTIMIZATION_TABLE
3235 #define TARGET_OPTION_OPTIMIZATION_TABLE v850_option_optimization_table
3237 struct gcc_target targetm = TARGET_INITIALIZER;
3239 #include "gt-v850.h"