1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
46 #include "diagnostic-core.h"
48 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
55 /* Structure of this file:
57 + Command Line Option Support
58 + Pattern support - constraints, predicates, expanders
61 + Functions to save and restore machine-specific function data.
62 + Frame/Epilog/Prolog Related
64 + Function args in registers
65 + Handle pipeline hazards
68 + Machine-dependent Reorg
73 Symbols are encoded as @ <char> . <name> where <char> is one of these:
81 c - cb (control bus) */
83 struct GTY(()) machine_function
85 int mep_frame_pointer_needed;
93 /* Records __builtin_return address. */
97 int reg_save_slot[FIRST_PSEUDO_REGISTER];
98 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
100 /* 2 if the current function has an interrupt attribute, 1 if not, 0
101 if unknown. This is here because resource.c uses EPILOGUE_USES
103 int interrupt_handler;
105 /* Likewise, for disinterrupt attribute. */
106 int disable_interrupts;
108 /* Number of doloop tags used so far. */
111 /* True if the last tag was allocated to a doloop_end. */
112 bool doloop_tag_from_end;
114 /* True if reload changes $TP. */
115 bool reload_changes_tp;
117 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
118 We only set this if the function is an interrupt handler. */
119 int asms_without_operands;
122 #define MEP_CONTROL_REG(x) \
123 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
125 static GTY(()) section * based_section;
126 static GTY(()) section * tinybss_section;
127 static GTY(()) section * far_section;
128 static GTY(()) section * farbss_section;
129 static GTY(()) section * frodata_section;
130 static GTY(()) section * srodata_section;
132 static GTY(()) section * vtext_section;
133 static GTY(()) section * vftext_section;
134 static GTY(()) section * ftext_section;
136 static void mep_set_leaf_registers (int);
137 static bool symbol_p (rtx);
138 static bool symbolref_p (rtx);
139 static void encode_pattern_1 (rtx);
140 static void encode_pattern (rtx);
141 static bool const_in_range (rtx, int, int);
142 static void mep_rewrite_mult (rtx, rtx);
143 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
144 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
145 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
146 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
147 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
148 static bool mep_nongeneral_reg (rtx);
149 static bool mep_general_copro_reg (rtx);
150 static bool mep_nonregister (rtx);
151 static struct machine_function* mep_init_machine_status (void);
152 static rtx mep_tp_rtx (void);
153 static rtx mep_gp_rtx (void);
154 static bool mep_interrupt_p (void);
155 static bool mep_disinterrupt_p (void);
156 static bool mep_reg_set_p (rtx, rtx);
157 static bool mep_reg_set_in_function (int);
158 static bool mep_interrupt_saved_reg (int);
159 static bool mep_call_saves_register (int);
161 static void add_constant (int, int, int, int);
162 static rtx maybe_dead_move (rtx, rtx, bool);
163 static void mep_reload_pointer (int, const char *);
164 static void mep_start_function (FILE *, HOST_WIDE_INT);
165 static bool mep_function_ok_for_sibcall (tree, tree);
166 static int unique_bit_in (HOST_WIDE_INT);
167 static int bit_size_for_clip (HOST_WIDE_INT);
168 static int bytesize (const_tree, enum machine_mode);
169 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
170 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
171 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
172 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
173 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
174 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
175 static bool mep_function_attribute_inlinable_p (const_tree);
176 static bool mep_can_inline_p (tree, tree);
177 static bool mep_lookup_pragma_disinterrupt (const char *);
178 static int mep_multiple_address_regions (tree, bool);
179 static int mep_attrlist_to_encoding (tree, tree);
180 static void mep_insert_attributes (tree, tree *);
181 static void mep_encode_section_info (tree, rtx, int);
182 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
183 static void mep_unique_section (tree, int);
184 static unsigned int mep_section_type_flags (tree, const char *, int);
185 static void mep_asm_named_section (const char *, unsigned int, tree);
186 static bool mep_mentioned_p (rtx, rtx, int);
187 static void mep_reorg_regmove (rtx);
188 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
189 static void mep_reorg_repeat (rtx);
190 static bool mep_invertable_branch_p (rtx);
191 static void mep_invert_branch (rtx, rtx);
192 static void mep_reorg_erepeat (rtx);
193 static void mep_jmp_return_reorg (rtx);
194 static void mep_reorg_addcombine (rtx);
195 static void mep_reorg (void);
196 static void mep_init_intrinsics (void);
197 static void mep_init_builtins (void);
198 static void mep_intrinsic_unavailable (int);
199 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
200 static bool mep_get_move_insn (int, const struct cgen_insn **);
201 static rtx mep_convert_arg (enum machine_mode, rtx);
202 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
203 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
204 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
205 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
206 static int mep_adjust_cost (rtx, rtx, rtx, int);
207 static int mep_issue_rate (void);
208 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
209 static void mep_move_ready_insn (rtx *, int, rtx);
210 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
211 static rtx mep_make_bundle (rtx, rtx);
212 static void mep_bundle_insns (rtx);
213 static bool mep_rtx_cost (rtx, int, int, int *, bool);
214 static int mep_address_cost (rtx, bool);
215 static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
217 static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
219 static rtx mep_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
221 static void mep_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
223 static bool mep_vector_mode_supported_p (enum machine_mode);
224 static bool mep_handle_option (size_t, const char *, int);
225 static rtx mep_allocate_initial_value (rtx);
226 static void mep_asm_init_sections (void);
227 static int mep_comp_type_attributes (const_tree, const_tree);
228 static bool mep_narrow_volatile_bitfield (void);
229 static rtx mep_expand_builtin_saveregs (void);
230 static tree mep_build_builtin_va_list (void);
231 static void mep_expand_va_start (tree, rtx);
232 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
233 static bool mep_can_eliminate (const int, const int);
234 static void mep_trampoline_init (rtx, tree, rtx);
236 #define WANT_GCC_DEFINITIONS
237 #include "mep-intrin.h"
238 #undef WANT_GCC_DEFINITIONS
241 /* Command Line Option Support. */
243 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
245 /* True if we can use cmov instructions to move values back and forth
246 between core and coprocessor registers. */
247 bool mep_have_core_copro_moves_p;
249 /* True if we can use cmov instructions (or a work-alike) to move
250 values between coprocessor registers. */
251 bool mep_have_copro_copro_moves_p;
253 /* A table of all coprocessor instructions that can act like
254 a coprocessor-to-coprocessor cmov. */
255 static const int mep_cmov_insns[] = {
268 static int option_mtiny_specified = 0;
272 mep_set_leaf_registers (int enable)
276 if (mep_leaf_registers[0] != enable)
277 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
278 mep_leaf_registers[i] = enable;
282 mep_conditional_register_usage (void)
286 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
288 fixed_regs[HI_REGNO] = 1;
289 fixed_regs[LO_REGNO] = 1;
290 call_used_regs[HI_REGNO] = 1;
291 call_used_regs[LO_REGNO] = 1;
294 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
299 static const struct default_options mep_option_optimization_table[] =
301 /* The first scheduling pass often increases register pressure and
302 tends to result in more spill code. Only run it when
303 specifically asked. */
304 { OPT_LEVELS_ALL, OPT_fschedule_insns, NULL, 0 },
306 /* Using $fp doesn't gain us much, even when debugging is
308 { OPT_LEVELS_ALL, OPT_fomit_frame_pointer, NULL, 1 },
310 { OPT_LEVELS_NONE, 0, NULL, 0 }
314 mep_option_override (void)
317 warning (OPT_fpic, "-fpic is not supported");
319 warning (OPT_fPIC, "-fPIC is not supported");
320 if (TARGET_S && TARGET_M)
321 error ("only one of -ms and -mm may be given");
322 if (TARGET_S && TARGET_L)
323 error ("only one of -ms and -ml may be given");
324 if (TARGET_M && TARGET_L)
325 error ("only one of -mm and -ml may be given");
326 if (TARGET_S && option_mtiny_specified)
327 error ("only one of -ms and -mtiny= may be given");
328 if (TARGET_M && option_mtiny_specified)
329 error ("only one of -mm and -mtiny= may be given");
330 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
331 warning (0, "-mclip currently has no effect without -mminmax");
333 if (mep_const_section)
335 if (strcmp (mep_const_section, "tiny") != 0
336 && strcmp (mep_const_section, "near") != 0
337 && strcmp (mep_const_section, "far") != 0)
338 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
342 mep_tiny_cutoff = 65536;
345 if (TARGET_L && ! option_mtiny_specified)
348 if (TARGET_64BIT_CR_REGS)
349 flag_split_wide_types = 0;
351 init_machine_status = mep_init_machine_status;
352 mep_init_intrinsics ();
355 /* Pattern Support - constraints, predicates, expanders. */
357 /* MEP has very few instructions that can refer to the span of
358 addresses used by symbols, so it's common to check for them. */
363 int c = GET_CODE (x);
365 return (c == CONST_INT
375 if (GET_CODE (x) != MEM)
378 c = GET_CODE (XEXP (x, 0));
379 return (c == CONST_INT
384 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
386 #define GEN_REG(R, STRICT) \
389 && ((R) == ARG_POINTER_REGNUM \
390 || (R) >= FIRST_PSEUDO_REGISTER)))
392 static char pattern[12], *patternp;
393 static GTY(()) rtx patternr[12];
394 #define RTX_IS(x) (strcmp (pattern, x) == 0)
397 encode_pattern_1 (rtx x)
401 if (patternp == pattern + sizeof (pattern) - 2)
407 patternr[patternp-pattern] = x;
409 switch (GET_CODE (x))
417 encode_pattern_1 (XEXP(x, 0));
421 encode_pattern_1 (XEXP(x, 0));
422 encode_pattern_1 (XEXP(x, 1));
426 encode_pattern_1 (XEXP(x, 0));
427 encode_pattern_1 (XEXP(x, 1));
431 encode_pattern_1 (XEXP(x, 0));
445 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
446 for (i=0; i<XVECLEN (x, 0); i++)
447 encode_pattern_1 (XVECEXP (x, 0, i));
455 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
464 encode_pattern (rtx x)
467 encode_pattern_1 (x);
472 mep_section_tag (rtx x)
478 switch (GET_CODE (x))
485 x = XVECEXP (x, 0, 0);
488 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
497 if (GET_CODE (x) != SYMBOL_REF)
500 if (name[0] == '@' && name[2] == '.')
502 if (name[1] == 'i' || name[1] == 'I')
505 return 'f'; /* near */
506 return 'n'; /* far */
514 mep_regno_reg_class (int regno)
518 case SP_REGNO: return SP_REGS;
519 case TP_REGNO: return TP_REGS;
520 case GP_REGNO: return GP_REGS;
521 case 0: return R0_REGS;
522 case HI_REGNO: return HI_REGS;
523 case LO_REGNO: return LO_REGS;
524 case ARG_POINTER_REGNUM: return GENERAL_REGS;
527 if (GR_REGNO_P (regno))
528 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
529 if (CONTROL_REGNO_P (regno))
532 if (CR_REGNO_P (regno))
536 /* Search for the register amongst user-defined subclasses of
537 the coprocessor registers. */
538 for (i = USER0_REGS; i <= USER3_REGS; ++i)
540 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
542 for (j = 0; j < N_REG_CLASSES; ++j)
544 enum reg_class sub = reg_class_subclasses[i][j];
546 if (sub == LIM_REG_CLASSES)
548 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
553 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
556 if (CCR_REGNO_P (regno))
559 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
565 mep_reg_class_from_constraint (int c, const char *str)
582 return LOADABLE_CR_REGS;
584 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
586 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
613 enum reg_class which = c - 'A' + USER0_REGS;
614 return (reg_class_size[which] > 0 ? which : NO_REGS);
623 mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
627 case 'I': return value >= -32768 && value < 32768;
628 case 'J': return value >= 0 && value < 65536;
629 case 'K': return value >= 0 && value < 0x01000000;
630 case 'L': return value >= -32 && value < 32;
631 case 'M': return value >= 0 && value < 32;
632 case 'N': return value >= 0 && value < 16;
636 return value >= -2147483647-1 && value <= 2147483647;
643 mep_extra_constraint (rtx value, int c)
645 encode_pattern (value);
650 /* For near symbols, like what call uses. */
651 if (GET_CODE (value) == REG)
653 return mep_call_address_operand (value, GET_MODE (value));
656 /* For signed 8-bit immediates. */
657 return (GET_CODE (value) == CONST_INT
658 && INTVAL (value) >= -128
659 && INTVAL (value) <= 127);
662 /* For tp/gp relative symbol values. */
663 return (RTX_IS ("u3s") || RTX_IS ("u2s")
664 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
667 /* Non-absolute memories. */
668 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
672 return RTX_IS ("Hs");
675 /* Register indirect. */
676 return RTX_IS ("mr");
679 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
690 const_in_range (rtx x, int minv, int maxv)
692 return (GET_CODE (x) == CONST_INT
693 && INTVAL (x) >= minv
694 && INTVAL (x) <= maxv);
697 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
698 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
699 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
700 at the end of the insn stream. */
703 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
705 if (rtx_equal_p (dest, src1))
707 else if (rtx_equal_p (dest, src2))
712 emit_insn (gen_movsi (copy_rtx (dest), src1));
714 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
719 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
720 Change the last element of PATTERN from (clobber (scratch:SI))
721 to (clobber (reg:SI HI_REGNO)). */
724 mep_rewrite_mult (rtx insn, rtx pattern)
728 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
729 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
730 PATTERN (insn) = pattern;
731 INSN_CODE (insn) = -1;
734 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
735 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
736 store the result in DEST if nonnull. */
739 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
743 lo = gen_rtx_REG (SImode, LO_REGNO);
745 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
746 mep_mulr_source (insn, dest, src1, src2));
748 pattern = gen_mulsi3_lo (lo, src1, src2);
749 mep_rewrite_mult (insn, pattern);
752 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
753 SRC3 into $lo, then use either madd or maddr. The move into $lo will
754 be deleted by a peephole2 if SRC3 is already in $lo. */
757 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
761 lo = gen_rtx_REG (SImode, LO_REGNO);
762 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
764 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
765 mep_mulr_source (insn, dest, src1, src2),
768 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
769 mep_rewrite_mult (insn, pattern);
772 /* Return true if $lo has the same value as integer register GPR when
773 instruction INSN is reached. If necessary, rewrite the instruction
774 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
775 rtx for (reg:SI LO_REGNO).
777 This function is intended to be used by the peephole2 pass. Since
778 that pass goes from the end of a basic block to the beginning, and
779 propagates liveness information on the way, there is no need to
780 update register notes here.
782 If GPR_DEAD_P is true on entry, and this function returns true,
783 then the caller will replace _every_ use of GPR in and after INSN
784 with LO. This means that if the instruction that sets $lo is a
785 mulr- or maddr-type instruction, we can rewrite it to use mul or
786 madd instead. In combination with the copy progagation pass,
787 this allows us to replace sequences like:
796 if GPR is no longer used. */
799 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
803 insn = PREV_INSN (insn);
805 switch (recog_memoized (insn))
807 case CODE_FOR_mulsi3_1:
809 if (rtx_equal_p (recog_data.operand[0], gpr))
811 mep_rewrite_mulsi3 (insn,
812 gpr_dead_p ? NULL : recog_data.operand[0],
813 recog_data.operand[1],
814 recog_data.operand[2]);
819 case CODE_FOR_maddsi3:
821 if (rtx_equal_p (recog_data.operand[0], gpr))
823 mep_rewrite_maddsi3 (insn,
824 gpr_dead_p ? NULL : recog_data.operand[0],
825 recog_data.operand[1],
826 recog_data.operand[2],
827 recog_data.operand[3]);
832 case CODE_FOR_mulsi3r:
833 case CODE_FOR_maddsi3r:
835 return rtx_equal_p (recog_data.operand[1], gpr);
838 if (reg_set_p (lo, insn)
839 || reg_set_p (gpr, insn)
840 || volatile_insn_p (PATTERN (insn)))
843 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
848 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
852 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
855 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
857 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
862 /* Return true if SET can be turned into a post-modify load or store
863 that adds OFFSET to GPR. In other words, return true if SET can be
866 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
868 It's OK to change SET to an equivalent operation in order to
872 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
875 unsigned int reg_bytes, mem_bytes;
876 enum machine_mode reg_mode, mem_mode;
878 /* Only simple SETs can be converted. */
879 if (GET_CODE (set) != SET)
882 /* Point REG to what we hope will be the register side of the set and
883 MEM to what we hope will be the memory side. */
884 if (GET_CODE (SET_DEST (set)) == MEM)
886 mem = &SET_DEST (set);
887 reg = &SET_SRC (set);
891 reg = &SET_DEST (set);
892 mem = &SET_SRC (set);
893 if (GET_CODE (*mem) == SIGN_EXTEND)
894 mem = &XEXP (*mem, 0);
897 /* Check that *REG is a suitable coprocessor register. */
898 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
901 /* Check that *MEM is a suitable memory reference. */
902 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
905 /* Get the number of bytes in each operand. */
906 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
907 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
909 /* Check that OFFSET is suitably aligned. */
910 if (INTVAL (offset) & (mem_bytes - 1))
913 /* Convert *MEM to a normal integer mode. */
914 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
915 *mem = change_address (*mem, mem_mode, NULL);
917 /* Adjust *REG as well. */
918 *reg = shallow_copy_rtx (*reg);
919 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
921 /* SET is a subword load. Convert it to an explicit extension. */
922 PUT_MODE (*reg, SImode);
923 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
927 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
928 PUT_MODE (*reg, reg_mode);
933 /* Return the effect of frame-related instruction INSN. */
936 mep_frame_expr (rtx insn)
940 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
941 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
942 RTX_FRAME_RELATED_P (expr) = 1;
946 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
947 new pattern in INSN1; INSN2 will be deleted by the caller. */
950 mep_make_parallel (rtx insn1, rtx insn2)
954 if (RTX_FRAME_RELATED_P (insn2))
956 expr = mep_frame_expr (insn2);
957 if (RTX_FRAME_RELATED_P (insn1))
958 expr = gen_rtx_SEQUENCE (VOIDmode,
959 gen_rtvec (2, mep_frame_expr (insn1), expr));
960 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
961 RTX_FRAME_RELATED_P (insn1) = 1;
964 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
965 gen_rtvec (2, PATTERN (insn1),
967 INSN_CODE (insn1) = -1;
970 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
971 the basic block to see if any previous load or store instruction can
972 be persuaded to do SET_INSN as a side-effect. Return true if so. */
975 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
982 insn = PREV_INSN (insn);
985 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
987 mep_make_parallel (insn, set_insn);
991 if (reg_set_p (reg, insn)
992 || reg_referenced_p (reg, PATTERN (insn))
993 || volatile_insn_p (PATTERN (insn)))
997 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1001 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1004 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1006 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1007 extract_insn (insn);
1012 mep_allow_clip (rtx ux, rtx lx, int s)
1014 HOST_WIDE_INT u = INTVAL (ux);
1015 HOST_WIDE_INT l = INTVAL (lx);
1018 if (!TARGET_OPT_CLIP)
1023 for (i = 0; i < 30; i ++)
1024 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1025 && (l == - ((HOST_WIDE_INT) 1 << i)))
1033 for (i = 0; i < 30; i ++)
1034 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1041 mep_bit_position_p (rtx x, bool looking_for)
1043 if (GET_CODE (x) != CONST_INT)
1045 switch ((int) INTVAL(x) & 0xff)
1047 case 0x01: case 0x02: case 0x04: case 0x08:
1048 case 0x10: case 0x20: case 0x40: case 0x80:
1050 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1051 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1052 return !looking_for;
1058 move_needs_splitting (rtx dest, rtx src,
1059 enum machine_mode mode ATTRIBUTE_UNUSED)
1061 int s = mep_section_tag (src);
1065 if (GET_CODE (src) == CONST
1066 || GET_CODE (src) == MEM)
1067 src = XEXP (src, 0);
1068 else if (GET_CODE (src) == SYMBOL_REF
1069 || GET_CODE (src) == LABEL_REF
1070 || GET_CODE (src) == PLUS)
1076 || (GET_CODE (src) == PLUS
1077 && GET_CODE (XEXP (src, 1)) == CONST_INT
1078 && (INTVAL (XEXP (src, 1)) < -65536
1079 || INTVAL (XEXP (src, 1)) > 0xffffff))
1080 || (GET_CODE (dest) == REG
1081 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1087 mep_split_mov (rtx *operands, int symbolic)
1091 if (move_needs_splitting (operands[0], operands[1], SImode))
1096 if (GET_CODE (operands[1]) != CONST_INT)
1099 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1100 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1101 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1104 if (((!reload_completed && !reload_in_progress)
1105 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1106 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1112 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1113 it to one specific value. So the insn chosen depends on whether
1114 the source and destination modes match. */
1117 mep_vliw_mode_match (rtx tgt)
1119 bool src_vliw = mep_vliw_function_p (cfun->decl);
1120 bool tgt_vliw = INTVAL (tgt);
1122 return src_vliw == tgt_vliw;
1125 /* Like the above, but also test for near/far mismatches. */
1128 mep_vliw_jmp_match (rtx tgt)
1130 bool src_vliw = mep_vliw_function_p (cfun->decl);
1131 bool tgt_vliw = INTVAL (tgt);
1133 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1136 return src_vliw == tgt_vliw;
1140 mep_multi_slot (rtx x)
1142 return get_attr_slot (x) == SLOT_MULTI;
1147 mep_legitimate_constant_p (rtx x)
1149 /* We can't convert symbol values to gp- or tp-rel values after
1150 reload, as reload might have used $gp or $tp for other
1152 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1154 char e = mep_section_tag (x);
1155 return (e != 't' && e != 'b');
1160 /* Be careful not to use macros that need to be compiled one way for
1161 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1164 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1168 #define DEBUG_LEGIT 0
1170 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1174 if (GET_CODE (x) == LO_SUM
1175 && GET_CODE (XEXP (x, 0)) == REG
1176 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1177 && CONSTANT_P (XEXP (x, 1)))
1179 if (GET_MODE_SIZE (mode) > 4)
1181 /* We will end up splitting this, and lo_sums are not
1182 offsettable for us. */
1184 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1189 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1194 if (GET_CODE (x) == REG
1195 && GEN_REG (REGNO (x), strict))
1198 fprintf (stderr, " - yup, [reg]\n");
1203 if (GET_CODE (x) == PLUS
1204 && GET_CODE (XEXP (x, 0)) == REG
1205 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1206 && const_in_range (XEXP (x, 1), -32768, 32767))
1209 fprintf (stderr, " - yup, [reg+const]\n");
1214 if (GET_CODE (x) == PLUS
1215 && GET_CODE (XEXP (x, 0)) == REG
1216 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1217 && GET_CODE (XEXP (x, 1)) == CONST
1218 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1219 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1220 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1221 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1224 fprintf (stderr, " - yup, [reg+unspec]\n");
1229 the_tag = mep_section_tag (x);
1234 fprintf (stderr, " - nope, [far]\n");
1239 if (mode == VOIDmode
1240 && GET_CODE (x) == SYMBOL_REF)
1243 fprintf (stderr, " - yup, call [symbol]\n");
1248 if ((mode == SImode || mode == SFmode)
1250 && LEGITIMATE_CONSTANT_P (x)
1251 && the_tag != 't' && the_tag != 'b')
1253 if (GET_CODE (x) != CONST_INT
1254 || (INTVAL (x) <= 0xfffff
1256 && (INTVAL (x) % 4) == 0))
1259 fprintf (stderr, " - yup, [const]\n");
1266 fprintf (stderr, " - nope.\n");
1272 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1273 enum reload_type type,
1274 int ind_levels ATTRIBUTE_UNUSED)
1276 if (GET_CODE (*x) == PLUS
1277 && GET_CODE (XEXP (*x, 0)) == MEM
1278 && GET_CODE (XEXP (*x, 1)) == REG)
1280 /* GCC will by default copy the MEM into a REG, which results in
1281 an invalid address. For us, the best thing to do is move the
1282 whole expression to a REG. */
1283 push_reload (*x, NULL_RTX, x, NULL,
1284 GENERAL_REGS, mode, VOIDmode,
1289 if (GET_CODE (*x) == PLUS
1290 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1291 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1293 char e = mep_section_tag (XEXP (*x, 0));
1295 if (e != 't' && e != 'b')
1297 /* GCC thinks that (sym+const) is a valid address. Well,
1298 sometimes it is, this time it isn't. The best thing to
1299 do is reload the symbol to a register, since reg+int
1300 tends to work, and we can't just add the symbol and
1302 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1303 GENERAL_REGS, mode, VOIDmode,
1312 mep_core_address_length (rtx insn, int opn)
1314 rtx set = single_set (insn);
1315 rtx mem = XEXP (set, opn);
1316 rtx other = XEXP (set, 1-opn);
1317 rtx addr = XEXP (mem, 0);
1319 if (register_operand (addr, Pmode))
1321 if (GET_CODE (addr) == PLUS)
1323 rtx addend = XEXP (addr, 1);
1325 gcc_assert (REG_P (XEXP (addr, 0)));
1327 switch (REGNO (XEXP (addr, 0)))
1329 case STACK_POINTER_REGNUM:
1330 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1331 && mep_imm7a4_operand (addend, VOIDmode))
1336 gcc_assert (REG_P (other));
1338 if (REGNO (other) >= 8)
1341 if (GET_CODE (addend) == CONST
1342 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1343 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1346 if (GET_CODE (addend) == CONST_INT
1347 && INTVAL (addend) >= 0
1348 && INTVAL (addend) <= 127
1349 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1359 mep_cop_address_length (rtx insn, int opn)
1361 rtx set = single_set (insn);
1362 rtx mem = XEXP (set, opn);
1363 rtx addr = XEXP (mem, 0);
1365 if (GET_CODE (mem) != MEM)
1367 if (register_operand (addr, Pmode))
1369 if (GET_CODE (addr) == POST_INC)
1375 #define DEBUG_EXPAND_MOV 0
1377 mep_expand_mov (rtx *operands, enum machine_mode mode)
1382 int post_reload = 0;
1384 tag[0] = mep_section_tag (operands[0]);
1385 tag[1] = mep_section_tag (operands[1]);
1387 if (!reload_in_progress
1388 && !reload_completed
1389 && GET_CODE (operands[0]) != REG
1390 && GET_CODE (operands[0]) != SUBREG
1391 && GET_CODE (operands[1]) != REG
1392 && GET_CODE (operands[1]) != SUBREG)
1393 operands[1] = copy_to_mode_reg (mode, operands[1]);
1395 #if DEBUG_EXPAND_MOV
1396 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1397 reload_in_progress || reload_completed);
1398 debug_rtx (operands[0]);
1399 debug_rtx (operands[1]);
1402 if (mode == DImode || mode == DFmode)
1405 if (reload_in_progress || reload_completed)
1409 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1410 cfun->machine->reload_changes_tp = true;
1412 if (tag[0] == 't' || tag[1] == 't')
1414 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1415 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1418 if (tag[0] == 'b' || tag[1] == 'b')
1420 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1421 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1424 if (cfun->machine->reload_changes_tp == true)
1431 if (symbol_p (operands[1]))
1433 t = mep_section_tag (operands[1]);
1434 if (t == 'b' || t == 't')
1437 if (GET_CODE (operands[1]) == SYMBOL_REF)
1439 tpsym = operands[1];
1440 n = gen_rtx_UNSPEC (mode,
1441 gen_rtvec (1, operands[1]),
1442 t == 'b' ? UNS_TPREL : UNS_GPREL);
1443 n = gen_rtx_CONST (mode, n);
1445 else if (GET_CODE (operands[1]) == CONST
1446 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1447 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1448 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1450 tpsym = XEXP (XEXP (operands[1], 0), 0);
1451 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1452 n = gen_rtx_UNSPEC (mode,
1453 gen_rtvec (1, tpsym),
1454 t == 'b' ? UNS_TPREL : UNS_GPREL);
1455 n = gen_rtx_PLUS (mode, n, tpoffs);
1456 n = gen_rtx_CONST (mode, n);
1458 else if (GET_CODE (operands[1]) == CONST
1459 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1463 error ("unusual TP-relative address");
1467 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1468 : mep_gp_rtx ()), n);
1469 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1470 #if DEBUG_EXPAND_MOV
1471 fprintf(stderr, "mep_expand_mov emitting ");
1478 for (i=0; i < 2; i++)
1480 t = mep_section_tag (operands[i]);
1481 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1486 sym = XEXP (operands[i], 0);
1487 if (GET_CODE (sym) == CONST
1488 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1489 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1502 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1503 n = gen_rtx_CONST (Pmode, n);
1504 n = gen_rtx_PLUS (Pmode, r, n);
1505 operands[i] = replace_equiv_address (operands[i], n);
1510 if ((GET_CODE (operands[1]) != REG
1511 && MEP_CONTROL_REG (operands[0]))
1512 || (GET_CODE (operands[0]) != REG
1513 && MEP_CONTROL_REG (operands[1])))
1516 #if DEBUG_EXPAND_MOV
1517 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1519 temp = gen_reg_rtx (mode);
1520 emit_move_insn (temp, operands[1]);
1524 if (symbolref_p (operands[0])
1525 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1526 || (GET_MODE_SIZE (mode) != 4)))
1530 gcc_assert (!reload_in_progress && !reload_completed);
1532 temp = force_reg (Pmode, XEXP (operands[0], 0));
1533 operands[0] = replace_equiv_address (operands[0], temp);
1534 emit_move_insn (operands[0], operands[1]);
1538 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1541 if (symbol_p (operands[1])
1542 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1544 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1545 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1549 if (symbolref_p (operands[1])
1550 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1554 if (reload_in_progress || reload_completed)
1557 temp = gen_reg_rtx (Pmode);
1559 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1560 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1561 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1568 /* Cases where the pattern can't be made to use at all. */
1571 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1575 #define DEBUG_MOV_OK 0
1577 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1578 mep_section_tag (operands[1]));
1579 debug_rtx (operands[0]);
1580 debug_rtx (operands[1]);
1583 /* We want the movh patterns to get these. */
1584 if (GET_CODE (operands[1]) == HIGH)
1587 /* We can't store a register to a far variable without using a
1588 scratch register to hold the address. Using far variables should
1589 be split by mep_emit_mov anyway. */
1590 if (mep_section_tag (operands[0]) == 'f'
1591 || mep_section_tag (operands[1]) == 'f')
1594 fprintf (stderr, " - no, f\n");
1598 i = mep_section_tag (operands[1]);
1599 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1600 /* These are supposed to be generated with adds of the appropriate
1601 register. During and after reload, however, we allow them to
1602 be accessed as normal symbols because adding a dependency on
1603 the base register now might cause problems. */
1606 fprintf (stderr, " - no, bt\n");
1611 /* The only moves we can allow involve at least one general
1612 register, so require it. */
1613 for (i = 0; i < 2; i ++)
1615 /* Allow subregs too, before reload. */
1616 rtx x = operands[i];
1618 if (GET_CODE (x) == SUBREG)
1620 if (GET_CODE (x) == REG
1621 && ! MEP_CONTROL_REG (x))
1624 fprintf (stderr, " - ok\n");
1630 fprintf (stderr, " - no, no gen reg\n");
1635 #define DEBUG_SPLIT_WIDE_MOVE 0
1637 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1641 #if DEBUG_SPLIT_WIDE_MOVE
1642 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1643 debug_rtx (operands[0]);
1644 debug_rtx (operands[1]);
1647 for (i = 0; i <= 1; i++)
1649 rtx op = operands[i], hi, lo;
1651 switch (GET_CODE (op))
1655 unsigned int regno = REGNO (op);
1657 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1661 lo = gen_rtx_REG (SImode, regno);
1663 hi = gen_rtx_ZERO_EXTRACT (SImode,
1664 gen_rtx_REG (DImode, regno),
1669 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1670 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1678 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1679 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1686 /* The high part of CR <- GPR moves must be done after the low part. */
1687 operands [i + 4] = lo;
1688 operands [i + 2] = hi;
1691 if (reg_mentioned_p (operands[2], operands[5])
1692 || GET_CODE (operands[2]) == ZERO_EXTRACT
1693 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1697 /* Overlapping register pairs -- make sure we don't
1698 early-clobber ourselves. */
1700 operands[2] = operands[4];
1703 operands[3] = operands[5];
1707 #if DEBUG_SPLIT_WIDE_MOVE
1708 fprintf(stderr, "\033[34m");
1709 debug_rtx (operands[2]);
1710 debug_rtx (operands[3]);
1711 debug_rtx (operands[4]);
1712 debug_rtx (operands[5]);
1713 fprintf(stderr, "\033[0m");
1717 /* Emit a setcc instruction in its entirity. */
1720 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1728 tmp = op1, op1 = op2, op2 = tmp;
1729 code = swap_condition (code);
1734 op1 = force_reg (SImode, op1);
1735 emit_insn (gen_rtx_SET (VOIDmode, dest,
1736 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1740 if (op2 != const0_rtx)
1741 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1742 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1746 /* Branchful sequence:
1748 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1751 Branchless sequence:
1752 add3 tmp, op1, -op2 32-bit (or mov + sub)
1753 sltu3 tmp, tmp, 1 16-bit
1754 xor3 dest, tmp, 1 32-bit
1756 if (optimize_size && op2 != const0_rtx)
1759 if (op2 != const0_rtx)
1760 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1762 op2 = gen_reg_rtx (SImode);
1763 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1765 emit_insn (gen_rtx_SET (VOIDmode, dest,
1766 gen_rtx_XOR (SImode, op2, const1_rtx)));
1770 if (GET_CODE (op2) != CONST_INT
1771 || INTVAL (op2) == 0x7ffffff)
1773 op2 = GEN_INT (INTVAL (op2) + 1);
1774 return mep_expand_setcc_1 (LT, dest, op1, op2);
1777 if (GET_CODE (op2) != CONST_INT
1778 || INTVAL (op2) == -1)
1780 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1781 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1784 if (GET_CODE (op2) != CONST_INT
1785 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1787 op2 = GEN_INT (INTVAL (op2) - 1);
1788 return mep_expand_setcc_1 (GT, dest, op1, op2);
1791 if (GET_CODE (op2) != CONST_INT
1792 || op2 == const0_rtx)
1794 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1795 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1803 mep_expand_setcc (rtx *operands)
1805 rtx dest = operands[0];
1806 enum rtx_code code = GET_CODE (operands[1]);
1807 rtx op0 = operands[2];
1808 rtx op1 = operands[3];
1810 return mep_expand_setcc_1 (code, dest, op0, op1);
1814 mep_expand_cbranch (rtx *operands)
1816 enum rtx_code code = GET_CODE (operands[0]);
1817 rtx op0 = operands[1];
1818 rtx op1 = operands[2];
1825 if (mep_imm4_operand (op1, SImode))
1828 tmp = gen_reg_rtx (SImode);
1829 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1836 if (mep_imm4_operand (op1, SImode))
1839 tmp = gen_reg_rtx (SImode);
1840 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1849 if (! mep_reg_or_imm4_operand (op1, SImode))
1850 op1 = force_reg (SImode, op1);
1855 if (GET_CODE (op1) == CONST_INT
1856 && INTVAL (op1) != 0x7fffffff)
1858 op1 = GEN_INT (INTVAL (op1) + 1);
1859 code = (code == LE ? LT : GE);
1863 tmp = gen_reg_rtx (SImode);
1864 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1866 code = (code == LE ? EQ : NE);
1872 if (op1 == const1_rtx)
1879 tmp = gen_reg_rtx (SImode);
1880 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1887 tmp = gen_reg_rtx (SImode);
1888 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1890 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1899 tmp = gen_reg_rtx (SImode);
1900 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1901 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1908 tmp = gen_reg_rtx (SImode);
1909 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1911 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1923 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1927 mep_emit_cbranch (rtx *operands, int ne)
1929 if (GET_CODE (operands[1]) == REG)
1930 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1931 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1932 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1934 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1938 mep_expand_call (rtx *operands, int returns_value)
1940 rtx addr = operands[returns_value];
1941 rtx tp = mep_tp_rtx ();
1942 rtx gp = mep_gp_rtx ();
1944 gcc_assert (GET_CODE (addr) == MEM);
1946 addr = XEXP (addr, 0);
1948 if (! mep_call_address_operand (addr, VOIDmode))
1949 addr = force_reg (SImode, addr);
1951 if (! operands[returns_value+2])
1952 operands[returns_value+2] = const0_rtx;
1955 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1956 operands[3], tp, gp));
1958 emit_call_insn (gen_call_internal (addr, operands[1],
1959 operands[2], tp, gp));
1962 /* Aliasing Support. */
1964 /* If X is a machine specific address (i.e. a symbol or label being
1965 referenced as a displacement from the GOT implemented using an
1966 UNSPEC), then return the base term. Otherwise return X. */
1969 mep_find_base_term (rtx x)
1974 if (GET_CODE (x) != PLUS)
1979 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1980 && base == mep_tp_rtx ())
1982 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1983 && base == mep_gp_rtx ())
1988 if (GET_CODE (term) != CONST)
1990 term = XEXP (term, 0);
1992 if (GET_CODE (term) != UNSPEC
1993 || XINT (term, 1) != unspec)
1996 return XVECEXP (term, 0, 0);
1999 /* Reload Support. */
2001 /* Return true if the registers in CLASS cannot represent the change from
2002 modes FROM to TO. */
2005 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2006 enum reg_class regclass)
2011 /* 64-bit COP regs must remain 64-bit COP regs. */
2012 if (TARGET_64BIT_CR_REGS
2013 && (regclass == CR_REGS
2014 || regclass == LOADABLE_CR_REGS)
2015 && (GET_MODE_SIZE (to) < 8
2016 || GET_MODE_SIZE (from) < 8))
2022 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2025 mep_general_reg (rtx x)
2027 while (GET_CODE (x) == SUBREG)
2029 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2033 mep_nongeneral_reg (rtx x)
2035 while (GET_CODE (x) == SUBREG)
2037 return (GET_CODE (x) == REG
2038 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2042 mep_general_copro_reg (rtx x)
2044 while (GET_CODE (x) == SUBREG)
2046 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2050 mep_nonregister (rtx x)
2052 while (GET_CODE (x) == SUBREG)
2054 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2057 #define DEBUG_RELOAD 0
2059 /* Return the secondary reload class needed for moving value X to or
2060 from a register in coprocessor register class CLASS. */
2062 static enum reg_class
2063 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2065 if (mep_general_reg (x))
2066 /* We can do the move directly if mep_have_core_copro_moves_p,
2067 otherwise we need to go through memory. Either way, no secondary
2068 register is needed. */
2071 if (mep_general_copro_reg (x))
2073 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2074 if (mep_have_copro_copro_moves_p)
2077 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2078 if (mep_have_core_copro_moves_p)
2079 return GENERAL_REGS;
2081 /* Otherwise we need to do it through memory. No secondary
2082 register is needed. */
2086 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2087 && constraint_satisfied_p (x, CONSTRAINT_U))
2088 /* X is a memory value that we can access directly. */
2091 /* We have to move X into a GPR first and then copy it to
2092 the coprocessor register. The move from the GPR to the
2093 coprocessor might be done directly or through memory,
2094 depending on mep_have_core_copro_moves_p. */
2095 return GENERAL_REGS;
2098 /* Copying X to register in RCLASS. */
2101 mep_secondary_input_reload_class (enum reg_class rclass,
2102 enum machine_mode mode ATTRIBUTE_UNUSED,
2108 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2112 if (reg_class_subset_p (rclass, CR_REGS))
2113 rv = mep_secondary_copro_reload_class (rclass, x);
2114 else if (MEP_NONGENERAL_CLASS (rclass)
2115 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2119 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2124 /* Copying register in RCLASS to X. */
2127 mep_secondary_output_reload_class (enum reg_class rclass,
2128 enum machine_mode mode ATTRIBUTE_UNUSED,
2134 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2138 if (reg_class_subset_p (rclass, CR_REGS))
2139 rv = mep_secondary_copro_reload_class (rclass, x);
2140 else if (MEP_NONGENERAL_CLASS (rclass)
2141 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2145 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2151 /* Implement SECONDARY_MEMORY_NEEDED. */
2154 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2155 enum machine_mode mode ATTRIBUTE_UNUSED)
2157 if (!mep_have_core_copro_moves_p)
2159 if (reg_classes_intersect_p (rclass1, CR_REGS)
2160 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2162 if (reg_classes_intersect_p (rclass2, CR_REGS)
2163 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2165 if (!mep_have_copro_copro_moves_p
2166 && reg_classes_intersect_p (rclass1, CR_REGS)
2167 && reg_classes_intersect_p (rclass2, CR_REGS))
2174 mep_expand_reload (rtx *operands, enum machine_mode mode)
2176 /* There are three cases for each direction:
2181 int s0 = mep_section_tag (operands[0]) == 'f';
2182 int s1 = mep_section_tag (operands[1]) == 'f';
2183 int c0 = mep_nongeneral_reg (operands[0]);
2184 int c1 = mep_nongeneral_reg (operands[1]);
2185 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2188 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2189 debug_rtx (operands[0]);
2190 debug_rtx (operands[1]);
2195 case 00: /* Don't know why this gets here. */
2196 case 02: /* general = far */
2197 emit_move_insn (operands[0], operands[1]);
2200 case 10: /* cr = mem */
2201 case 11: /* cr = cr */
2202 case 01: /* mem = cr */
2203 case 12: /* cr = far */
2204 emit_move_insn (operands[2], operands[1]);
2205 emit_move_insn (operands[0], operands[2]);
2208 case 20: /* far = general */
2209 emit_move_insn (operands[2], XEXP (operands[1], 0));
2210 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2213 case 21: /* far = cr */
2214 case 22: /* far = far */
2216 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2217 which, mode_name[mode]);
2218 debug_rtx (operands[0]);
2219 debug_rtx (operands[1]);
2224 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2225 can be moved directly into registers 0 to 7, but not into the rest.
2226 If so, and if the required class includes registers 0 to 7, restrict
2227 it to those registers. */
2230 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2232 switch (GET_CODE (x))
2235 if (INTVAL (x) >= 0x10000
2236 && INTVAL (x) < 0x01000000
2237 && (INTVAL (x) & 0xffff) != 0
2238 && reg_class_subset_p (TPREL_REGS, rclass))
2239 rclass = TPREL_REGS;
2245 if (mep_section_tag (x) != 'f'
2246 && reg_class_subset_p (TPREL_REGS, rclass))
2247 rclass = TPREL_REGS;
2256 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2257 moves, 4 for direct double-register moves, and 1000 for anything
2258 that requires a temporary register or temporary stack slot. */
2261 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2263 if (mep_have_copro_copro_moves_p
2264 && reg_class_subset_p (from, CR_REGS)
2265 && reg_class_subset_p (to, CR_REGS))
2267 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2271 if (reg_class_subset_p (from, CR_REGS)
2272 && reg_class_subset_p (to, CR_REGS))
2274 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2278 if (reg_class_subset_p (from, CR_REGS)
2279 || reg_class_subset_p (to, CR_REGS))
2281 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2285 if (mep_secondary_memory_needed (from, to, mode))
2287 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2290 if (GET_MODE_SIZE (mode) > 4)
2297 /* Functions to save and restore machine-specific function data. */
2299 static struct machine_function *
2300 mep_init_machine_status (void)
2302 return ggc_alloc_cleared_machine_function ();
2306 mep_allocate_initial_value (rtx reg)
2310 if (GET_CODE (reg) != REG)
2313 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2316 /* In interrupt functions, the "initial" values of $gp and $tp are
2317 provided by the prologue. They are not necessarily the same as
2318 the values that the caller was using. */
2319 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2320 if (mep_interrupt_p ())
2323 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2325 cfun->machine->reg_save_size += 4;
2326 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2329 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2330 return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2334 mep_return_addr_rtx (int count)
2339 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2345 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2351 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2355 mep_interrupt_p (void)
2357 if (cfun->machine->interrupt_handler == 0)
2359 int interrupt_handler
2360 = (lookup_attribute ("interrupt",
2361 DECL_ATTRIBUTES (current_function_decl))
2363 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2365 return cfun->machine->interrupt_handler == 2;
2369 mep_disinterrupt_p (void)
2371 if (cfun->machine->disable_interrupts == 0)
2373 int disable_interrupts
2374 = (lookup_attribute ("disinterrupt",
2375 DECL_ATTRIBUTES (current_function_decl))
2377 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2379 return cfun->machine->disable_interrupts == 2;
2383 /* Frame/Epilog/Prolog Related. */
2386 mep_reg_set_p (rtx reg, rtx insn)
2388 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2391 if (FIND_REG_INC_NOTE (insn, reg))
2393 insn = PATTERN (insn);
2396 if (GET_CODE (insn) == SET
2397 && GET_CODE (XEXP (insn, 0)) == REG
2398 && GET_CODE (XEXP (insn, 1)) == REG
2399 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2402 return set_of (reg, insn) != NULL_RTX;
2406 #define MEP_SAVES_UNKNOWN 0
2407 #define MEP_SAVES_YES 1
2408 #define MEP_SAVES_MAYBE 2
2409 #define MEP_SAVES_NO 3
2412 mep_reg_set_in_function (int regno)
2416 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2419 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2422 push_topmost_sequence ();
2423 insn = get_insns ();
2424 pop_topmost_sequence ();
2429 reg = gen_rtx_REG (SImode, regno);
2431 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2432 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2438 mep_asm_without_operands_p (void)
2440 if (cfun->machine->asms_without_operands == 0)
2444 push_topmost_sequence ();
2445 insn = get_insns ();
2446 pop_topmost_sequence ();
2448 cfun->machine->asms_without_operands = 1;
2452 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2454 cfun->machine->asms_without_operands = 2;
2457 insn = NEXT_INSN (insn);
2461 return cfun->machine->asms_without_operands == 2;
2464 /* Interrupt functions save/restore every call-preserved register, and
2465 any call-used register it uses (or all if it calls any function,
2466 since they may get clobbered there too). Here we check to see
2467 which call-used registers need saving. */
2469 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2470 && (r == FIRST_CCR_REGNO + 1 \
2471 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2472 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2475 mep_interrupt_saved_reg (int r)
2477 if (!mep_interrupt_p ())
2479 if (r == REGSAVE_CONTROL_TEMP
2480 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2482 if (mep_asm_without_operands_p ()
2484 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2485 || IVC2_ISAVED_REG (r)))
2487 if (!current_function_is_leaf)
2488 /* Function calls mean we need to save $lp. */
2489 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2491 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2492 /* The interrupt handler might use these registers for repeat blocks,
2493 or it might call a function that does so. */
2494 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2496 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2498 /* Functions we call might clobber these. */
2499 if (call_used_regs[r] && !fixed_regs[r])
2501 /* Additional registers that need to be saved for IVC2. */
2502 if (IVC2_ISAVED_REG (r))
2509 mep_call_saves_register (int r)
2511 if (! cfun->machine->frame_locked)
2513 int rv = MEP_SAVES_NO;
2515 if (cfun->machine->reg_save_slot[r])
2517 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2519 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2521 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2523 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2524 /* We need these to have stack slots so that they can be set during
2527 else if (mep_interrupt_saved_reg (r))
2529 cfun->machine->reg_saved[r] = rv;
2531 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2534 /* Return true if epilogue uses register REGNO. */
2537 mep_epilogue_uses (int regno)
2539 /* Since $lp is a call-saved register, the generic code will normally
2540 mark it used in the epilogue if it needs to be saved and restored.
2541 However, when profiling is enabled, the profiling code will implicitly
2542 clobber $11. This case has to be handled specially both here and in
2543 mep_call_saves_register. */
2544 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2546 /* Interrupt functions save/restore pretty much everything. */
2547 return (reload_completed && mep_interrupt_saved_reg (regno));
2551 mep_reg_size (int regno)
2553 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2558 /* Worker function for TARGET_CAN_ELIMINATE. */
2561 mep_can_eliminate (const int from, const int to)
2563 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2564 ? ! frame_pointer_needed
2569 mep_elimination_offset (int from, int to)
2573 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2576 if (!cfun->machine->frame_locked)
2577 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2579 /* We don't count arg_regs_to_save in the arg pointer offset, because
2580 gcc thinks the arg pointer has moved along with the saved regs.
2581 However, we do count it when we adjust $sp in the prologue. */
2583 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2584 if (mep_call_saves_register (i))
2585 reg_save_size += mep_reg_size (i);
2587 if (reg_save_size % 8)
2588 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2590 cfun->machine->regsave_filler = 0;
2592 /* This is what our total stack adjustment looks like. */
2593 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2596 cfun->machine->frame_filler = 8 - (total_size % 8);
2598 cfun->machine->frame_filler = 0;
2601 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2602 return reg_save_size + cfun->machine->regsave_filler;
2604 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2605 return cfun->machine->frame_filler + frame_size;
2607 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2608 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2616 RTX_FRAME_RELATED_P (x) = 1;
2620 /* Since the prologue/epilogue code is generated after optimization,
2621 we can't rely on gcc to split constants for us. So, this code
2622 captures all the ways to add a constant to a register in one logic
2623 chunk, including optimizing away insns we just don't need. This
2624 makes the prolog/epilog code easier to follow. */
2626 add_constant (int dest, int src, int value, int mark_frame)
2631 if (src == dest && value == 0)
2636 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2637 gen_rtx_REG (SImode, src));
2639 RTX_FRAME_RELATED_P(insn) = 1;
2643 if (value >= -32768 && value <= 32767)
2645 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2646 gen_rtx_REG (SImode, src),
2649 RTX_FRAME_RELATED_P(insn) = 1;
2653 /* Big constant, need to use a temp register. We use
2654 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2655 area is always small enough to directly add to). */
2657 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2658 lo = value & 0xffff;
2660 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2665 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2666 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2670 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2671 gen_rtx_REG (SImode, src),
2672 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2675 RTX_FRAME_RELATED_P(insn) = 1;
2676 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2677 gen_rtx_SET (SImode,
2678 gen_rtx_REG (SImode, dest),
2679 gen_rtx_PLUS (SImode,
2680 gen_rtx_REG (SImode, dest),
2685 /* Move SRC to DEST. Mark the move as being potentially dead if
2689 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2691 rtx insn = emit_move_insn (dest, src);
2694 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2699 /* Used for interrupt functions, which can't assume that $tp and $gp
2700 contain the correct pointers. */
2703 mep_reload_pointer (int regno, const char *symbol)
2707 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2710 reg = gen_rtx_REG (SImode, regno);
2711 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2712 emit_insn (gen_movsi_topsym_s (reg, sym));
2713 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2716 /* Assign save slots for any register not already saved. DImode
2717 registers go at the end of the reg save area; the rest go at the
2718 beginning. This is for alignment purposes. Returns true if a frame
2719 is really needed. */
2721 mep_assign_save_slots (int reg_save_size)
2723 bool really_need_stack_frame = false;
2727 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2728 if (mep_call_saves_register(i))
2730 int regsize = mep_reg_size (i);
2732 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2733 || mep_reg_set_in_function (i))
2734 really_need_stack_frame = true;
2736 if (cfun->machine->reg_save_slot[i])
2741 cfun->machine->reg_save_size += regsize;
2742 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2746 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2750 cfun->machine->frame_locked = 1;
2751 return really_need_stack_frame;
2755 mep_expand_prologue (void)
2757 int i, rss, sp_offset = 0;
2760 int really_need_stack_frame;
2762 /* We must not allow register renaming in interrupt functions,
2763 because that invalidates the correctness of the set of call-used
2764 registers we're going to save/restore. */
2765 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2767 if (mep_disinterrupt_p ())
2768 emit_insn (gen_mep_disable_int ());
2770 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2772 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2773 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2774 really_need_stack_frame = frame_size;
2776 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2778 sp_offset = reg_save_size;
2779 if (sp_offset + frame_size < 128)
2780 sp_offset += frame_size ;
2782 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2784 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2785 if (mep_call_saves_register(i))
2789 enum machine_mode rmode;
2791 rss = cfun->machine->reg_save_slot[i];
2793 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2794 && (!mep_reg_set_in_function (i)
2795 && !mep_interrupt_p ()))
2798 if (mep_reg_size (i) == 8)
2803 /* If there is a pseudo associated with this register's initial value,
2804 reload might have already spilt it to the stack slot suggested by
2805 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2807 mem = gen_rtx_MEM (rmode,
2808 plus_constant (stack_pointer_rtx, sp_offset - rss));
2809 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2811 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2812 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2813 else if (rmode == DImode)
2816 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2818 mem = gen_rtx_MEM (SImode,
2819 plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2821 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2822 gen_rtx_REG (SImode, i),
2824 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2825 gen_rtx_ZERO_EXTRACT (SImode,
2826 gen_rtx_REG (DImode, i),
2830 insn = maybe_dead_move (mem,
2831 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2833 RTX_FRAME_RELATED_P (insn) = 1;
2835 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2836 gen_rtx_SET (VOIDmode,
2838 gen_rtx_REG (rmode, i)));
2839 mem = gen_rtx_MEM (SImode,
2840 plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2841 insn = maybe_dead_move (mem,
2842 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2848 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2849 gen_rtx_REG (rmode, i),
2851 insn = maybe_dead_move (mem,
2852 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2854 RTX_FRAME_RELATED_P (insn) = 1;
2856 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2857 gen_rtx_SET (VOIDmode,
2859 gen_rtx_REG (rmode, i)));
2863 if (frame_pointer_needed)
2865 /* We've already adjusted down by sp_offset. Total $sp change
2866 is reg_save_size + frame_size. We want a net change here of
2867 just reg_save_size. */
2868 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2871 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2873 if (mep_interrupt_p ())
2875 mep_reload_pointer(GP_REGNO, "__sdabase");
2876 mep_reload_pointer(TP_REGNO, "__tpbase");
2881 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2883 int local = hwi_local;
2884 int frame_size = local + crtl->outgoing_args_size;
2889 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2891 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2892 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2893 sp_offset = reg_save_size + frame_size;
2895 ffill = cfun->machine->frame_filler;
2897 if (cfun->machine->mep_frame_pointer_needed)
2898 reg_names[FP_REGNO] = "$fp";
2900 reg_names[FP_REGNO] = "$8";
2905 if (debug_info_level == DINFO_LEVEL_NONE)
2907 fprintf (file, "\t# frame: %d", sp_offset);
2909 fprintf (file, " %d regs", reg_save_size);
2911 fprintf (file, " %d locals", local);
2912 if (crtl->outgoing_args_size)
2913 fprintf (file, " %d args", crtl->outgoing_args_size);
2914 fprintf (file, "\n");
2918 fprintf (file, "\t#\n");
2919 fprintf (file, "\t# Initial Frame Information:\n");
2920 if (sp_offset || !frame_pointer_needed)
2921 fprintf (file, "\t# Entry ---------- 0\n");
2923 /* Sort registers by save slots, so they're printed in the order
2924 they appear in memory, not the order they're saved in. */
2925 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2927 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2928 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2929 if (cfun->machine->reg_save_slot[slot_map[si]]
2930 > cfun->machine->reg_save_slot[slot_map[sj]])
2932 int t = slot_map[si];
2933 slot_map[si] = slot_map[sj];
2938 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2941 int r = slot_map[i];
2942 int rss = cfun->machine->reg_save_slot[r];
2944 if (!mep_call_saves_register (r))
2947 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2948 && (!mep_reg_set_in_function (r)
2949 && !mep_interrupt_p ()))
2952 rsize = mep_reg_size(r);
2953 skip = rss - (sp+rsize);
2955 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2956 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2957 rsize, reg_names[r], sp_offset - rss);
2961 skip = reg_save_size - sp;
2963 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2965 if (frame_pointer_needed)
2966 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2968 fprintf (file, "\t# %3d bytes for local vars\n", local);
2970 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2971 if (crtl->outgoing_args_size)
2972 fprintf (file, "\t# %3d bytes for outgoing args\n",
2973 crtl->outgoing_args_size);
2974 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2975 fprintf (file, "\t#\n");
2979 static int mep_prevent_lp_restore = 0;
2980 static int mep_sibcall_epilogue = 0;
2983 mep_expand_epilogue (void)
2985 int i, sp_offset = 0;
2986 int reg_save_size = 0;
2988 int lp_temp = LP_REGNO, lp_slot = -1;
2989 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2990 int interrupt_handler = mep_interrupt_p ();
2992 if (profile_arc_flag == 2)
2993 emit_insn (gen_mep_bb_trace_ret ());
2995 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2996 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2998 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
3000 if (frame_pointer_needed)
3002 /* If we have a frame pointer, we won't have a reliable stack
3003 pointer (alloca, you know), so rebase SP from FP */
3004 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3005 gen_rtx_REG (SImode, FP_REGNO));
3006 sp_offset = reg_save_size;
3010 /* SP is right under our local variable space. Adjust it if
3012 sp_offset = reg_save_size + frame_size;
3013 if (sp_offset >= 128)
3015 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3016 sp_offset -= frame_size;
3020 /* This is backwards so that we restore the control and coprocessor
3021 registers before the temporary registers we use to restore
3023 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3024 if (mep_call_saves_register (i))
3026 enum machine_mode rmode;
3027 int rss = cfun->machine->reg_save_slot[i];
3029 if (mep_reg_size (i) == 8)
3034 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3035 && !(mep_reg_set_in_function (i) || interrupt_handler))
3037 if (mep_prevent_lp_restore && i == LP_REGNO)
3039 if (!mep_prevent_lp_restore
3040 && !interrupt_handler
3041 && (i == 10 || i == 11))
3044 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3045 emit_move_insn (gen_rtx_REG (rmode, i),
3047 plus_constant (stack_pointer_rtx,
3051 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3052 /* Defer this one so we can jump indirect rather than
3053 copying the RA to $lp and "ret". EH epilogues
3054 automatically skip this anyway. */
3055 lp_slot = sp_offset-rss;
3058 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3060 plus_constant (stack_pointer_rtx,
3062 emit_move_insn (gen_rtx_REG (rmode, i),
3063 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3069 /* Restore this one last so we know it will be in the temp
3070 register when we return by jumping indirectly via the temp. */
3071 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3072 gen_rtx_MEM (SImode,
3073 plus_constant (stack_pointer_rtx,
3075 lp_temp = REGSAVE_CONTROL_TEMP;
3079 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3081 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3082 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3083 gen_rtx_REG (SImode, SP_REGNO),
3084 cfun->machine->eh_stack_adjust));
3086 if (mep_sibcall_epilogue)
3089 if (mep_disinterrupt_p ())
3090 emit_insn (gen_mep_enable_int ());
3092 if (mep_prevent_lp_restore)
3094 emit_jump_insn (gen_eh_return_internal ());
3097 else if (interrupt_handler)
3098 emit_jump_insn (gen_mep_reti ());
3100 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3104 mep_expand_eh_return (rtx *operands)
3106 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3108 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3109 emit_move_insn (ra, operands[0]);
3113 emit_insn (gen_eh_epilogue (operands[0]));
3117 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3119 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3120 mep_prevent_lp_restore = 1;
3121 mep_expand_epilogue ();
3122 mep_prevent_lp_restore = 0;
3126 mep_expand_sibcall_epilogue (void)
3128 mep_sibcall_epilogue = 1;
3129 mep_expand_epilogue ();
3130 mep_sibcall_epilogue = 0;
3134 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3139 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3142 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3143 if (mep_interrupt_p () || mep_disinterrupt_p ())
3150 mep_return_stackadj_rtx (void)
3152 return gen_rtx_REG (SImode, 10);
3156 mep_return_handler_rtx (void)
3158 return gen_rtx_REG (SImode, LP_REGNO);
3162 mep_function_profiler (FILE *file)
3164 /* Always right at the beginning of the function. */
3165 fprintf (file, "\t# mep function profiler\n");
3166 fprintf (file, "\tadd\t$sp, -8\n");
3167 fprintf (file, "\tsw\t$0, ($sp)\n");
3168 fprintf (file, "\tldc\t$0, $lp\n");
3169 fprintf (file, "\tsw\t$0, 4($sp)\n");
3170 fprintf (file, "\tbsr\t__mep_mcount\n");
3171 fprintf (file, "\tlw\t$0, 4($sp)\n");
3172 fprintf (file, "\tstc\t$0, $lp\n");
3173 fprintf (file, "\tlw\t$0, ($sp)\n");
3174 fprintf (file, "\tadd\t$sp, 8\n\n");
3178 mep_emit_bb_trace_ret (void)
3180 fprintf (asm_out_file, "\t# end of block profiling\n");
3181 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3182 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3183 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3184 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3185 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3186 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3187 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3188 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3189 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3196 /* Operand Printing. */
3199 mep_print_operand_address (FILE *stream, rtx address)
3201 if (GET_CODE (address) == MEM)
3202 address = XEXP (address, 0);
3204 /* cf: gcc.dg/asm-4.c. */
3205 gcc_assert (GET_CODE (address) == REG);
3207 mep_print_operand (stream, address, 0);
3213 const char *pattern;
3216 const conversions[] =
3219 { 0, "m+ri", "3(2)" },
3223 { 0, "mLrs", "%lo(3)(2)" },
3224 { 0, "mLr+si", "%lo(4+5)(2)" },
3225 { 0, "m+ru2s", "%tpoff(5)(2)" },
3226 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3227 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3228 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3229 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3230 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3232 { 0, "m+si", "(2+3)" },
3233 { 0, "m+li", "(2+3)" },
3236 { 0, "+si", "1+2" },
3237 { 0, "+u2si", "%tpoff(3+4)" },
3238 { 0, "+u3si", "%sdaoff(3+4)" },
3244 { 'h', "Hs", "%hi(1)" },
3246 { 'I', "u2s", "%tpoff(2)" },
3247 { 'I', "u3s", "%sdaoff(2)" },
3248 { 'I', "+u2si", "%tpoff(3+4)" },
3249 { 'I', "+u3si", "%sdaoff(3+4)" },
3251 { 'P', "mr", "(1\\+),\\0" },
3257 unique_bit_in (HOST_WIDE_INT i)
3261 case 0x01: case 0xfe: return 0;
3262 case 0x02: case 0xfd: return 1;
3263 case 0x04: case 0xfb: return 2;
3264 case 0x08: case 0xf7: return 3;
3265 case 0x10: case 0x7f: return 4;
3266 case 0x20: case 0xbf: return 5;
3267 case 0x40: case 0xdf: return 6;
3268 case 0x80: case 0xef: return 7;
3275 bit_size_for_clip (HOST_WIDE_INT i)
3279 for (rv = 0; rv < 31; rv ++)
3280 if (((HOST_WIDE_INT) 1 << rv) > i)
3285 /* Print an operand to a assembler instruction. */
3288 mep_print_operand (FILE *file, rtx x, int code)
3291 const char *real_name;
3295 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3296 we're using, then skip over the "mep_" part of its name. */
3297 const struct cgen_insn *insn;
3299 if (mep_get_move_insn (mep_cmov, &insn))
3300 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3302 mep_intrinsic_unavailable (mep_cmov);
3307 switch (GET_CODE (x))
3310 fputs ("clr", file);
3313 fputs ("set", file);
3316 fputs ("not", file);
3319 output_operand_lossage ("invalid %%L code");
3324 /* Print the second operand of a CR <- CR move. If we're using
3325 a two-operand instruction (i.e., a real cmov), then just print
3326 the operand normally. If we're using a "reg, reg, immediate"
3327 instruction such as caddi3, print the operand followed by a
3328 zero field. If we're using a three-register instruction,
3329 print the operand twice. */
3330 const struct cgen_insn *insn;
3332 mep_print_operand (file, x, 0);
3333 if (mep_get_move_insn (mep_cmov, &insn)
3334 && insn_data[insn->icode].n_operands == 3)
3337 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3338 mep_print_operand (file, x, 0);
3340 mep_print_operand (file, const0_rtx, 0);
3346 for (i = 0; conversions[i].pattern; i++)
3347 if (conversions[i].code == code
3348 && strcmp(conversions[i].pattern, pattern) == 0)
3350 for (j = 0; conversions[i].format[j]; j++)
3351 if (conversions[i].format[j] == '\\')
3353 fputc (conversions[i].format[j+1], file);
3356 else if (ISDIGIT(conversions[i].format[j]))
3358 rtx r = patternr[conversions[i].format[j] - '0'];
3359 switch (GET_CODE (r))
3362 fprintf (file, "%s", reg_names [REGNO (r)]);
3368 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3371 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3374 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3377 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3380 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3383 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3384 && !(INTVAL (r) & 0xff))
3385 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3387 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3390 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3391 && conversions[i].format[j+1] == 0)
3393 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3394 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3397 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3400 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3405 fprintf(file, "[const_double 0x%lx]",
3406 (unsigned long) CONST_DOUBLE_HIGH(r));
3409 real_name = targetm.strip_name_encoding (XSTR (r, 0));
3410 assemble_name (file, real_name);
3413 output_asm_label (r);
3416 fprintf (stderr, "don't know how to print this operand:");
3423 if (conversions[i].format[j] == '+'
3424 && (!code || code == 'I')
3425 && ISDIGIT (conversions[i].format[j+1])
3426 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3427 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3429 fputc(conversions[i].format[j], file);
3433 if (!conversions[i].pattern)
3435 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3443 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3444 int noperands ATTRIBUTE_UNUSED)
3446 /* Despite the fact that MeP is perfectly capable of branching and
3447 doing something else in the same bundle, gcc does jump
3448 optimization *after* scheduling, so we cannot trust the bundling
3449 flags on jump instructions. */
3450 if (GET_MODE (insn) == BImode
3451 && get_attr_slots (insn) != SLOTS_CORE)
3452 fputc ('+', asm_out_file);
3455 /* Function args in registers. */
3458 mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
3459 enum machine_mode mode ATTRIBUTE_UNUSED,
3460 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3461 int second_time ATTRIBUTE_UNUSED)
3463 int nsave = 4 - (cum->nregs + 1);
3466 cfun->machine->arg_regs_to_save = nsave;
3467 *pretend_size = nsave * 4;
3471 bytesize (const_tree type, enum machine_mode mode)
3473 if (mode == BLKmode)
3474 return int_size_in_bytes (type);
3475 return GET_MODE_SIZE (mode);
3479 mep_expand_builtin_saveregs (void)
3484 ns = cfun->machine->arg_regs_to_save;
3487 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3488 regbuf = assign_stack_local (SImode, bufsize, 64);
3493 regbuf = assign_stack_local (SImode, bufsize, 32);
3496 move_block_from_reg (5-ns, regbuf, ns);
3500 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3501 int ofs = 8 * ((ns+1)/2);
3503 for (i=0; i<ns; i++)
3505 int rn = (4-ns) + i + 49;
3508 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3509 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3513 return XEXP (regbuf, 0);
3516 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3519 mep_build_builtin_va_list (void)
3521 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3525 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3527 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3528 get_identifier ("__va_next_gp"), ptr_type_node);
3529 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3530 get_identifier ("__va_next_gp_limit"),
3532 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3534 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3537 DECL_FIELD_CONTEXT (f_next_gp) = record;
3538 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3539 DECL_FIELD_CONTEXT (f_next_cop) = record;
3540 DECL_FIELD_CONTEXT (f_next_stack) = record;
3542 TYPE_FIELDS (record) = f_next_gp;
3543 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3544 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3545 DECL_CHAIN (f_next_cop) = f_next_stack;
3547 layout_type (record);
3553 mep_expand_va_start (tree valist, rtx nextarg)
3555 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3556 tree next_gp, next_gp_limit, next_cop, next_stack;
3560 ns = cfun->machine->arg_regs_to_save;
3562 f_next_gp = TYPE_FIELDS (va_list_type_node);
3563 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3564 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3565 f_next_stack = DECL_CHAIN (f_next_cop);
3567 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3569 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3570 valist, f_next_gp_limit, NULL_TREE);
3571 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3573 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3574 valist, f_next_stack, NULL_TREE);
3576 /* va_list.next_gp = expand_builtin_saveregs (); */
3577 u = make_tree (sizetype, expand_builtin_saveregs ());
3578 u = fold_convert (ptr_type_node, u);
3579 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3580 TREE_SIDE_EFFECTS (t) = 1;
3581 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3583 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3584 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3586 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3587 TREE_SIDE_EFFECTS (t) = 1;
3588 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3590 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3591 size_int (8 * ((ns+1)/2)));
3592 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3593 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3594 TREE_SIDE_EFFECTS (t) = 1;
3595 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3597 /* va_list.next_stack = nextarg; */
3598 u = make_tree (ptr_type_node, nextarg);
3599 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3600 TREE_SIDE_EFFECTS (t) = 1;
3601 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3605 mep_gimplify_va_arg_expr (tree valist, tree type,
3607 gimple_seq *post_p ATTRIBUTE_UNUSED)
3609 HOST_WIDE_INT size, rsize;
3610 bool by_reference, ivc2_vec;
3611 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3612 tree next_gp, next_gp_limit, next_cop, next_stack;
3613 tree label_sover, label_selse;
3616 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3618 size = int_size_in_bytes (type);
3619 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3623 type = build_pointer_type (type);
3626 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3628 f_next_gp = TYPE_FIELDS (va_list_type_node);
3629 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3630 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3631 f_next_stack = DECL_CHAIN (f_next_cop);
3633 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3635 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3636 valist, f_next_gp_limit, NULL_TREE);
3637 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3639 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3640 valist, f_next_stack, NULL_TREE);
3642 /* if f_next_gp < f_next_gp_limit
3643 IF (VECTOR_P && IVC2)
3651 val = *f_next_stack;
3652 f_next_stack += rsize;
3656 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3657 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3658 res_addr = create_tmp_var (ptr_type_node, NULL);
3660 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3661 unshare_expr (next_gp_limit));
3662 tmp = build3 (COND_EXPR, void_type_node, tmp,
3663 build1 (GOTO_EXPR, void_type_node,
3664 unshare_expr (label_selse)),
3666 gimplify_and_add (tmp, pre_p);
3670 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3671 gimplify_and_add (tmp, pre_p);
3675 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3676 gimplify_and_add (tmp, pre_p);
3679 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3680 unshare_expr (next_gp), size_int (4));
3681 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3683 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3684 unshare_expr (next_cop), size_int (8));
3685 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3687 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3688 gimplify_and_add (tmp, pre_p);
3692 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3693 gimplify_and_add (tmp, pre_p);
3695 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3696 gimplify_and_add (tmp, pre_p);
3698 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3699 unshare_expr (next_stack), size_int (rsize));
3700 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3704 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3705 gimplify_and_add (tmp, pre_p);
3707 res_addr = fold_convert (build_pointer_type (type), res_addr);
3710 res_addr = build_va_arg_indirect_ref (res_addr);
3712 return build_va_arg_indirect_ref (res_addr);
3716 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3717 rtx libname ATTRIBUTE_UNUSED,
3718 tree fndecl ATTRIBUTE_UNUSED)
3722 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3728 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3729 larger than 4 bytes are passed indirectly. Return value in 0,
3730 unless bigger than 4 bytes, then the caller passes a pointer as the
3731 first arg. For varargs, we copy $1..$4 to the stack. */
3734 mep_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3735 const_tree type ATTRIBUTE_UNUSED,
3736 bool named ATTRIBUTE_UNUSED)
3738 /* VOIDmode is a signal for the backend to pass data to the call
3739 expander via the second operand to the call pattern. We use
3740 this to determine whether to use "jsr" or "jsrv". */
3741 if (mode == VOIDmode)
3742 return GEN_INT (cum->vliw);
3744 /* If we havn't run out of argument registers, return the next. */
3747 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3748 return gen_rtx_REG (mode, cum->nregs + 49);
3750 return gen_rtx_REG (mode, cum->nregs + 1);
3753 /* Otherwise the argument goes on the stack. */
3758 mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
3759 enum machine_mode mode,
3761 bool named ATTRIBUTE_UNUSED)
3763 int size = bytesize (type, mode);
3765 /* This is non-obvious, but yes, large values passed after we've run
3766 out of registers are *still* passed by reference - we put the
3767 address of the parameter on the stack, as well as putting the
3768 parameter itself elsewhere on the stack. */
3770 if (size <= 0 || size > 8)
3774 if (TARGET_IVC2 && cum->nregs < 4 && type != NULL_TREE && VECTOR_TYPE_P (type))
3780 mep_function_arg_advance (CUMULATIVE_ARGS *pcum,
3781 enum machine_mode mode ATTRIBUTE_UNUSED,
3782 const_tree type ATTRIBUTE_UNUSED,
3783 bool named ATTRIBUTE_UNUSED)
3789 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3791 int size = bytesize (type, BLKmode);
3792 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3793 return size > 0 && size <= 8 ? 0 : 1;
3794 return size > 0 && size <= 4 ? 0 : 1;
3798 mep_narrow_volatile_bitfield (void)
3804 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3807 mep_function_value (tree type, tree func ATTRIBUTE_UNUSED)
3809 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3810 return gen_rtx_REG (TYPE_MODE (type), 48);
3811 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3814 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3817 mep_libcall_value (enum machine_mode mode)
3819 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3822 /* Handle pipeline hazards. */
3824 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3825 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3827 static int prev_opcode = 0;
3829 /* This isn't as optimal as it could be, because we don't know what
3830 control register the STC opcode is storing in. We only need to add
3831 the nop if it's the relevent register, but we add it for irrelevent
3835 mep_asm_output_opcode (FILE *file, const char *ptr)
3837 int this_opcode = op_none;
3838 const char *hazard = 0;
3843 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3844 this_opcode = op_fsft;
3847 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3848 this_opcode = op_ret;
3851 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3852 this_opcode = op_stc;
3856 if (prev_opcode == op_stc && this_opcode == op_fsft)
3858 if (prev_opcode == op_stc && this_opcode == op_ret)
3862 fprintf(file, "%s\t# %s-%s hazard\n\t",
3863 hazard, opnames[prev_opcode], opnames[this_opcode]);
3865 prev_opcode = this_opcode;
3868 /* Handle attributes. */
3871 mep_validate_based_tiny (tree *node, tree name, tree args,
3872 int flags ATTRIBUTE_UNUSED, bool *no_add)
3874 if (TREE_CODE (*node) != VAR_DECL
3875 && TREE_CODE (*node) != POINTER_TYPE
3876 && TREE_CODE (*node) != TYPE_DECL)
3878 warning (0, "%qE attribute only applies to variables", name);
3881 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3883 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3885 warning (0, "address region attributes not allowed with auto storage class");
3888 /* Ignore storage attribute of pointed to variable: char __far * x; */
3889 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3891 warning (0, "address region attributes on pointed-to types ignored");
3900 mep_multiple_address_regions (tree list, bool check_section_attr)
3903 int count_sections = 0;
3904 int section_attr_count = 0;
3906 for (a = list; a; a = TREE_CHAIN (a))
3908 if (is_attribute_p ("based", TREE_PURPOSE (a))
3909 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3910 || is_attribute_p ("near", TREE_PURPOSE (a))
3911 || is_attribute_p ("far", TREE_PURPOSE (a))
3912 || is_attribute_p ("io", TREE_PURPOSE (a)))
3914 if (check_section_attr)
3915 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3918 if (check_section_attr)
3919 return section_attr_count;
3921 return count_sections;
3924 #define MEP_ATTRIBUTES(decl) \
3925 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3926 : DECL_ATTRIBUTES (decl) \
3927 ? (DECL_ATTRIBUTES (decl)) \
3928 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3931 mep_validate_near_far (tree *node, tree name, tree args,
3932 int flags ATTRIBUTE_UNUSED, bool *no_add)
3934 if (TREE_CODE (*node) != VAR_DECL
3935 && TREE_CODE (*node) != FUNCTION_DECL
3936 && TREE_CODE (*node) != METHOD_TYPE
3937 && TREE_CODE (*node) != POINTER_TYPE
3938 && TREE_CODE (*node) != TYPE_DECL)
3940 warning (0, "%qE attribute only applies to variables and functions",
3944 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3946 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3948 warning (0, "address region attributes not allowed with auto storage class");
3951 /* Ignore storage attribute of pointed to variable: char __far * x; */
3952 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3954 warning (0, "address region attributes on pointed-to types ignored");
3958 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3960 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3961 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3962 DECL_ATTRIBUTES (*node) = NULL_TREE;
3968 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3969 int flags ATTRIBUTE_UNUSED, bool *no_add)
3971 if (TREE_CODE (*node) != FUNCTION_DECL
3972 && TREE_CODE (*node) != METHOD_TYPE)
3974 warning (0, "%qE attribute only applies to functions", name);
3981 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3982 int flags ATTRIBUTE_UNUSED, bool *no_add)
3986 if (TREE_CODE (*node) != FUNCTION_DECL)
3988 warning (0, "%qE attribute only applies to functions", name);
3993 if (DECL_DECLARED_INLINE_P (*node))
3994 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3995 DECL_UNINLINABLE (*node) = 1;
3997 function_type = TREE_TYPE (*node);
3999 if (TREE_TYPE (function_type) != void_type_node)
4000 error ("interrupt function must have return type of void");
4002 if (TYPE_ARG_TYPES (function_type)
4003 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4004 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4005 error ("interrupt function must have no arguments");
4011 mep_validate_io_cb (tree *node, tree name, tree args,
4012 int flags ATTRIBUTE_UNUSED, bool *no_add)
4014 if (TREE_CODE (*node) != VAR_DECL)
4016 warning (0, "%qE attribute only applies to variables", name);
4020 if (args != NULL_TREE)
4022 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4023 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4024 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4026 warning (0, "%qE attribute allows only an integer constant argument",
4032 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4033 TREE_THIS_VOLATILE (*node) = 1;
4039 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4040 int flags ATTRIBUTE_UNUSED, bool *no_add)
4042 if (TREE_CODE (*node) != FUNCTION_TYPE
4043 && TREE_CODE (*node) != FUNCTION_DECL
4044 && TREE_CODE (*node) != METHOD_TYPE
4045 && TREE_CODE (*node) != FIELD_DECL
4046 && TREE_CODE (*node) != TYPE_DECL)
4048 static int gave_pointer_note = 0;
4049 static int gave_array_note = 0;
4050 static const char * given_type = NULL;
4052 given_type = tree_code_name[TREE_CODE (*node)];
4053 if (TREE_CODE (*node) == POINTER_TYPE)
4054 given_type = "pointers";
4055 if (TREE_CODE (*node) == ARRAY_TYPE)
4056 given_type = "arrays";
4059 warning (0, "%qE attribute only applies to functions, not %s",
4062 warning (0, "%qE attribute only applies to functions",
4066 if (TREE_CODE (*node) == POINTER_TYPE
4067 && !gave_pointer_note)
4069 inform (input_location, "to describe a pointer to a VLIW function, use syntax like this:");
4070 inform (input_location, " typedef int (__vliw *vfuncptr) ();");
4071 gave_pointer_note = 1;
4074 if (TREE_CODE (*node) == ARRAY_TYPE
4075 && !gave_array_note)
4077 inform (input_location, "to describe an array of VLIW function pointers, use syntax like this:");
4078 inform (input_location, " typedef int (__vliw *vfuncptr[]) ();");
4079 gave_array_note = 1;
4083 error ("VLIW functions are not allowed without a VLIW configuration");
4087 static const struct attribute_spec mep_attribute_table[11] =
4089 /* name min max decl type func handler */
4090 { "based", 0, 0, false, false, false, mep_validate_based_tiny },
4091 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny },
4092 { "near", 0, 0, false, false, false, mep_validate_near_far },
4093 { "far", 0, 0, false, false, false, mep_validate_near_far },
4094 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt },
4095 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt },
4096 { "io", 0, 1, false, false, false, mep_validate_io_cb },
4097 { "cb", 0, 1, false, false, false, mep_validate_io_cb },
4098 { "vliw", 0, 0, false, true, false, mep_validate_vliw },
4099 { NULL, 0, 0, false, false, false, NULL }
4103 mep_function_attribute_inlinable_p (const_tree callee)
4105 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4106 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4107 return (lookup_attribute ("disinterrupt", attrs) == 0
4108 && lookup_attribute ("interrupt", attrs) == 0);
4112 mep_can_inline_p (tree caller, tree callee)
4114 if (TREE_CODE (callee) == ADDR_EXPR)
4115 callee = TREE_OPERAND (callee, 0);
4117 if (!mep_vliw_function_p (caller)
4118 && mep_vliw_function_p (callee))
4126 #define FUNC_DISINTERRUPT 2
4129 struct GTY(()) pragma_entry {
4132 const char *funcname;
4134 typedef struct pragma_entry pragma_entry;
4136 /* Hash table of farcall-tagged sections. */
4137 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4140 pragma_entry_eq (const void *p1, const void *p2)
4142 const pragma_entry *old = (const pragma_entry *) p1;
4143 const char *new_name = (const char *) p2;
4145 return strcmp (old->funcname, new_name) == 0;
4149 pragma_entry_hash (const void *p)
4151 const pragma_entry *old = (const pragma_entry *) p;
4152 return htab_hash_string (old->funcname);
4156 mep_note_pragma_flag (const char *funcname, int flag)
4158 pragma_entry **slot;
4161 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4162 pragma_entry_eq, NULL);
4164 slot = (pragma_entry **)
4165 htab_find_slot_with_hash (pragma_htab, funcname,
4166 htab_hash_string (funcname), INSERT);
4170 *slot = ggc_alloc_pragma_entry ();
4173 (*slot)->funcname = ggc_strdup (funcname);
4175 (*slot)->flag |= flag;
4179 mep_lookup_pragma_flag (const char *funcname, int flag)
4181 pragma_entry **slot;
4186 if (funcname[0] == '@' && funcname[2] == '.')
4189 slot = (pragma_entry **)
4190 htab_find_slot_with_hash (pragma_htab, funcname,
4191 htab_hash_string (funcname), NO_INSERT);
4192 if (slot && *slot && ((*slot)->flag & flag))
4194 (*slot)->used |= flag;
4201 mep_lookup_pragma_call (const char *funcname)
4203 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4207 mep_note_pragma_call (const char *funcname)
4209 mep_note_pragma_flag (funcname, FUNC_CALL);
4213 mep_lookup_pragma_disinterrupt (const char *funcname)
4215 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4219 mep_note_pragma_disinterrupt (const char *funcname)
4221 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4225 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4227 const pragma_entry *d = (const pragma_entry *)(*slot);
4229 if ((d->flag & FUNC_DISINTERRUPT)
4230 && !(d->used & FUNC_DISINTERRUPT))
4231 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4236 mep_file_cleanups (void)
4239 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4242 /* These three functions provide a bridge between the pramgas that
4243 affect register classes, and the functions that maintain them. We
4244 can't call those functions directly as pragma handling is part of
4245 the front end and doesn't have direct access to them. */
4248 mep_save_register_info (void)
4250 save_register_info ();
4254 mep_reinit_regs (void)
4260 mep_init_regs (void)
4268 mep_attrlist_to_encoding (tree list, tree decl)
4270 if (mep_multiple_address_regions (list, false) > 1)
4272 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4273 TREE_PURPOSE (TREE_CHAIN (list)),
4275 DECL_SOURCE_LINE (decl));
4276 TREE_CHAIN (list) = NULL_TREE;
4281 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4283 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4285 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4287 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4289 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4291 if (TREE_VALUE (list)
4292 && TREE_VALUE (TREE_VALUE (list))
4293 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4295 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4297 && location <= 0x1000000)
4302 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4304 list = TREE_CHAIN (list);
4307 && TREE_CODE (decl) == FUNCTION_DECL
4308 && DECL_SECTION_NAME (decl) == 0)
4314 mep_comp_type_attributes (const_tree t1, const_tree t2)
4318 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4319 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4328 mep_insert_attributes (tree decl, tree *attributes)
4331 const char *secname = 0;
4332 tree attrib, attrlist;
4335 if (TREE_CODE (decl) == FUNCTION_DECL)
4337 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4339 if (mep_lookup_pragma_disinterrupt (funcname))
4341 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4342 *attributes = chainon (*attributes, attrib);
4346 if (TREE_CODE (decl) != VAR_DECL
4347 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4350 if (TREE_READONLY (decl) && TARGET_DC)
4351 /* -mdc means that const variables default to the near section,
4352 regardless of the size cutoff. */
4355 /* User specified an attribute, so override the default.
4356 Ignore storage attribute of pointed to variable. char __far * x; */
4357 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4359 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4360 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4361 else if (DECL_ATTRIBUTES (decl) && *attributes)
4362 DECL_ATTRIBUTES (decl) = NULL_TREE;
4365 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4366 encoding = mep_attrlist_to_encoding (attrlist, decl);
4367 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4369 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4370 encoding = mep_attrlist_to_encoding (attrlist, decl);
4374 /* This means that the declaration has a specific section
4375 attribute, so we should not apply the default rules. */
4377 if (encoding == 'i' || encoding == 'I')
4379 tree attr = lookup_attribute ("io", attrlist);
4381 && TREE_VALUE (attr)
4382 && TREE_VALUE (TREE_VALUE(attr)))
4384 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4385 static tree previous_value = 0;
4386 static int previous_location = 0;
4387 static tree previous_name = 0;
4389 /* We take advantage of the fact that gcc will reuse the
4390 same tree pointer when applying an attribute to a
4391 list of decls, but produce a new tree for attributes
4392 on separate source lines, even when they're textually
4393 identical. This is the behavior we want. */
4394 if (TREE_VALUE (attr) == previous_value
4395 && location == previous_location)
4397 warning(0, "__io address 0x%x is the same for %qE and %qE",
4398 location, previous_name, DECL_NAME (decl));
4400 previous_name = DECL_NAME (decl);
4401 previous_location = location;
4402 previous_value = TREE_VALUE (attr);
4409 /* Declarations of arrays can change size. Don't trust them. */
4410 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4413 size = int_size_in_bytes (TREE_TYPE (decl));
4415 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4417 if (TREE_PUBLIC (decl)
4418 || DECL_EXTERNAL (decl)
4419 || TREE_STATIC (decl))
4421 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4445 if (size <= mep_based_cutoff && size > 0)
4447 else if (size <= mep_tiny_cutoff && size > 0)
4453 if (mep_const_section && TREE_READONLY (decl))
4455 if (strcmp (mep_const_section, "tiny") == 0)
4457 else if (strcmp (mep_const_section, "near") == 0)
4459 else if (strcmp (mep_const_section, "far") == 0)
4466 if (!mep_multiple_address_regions (*attributes, true)
4467 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4469 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4471 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4472 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4473 and mep_validate_based_tiny. */
4474 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4479 mep_encode_section_info (tree decl, rtx rtl, int first)
4482 const char *oldname;
4483 const char *secname;
4489 tree mep_attributes;
4494 if (TREE_CODE (decl) != VAR_DECL
4495 && TREE_CODE (decl) != FUNCTION_DECL)
4498 rtlname = XEXP (rtl, 0);
4499 if (GET_CODE (rtlname) == SYMBOL_REF)
4500 oldname = XSTR (rtlname, 0);
4501 else if (GET_CODE (rtlname) == MEM
4502 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4503 oldname = XSTR (XEXP (rtlname, 0), 0);
4507 type = TREE_TYPE (decl);
4508 if (type == error_mark_node)
4510 mep_attributes = MEP_ATTRIBUTES (decl);
4512 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4516 newname = (char *) alloca (strlen (oldname) + 4);
4517 sprintf (newname, "@%c.%s", encoding, oldname);
4518 idp = get_identifier (newname);
4520 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4521 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4522 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4535 maxsize = 0x1000000;
4543 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4545 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4547 (long) int_size_in_bytes (TREE_TYPE (decl)),
4555 mep_strip_name_encoding (const char *sym)
4561 else if (*sym == '@' && sym[2] == '.')
4569 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4570 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4575 switch (TREE_CODE (decl))
4578 if (!TREE_READONLY (decl)
4579 || TREE_SIDE_EFFECTS (decl)
4580 || !DECL_INITIAL (decl)
4581 || (DECL_INITIAL (decl) != error_mark_node
4582 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4586 if (! TREE_CONSTANT (decl))
4594 if (TREE_CODE (decl) == FUNCTION_DECL)
4596 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4598 if (name[0] == '@' && name[2] == '.')
4603 if (flag_function_sections || DECL_ONE_ONLY (decl))
4604 mep_unique_section (decl, 0);
4605 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4607 if (encoding == 'f')
4608 return vftext_section;
4610 return vtext_section;
4612 else if (encoding == 'f')
4613 return ftext_section;
4615 return text_section;
4618 if (TREE_CODE (decl) == VAR_DECL)
4620 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4622 if (name[0] == '@' && name[2] == '.')
4626 return based_section;
4630 return srodata_section;
4631 if (DECL_INITIAL (decl))
4632 return sdata_section;
4633 return tinybss_section;
4637 return frodata_section;
4642 error_at (DECL_SOURCE_LOCATION (decl),
4643 "variable %D of type %<io%> must be uninitialized", decl);
4644 return data_section;
4647 error_at (DECL_SOURCE_LOCATION (decl),
4648 "variable %D of type %<cb%> must be uninitialized", decl);
4649 return data_section;
4654 return readonly_data_section;
4656 return data_section;
4660 mep_unique_section (tree decl, int reloc)
4662 static const char *prefixes[][2] =
4664 { ".text.", ".gnu.linkonce.t." },
4665 { ".rodata.", ".gnu.linkonce.r." },
4666 { ".data.", ".gnu.linkonce.d." },
4667 { ".based.", ".gnu.linkonce.based." },
4668 { ".sdata.", ".gnu.linkonce.s." },
4669 { ".far.", ".gnu.linkonce.far." },
4670 { ".ftext.", ".gnu.linkonce.ft." },
4671 { ".frodata.", ".gnu.linkonce.frd." },
4672 { ".srodata.", ".gnu.linkonce.srd." },
4673 { ".vtext.", ".gnu.linkonce.v." },
4674 { ".vftext.", ".gnu.linkonce.vf." }
4676 int sec = 2; /* .data */
4678 const char *name, *prefix;
4681 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4682 if (DECL_RTL (decl))
4683 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4685 if (TREE_CODE (decl) == FUNCTION_DECL)
4687 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4688 sec = 9; /* .vtext */
4690 sec = 0; /* .text */
4692 else if (decl_readonly_section (decl, reloc))
4693 sec = 1; /* .rodata */
4695 if (name[0] == '@' && name[2] == '.')
4700 sec = 3; /* .based */
4704 sec = 8; /* .srodata */
4706 sec = 4; /* .sdata */
4710 sec = 6; /* .ftext */
4712 sec = 10; /* .vftext */
4714 sec = 7; /* .frodata */
4716 sec = 5; /* .far. */
4722 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4723 len = strlen (name) + strlen (prefix);
4724 string = (char *) alloca (len + 1);
4726 sprintf (string, "%s%s", prefix, name);
4728 DECL_SECTION_NAME (decl) = build_string (len, string);
4731 /* Given a decl, a section name, and whether the decl initializer
4732 has relocs, choose attributes for the section. */
4734 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4737 mep_section_type_flags (tree decl, const char *name, int reloc)
4739 unsigned int flags = default_section_type_flags (decl, name, reloc);
4741 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4742 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4743 flags |= SECTION_MEP_VLIW;
4748 /* Switch to an arbitrary section NAME with attributes as specified
4749 by FLAGS. ALIGN specifies any known alignment requirements for
4750 the section; 0 if the default should be used.
4752 Differs from the standard ELF version only in support of VLIW mode. */
4755 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4757 char flagchars[8], *f = flagchars;
4760 if (!(flags & SECTION_DEBUG))
4762 if (flags & SECTION_WRITE)
4764 if (flags & SECTION_CODE)
4766 if (flags & SECTION_SMALL)
4768 if (flags & SECTION_MEP_VLIW)
4772 if (flags & SECTION_BSS)
4777 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4778 name, flagchars, type);
4780 if (flags & SECTION_CODE)
4781 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4786 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4787 int size, int align, int global)
4789 /* We intentionally don't use mep_section_tag() here. */
4791 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4795 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4796 DECL_ATTRIBUTES (decl));
4798 && TREE_VALUE (attr)
4799 && TREE_VALUE (TREE_VALUE(attr)))
4800 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4805 fprintf (stream, "\t.globl\t");
4806 assemble_name (stream, name);
4807 fprintf (stream, "\n");
4809 assemble_name (stream, name);
4810 fprintf (stream, " = %d\n", location);
4813 if (name[0] == '@' && name[2] == '.')
4815 const char *sec = 0;
4819 switch_to_section (based_section);
4823 switch_to_section (tinybss_section);
4827 switch_to_section (farbss_section);
4836 while (align > BITS_PER_UNIT)
4841 name2 = targetm.strip_name_encoding (name);
4843 fprintf (stream, "\t.globl\t%s\n", name2);
4844 fprintf (stream, "\t.p2align %d\n", p2align);
4845 fprintf (stream, "\t.type\t%s,@object\n", name2);
4846 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4847 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4854 fprintf (stream, "\t.local\t");
4855 assemble_name (stream, name);
4856 fprintf (stream, "\n");
4858 fprintf (stream, "\t.comm\t");
4859 assemble_name (stream, name);
4860 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4866 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4868 rtx addr = XEXP (m_tramp, 0);
4869 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4871 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4872 LCT_NORMAL, VOIDmode, 3,
4875 static_chain, Pmode);
4878 /* Experimental Reorg. */
4881 mep_mentioned_p (rtx in,
4882 rtx reg, /* NULL for mem */
4883 int modes_too) /* if nonzero, modes must match also. */
4891 if (reg && GET_CODE (reg) != REG)
4894 if (GET_CODE (in) == LABEL_REF)
4897 code = GET_CODE (in);
4903 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4909 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4911 return (REGNO (in) == REGNO (reg));
4924 /* Set's source should be read-only. */
4925 if (code == SET && !reg)
4926 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4928 fmt = GET_RTX_FORMAT (code);
4930 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4935 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4936 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4939 else if (fmt[i] == 'e'
4940 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4946 #define EXPERIMENTAL_REGMOVE_REORG 1
4948 #if EXPERIMENTAL_REGMOVE_REORG
4951 mep_compatible_reg_class (int r1, int r2)
4953 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4955 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4961 mep_reorg_regmove (rtx insns)
4963 rtx insn, next, pat, follow, *where;
4964 int count = 0, done = 0, replace, before = 0;
4967 for (insn = insns; insn; insn = NEXT_INSN (insn))
4968 if (GET_CODE (insn) == INSN)
4971 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4972 set that uses the r2 and r2 dies there. We replace r2 with r1
4973 and see if it's still a valid insn. If so, delete the first set.
4974 Copied from reorg.c. */
4979 for (insn = insns; insn; insn = next)
4981 next = NEXT_INSN (insn);
4982 if (GET_CODE (insn) != INSN)
4984 pat = PATTERN (insn);
4988 if (GET_CODE (pat) == SET
4989 && GET_CODE (SET_SRC (pat)) == REG
4990 && GET_CODE (SET_DEST (pat)) == REG
4991 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4992 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4994 follow = next_nonnote_insn (insn);
4996 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4998 while (follow && GET_CODE (follow) == INSN
4999 && GET_CODE (PATTERN (follow)) == SET
5000 && !dead_or_set_p (follow, SET_SRC (pat))
5001 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
5002 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
5005 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
5006 follow = next_nonnote_insn (follow);
5010 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5011 if (follow && GET_CODE (follow) == INSN
5012 && GET_CODE (PATTERN (follow)) == SET
5013 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5015 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5017 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5020 where = & SET_SRC (PATTERN (follow));
5023 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5025 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5028 where = & PATTERN (follow);
5034 /* If so, follow is the corresponding insn */
5041 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5042 for (x = insn; x ;x = NEXT_INSN (x))
5044 print_rtl_single (dump_file, x);
5047 fprintf (dump_file, "\n");
5051 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5055 next = delete_insn (insn);
5058 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5059 print_rtl_single (dump_file, follow);
5069 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5070 fprintf (dump_file, "=====\n");
5076 /* Figure out where to put LABEL, which is the label for a repeat loop.
5077 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5078 the loop ends just before LAST_INSN. If SHARED, insns other than the
5079 "repeat" might use LABEL to jump to the loop's continuation point.
5081 Return the last instruction in the adjusted loop. */
5084 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5088 int count = 0, code, icode;
5091 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5092 INSN_UID (last_insn));
5094 /* Set PREV to the last insn in the loop. */
5097 prev = PREV_INSN (prev);
5099 /* Set NEXT to the next insn after the repeat label. */
5104 code = GET_CODE (prev);
5105 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5110 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5111 prev = XVECEXP (PATTERN (prev), 0, 1);
5113 /* Other insns that should not be in the last two opcodes. */
5114 icode = recog_memoized (prev);
5116 || icode == CODE_FOR_repeat
5117 || icode == CODE_FOR_erepeat
5118 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5121 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5122 is the second instruction in a VLIW bundle. In that case,
5123 loop again: if the first instruction also satisfies the
5124 conditions above then we will reach here again and put
5125 both of them into the repeat epilogue. Otherwise both
5126 should remain outside. */
5127 if (GET_MODE (prev) != BImode)
5132 print_rtl_single (dump_file, next);
5137 prev = PREV_INSN (prev);
5140 /* See if we're adding the label immediately after the repeat insn.
5141 If so, we need to separate them with a nop. */
5142 prev = prev_real_insn (next);
5144 switch (recog_memoized (prev))
5146 case CODE_FOR_repeat:
5147 case CODE_FOR_erepeat:
5149 fprintf (dump_file, "Adding nop inside loop\n");
5150 emit_insn_before (gen_nop (), next);
5157 /* Insert the label. */
5158 emit_label_before (label, next);
5160 /* Insert the nops. */
5161 if (dump_file && count < 2)
5162 fprintf (dump_file, "Adding %d nop%s\n\n",
5163 2 - count, count == 1 ? "" : "s");
5165 for (; count < 2; count++)
5167 last_insn = emit_insn_after (gen_nop (), last_insn);
5169 emit_insn_before (gen_nop (), last_insn);
5176 mep_emit_doloop (rtx *operands, int is_end)
5180 if (cfun->machine->doloop_tags == 0
5181 || cfun->machine->doloop_tag_from_end == is_end)
5183 cfun->machine->doloop_tags++;
5184 cfun->machine->doloop_tag_from_end = is_end;
5187 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5189 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5191 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5195 /* Code for converting doloop_begins and doloop_ends into valid
5196 MeP instructions. A doloop_begin is just a placeholder:
5198 $count = unspec ($count)
5200 where $count is initially the number of iterations - 1.
5201 doloop_end has the form:
5203 if ($count-- == 0) goto label
5205 The counter variable is private to the doloop insns, nothing else
5206 relies on its value.
5208 There are three cases, in decreasing order of preference:
5210 1. A loop has exactly one doloop_begin and one doloop_end.
5211 The doloop_end branches to the first instruction after
5214 In this case we can replace the doloop_begin with a repeat
5215 instruction and remove the doloop_end. I.e.:
5217 $count1 = unspec ($count1)
5222 if ($count2-- == 0) goto label
5226 repeat $count1,repeat_label
5234 2. As for (1), except there are several doloop_ends. One of them
5235 (call it X) falls through to a label L. All the others fall
5236 through to branches to L.
5238 In this case, we remove X and replace the other doloop_ends
5239 with branches to the repeat label. For example:
5241 $count1 = unspec ($count1)
5244 if ($count2-- == 0) goto label
5247 if ($count3-- == 0) goto label
5252 repeat $count1,repeat_label
5263 3. The fallback case. Replace doloop_begins with:
5267 Replace doloop_ends with the equivalent of:
5270 if ($count == 0) goto label
5272 Note that this might need a scratch register if $count
5273 is stored in memory. */
5275 /* A structure describing one doloop_begin. */
5276 struct mep_doloop_begin {
5277 /* The next doloop_begin with the same tag. */
5278 struct mep_doloop_begin *next;
5280 /* The instruction itself. */
5283 /* The initial counter value. This is known to be a general register. */
5287 /* A structure describing a doloop_end. */
5288 struct mep_doloop_end {
5289 /* The next doloop_end with the same loop tag. */
5290 struct mep_doloop_end *next;
5292 /* The instruction itself. */
5295 /* The first instruction after INSN when the branch isn't taken. */
5298 /* The location of the counter value. Since doloop_end_internal is a
5299 jump instruction, it has to allow the counter to be stored anywhere
5300 (any non-fixed register or memory location). */
5303 /* The target label (the place where the insn branches when the counter
5307 /* A scratch register. Only available when COUNTER isn't stored
5308 in a general register. */
5313 /* One do-while loop. */
5315 /* All the doloop_begins for this loop (in no particular order). */
5316 struct mep_doloop_begin *begin;
5318 /* All the doloop_ends. When there is more than one, arrange things
5319 so that the first one is the most likely to be X in case (2) above. */
5320 struct mep_doloop_end *end;
5324 /* Return true if LOOP can be converted into repeat/repeat_end form
5325 (that is, if it matches cases (1) or (2) above). */
5328 mep_repeat_loop_p (struct mep_doloop *loop)
5330 struct mep_doloop_end *end;
5333 /* There must be exactly one doloop_begin and at least one doloop_end. */
5334 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5337 /* The first doloop_end (X) must branch back to the insn after
5338 the doloop_begin. */
5339 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5342 /* All the other doloop_ends must branch to the same place as X.
5343 When the branch isn't taken, they must jump to the instruction
5345 fallthrough = loop->end->fallthrough;
5346 for (end = loop->end->next; end != 0; end = end->next)
5347 if (end->label != loop->end->label
5348 || !simplejump_p (end->fallthrough)
5349 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5356 /* The main repeat reorg function. See comment above for details. */
5359 mep_reorg_repeat (rtx insns)
5362 struct mep_doloop *loops, *loop;
5363 struct mep_doloop_begin *begin;
5364 struct mep_doloop_end *end;
5366 /* Quick exit if we haven't created any loops. */
5367 if (cfun->machine->doloop_tags == 0)
5370 /* Create an array of mep_doloop structures. */
5371 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5372 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5374 /* Search the function for do-while insns and group them by loop tag. */
5375 for (insn = insns; insn; insn = NEXT_INSN (insn))
5377 switch (recog_memoized (insn))
5379 case CODE_FOR_doloop_begin_internal:
5380 insn_extract (insn);
5381 loop = &loops[INTVAL (recog_data.operand[2])];
5383 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5384 begin->next = loop->begin;
5386 begin->counter = recog_data.operand[0];
5388 loop->begin = begin;
5391 case CODE_FOR_doloop_end_internal:
5392 insn_extract (insn);
5393 loop = &loops[INTVAL (recog_data.operand[2])];
5395 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5397 end->fallthrough = next_real_insn (insn);
5398 end->counter = recog_data.operand[0];
5399 end->label = recog_data.operand[1];
5400 end->scratch = recog_data.operand[3];
5402 /* If this insn falls through to an unconditional jump,
5403 give it a lower priority than the others. */
5404 if (loop->end != 0 && simplejump_p (end->fallthrough))
5406 end->next = loop->end->next;
5407 loop->end->next = end;
5411 end->next = loop->end;
5417 /* Convert the insns for each loop in turn. */
5418 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5419 if (mep_repeat_loop_p (loop))
5421 /* Case (1) or (2). */
5422 rtx repeat_label, label_ref;
5424 /* Create a new label for the repeat insn. */
5425 repeat_label = gen_label_rtx ();
5427 /* Replace the doloop_begin with a repeat. */
5428 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5429 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5431 delete_insn (loop->begin->insn);
5433 /* Insert the repeat label before the first doloop_end.
5434 Fill the gap with nops if there are other doloop_ends. */
5435 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5436 false, loop->end->next != 0);
5438 /* Emit a repeat_end (to improve the readability of the output). */
5439 emit_insn_before (gen_repeat_end (), loop->end->insn);
5441 /* Delete the first doloop_end. */
5442 delete_insn (loop->end->insn);
5444 /* Replace the others with branches to REPEAT_LABEL. */
5445 for (end = loop->end->next; end != 0; end = end->next)
5447 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5448 delete_insn (end->insn);
5449 delete_insn (end->fallthrough);
5454 /* Case (3). First replace all the doloop_begins with increment
5456 for (begin = loop->begin; begin != 0; begin = begin->next)
5458 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5459 begin->counter, const1_rtx),
5461 delete_insn (begin->insn);
5464 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5465 for (end = loop->end; end != 0; end = end->next)
5471 /* Load the counter value into a general register. */
5473 if (!REG_P (reg) || REGNO (reg) > 15)
5476 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5479 /* Decrement the counter. */
5480 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5483 /* Copy it back to its original location. */
5484 if (reg != end->counter)
5485 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5487 /* Jump back to the start label. */
5488 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5490 JUMP_LABEL (insn) = end->label;
5491 LABEL_NUSES (end->label)++;
5493 /* Emit the whole sequence before the doloop_end. */
5494 insn = get_insns ();
5496 emit_insn_before (insn, end->insn);
5498 /* Delete the doloop_end. */
5499 delete_insn (end->insn);
5506 mep_invertable_branch_p (rtx insn)
5509 enum rtx_code old_code;
5512 set = PATTERN (insn);
5513 if (GET_CODE (set) != SET)
5515 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5517 cond = XEXP (XEXP (set, 1), 0);
5518 old_code = GET_CODE (cond);
5522 PUT_CODE (cond, NE);
5525 PUT_CODE (cond, EQ);
5528 PUT_CODE (cond, GE);
5531 PUT_CODE (cond, LT);
5536 INSN_CODE (insn) = -1;
5537 i = recog_memoized (insn);
5538 PUT_CODE (cond, old_code);
5539 INSN_CODE (insn) = -1;
5544 mep_invert_branch (rtx insn, rtx after)
5546 rtx cond, set, label;
5549 set = PATTERN (insn);
5551 gcc_assert (GET_CODE (set) == SET);
5552 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5554 cond = XEXP (XEXP (set, 1), 0);
5555 switch (GET_CODE (cond))
5558 PUT_CODE (cond, NE);
5561 PUT_CODE (cond, EQ);
5564 PUT_CODE (cond, GE);
5567 PUT_CODE (cond, LT);
5572 label = gen_label_rtx ();
5573 emit_label_after (label, after);
5574 for (i=1; i<=2; i++)
5575 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5577 rtx ref = XEXP (XEXP (set, 1), i);
5578 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5579 delete_insn (XEXP (ref, 0));
5580 XEXP (ref, 0) = label;
5581 LABEL_NUSES (label) ++;
5582 JUMP_LABEL (insn) = label;
5584 INSN_CODE (insn) = -1;
5585 i = recog_memoized (insn);
5586 gcc_assert (i >= 0);
5590 mep_reorg_erepeat (rtx insns)
5592 rtx insn, prev, l, x;
5595 for (insn = insns; insn; insn = NEXT_INSN (insn))
5597 && ! JUMP_TABLE_DATA_P (insn)
5598 && mep_invertable_branch_p (insn))
5602 fprintf (dump_file, "\n------------------------------\n");
5603 fprintf (dump_file, "erepeat: considering this jump:\n");
5604 print_rtl_single (dump_file, insn);
5606 count = simplejump_p (insn) ? 0 : 1;
5607 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5609 if (GET_CODE (prev) == CALL_INSN
5610 || BARRIER_P (prev))
5613 if (prev == JUMP_LABEL (insn))
5617 fprintf (dump_file, "found loop top, %d insns\n", count);
5619 if (LABEL_NUSES (prev) == 1)
5620 /* We're the only user, always safe */ ;
5621 else if (LABEL_NUSES (prev) == 2)
5623 /* See if there's a barrier before this label. If
5624 so, we know nobody inside the loop uses it.
5625 But we must be careful to put the erepeat
5626 *after* the label. */
5628 for (barrier = PREV_INSN (prev);
5629 barrier && GET_CODE (barrier) == NOTE;
5630 barrier = PREV_INSN (barrier))
5632 if (barrier && GET_CODE (barrier) != BARRIER)
5637 /* We don't know who else, within or without our loop, uses this */
5639 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5643 /* Generate a label to be used by the erepat insn. */
5644 l = gen_label_rtx ();
5646 /* Insert the erepeat after INSN's target label. */
5647 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5649 emit_insn_after (x, prev);
5651 /* Insert the erepeat label. */
5652 newlast = (mep_insert_repeat_label_last
5653 (insn, l, !simplejump_p (insn), false));
5654 if (simplejump_p (insn))
5656 emit_insn_before (gen_erepeat_end (), insn);
5661 mep_invert_branch (insn, newlast);
5662 emit_insn_after (gen_erepeat_end (), newlast);
5669 /* A label is OK if there is exactly one user, and we
5670 can find that user before the next label. */
5673 if (LABEL_NUSES (prev) == 1)
5675 for (user = PREV_INSN (prev);
5676 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5677 user = PREV_INSN (user))
5678 if (GET_CODE (user) == JUMP_INSN
5679 && JUMP_LABEL (user) == prev)
5681 safe = INSN_UID (user);
5688 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5689 safe, INSN_UID (prev));
5699 fprintf (dump_file, "\n==============================\n");
5702 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5703 always do this on its own. */
5706 mep_jmp_return_reorg (rtx insns)
5708 rtx insn, label, ret;
5711 for (insn = insns; insn; insn = NEXT_INSN (insn))
5712 if (simplejump_p (insn))
5714 /* Find the fist real insn the jump jumps to. */
5715 label = ret = JUMP_LABEL (insn);
5717 && (GET_CODE (ret) == NOTE
5718 || GET_CODE (ret) == CODE_LABEL
5719 || GET_CODE (PATTERN (ret)) == USE))
5720 ret = NEXT_INSN (ret);
5724 /* Is it a return? */
5725 ret_code = recog_memoized (ret);
5726 if (ret_code == CODE_FOR_return_internal
5727 || ret_code == CODE_FOR_eh_return_internal)
5729 /* It is. Replace the jump with a return. */
5730 LABEL_NUSES (label) --;
5731 if (LABEL_NUSES (label) == 0)
5732 delete_insn (label);
5733 PATTERN (insn) = copy_rtx (PATTERN (ret));
5734 INSN_CODE (insn) = -1;
5742 mep_reorg_addcombine (rtx insns)
5746 for (i = insns; i; i = NEXT_INSN (i))
5748 && INSN_CODE (i) == CODE_FOR_addsi3
5749 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5750 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5751 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5752 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5756 && INSN_CODE (n) == CODE_FOR_addsi3
5757 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5758 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5759 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5760 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5762 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5763 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5764 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5766 && ic + nc > -32768)
5768 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5769 NEXT_INSN (i) = NEXT_INSN (n);
5771 PREV_INSN (NEXT_INSN (i)) = i;
5777 /* If this insn adjusts the stack, return the adjustment, else return
5780 add_sp_insn_p (rtx insn)
5784 if (! single_set (insn))
5786 pat = PATTERN (insn);
5787 if (GET_CODE (SET_DEST (pat)) != REG)
5789 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5791 if (GET_CODE (SET_SRC (pat)) != PLUS)
5793 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5795 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5797 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5799 return INTVAL (XEXP (SET_SRC (pat), 1));
5802 /* Check for trivial functions that set up an unneeded stack
5805 mep_reorg_noframe (rtx insns)
5807 rtx start_frame_insn;
5808 rtx end_frame_insn = 0;
5812 /* The first insn should be $sp = $sp + N */
5813 while (insns && ! INSN_P (insns))
5814 insns = NEXT_INSN (insns);
5818 sp_adjust = add_sp_insn_p (insns);
5822 start_frame_insn = insns;
5823 sp = SET_DEST (PATTERN (start_frame_insn));
5825 insns = next_real_insn (insns);
5829 rtx next = next_real_insn (insns);
5833 sp2 = add_sp_insn_p (insns);
5838 end_frame_insn = insns;
5839 if (sp2 != -sp_adjust)
5842 else if (mep_mentioned_p (insns, sp, 0))
5844 else if (CALL_P (insns))
5852 delete_insn (start_frame_insn);
5853 delete_insn (end_frame_insn);
5860 rtx insns = get_insns ();
5862 /* We require accurate REG_DEAD notes. */
5863 compute_bb_for_insn ();
5864 df_note_add_problem ();
5867 mep_reorg_addcombine (insns);
5868 #if EXPERIMENTAL_REGMOVE_REORG
5869 /* VLIW packing has been done already, so we can't just delete things. */
5870 if (!mep_vliw_function_p (cfun->decl))
5871 mep_reorg_regmove (insns);
5873 mep_jmp_return_reorg (insns);
5874 mep_bundle_insns (insns);
5875 mep_reorg_repeat (insns);
5878 && !profile_arc_flag
5879 && TARGET_OPT_REPEAT
5880 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5881 mep_reorg_erepeat (insns);
5883 /* This may delete *insns so make sure it's last. */
5884 mep_reorg_noframe (insns);
5886 df_finish_pass (false);
5891 /*----------------------------------------------------------------------*/
5893 /*----------------------------------------------------------------------*/
5895 /* Element X gives the index into cgen_insns[] of the most general
5896 implementation of intrinsic X. Unimplemented intrinsics are
5898 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5900 /* Element X gives the index of another instruction that is mapped to
5901 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5904 Things are set up so that mep_intrinsic_chain[X] < X. */
5905 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5907 /* The bitmask for the current ISA. The ISA masks are declared
5909 unsigned int mep_selected_isa;
5912 const char *config_name;
5916 static struct mep_config mep_configs[] = {
5917 #ifdef COPROC_SELECTION_TABLE
5918 COPROC_SELECTION_TABLE,
5923 /* Initialize the global intrinsics variables above. */
5926 mep_init_intrinsics (void)
5930 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5931 mep_selected_isa = mep_configs[0].isa;
5932 if (mep_config_string != 0)
5933 for (i = 0; mep_configs[i].config_name; i++)
5934 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5936 mep_selected_isa = mep_configs[i].isa;
5940 /* Assume all intrinsics are unavailable. */
5941 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5942 mep_intrinsic_insn[i] = -1;
5944 /* Build up the global intrinsic tables. */
5945 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5946 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5948 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5949 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5951 /* See whether we can directly move values between one coprocessor
5952 register and another. */
5953 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5954 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5955 mep_have_copro_copro_moves_p = true;
5957 /* See whether we can directly move values between core and
5958 coprocessor registers. */
5959 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5960 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5962 mep_have_core_copro_moves_p = 1;
5965 /* Declare all available intrinsic functions. Called once only. */
5967 static tree cp_data_bus_int_type_node;
5968 static tree opaque_vector_type_node;
5969 static tree v8qi_type_node;
5970 static tree v4hi_type_node;
5971 static tree v2si_type_node;
5972 static tree v8uqi_type_node;
5973 static tree v4uhi_type_node;
5974 static tree v2usi_type_node;
5977 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5981 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5982 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5983 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5984 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5985 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5986 case cgen_regnum_operand_type_CHAR: return char_type_node;
5987 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5988 case cgen_regnum_operand_type_SI: return intSI_type_node;
5989 case cgen_regnum_operand_type_DI: return intDI_type_node;
5990 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5991 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5992 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5993 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5994 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5995 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5996 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5997 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5999 return void_type_node;
6004 mep_init_builtins (void)
6008 if (TARGET_64BIT_CR_REGS)
6009 cp_data_bus_int_type_node = long_long_integer_type_node;
6011 cp_data_bus_int_type_node = long_integer_type_node;
6013 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
6014 v8qi_type_node = build_vector_type (intQI_type_node, 8);
6015 v4hi_type_node = build_vector_type (intHI_type_node, 4);
6016 v2si_type_node = build_vector_type (intSI_type_node, 2);
6017 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6018 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6019 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6021 (*lang_hooks.decls.pushdecl)
6022 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
6023 cp_data_bus_int_type_node));
6025 (*lang_hooks.decls.pushdecl)
6026 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
6027 opaque_vector_type_node));
6029 (*lang_hooks.decls.pushdecl)
6030 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6032 (*lang_hooks.decls.pushdecl)
6033 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6035 (*lang_hooks.decls.pushdecl)
6036 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6039 (*lang_hooks.decls.pushdecl)
6040 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6042 (*lang_hooks.decls.pushdecl)
6043 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6045 (*lang_hooks.decls.pushdecl)
6046 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6049 /* Intrinsics like mep_cadd3 are implemented with two groups of
6050 instructions, one which uses UNSPECs and one which uses a specific
6051 rtl code such as PLUS. Instructions in the latter group belong
6052 to GROUP_KNOWN_CODE.
6054 In such cases, the intrinsic will have two entries in the global
6055 tables above. The unspec form is accessed using builtin functions
6056 while the specific form is accessed using the mep_* enum in
6059 The idea is that __cop arithmetic and builtin functions have
6060 different optimization requirements. If mep_cadd3() appears in
6061 the source code, the user will surely except gcc to use cadd3
6062 rather than a work-alike such as add3. However, if the user
6063 just writes "a + b", where a or b are __cop variables, it is
6064 reasonable for gcc to choose a core instruction rather than
6065 cadd3 if it believes that is more optimal. */
6066 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6067 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6068 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6070 tree ret_type = void_type_node;
6073 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6076 if (cgen_insns[i].cret_p)
6077 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6079 bi_type = build_function_type (ret_type, 0);
6080 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6082 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6086 /* Report the unavailablity of the given intrinsic. */
6090 mep_intrinsic_unavailable (int intrinsic)
6092 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6094 if (already_reported_p[intrinsic])
6097 if (mep_intrinsic_insn[intrinsic] < 0)
6098 error ("coprocessor intrinsic %qs is not available in this configuration",
6099 cgen_intrinsics[intrinsic]);
6100 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6101 error ("%qs is not available in VLIW functions",
6102 cgen_intrinsics[intrinsic]);
6104 error ("%qs is not available in non-VLIW functions",
6105 cgen_intrinsics[intrinsic]);
6107 already_reported_p[intrinsic] = 1;
6112 /* See if any implementation of INTRINSIC is available to the
6113 current function. If so, store the most general implementation
6114 in *INSN_PTR and return true. Return false otherwise. */
6117 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6121 i = mep_intrinsic_insn[intrinsic];
6122 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6123 i = mep_intrinsic_chain[i];
6127 *insn_ptr = &cgen_insns[i];
6134 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6135 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6136 try using a work-alike instead. In this case, the returned insn
6137 may have three operands rather than two. */
6140 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6144 if (intrinsic == mep_cmov)
6146 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6147 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6151 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6155 /* If ARG is a register operand that is the same size as MODE, convert it
6156 to MODE using a subreg. Otherwise return ARG as-is. */
6159 mep_convert_arg (enum machine_mode mode, rtx arg)
6161 if (GET_MODE (arg) != mode
6162 && register_operand (arg, VOIDmode)
6163 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6164 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6169 /* Apply regnum conversions to ARG using the description given by REGNUM.
6170 Return the new argument on success and null on failure. */
6173 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6175 if (regnum->count == 0)
6178 if (GET_CODE (arg) != CONST_INT
6180 || INTVAL (arg) >= regnum->count)
6183 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6187 /* Try to make intrinsic argument ARG match the given operand.
6188 UNSIGNED_P is true if the argument has an unsigned type. */
6191 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6194 if (GET_CODE (arg) == CONST_INT)
6196 /* CONST_INTs can only be bound to integer operands. */
6197 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6200 else if (GET_CODE (arg) == CONST_DOUBLE)
6201 /* These hold vector constants. */;
6202 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6204 /* If the argument is a different size from what's expected, we must
6205 have a value in the right mode class in order to convert it. */
6206 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6209 /* If the operand is an rvalue, promote or demote it to match the
6210 operand's size. This might not need extra instructions when
6211 ARG is a register value. */
6212 if (operand->constraint[0] != '=')
6213 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6216 /* If the operand is an lvalue, bind the operand to a new register.
6217 The caller will copy this value into ARG after the main
6218 instruction. By doing this always, we produce slightly more
6220 /* But not for control registers. */
6221 if (operand->constraint[0] == '='
6223 || ! (CONTROL_REGNO_P (REGNO (arg))
6224 || CCR_REGNO_P (REGNO (arg))
6225 || CR_REGNO_P (REGNO (arg)))
6227 return gen_reg_rtx (operand->mode);
6229 /* Try simple mode punning. */
6230 arg = mep_convert_arg (operand->mode, arg);
6231 if (operand->predicate (arg, operand->mode))
6234 /* See if forcing the argument into a register will make it match. */
6235 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6236 arg = force_reg (operand->mode, arg);
6238 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6239 if (operand->predicate (arg, operand->mode))
6246 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6247 function FNNAME. OPERAND describes the operand to which ARGNUM
6251 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6252 int argnum, tree fnname)
6256 if (GET_CODE (arg) == CONST_INT)
6257 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6258 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6260 const struct cgen_immediate_predicate *predicate;
6261 HOST_WIDE_INT argval;
6263 predicate = &cgen_immediate_predicates[i];
6264 argval = INTVAL (arg);
6265 if (argval < predicate->lower || argval >= predicate->upper)
6266 error ("argument %d of %qE must be in the range %d...%d",
6267 argnum, fnname, predicate->lower, predicate->upper - 1);
6269 error ("argument %d of %qE must be a multiple of %d",
6270 argnum, fnname, predicate->align);
6274 error ("incompatible type for argument %d of %qE", argnum, fnname);
6278 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6279 rtx subtarget ATTRIBUTE_UNUSED,
6280 enum machine_mode mode ATTRIBUTE_UNUSED,
6281 int ignore ATTRIBUTE_UNUSED)
6283 rtx pat, op[10], arg[10];
6285 int opindex, unsigned_p[10];
6287 unsigned int n_args;
6289 const struct cgen_insn *cgen_insn;
6290 const struct insn_data_d *idata;
6291 unsigned int first_arg = 0;
6292 unsigned int builtin_n_args;
6294 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6295 fnname = DECL_NAME (fndecl);
6297 /* Find out which instruction we should emit. Note that some coprocessor
6298 intrinsics may only be available in VLIW mode, or only in normal mode. */
6299 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6301 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6304 idata = &insn_data[cgen_insn->icode];
6306 builtin_n_args = cgen_insn->num_args;
6308 if (cgen_insn->cret_p)
6310 if (cgen_insn->cret_p > 1)
6313 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6317 /* Evaluate each argument. */
6318 n_args = call_expr_nargs (exp);
6320 if (n_args < builtin_n_args)
6322 error ("too few arguments to %qE", fnname);
6325 if (n_args > builtin_n_args)
6327 error ("too many arguments to %qE", fnname);
6331 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6335 args = CALL_EXPR_ARG (exp, a - first_arg);
6340 if (cgen_insn->regnums[a].reference_p)
6342 if (TREE_CODE (value) != ADDR_EXPR)
6345 error ("argument %d of %qE must be an address", a+1, fnname);
6348 value = TREE_OPERAND (value, 0);
6352 /* If the argument has been promoted to int, get the unpromoted
6353 value. This is necessary when sub-int memory values are bound
6354 to reference parameters. */
6355 if (TREE_CODE (value) == NOP_EXPR
6356 && TREE_TYPE (value) == integer_type_node
6357 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6358 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6359 < TYPE_PRECISION (TREE_TYPE (value))))
6360 value = TREE_OPERAND (value, 0);
6362 /* If the argument has been promoted to double, get the unpromoted
6363 SFmode value. This is necessary for FMAX support, for example. */
6364 if (TREE_CODE (value) == NOP_EXPR
6365 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6366 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6367 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6368 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6369 value = TREE_OPERAND (value, 0);
6371 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6372 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6373 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6374 if (cgen_insn->regnums[a].reference_p)
6376 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6377 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6379 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6383 error ("argument %d of %qE must be in the range %d...%d",
6384 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6389 for (a = 0; a < first_arg; a++)
6391 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6394 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6397 /* Convert the arguments into a form suitable for the intrinsic.
6398 Report an error if this isn't possible. */
6399 for (opindex = 0; opindex < idata->n_operands; opindex++)
6401 a = cgen_insn->op_mapping[opindex];
6402 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6403 arg[a], unsigned_p[a]);
6404 if (op[opindex] == 0)
6406 mep_incompatible_arg (&idata->operand[opindex],
6407 arg[a], a + 1 - first_arg, fnname);
6412 /* Emit the instruction. */
6413 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6414 op[5], op[6], op[7], op[8], op[9]);
6416 if (GET_CODE (pat) == SET
6417 && GET_CODE (SET_DEST (pat)) == PC
6418 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6419 emit_jump_insn (pat);
6423 /* Copy lvalues back to their final locations. */
6424 for (opindex = 0; opindex < idata->n_operands; opindex++)
6425 if (idata->operand[opindex].constraint[0] == '=')
6427 a = cgen_insn->op_mapping[opindex];
6430 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6431 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6432 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6436 /* First convert the operand to the right mode, then copy it
6437 into the destination. Doing the conversion as a separate
6438 step (rather than using convert_move) means that we can
6439 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6440 refer to the same register. */
6441 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6442 op[opindex], unsigned_p[a]);
6443 if (!rtx_equal_p (arg[a], op[opindex]))
6444 emit_move_insn (arg[a], op[opindex]);
6449 if (first_arg > 0 && target && target != op[0])
6451 emit_move_insn (target, op[0]);
6458 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6463 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6464 a global register. */
6467 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6475 switch (GET_CODE (x))
6478 if (REG_P (SUBREG_REG (x)))
6480 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6481 && global_regs[subreg_regno (x)])
6489 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6503 /* A non-constant call might use a global register. */
6513 /* Returns nonzero if X mentions a global register. */
6516 global_reg_mentioned_p (rtx x)
6522 if (! RTL_CONST_OR_PURE_CALL_P (x))
6524 x = CALL_INSN_FUNCTION_USAGE (x);
6532 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6534 /* Scheduling hooks for VLIW mode.
6536 Conceptually this is very simple: we have a two-pack architecture
6537 that takes one core insn and one coprocessor insn to make up either
6538 a 32- or 64-bit instruction word (depending on the option bit set in
6539 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6540 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6541 and one 48-bit cop insn or two 32-bit core/cop insns.
6543 In practice, instruction selection will be a bear. Consider in
6544 VL64 mode the following insns
6549 these cannot pack, since the add is a 16-bit core insn and cmov
6550 is a 32-bit cop insn. However,
6555 packs just fine. For good VLIW code generation in VL64 mode, we
6556 will have to have 32-bit alternatives for many of the common core
6557 insns. Not implemented. */
6560 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6564 if (REG_NOTE_KIND (link) != 0)
6566 /* See whether INSN and DEP_INSN are intrinsics that set the same
6567 hard register. If so, it is more important to free up DEP_INSN
6568 than it is to free up INSN.
6570 Note that intrinsics like mep_mulr are handled differently from
6571 the equivalent mep.md patterns. In mep.md, if we don't care
6572 about the value of $lo and $hi, the pattern will just clobber
6573 the registers, not set them. Since clobbers don't count as
6574 output dependencies, it is often possible to reorder two mulrs,
6577 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6578 so any pair of mep_mulr()s will be inter-dependent. We should
6579 therefore give the first mep_mulr() a higher priority. */
6580 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6581 && global_reg_mentioned_p (PATTERN (insn))
6582 && global_reg_mentioned_p (PATTERN (dep_insn)))
6585 /* If the dependence is an anti or output dependence, assume it
6590 /* If we can't recognize the insns, we can't really do anything. */
6591 if (recog_memoized (dep_insn) < 0)
6594 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6595 attribute instead. */
6598 cost_specified = get_attr_latency (dep_insn);
6599 if (cost_specified != 0)
6600 return cost_specified;
6606 /* ??? We don't properly compute the length of a load/store insn,
6607 taking into account the addressing mode. */
6610 mep_issue_rate (void)
6612 return TARGET_IVC2 ? 3 : 2;
6615 /* Return true if function DECL was declared with the vliw attribute. */
6618 mep_vliw_function_p (tree decl)
6620 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6624 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6628 for (i = nready - 1; i >= 0; --i)
6630 rtx insn = ready[i];
6631 if (recog_memoized (insn) >= 0
6632 && get_attr_slot (insn) == slot
6633 && get_attr_length (insn) == length)
6641 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6645 for (i = 0; i < nready; ++i)
6646 if (ready[i] == insn)
6648 for (; i < nready - 1; ++i)
6649 ready[i] = ready[i + 1];
6658 mep_print_sched_insn (FILE *dump, rtx insn)
6660 const char *slots = "none";
6661 const char *name = NULL;
6665 if (GET_CODE (PATTERN (insn)) == SET
6666 || GET_CODE (PATTERN (insn)) == PARALLEL)
6668 switch (get_attr_slots (insn))
6670 case SLOTS_CORE: slots = "core"; break;
6671 case SLOTS_C3: slots = "c3"; break;
6672 case SLOTS_P0: slots = "p0"; break;
6673 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6674 case SLOTS_P0_P1: slots = "p0,p1"; break;
6675 case SLOTS_P0S: slots = "p0s"; break;
6676 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6677 case SLOTS_P1: slots = "p1"; break;
6679 sprintf(buf, "%d", get_attr_slots (insn));
6684 if (GET_CODE (PATTERN (insn)) == USE)
6687 code = INSN_CODE (insn);
6689 name = get_insn_name (code);
6694 "insn %4d %4d %8s %s\n",
6702 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6703 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6704 int *pnready, int clock ATTRIBUTE_UNUSED)
6706 int nready = *pnready;
6707 rtx core_insn, cop_insn;
6710 if (dump && sched_verbose > 1)
6712 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6713 for (i=0; i<nready; i++)
6714 mep_print_sched_insn (dump, ready[i]);
6715 fprintf (dump, "\n");
6718 if (!mep_vliw_function_p (cfun->decl))
6723 /* IVC2 uses a DFA to determine what's ready and what's not. */
6727 /* We can issue either a core or coprocessor instruction.
6728 Look for a matched pair of insns to reorder. If we don't
6729 find any, don't second-guess the scheduler's priorities. */
6731 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6732 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6733 TARGET_OPT_VL64 ? 6 : 2)))
6735 else if (TARGET_OPT_VL64
6736 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6737 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6740 /* We didn't find a pair. Issue the single insn at the head
6741 of the ready list. */
6744 /* Reorder the two insns first. */
6745 mep_move_ready_insn (ready, nready, core_insn);
6746 mep_move_ready_insn (ready, nready - 1, cop_insn);
6750 /* A for_each_rtx callback. Return true if *X is a register that is
6751 set by insn PREV. */
6754 mep_store_find_set (rtx *x, void *prev)
6756 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6759 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6760 not the containing insn. */
6763 mep_store_data_bypass_1 (rtx prev, rtx pat)
6765 /* Cope with intrinsics like swcpa. */
6766 if (GET_CODE (pat) == PARALLEL)
6770 for (i = 0; i < XVECLEN (pat, 0); i++)
6771 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6777 /* Check for some sort of store. */
6778 if (GET_CODE (pat) != SET
6779 || GET_CODE (SET_DEST (pat)) != MEM)
6782 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6783 The first operand to the unspec is the store data and the other operands
6784 are used to calculate the address. */
6785 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6790 src = SET_SRC (pat);
6791 for (i = 1; i < XVECLEN (src, 0); i++)
6792 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6798 /* Otherwise just check that PREV doesn't modify any register mentioned
6799 in the memory destination. */
6800 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6803 /* Return true if INSN is a store instruction and if the store address
6804 has no true dependence on PREV. */
6807 mep_store_data_bypass_p (rtx prev, rtx insn)
6809 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6812 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6813 is a register other than LO or HI and if PREV sets *X. */
6816 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6819 && REGNO (*x) != LO_REGNO
6820 && REGNO (*x) != HI_REGNO
6821 && reg_set_p (*x, (const_rtx) prev));
6824 /* Return true if, apart from HI/LO, there are no true dependencies
6825 between multiplication instructions PREV and INSN. */
6828 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6832 pat = PATTERN (insn);
6833 if (GET_CODE (pat) == PARALLEL)
6834 pat = XVECEXP (pat, 0, 0);
6835 return (GET_CODE (pat) == SET
6836 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6839 /* Return true if INSN is an ldc instruction that issues to the
6840 MeP-h1 integer pipeline. This is true for instructions that
6841 read from PSW, LP, SAR, HI and LO. */
6844 mep_ipipe_ldc_p (rtx insn)
6848 pat = PATTERN (insn);
6850 /* Cope with instrinsics that set both a hard register and its shadow.
6851 The set of the hard register comes first. */
6852 if (GET_CODE (pat) == PARALLEL)
6853 pat = XVECEXP (pat, 0, 0);
6855 if (GET_CODE (pat) == SET)
6857 src = SET_SRC (pat);
6859 /* Cope with intrinsics. The first operand to the unspec is
6860 the source register. */
6861 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6862 src = XVECEXP (src, 0, 0);
6865 switch (REGNO (src))
6878 /* Create a VLIW bundle from core instruction CORE and coprocessor
6879 instruction COP. COP always satisfies INSN_P, but CORE can be
6880 either a new pattern or an existing instruction.
6882 Emit the bundle in place of COP and return it. */
6885 mep_make_bundle (rtx core, rtx cop)
6889 /* If CORE is an existing instruction, remove it, otherwise put
6890 the new pattern in an INSN harness. */
6894 core = make_insn_raw (core);
6896 /* Generate the bundle sequence and replace COP with it. */
6897 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6898 insn = emit_insn_after (insn, cop);
6901 /* Set up the links of the insns inside the SEQUENCE. */
6902 PREV_INSN (core) = PREV_INSN (insn);
6903 NEXT_INSN (core) = cop;
6904 PREV_INSN (cop) = core;
6905 NEXT_INSN (cop) = NEXT_INSN (insn);
6907 /* Set the VLIW flag for the coprocessor instruction. */
6908 PUT_MODE (core, VOIDmode);
6909 PUT_MODE (cop, BImode);
6911 /* Derive a location for the bundle. Individual instructions cannot
6912 have their own location because there can be no assembler labels
6913 between CORE and COP. */
6914 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6915 INSN_LOCATOR (core) = 0;
6916 INSN_LOCATOR (cop) = 0;
6921 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6924 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6926 rtx * pinsn = (rtx *) data;
6928 if (*pinsn && reg_mentioned_p (x, *pinsn))
6932 /* Return true if anything in insn X is (anti,output,true) dependent on
6933 anything in insn Y. */
6936 mep_insn_dependent_p (rtx x, rtx y)
6940 gcc_assert (INSN_P (x));
6941 gcc_assert (INSN_P (y));
6944 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6945 if (tmp == NULL_RTX)
6949 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6950 if (tmp == NULL_RTX)
6957 core_insn_p (rtx insn)
6959 if (GET_CODE (PATTERN (insn)) == USE)
6961 if (get_attr_slot (insn) == SLOT_CORE)
6966 /* Mark coprocessor instructions that can be bundled together with
6967 the immediately preceeding core instruction. This is later used
6968 to emit the "+" that tells the assembler to create a VLIW insn.
6970 For unbundled insns, the assembler will automatically add coprocessor
6971 nops, and 16-bit core nops. Due to an apparent oversight in the
6972 spec, the assembler will _not_ automatically add 32-bit core nops,
6973 so we have to emit those here.
6975 Called from mep_insn_reorg. */
6978 mep_bundle_insns (rtx insns)
6980 rtx insn, last = NULL_RTX, first = NULL_RTX;
6981 int saw_scheduling = 0;
6983 /* Only do bundling if we're in vliw mode. */
6984 if (!mep_vliw_function_p (cfun->decl))
6987 /* The first insn in a bundle are TImode, the remainder are
6988 VOIDmode. After this function, the first has VOIDmode and the
6989 rest have BImode. */
6991 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6993 /* First, move any NOTEs that are within a bundle, to the beginning
6995 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6997 if (NOTE_P (insn) && first)
6998 /* Don't clear FIRST. */;
7000 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
7003 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
7007 /* INSN is part of a bundle; FIRST is the first insn in that
7008 bundle. Move all intervening notes out of the bundle.
7009 In addition, since the debug pass may insert a label
7010 whenever the current line changes, set the location info
7011 for INSN to match FIRST. */
7013 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
7015 note = PREV_INSN (insn);
7016 while (note && note != first)
7018 prev = PREV_INSN (note);
7022 /* Remove NOTE from here... */
7023 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7024 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7025 /* ...and put it in here. */
7026 NEXT_INSN (note) = first;
7027 PREV_INSN (note) = PREV_INSN (first);
7028 NEXT_INSN (PREV_INSN (note)) = note;
7029 PREV_INSN (NEXT_INSN (note)) = note;
7036 else if (!NONJUMP_INSN_P (insn))
7040 /* Now fix up the bundles. */
7041 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7046 if (!NONJUMP_INSN_P (insn))
7052 /* If we're not optimizing enough, there won't be scheduling
7053 info. We detect that here. */
7054 if (GET_MODE (insn) == TImode)
7056 if (!saw_scheduling)
7061 rtx core_insn = NULL_RTX;
7063 /* IVC2 slots are scheduled by DFA, so we just accept
7064 whatever the scheduler gives us. However, we must make
7065 sure the core insn (if any) is the first in the bundle.
7066 The IVC2 assembler can insert whatever NOPs are needed,
7067 and allows a COP insn to be first. */
7069 if (NONJUMP_INSN_P (insn)
7070 && GET_CODE (PATTERN (insn)) != USE
7071 && GET_MODE (insn) == TImode)
7075 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7076 && NONJUMP_INSN_P (NEXT_INSN (last));
7077 last = NEXT_INSN (last))
7079 if (core_insn_p (last))
7082 if (core_insn_p (last))
7085 if (core_insn && core_insn != insn)
7087 /* Swap core insn to first in the bundle. */
7089 /* Remove core insn. */
7090 if (PREV_INSN (core_insn))
7091 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7092 if (NEXT_INSN (core_insn))
7093 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7095 /* Re-insert core insn. */
7096 PREV_INSN (core_insn) = PREV_INSN (insn);
7097 NEXT_INSN (core_insn) = insn;
7099 if (PREV_INSN (core_insn))
7100 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7101 PREV_INSN (insn) = core_insn;
7103 PUT_MODE (core_insn, TImode);
7104 PUT_MODE (insn, VOIDmode);
7108 /* The first insn has TImode, the rest have VOIDmode */
7109 if (GET_MODE (insn) == TImode)
7110 PUT_MODE (insn, VOIDmode);
7112 PUT_MODE (insn, BImode);
7116 PUT_MODE (insn, VOIDmode);
7117 if (recog_memoized (insn) >= 0
7118 && get_attr_slot (insn) == SLOT_COP)
7120 if (GET_CODE (insn) == JUMP_INSN
7122 || recog_memoized (last) < 0
7123 || get_attr_slot (last) != SLOT_CORE
7124 || (get_attr_length (insn)
7125 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7126 || mep_insn_dependent_p (insn, last))
7128 switch (get_attr_length (insn))
7133 insn = mep_make_bundle (gen_nop (), insn);
7136 if (TARGET_OPT_VL64)
7137 insn = mep_make_bundle (gen_nop32 (), insn);
7140 if (TARGET_OPT_VL64)
7141 error ("2 byte cop instructions are"
7142 " not allowed in 64-bit VLIW mode");
7144 insn = mep_make_bundle (gen_nop (), insn);
7147 error ("unexpected %d byte cop instruction",
7148 get_attr_length (insn));
7153 insn = mep_make_bundle (last, insn);
7161 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7162 Return true on success. This function can fail if the intrinsic
7163 is unavailable or if the operands don't satisfy their predicates. */
7166 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7168 const struct cgen_insn *cgen_insn;
7169 const struct insn_data_d *idata;
7173 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7176 idata = &insn_data[cgen_insn->icode];
7177 for (i = 0; i < idata->n_operands; i++)
7179 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7180 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7184 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7185 newop[3], newop[4], newop[5],
7186 newop[6], newop[7], newop[8]));
7192 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7193 OPERANDS[0]. Report an error if the instruction could not
7194 be synthesized. OPERANDS[1] is a register_operand. For sign
7195 and zero extensions, it may be smaller than SImode. */
7198 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7199 rtx * operands ATTRIBUTE_UNUSED)
7205 /* Likewise, but apply a binary operation to OPERANDS[1] and
7206 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7207 can be a general_operand.
7209 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7210 third operand. REG and REG3 take register operands only. */
7213 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7214 int ATTRIBUTE_UNUSED immediate3,
7215 int ATTRIBUTE_UNUSED reg,
7216 int ATTRIBUTE_UNUSED reg3,
7217 rtx * operands ATTRIBUTE_UNUSED)
7223 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, bool ATTRIBUTE_UNUSED speed_t)
7228 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7230 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7237 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7241 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7243 : COSTS_N_INSNS (2));
7250 mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7256 mep_handle_option (size_t code,
7257 const char *arg ATTRIBUTE_UNUSED,
7258 int value ATTRIBUTE_UNUSED)
7265 target_flags |= MEP_ALL_OPTS;
7269 target_flags &= ~ MEP_ALL_OPTS;
7273 target_flags |= MASK_COP;
7274 target_flags |= MASK_64BIT_CR_REGS;
7278 option_mtiny_specified = 1;
7281 target_flags |= MASK_COP;
7282 target_flags |= MASK_64BIT_CR_REGS;
7283 target_flags |= MASK_VLIW;
7284 target_flags |= MASK_OPT_VL64;
7285 target_flags |= MASK_IVC2;
7287 for (i=0; i<32; i++)
7288 fixed_regs[i+48] = 0;
7289 for (i=0; i<32; i++)
7290 call_used_regs[i+48] = 1;
7292 call_used_regs[i+48] = 0;
7294 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7331 mep_asm_init_sections (void)
7334 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7335 "\t.section .based,\"aw\"");
7338 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7339 "\t.section .sbss,\"aw\"");
7342 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7343 "\t.section .sdata,\"aw\",@progbits");
7346 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7347 "\t.section .far,\"aw\"");
7350 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7351 "\t.section .farbss,\"aw\"");
7354 = get_unnamed_section (0, output_section_asm_op,
7355 "\t.section .frodata,\"a\"");
7358 = get_unnamed_section (0, output_section_asm_op,
7359 "\t.section .srodata,\"a\"");
7362 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7363 "\t.section .vtext,\"axv\"\n\t.vliw");
7366 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7367 "\t.section .vftext,\"axv\"\n\t.vliw");
7370 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7371 "\t.section .ftext,\"ax\"\n\t.core");
7375 /* Initialize the GCC target structure. */
7377 #undef TARGET_ASM_FUNCTION_PROLOGUE
7378 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7379 #undef TARGET_ATTRIBUTE_TABLE
7380 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7381 #undef TARGET_COMP_TYPE_ATTRIBUTES
7382 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7383 #undef TARGET_INSERT_ATTRIBUTES
7384 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7385 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7386 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7387 #undef TARGET_CAN_INLINE_P
7388 #define TARGET_CAN_INLINE_P mep_can_inline_p
7389 #undef TARGET_SECTION_TYPE_FLAGS
7390 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7391 #undef TARGET_ASM_NAMED_SECTION
7392 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7393 #undef TARGET_INIT_BUILTINS
7394 #define TARGET_INIT_BUILTINS mep_init_builtins
7395 #undef TARGET_EXPAND_BUILTIN
7396 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7397 #undef TARGET_SCHED_ADJUST_COST
7398 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7399 #undef TARGET_SCHED_ISSUE_RATE
7400 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7401 #undef TARGET_SCHED_REORDER
7402 #define TARGET_SCHED_REORDER mep_sched_reorder
7403 #undef TARGET_STRIP_NAME_ENCODING
7404 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7405 #undef TARGET_ASM_SELECT_SECTION
7406 #define TARGET_ASM_SELECT_SECTION mep_select_section
7407 #undef TARGET_ASM_UNIQUE_SECTION
7408 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7409 #undef TARGET_ENCODE_SECTION_INFO
7410 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7411 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7412 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7413 #undef TARGET_RTX_COSTS
7414 #define TARGET_RTX_COSTS mep_rtx_cost
7415 #undef TARGET_ADDRESS_COST
7416 #define TARGET_ADDRESS_COST mep_address_cost
7417 #undef TARGET_MACHINE_DEPENDENT_REORG
7418 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7419 #undef TARGET_SETUP_INCOMING_VARARGS
7420 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7421 #undef TARGET_PASS_BY_REFERENCE
7422 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7423 #undef TARGET_FUNCTION_ARG
7424 #define TARGET_FUNCTION_ARG mep_function_arg
7425 #undef TARGET_FUNCTION_ARG_ADVANCE
7426 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7427 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7428 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7429 #undef TARGET_HANDLE_OPTION
7430 #define TARGET_HANDLE_OPTION mep_handle_option
7431 #undef TARGET_OPTION_OVERRIDE
7432 #define TARGET_OPTION_OVERRIDE mep_option_override
7433 #undef TARGET_OPTION_OPTIMIZATION_TABLE
7434 #define TARGET_OPTION_OPTIMIZATION_TABLE mep_option_optimization_table
7435 #undef TARGET_DEFAULT_TARGET_FLAGS
7436 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
7437 #undef TARGET_ALLOCATE_INITIAL_VALUE
7438 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7439 #undef TARGET_ASM_INIT_SECTIONS
7440 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7441 #undef TARGET_RETURN_IN_MEMORY
7442 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7443 #undef TARGET_NARROW_VOLATILE_BITFIELD
7444 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7445 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7446 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7447 #undef TARGET_BUILD_BUILTIN_VA_LIST
7448 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7449 #undef TARGET_EXPAND_BUILTIN_VA_START
7450 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7451 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7452 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7453 #undef TARGET_CAN_ELIMINATE
7454 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7455 #undef TARGET_TRAMPOLINE_INIT
7456 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7458 struct gcc_target targetm = TARGET_INITIALIZER;