1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
46 #include "diagnostic-core.h"
48 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
55 /* Structure of this file:
57 + Command Line Option Support
58 + Pattern support - constraints, predicates, expanders
61 + Functions to save and restore machine-specific function data.
62 + Frame/Epilog/Prolog Related
64 + Function args in registers
65 + Handle pipeline hazards
68 + Machine-dependent Reorg
73 Symbols are encoded as @ <char> . <name> where <char> is one of these:
81 c - cb (control bus) */
83 struct GTY(()) machine_function
85 int mep_frame_pointer_needed;
93 /* Records __builtin_return address. */
97 int reg_save_slot[FIRST_PSEUDO_REGISTER];
98 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
100 /* 2 if the current function has an interrupt attribute, 1 if not, 0
101 if unknown. This is here because resource.c uses EPILOGUE_USES
103 int interrupt_handler;
105 /* Likewise, for disinterrupt attribute. */
106 int disable_interrupts;
108 /* Number of doloop tags used so far. */
111 /* True if the last tag was allocated to a doloop_end. */
112 bool doloop_tag_from_end;
114 /* True if reload changes $TP. */
115 bool reload_changes_tp;
117 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
118 We only set this if the function is an interrupt handler. */
119 int asms_without_operands;
122 #define MEP_CONTROL_REG(x) \
123 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
125 static GTY(()) section * based_section;
126 static GTY(()) section * tinybss_section;
127 static GTY(()) section * far_section;
128 static GTY(()) section * farbss_section;
129 static GTY(()) section * frodata_section;
130 static GTY(()) section * srodata_section;
132 static GTY(()) section * vtext_section;
133 static GTY(()) section * vftext_section;
134 static GTY(()) section * ftext_section;
136 static void mep_set_leaf_registers (int);
137 static bool symbol_p (rtx);
138 static bool symbolref_p (rtx);
139 static void encode_pattern_1 (rtx);
140 static void encode_pattern (rtx);
141 static bool const_in_range (rtx, int, int);
142 static void mep_rewrite_mult (rtx, rtx);
143 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
144 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
145 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
146 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
147 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
148 static bool mep_nongeneral_reg (rtx);
149 static bool mep_general_copro_reg (rtx);
150 static bool mep_nonregister (rtx);
151 static struct machine_function* mep_init_machine_status (void);
152 static rtx mep_tp_rtx (void);
153 static rtx mep_gp_rtx (void);
154 static bool mep_interrupt_p (void);
155 static bool mep_disinterrupt_p (void);
156 static bool mep_reg_set_p (rtx, rtx);
157 static bool mep_reg_set_in_function (int);
158 static bool mep_interrupt_saved_reg (int);
159 static bool mep_call_saves_register (int);
161 static void add_constant (int, int, int, int);
162 static rtx maybe_dead_move (rtx, rtx, bool);
163 static void mep_reload_pointer (int, const char *);
164 static void mep_start_function (FILE *, HOST_WIDE_INT);
165 static bool mep_function_ok_for_sibcall (tree, tree);
166 static int unique_bit_in (HOST_WIDE_INT);
167 static int bit_size_for_clip (HOST_WIDE_INT);
168 static int bytesize (const_tree, enum machine_mode);
169 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
170 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
171 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
172 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
173 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
174 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
175 static bool mep_function_attribute_inlinable_p (const_tree);
176 static bool mep_can_inline_p (tree, tree);
177 static bool mep_lookup_pragma_disinterrupt (const char *);
178 static int mep_multiple_address_regions (tree, bool);
179 static int mep_attrlist_to_encoding (tree, tree);
180 static void mep_insert_attributes (tree, tree *);
181 static void mep_encode_section_info (tree, rtx, int);
182 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
183 static void mep_unique_section (tree, int);
184 static unsigned int mep_section_type_flags (tree, const char *, int);
185 static void mep_asm_named_section (const char *, unsigned int, tree);
186 static bool mep_mentioned_p (rtx, rtx, int);
187 static void mep_reorg_regmove (rtx);
188 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
189 static void mep_reorg_repeat (rtx);
190 static bool mep_invertable_branch_p (rtx);
191 static void mep_invert_branch (rtx, rtx);
192 static void mep_reorg_erepeat (rtx);
193 static void mep_jmp_return_reorg (rtx);
194 static void mep_reorg_addcombine (rtx);
195 static void mep_reorg (void);
196 static void mep_init_intrinsics (void);
197 static void mep_init_builtins (void);
198 static void mep_intrinsic_unavailable (int);
199 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
200 static bool mep_get_move_insn (int, const struct cgen_insn **);
201 static rtx mep_convert_arg (enum machine_mode, rtx);
202 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
203 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
204 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
205 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
206 static int mep_adjust_cost (rtx, rtx, rtx, int);
207 static int mep_issue_rate (void);
208 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
209 static void mep_move_ready_insn (rtx *, int, rtx);
210 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
211 static rtx mep_make_bundle (rtx, rtx);
212 static void mep_bundle_insns (rtx);
213 static bool mep_rtx_cost (rtx, int, int, int *, bool);
214 static int mep_address_cost (rtx, bool);
215 static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
217 static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
219 static rtx mep_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
221 static void mep_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
223 static bool mep_vector_mode_supported_p (enum machine_mode);
224 static bool mep_handle_option (size_t, const char *, int);
225 static rtx mep_allocate_initial_value (rtx);
226 static void mep_asm_init_sections (void);
227 static int mep_comp_type_attributes (const_tree, const_tree);
228 static bool mep_narrow_volatile_bitfield (void);
229 static rtx mep_expand_builtin_saveregs (void);
230 static tree mep_build_builtin_va_list (void);
231 static void mep_expand_va_start (tree, rtx);
232 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
233 static bool mep_can_eliminate (const int, const int);
234 static void mep_trampoline_init (rtx, tree, rtx);
236 #define WANT_GCC_DEFINITIONS
237 #include "mep-intrin.h"
238 #undef WANT_GCC_DEFINITIONS
241 /* Command Line Option Support. */
243 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
245 /* True if we can use cmov instructions to move values back and forth
246 between core and coprocessor registers. */
247 bool mep_have_core_copro_moves_p;
249 /* True if we can use cmov instructions (or a work-alike) to move
250 values between coprocessor registers. */
251 bool mep_have_copro_copro_moves_p;
253 /* A table of all coprocessor instructions that can act like
254 a coprocessor-to-coprocessor cmov. */
255 static const int mep_cmov_insns[] = {
268 static int option_mtiny_specified = 0;
272 mep_set_leaf_registers (int enable)
276 if (mep_leaf_registers[0] != enable)
277 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
278 mep_leaf_registers[i] = enable;
282 mep_conditional_register_usage (void)
286 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
288 fixed_regs[HI_REGNO] = 1;
289 fixed_regs[LO_REGNO] = 1;
290 call_used_regs[HI_REGNO] = 1;
291 call_used_regs[LO_REGNO] = 1;
294 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
299 static const struct default_options mep_option_optimization_table[] =
301 /* The first scheduling pass often increases register pressure and
302 tends to result in more spill code. Only run it when
303 specifically asked. */
304 { OPT_LEVELS_ALL, OPT_fschedule_insns, NULL, 0 },
306 /* Using $fp doesn't gain us much, even when debugging is
308 { OPT_LEVELS_ALL, OPT_fomit_frame_pointer, NULL, 1 },
310 { OPT_LEVELS_NONE, 0, NULL, 0 }
314 mep_option_override (void)
317 warning (OPT_fpic, "-fpic is not supported");
319 warning (OPT_fPIC, "-fPIC is not supported");
320 if (TARGET_S && TARGET_M)
321 error ("only one of -ms and -mm may be given");
322 if (TARGET_S && TARGET_L)
323 error ("only one of -ms and -ml may be given");
324 if (TARGET_M && TARGET_L)
325 error ("only one of -mm and -ml may be given");
326 if (TARGET_S && option_mtiny_specified)
327 error ("only one of -ms and -mtiny= may be given");
328 if (TARGET_M && option_mtiny_specified)
329 error ("only one of -mm and -mtiny= may be given");
330 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
331 warning (0, "-mclip currently has no effect without -mminmax");
333 if (mep_const_section)
335 if (strcmp (mep_const_section, "tiny") != 0
336 && strcmp (mep_const_section, "near") != 0
337 && strcmp (mep_const_section, "far") != 0)
338 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
342 mep_tiny_cutoff = 65536;
345 if (TARGET_L && ! option_mtiny_specified)
348 if (TARGET_64BIT_CR_REGS)
349 flag_split_wide_types = 0;
351 init_machine_status = mep_init_machine_status;
352 mep_init_intrinsics ();
355 /* Pattern Support - constraints, predicates, expanders. */
357 /* MEP has very few instructions that can refer to the span of
358 addresses used by symbols, so it's common to check for them. */
363 int c = GET_CODE (x);
365 return (c == CONST_INT
375 if (GET_CODE (x) != MEM)
378 c = GET_CODE (XEXP (x, 0));
379 return (c == CONST_INT
384 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
386 #define GEN_REG(R, STRICT) \
389 && ((R) == ARG_POINTER_REGNUM \
390 || (R) >= FIRST_PSEUDO_REGISTER)))
392 static char pattern[12], *patternp;
393 static GTY(()) rtx patternr[12];
394 #define RTX_IS(x) (strcmp (pattern, x) == 0)
397 encode_pattern_1 (rtx x)
401 if (patternp == pattern + sizeof (pattern) - 2)
407 patternr[patternp-pattern] = x;
409 switch (GET_CODE (x))
417 encode_pattern_1 (XEXP(x, 0));
421 encode_pattern_1 (XEXP(x, 0));
422 encode_pattern_1 (XEXP(x, 1));
426 encode_pattern_1 (XEXP(x, 0));
427 encode_pattern_1 (XEXP(x, 1));
431 encode_pattern_1 (XEXP(x, 0));
445 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
446 for (i=0; i<XVECLEN (x, 0); i++)
447 encode_pattern_1 (XVECEXP (x, 0, i));
455 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
464 encode_pattern (rtx x)
467 encode_pattern_1 (x);
472 mep_section_tag (rtx x)
478 switch (GET_CODE (x))
485 x = XVECEXP (x, 0, 0);
488 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
497 if (GET_CODE (x) != SYMBOL_REF)
500 if (name[0] == '@' && name[2] == '.')
502 if (name[1] == 'i' || name[1] == 'I')
505 return 'f'; /* near */
506 return 'n'; /* far */
514 mep_regno_reg_class (int regno)
518 case SP_REGNO: return SP_REGS;
519 case TP_REGNO: return TP_REGS;
520 case GP_REGNO: return GP_REGS;
521 case 0: return R0_REGS;
522 case HI_REGNO: return HI_REGS;
523 case LO_REGNO: return LO_REGS;
524 case ARG_POINTER_REGNUM: return GENERAL_REGS;
527 if (GR_REGNO_P (regno))
528 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
529 if (CONTROL_REGNO_P (regno))
532 if (CR_REGNO_P (regno))
536 /* Search for the register amongst user-defined subclasses of
537 the coprocessor registers. */
538 for (i = USER0_REGS; i <= USER3_REGS; ++i)
540 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
542 for (j = 0; j < N_REG_CLASSES; ++j)
544 enum reg_class sub = reg_class_subclasses[i][j];
546 if (sub == LIM_REG_CLASSES)
548 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
553 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
556 if (CCR_REGNO_P (regno))
559 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
565 mep_reg_class_from_constraint (int c, const char *str)
582 return LOADABLE_CR_REGS;
584 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
586 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
613 enum reg_class which = c - 'A' + USER0_REGS;
614 return (reg_class_size[which] > 0 ? which : NO_REGS);
623 mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
627 case 'I': return value >= -32768 && value < 32768;
628 case 'J': return value >= 0 && value < 65536;
629 case 'K': return value >= 0 && value < 0x01000000;
630 case 'L': return value >= -32 && value < 32;
631 case 'M': return value >= 0 && value < 32;
632 case 'N': return value >= 0 && value < 16;
636 return value >= -2147483647-1 && value <= 2147483647;
643 mep_extra_constraint (rtx value, int c)
645 encode_pattern (value);
650 /* For near symbols, like what call uses. */
651 if (GET_CODE (value) == REG)
653 return mep_call_address_operand (value, GET_MODE (value));
656 /* For signed 8-bit immediates. */
657 return (GET_CODE (value) == CONST_INT
658 && INTVAL (value) >= -128
659 && INTVAL (value) <= 127);
662 /* For tp/gp relative symbol values. */
663 return (RTX_IS ("u3s") || RTX_IS ("u2s")
664 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
667 /* Non-absolute memories. */
668 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
672 return RTX_IS ("Hs");
675 /* Register indirect. */
676 return RTX_IS ("mr");
679 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
690 const_in_range (rtx x, int minv, int maxv)
692 return (GET_CODE (x) == CONST_INT
693 && INTVAL (x) >= minv
694 && INTVAL (x) <= maxv);
697 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
698 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
699 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
700 at the end of the insn stream. */
703 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
705 if (rtx_equal_p (dest, src1))
707 else if (rtx_equal_p (dest, src2))
712 emit_insn (gen_movsi (copy_rtx (dest), src1));
714 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
719 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
720 Change the last element of PATTERN from (clobber (scratch:SI))
721 to (clobber (reg:SI HI_REGNO)). */
724 mep_rewrite_mult (rtx insn, rtx pattern)
728 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
729 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
730 PATTERN (insn) = pattern;
731 INSN_CODE (insn) = -1;
734 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
735 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
736 store the result in DEST if nonnull. */
739 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
743 lo = gen_rtx_REG (SImode, LO_REGNO);
745 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
746 mep_mulr_source (insn, dest, src1, src2));
748 pattern = gen_mulsi3_lo (lo, src1, src2);
749 mep_rewrite_mult (insn, pattern);
752 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
753 SRC3 into $lo, then use either madd or maddr. The move into $lo will
754 be deleted by a peephole2 if SRC3 is already in $lo. */
757 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
761 lo = gen_rtx_REG (SImode, LO_REGNO);
762 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
764 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
765 mep_mulr_source (insn, dest, src1, src2),
768 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
769 mep_rewrite_mult (insn, pattern);
772 /* Return true if $lo has the same value as integer register GPR when
773 instruction INSN is reached. If necessary, rewrite the instruction
774 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
775 rtx for (reg:SI LO_REGNO).
777 This function is intended to be used by the peephole2 pass. Since
778 that pass goes from the end of a basic block to the beginning, and
779 propagates liveness information on the way, there is no need to
780 update register notes here.
782 If GPR_DEAD_P is true on entry, and this function returns true,
783 then the caller will replace _every_ use of GPR in and after INSN
784 with LO. This means that if the instruction that sets $lo is a
785 mulr- or maddr-type instruction, we can rewrite it to use mul or
786 madd instead. In combination with the copy progagation pass,
787 this allows us to replace sequences like:
796 if GPR is no longer used. */
799 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
803 insn = PREV_INSN (insn);
805 switch (recog_memoized (insn))
807 case CODE_FOR_mulsi3_1:
809 if (rtx_equal_p (recog_data.operand[0], gpr))
811 mep_rewrite_mulsi3 (insn,
812 gpr_dead_p ? NULL : recog_data.operand[0],
813 recog_data.operand[1],
814 recog_data.operand[2]);
819 case CODE_FOR_maddsi3:
821 if (rtx_equal_p (recog_data.operand[0], gpr))
823 mep_rewrite_maddsi3 (insn,
824 gpr_dead_p ? NULL : recog_data.operand[0],
825 recog_data.operand[1],
826 recog_data.operand[2],
827 recog_data.operand[3]);
832 case CODE_FOR_mulsi3r:
833 case CODE_FOR_maddsi3r:
835 return rtx_equal_p (recog_data.operand[1], gpr);
838 if (reg_set_p (lo, insn)
839 || reg_set_p (gpr, insn)
840 || volatile_insn_p (PATTERN (insn)))
843 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
848 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
852 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
855 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
857 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
862 /* Return true if SET can be turned into a post-modify load or store
863 that adds OFFSET to GPR. In other words, return true if SET can be
866 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
868 It's OK to change SET to an equivalent operation in order to
872 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
875 unsigned int reg_bytes, mem_bytes;
876 enum machine_mode reg_mode, mem_mode;
878 /* Only simple SETs can be converted. */
879 if (GET_CODE (set) != SET)
882 /* Point REG to what we hope will be the register side of the set and
883 MEM to what we hope will be the memory side. */
884 if (GET_CODE (SET_DEST (set)) == MEM)
886 mem = &SET_DEST (set);
887 reg = &SET_SRC (set);
891 reg = &SET_DEST (set);
892 mem = &SET_SRC (set);
893 if (GET_CODE (*mem) == SIGN_EXTEND)
894 mem = &XEXP (*mem, 0);
897 /* Check that *REG is a suitable coprocessor register. */
898 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
901 /* Check that *MEM is a suitable memory reference. */
902 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
905 /* Get the number of bytes in each operand. */
906 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
907 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
909 /* Check that OFFSET is suitably aligned. */
910 if (INTVAL (offset) & (mem_bytes - 1))
913 /* Convert *MEM to a normal integer mode. */
914 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
915 *mem = change_address (*mem, mem_mode, NULL);
917 /* Adjust *REG as well. */
918 *reg = shallow_copy_rtx (*reg);
919 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
921 /* SET is a subword load. Convert it to an explicit extension. */
922 PUT_MODE (*reg, SImode);
923 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
927 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
928 PUT_MODE (*reg, reg_mode);
933 /* Return the effect of frame-related instruction INSN. */
936 mep_frame_expr (rtx insn)
940 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
941 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
942 RTX_FRAME_RELATED_P (expr) = 1;
946 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
947 new pattern in INSN1; INSN2 will be deleted by the caller. */
950 mep_make_parallel (rtx insn1, rtx insn2)
954 if (RTX_FRAME_RELATED_P (insn2))
956 expr = mep_frame_expr (insn2);
957 if (RTX_FRAME_RELATED_P (insn1))
958 expr = gen_rtx_SEQUENCE (VOIDmode,
959 gen_rtvec (2, mep_frame_expr (insn1), expr));
960 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
961 RTX_FRAME_RELATED_P (insn1) = 1;
964 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
965 gen_rtvec (2, PATTERN (insn1),
967 INSN_CODE (insn1) = -1;
970 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
971 the basic block to see if any previous load or store instruction can
972 be persuaded to do SET_INSN as a side-effect. Return true if so. */
975 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
982 insn = PREV_INSN (insn);
985 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
987 mep_make_parallel (insn, set_insn);
991 if (reg_set_p (reg, insn)
992 || reg_referenced_p (reg, PATTERN (insn))
993 || volatile_insn_p (PATTERN (insn)))
997 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1001 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1004 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1006 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1007 extract_insn (insn);
1012 mep_allow_clip (rtx ux, rtx lx, int s)
1014 HOST_WIDE_INT u = INTVAL (ux);
1015 HOST_WIDE_INT l = INTVAL (lx);
1018 if (!TARGET_OPT_CLIP)
1023 for (i = 0; i < 30; i ++)
1024 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1025 && (l == - ((HOST_WIDE_INT) 1 << i)))
1033 for (i = 0; i < 30; i ++)
1034 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1041 mep_bit_position_p (rtx x, bool looking_for)
1043 if (GET_CODE (x) != CONST_INT)
1045 switch ((int) INTVAL(x) & 0xff)
1047 case 0x01: case 0x02: case 0x04: case 0x08:
1048 case 0x10: case 0x20: case 0x40: case 0x80:
1050 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1051 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1052 return !looking_for;
1058 move_needs_splitting (rtx dest, rtx src,
1059 enum machine_mode mode ATTRIBUTE_UNUSED)
1061 int s = mep_section_tag (src);
1065 if (GET_CODE (src) == CONST
1066 || GET_CODE (src) == MEM)
1067 src = XEXP (src, 0);
1068 else if (GET_CODE (src) == SYMBOL_REF
1069 || GET_CODE (src) == LABEL_REF
1070 || GET_CODE (src) == PLUS)
1076 || (GET_CODE (src) == PLUS
1077 && GET_CODE (XEXP (src, 1)) == CONST_INT
1078 && (INTVAL (XEXP (src, 1)) < -65536
1079 || INTVAL (XEXP (src, 1)) > 0xffffff))
1080 || (GET_CODE (dest) == REG
1081 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1087 mep_split_mov (rtx *operands, int symbolic)
1091 if (move_needs_splitting (operands[0], operands[1], SImode))
1096 if (GET_CODE (operands[1]) != CONST_INT)
1099 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1100 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1101 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1104 if (((!reload_completed && !reload_in_progress)
1105 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1106 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1112 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1113 it to one specific value. So the insn chosen depends on whether
1114 the source and destination modes match. */
1117 mep_vliw_mode_match (rtx tgt)
1119 bool src_vliw = mep_vliw_function_p (cfun->decl);
1120 bool tgt_vliw = INTVAL (tgt);
1122 return src_vliw == tgt_vliw;
1125 /* Like the above, but also test for near/far mismatches. */
1128 mep_vliw_jmp_match (rtx tgt)
1130 bool src_vliw = mep_vliw_function_p (cfun->decl);
1131 bool tgt_vliw = INTVAL (tgt);
1133 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1136 return src_vliw == tgt_vliw;
1140 mep_multi_slot (rtx x)
1142 return get_attr_slot (x) == SLOT_MULTI;
1147 mep_legitimate_constant_p (rtx x)
1149 /* We can't convert symbol values to gp- or tp-rel values after
1150 reload, as reload might have used $gp or $tp for other
1152 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1154 char e = mep_section_tag (x);
1155 return (e != 't' && e != 'b');
1160 /* Be careful not to use macros that need to be compiled one way for
1161 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1164 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1168 #define DEBUG_LEGIT 0
1170 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1174 if (GET_CODE (x) == LO_SUM
1175 && GET_CODE (XEXP (x, 0)) == REG
1176 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1177 && CONSTANT_P (XEXP (x, 1)))
1179 if (GET_MODE_SIZE (mode) > 4)
1181 /* We will end up splitting this, and lo_sums are not
1182 offsettable for us. */
1184 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1189 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1194 if (GET_CODE (x) == REG
1195 && GEN_REG (REGNO (x), strict))
1198 fprintf (stderr, " - yup, [reg]\n");
1203 if (GET_CODE (x) == PLUS
1204 && GET_CODE (XEXP (x, 0)) == REG
1205 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1206 && const_in_range (XEXP (x, 1), -32768, 32767))
1209 fprintf (stderr, " - yup, [reg+const]\n");
1214 if (GET_CODE (x) == PLUS
1215 && GET_CODE (XEXP (x, 0)) == REG
1216 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1217 && GET_CODE (XEXP (x, 1)) == CONST
1218 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1219 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1220 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1221 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1224 fprintf (stderr, " - yup, [reg+unspec]\n");
1229 the_tag = mep_section_tag (x);
1234 fprintf (stderr, " - nope, [far]\n");
1239 if (mode == VOIDmode
1240 && GET_CODE (x) == SYMBOL_REF)
1243 fprintf (stderr, " - yup, call [symbol]\n");
1248 if ((mode == SImode || mode == SFmode)
1250 && LEGITIMATE_CONSTANT_P (x)
1251 && the_tag != 't' && the_tag != 'b')
1253 if (GET_CODE (x) != CONST_INT
1254 || (INTVAL (x) <= 0xfffff
1256 && (INTVAL (x) % 4) == 0))
1259 fprintf (stderr, " - yup, [const]\n");
1266 fprintf (stderr, " - nope.\n");
1272 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1274 int ind_levels ATTRIBUTE_UNUSED)
1276 enum reload_type type = (enum reload_type) type_i;
1278 if (GET_CODE (*x) == PLUS
1279 && GET_CODE (XEXP (*x, 0)) == MEM
1280 && GET_CODE (XEXP (*x, 1)) == REG)
1282 /* GCC will by default copy the MEM into a REG, which results in
1283 an invalid address. For us, the best thing to do is move the
1284 whole expression to a REG. */
1285 push_reload (*x, NULL_RTX, x, NULL,
1286 GENERAL_REGS, mode, VOIDmode,
1291 if (GET_CODE (*x) == PLUS
1292 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1293 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1295 char e = mep_section_tag (XEXP (*x, 0));
1297 if (e != 't' && e != 'b')
1299 /* GCC thinks that (sym+const) is a valid address. Well,
1300 sometimes it is, this time it isn't. The best thing to
1301 do is reload the symbol to a register, since reg+int
1302 tends to work, and we can't just add the symbol and
1304 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1305 GENERAL_REGS, mode, VOIDmode,
1314 mep_core_address_length (rtx insn, int opn)
1316 rtx set = single_set (insn);
1317 rtx mem = XEXP (set, opn);
1318 rtx other = XEXP (set, 1-opn);
1319 rtx addr = XEXP (mem, 0);
1321 if (register_operand (addr, Pmode))
1323 if (GET_CODE (addr) == PLUS)
1325 rtx addend = XEXP (addr, 1);
1327 gcc_assert (REG_P (XEXP (addr, 0)));
1329 switch (REGNO (XEXP (addr, 0)))
1331 case STACK_POINTER_REGNUM:
1332 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1333 && mep_imm7a4_operand (addend, VOIDmode))
1338 gcc_assert (REG_P (other));
1340 if (REGNO (other) >= 8)
1343 if (GET_CODE (addend) == CONST
1344 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1345 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1348 if (GET_CODE (addend) == CONST_INT
1349 && INTVAL (addend) >= 0
1350 && INTVAL (addend) <= 127
1351 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1361 mep_cop_address_length (rtx insn, int opn)
1363 rtx set = single_set (insn);
1364 rtx mem = XEXP (set, opn);
1365 rtx addr = XEXP (mem, 0);
1367 if (GET_CODE (mem) != MEM)
1369 if (register_operand (addr, Pmode))
1371 if (GET_CODE (addr) == POST_INC)
1377 #define DEBUG_EXPAND_MOV 0
1379 mep_expand_mov (rtx *operands, enum machine_mode mode)
1384 int post_reload = 0;
1386 tag[0] = mep_section_tag (operands[0]);
1387 tag[1] = mep_section_tag (operands[1]);
1389 if (!reload_in_progress
1390 && !reload_completed
1391 && GET_CODE (operands[0]) != REG
1392 && GET_CODE (operands[0]) != SUBREG
1393 && GET_CODE (operands[1]) != REG
1394 && GET_CODE (operands[1]) != SUBREG)
1395 operands[1] = copy_to_mode_reg (mode, operands[1]);
1397 #if DEBUG_EXPAND_MOV
1398 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1399 reload_in_progress || reload_completed);
1400 debug_rtx (operands[0]);
1401 debug_rtx (operands[1]);
1404 if (mode == DImode || mode == DFmode)
1407 if (reload_in_progress || reload_completed)
1411 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1412 cfun->machine->reload_changes_tp = true;
1414 if (tag[0] == 't' || tag[1] == 't')
1416 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1417 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1420 if (tag[0] == 'b' || tag[1] == 'b')
1422 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1423 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1426 if (cfun->machine->reload_changes_tp == true)
1433 if (symbol_p (operands[1]))
1435 t = mep_section_tag (operands[1]);
1436 if (t == 'b' || t == 't')
1439 if (GET_CODE (operands[1]) == SYMBOL_REF)
1441 tpsym = operands[1];
1442 n = gen_rtx_UNSPEC (mode,
1443 gen_rtvec (1, operands[1]),
1444 t == 'b' ? UNS_TPREL : UNS_GPREL);
1445 n = gen_rtx_CONST (mode, n);
1447 else if (GET_CODE (operands[1]) == CONST
1448 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1449 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1450 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1452 tpsym = XEXP (XEXP (operands[1], 0), 0);
1453 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1454 n = gen_rtx_UNSPEC (mode,
1455 gen_rtvec (1, tpsym),
1456 t == 'b' ? UNS_TPREL : UNS_GPREL);
1457 n = gen_rtx_PLUS (mode, n, tpoffs);
1458 n = gen_rtx_CONST (mode, n);
1460 else if (GET_CODE (operands[1]) == CONST
1461 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1465 error ("unusual TP-relative address");
1469 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1470 : mep_gp_rtx ()), n);
1471 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1472 #if DEBUG_EXPAND_MOV
1473 fprintf(stderr, "mep_expand_mov emitting ");
1480 for (i=0; i < 2; i++)
1482 t = mep_section_tag (operands[i]);
1483 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1488 sym = XEXP (operands[i], 0);
1489 if (GET_CODE (sym) == CONST
1490 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1491 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1504 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1505 n = gen_rtx_CONST (Pmode, n);
1506 n = gen_rtx_PLUS (Pmode, r, n);
1507 operands[i] = replace_equiv_address (operands[i], n);
1512 if ((GET_CODE (operands[1]) != REG
1513 && MEP_CONTROL_REG (operands[0]))
1514 || (GET_CODE (operands[0]) != REG
1515 && MEP_CONTROL_REG (operands[1])))
1518 #if DEBUG_EXPAND_MOV
1519 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1521 temp = gen_reg_rtx (mode);
1522 emit_move_insn (temp, operands[1]);
1526 if (symbolref_p (operands[0])
1527 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1528 || (GET_MODE_SIZE (mode) != 4)))
1532 gcc_assert (!reload_in_progress && !reload_completed);
1534 temp = force_reg (Pmode, XEXP (operands[0], 0));
1535 operands[0] = replace_equiv_address (operands[0], temp);
1536 emit_move_insn (operands[0], operands[1]);
1540 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1543 if (symbol_p (operands[1])
1544 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1546 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1547 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1551 if (symbolref_p (operands[1])
1552 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1556 if (reload_in_progress || reload_completed)
1559 temp = gen_reg_rtx (Pmode);
1561 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1562 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1563 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1570 /* Cases where the pattern can't be made to use at all. */
1573 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1577 #define DEBUG_MOV_OK 0
1579 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1580 mep_section_tag (operands[1]));
1581 debug_rtx (operands[0]);
1582 debug_rtx (operands[1]);
1585 /* We want the movh patterns to get these. */
1586 if (GET_CODE (operands[1]) == HIGH)
1589 /* We can't store a register to a far variable without using a
1590 scratch register to hold the address. Using far variables should
1591 be split by mep_emit_mov anyway. */
1592 if (mep_section_tag (operands[0]) == 'f'
1593 || mep_section_tag (operands[1]) == 'f')
1596 fprintf (stderr, " - no, f\n");
1600 i = mep_section_tag (operands[1]);
1601 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1602 /* These are supposed to be generated with adds of the appropriate
1603 register. During and after reload, however, we allow them to
1604 be accessed as normal symbols because adding a dependency on
1605 the base register now might cause problems. */
1608 fprintf (stderr, " - no, bt\n");
1613 /* The only moves we can allow involve at least one general
1614 register, so require it. */
1615 for (i = 0; i < 2; i ++)
1617 /* Allow subregs too, before reload. */
1618 rtx x = operands[i];
1620 if (GET_CODE (x) == SUBREG)
1622 if (GET_CODE (x) == REG
1623 && ! MEP_CONTROL_REG (x))
1626 fprintf (stderr, " - ok\n");
1632 fprintf (stderr, " - no, no gen reg\n");
1637 #define DEBUG_SPLIT_WIDE_MOVE 0
1639 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1643 #if DEBUG_SPLIT_WIDE_MOVE
1644 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1645 debug_rtx (operands[0]);
1646 debug_rtx (operands[1]);
1649 for (i = 0; i <= 1; i++)
1651 rtx op = operands[i], hi, lo;
1653 switch (GET_CODE (op))
1657 unsigned int regno = REGNO (op);
1659 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1663 lo = gen_rtx_REG (SImode, regno);
1665 hi = gen_rtx_ZERO_EXTRACT (SImode,
1666 gen_rtx_REG (DImode, regno),
1671 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1672 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1680 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1681 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1688 /* The high part of CR <- GPR moves must be done after the low part. */
1689 operands [i + 4] = lo;
1690 operands [i + 2] = hi;
1693 if (reg_mentioned_p (operands[2], operands[5])
1694 || GET_CODE (operands[2]) == ZERO_EXTRACT
1695 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1699 /* Overlapping register pairs -- make sure we don't
1700 early-clobber ourselves. */
1702 operands[2] = operands[4];
1705 operands[3] = operands[5];
1709 #if DEBUG_SPLIT_WIDE_MOVE
1710 fprintf(stderr, "\033[34m");
1711 debug_rtx (operands[2]);
1712 debug_rtx (operands[3]);
1713 debug_rtx (operands[4]);
1714 debug_rtx (operands[5]);
1715 fprintf(stderr, "\033[0m");
1719 /* Emit a setcc instruction in its entirity. */
1722 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1730 tmp = op1, op1 = op2, op2 = tmp;
1731 code = swap_condition (code);
1736 op1 = force_reg (SImode, op1);
1737 emit_insn (gen_rtx_SET (VOIDmode, dest,
1738 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1742 if (op2 != const0_rtx)
1743 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1744 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1748 /* Branchful sequence:
1750 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1753 Branchless sequence:
1754 add3 tmp, op1, -op2 32-bit (or mov + sub)
1755 sltu3 tmp, tmp, 1 16-bit
1756 xor3 dest, tmp, 1 32-bit
1758 if (optimize_size && op2 != const0_rtx)
1761 if (op2 != const0_rtx)
1762 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1764 op2 = gen_reg_rtx (SImode);
1765 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1767 emit_insn (gen_rtx_SET (VOIDmode, dest,
1768 gen_rtx_XOR (SImode, op2, const1_rtx)));
1772 if (GET_CODE (op2) != CONST_INT
1773 || INTVAL (op2) == 0x7ffffff)
1775 op2 = GEN_INT (INTVAL (op2) + 1);
1776 return mep_expand_setcc_1 (LT, dest, op1, op2);
1779 if (GET_CODE (op2) != CONST_INT
1780 || INTVAL (op2) == -1)
1782 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1783 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1786 if (GET_CODE (op2) != CONST_INT
1787 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1789 op2 = GEN_INT (INTVAL (op2) - 1);
1790 return mep_expand_setcc_1 (GT, dest, op1, op2);
1793 if (GET_CODE (op2) != CONST_INT
1794 || op2 == const0_rtx)
1796 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1797 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1805 mep_expand_setcc (rtx *operands)
1807 rtx dest = operands[0];
1808 enum rtx_code code = GET_CODE (operands[1]);
1809 rtx op0 = operands[2];
1810 rtx op1 = operands[3];
1812 return mep_expand_setcc_1 (code, dest, op0, op1);
1816 mep_expand_cbranch (rtx *operands)
1818 enum rtx_code code = GET_CODE (operands[0]);
1819 rtx op0 = operands[1];
1820 rtx op1 = operands[2];
1827 if (mep_imm4_operand (op1, SImode))
1830 tmp = gen_reg_rtx (SImode);
1831 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1838 if (mep_imm4_operand (op1, SImode))
1841 tmp = gen_reg_rtx (SImode);
1842 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1851 if (! mep_reg_or_imm4_operand (op1, SImode))
1852 op1 = force_reg (SImode, op1);
1857 if (GET_CODE (op1) == CONST_INT
1858 && INTVAL (op1) != 0x7fffffff)
1860 op1 = GEN_INT (INTVAL (op1) + 1);
1861 code = (code == LE ? LT : GE);
1865 tmp = gen_reg_rtx (SImode);
1866 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1868 code = (code == LE ? EQ : NE);
1874 if (op1 == const1_rtx)
1881 tmp = gen_reg_rtx (SImode);
1882 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1889 tmp = gen_reg_rtx (SImode);
1890 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1892 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1901 tmp = gen_reg_rtx (SImode);
1902 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1903 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1910 tmp = gen_reg_rtx (SImode);
1911 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1913 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1925 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1929 mep_emit_cbranch (rtx *operands, int ne)
1931 if (GET_CODE (operands[1]) == REG)
1932 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1933 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1934 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1936 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1940 mep_expand_call (rtx *operands, int returns_value)
1942 rtx addr = operands[returns_value];
1943 rtx tp = mep_tp_rtx ();
1944 rtx gp = mep_gp_rtx ();
1946 gcc_assert (GET_CODE (addr) == MEM);
1948 addr = XEXP (addr, 0);
1950 if (! mep_call_address_operand (addr, VOIDmode))
1951 addr = force_reg (SImode, addr);
1953 if (! operands[returns_value+2])
1954 operands[returns_value+2] = const0_rtx;
1957 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1958 operands[3], tp, gp));
1960 emit_call_insn (gen_call_internal (addr, operands[1],
1961 operands[2], tp, gp));
1964 /* Aliasing Support. */
1966 /* If X is a machine specific address (i.e. a symbol or label being
1967 referenced as a displacement from the GOT implemented using an
1968 UNSPEC), then return the base term. Otherwise return X. */
1971 mep_find_base_term (rtx x)
1976 if (GET_CODE (x) != PLUS)
1981 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1982 && base == mep_tp_rtx ())
1984 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1985 && base == mep_gp_rtx ())
1990 if (GET_CODE (term) != CONST)
1992 term = XEXP (term, 0);
1994 if (GET_CODE (term) != UNSPEC
1995 || XINT (term, 1) != unspec)
1998 return XVECEXP (term, 0, 0);
2001 /* Reload Support. */
2003 /* Return true if the registers in CLASS cannot represent the change from
2004 modes FROM to TO. */
2007 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2008 enum reg_class regclass)
2013 /* 64-bit COP regs must remain 64-bit COP regs. */
2014 if (TARGET_64BIT_CR_REGS
2015 && (regclass == CR_REGS
2016 || regclass == LOADABLE_CR_REGS)
2017 && (GET_MODE_SIZE (to) < 8
2018 || GET_MODE_SIZE (from) < 8))
2024 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2027 mep_general_reg (rtx x)
2029 while (GET_CODE (x) == SUBREG)
2031 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2035 mep_nongeneral_reg (rtx x)
2037 while (GET_CODE (x) == SUBREG)
2039 return (GET_CODE (x) == REG
2040 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2044 mep_general_copro_reg (rtx x)
2046 while (GET_CODE (x) == SUBREG)
2048 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2052 mep_nonregister (rtx x)
2054 while (GET_CODE (x) == SUBREG)
2056 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2059 #define DEBUG_RELOAD 0
2061 /* Return the secondary reload class needed for moving value X to or
2062 from a register in coprocessor register class CLASS. */
2064 static enum reg_class
2065 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2067 if (mep_general_reg (x))
2068 /* We can do the move directly if mep_have_core_copro_moves_p,
2069 otherwise we need to go through memory. Either way, no secondary
2070 register is needed. */
2073 if (mep_general_copro_reg (x))
2075 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2076 if (mep_have_copro_copro_moves_p)
2079 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2080 if (mep_have_core_copro_moves_p)
2081 return GENERAL_REGS;
2083 /* Otherwise we need to do it through memory. No secondary
2084 register is needed. */
2088 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2089 && constraint_satisfied_p (x, CONSTRAINT_U))
2090 /* X is a memory value that we can access directly. */
2093 /* We have to move X into a GPR first and then copy it to
2094 the coprocessor register. The move from the GPR to the
2095 coprocessor might be done directly or through memory,
2096 depending on mep_have_core_copro_moves_p. */
2097 return GENERAL_REGS;
2100 /* Copying X to register in RCLASS. */
2103 mep_secondary_input_reload_class (enum reg_class rclass,
2104 enum machine_mode mode ATTRIBUTE_UNUSED,
2110 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2114 if (reg_class_subset_p (rclass, CR_REGS))
2115 rv = mep_secondary_copro_reload_class (rclass, x);
2116 else if (MEP_NONGENERAL_CLASS (rclass)
2117 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2121 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2123 return (enum reg_class) rv;
2126 /* Copying register in RCLASS to X. */
2129 mep_secondary_output_reload_class (enum reg_class rclass,
2130 enum machine_mode mode ATTRIBUTE_UNUSED,
2136 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2140 if (reg_class_subset_p (rclass, CR_REGS))
2141 rv = mep_secondary_copro_reload_class (rclass, x);
2142 else if (MEP_NONGENERAL_CLASS (rclass)
2143 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2147 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2150 return (enum reg_class) rv;
2153 /* Implement SECONDARY_MEMORY_NEEDED. */
2156 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2157 enum machine_mode mode ATTRIBUTE_UNUSED)
2159 if (!mep_have_core_copro_moves_p)
2161 if (reg_classes_intersect_p (rclass1, CR_REGS)
2162 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2164 if (reg_classes_intersect_p (rclass2, CR_REGS)
2165 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2167 if (!mep_have_copro_copro_moves_p
2168 && reg_classes_intersect_p (rclass1, CR_REGS)
2169 && reg_classes_intersect_p (rclass2, CR_REGS))
2176 mep_expand_reload (rtx *operands, enum machine_mode mode)
2178 /* There are three cases for each direction:
2183 int s0 = mep_section_tag (operands[0]) == 'f';
2184 int s1 = mep_section_tag (operands[1]) == 'f';
2185 int c0 = mep_nongeneral_reg (operands[0]);
2186 int c1 = mep_nongeneral_reg (operands[1]);
2187 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2190 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2191 debug_rtx (operands[0]);
2192 debug_rtx (operands[1]);
2197 case 00: /* Don't know why this gets here. */
2198 case 02: /* general = far */
2199 emit_move_insn (operands[0], operands[1]);
2202 case 10: /* cr = mem */
2203 case 11: /* cr = cr */
2204 case 01: /* mem = cr */
2205 case 12: /* cr = far */
2206 emit_move_insn (operands[2], operands[1]);
2207 emit_move_insn (operands[0], operands[2]);
2210 case 20: /* far = general */
2211 emit_move_insn (operands[2], XEXP (operands[1], 0));
2212 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2215 case 21: /* far = cr */
2216 case 22: /* far = far */
2218 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2219 which, mode_name[mode]);
2220 debug_rtx (operands[0]);
2221 debug_rtx (operands[1]);
2226 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2227 can be moved directly into registers 0 to 7, but not into the rest.
2228 If so, and if the required class includes registers 0 to 7, restrict
2229 it to those registers. */
2232 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2234 switch (GET_CODE (x))
2237 if (INTVAL (x) >= 0x10000
2238 && INTVAL (x) < 0x01000000
2239 && (INTVAL (x) & 0xffff) != 0
2240 && reg_class_subset_p (TPREL_REGS, rclass))
2241 rclass = TPREL_REGS;
2247 if (mep_section_tag (x) != 'f'
2248 && reg_class_subset_p (TPREL_REGS, rclass))
2249 rclass = TPREL_REGS;
2258 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2259 moves, 4 for direct double-register moves, and 1000 for anything
2260 that requires a temporary register or temporary stack slot. */
2263 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2265 if (mep_have_copro_copro_moves_p
2266 && reg_class_subset_p (from, CR_REGS)
2267 && reg_class_subset_p (to, CR_REGS))
2269 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2273 if (reg_class_subset_p (from, CR_REGS)
2274 && reg_class_subset_p (to, CR_REGS))
2276 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2280 if (reg_class_subset_p (from, CR_REGS)
2281 || reg_class_subset_p (to, CR_REGS))
2283 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2287 if (mep_secondary_memory_needed (from, to, mode))
2289 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2292 if (GET_MODE_SIZE (mode) > 4)
2299 /* Functions to save and restore machine-specific function data. */
2301 static struct machine_function *
2302 mep_init_machine_status (void)
2304 return ggc_alloc_cleared_machine_function ();
2308 mep_allocate_initial_value (rtx reg)
2312 if (GET_CODE (reg) != REG)
2315 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2318 /* In interrupt functions, the "initial" values of $gp and $tp are
2319 provided by the prologue. They are not necessarily the same as
2320 the values that the caller was using. */
2321 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2322 if (mep_interrupt_p ())
2325 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2327 cfun->machine->reg_save_size += 4;
2328 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2331 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2332 return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2336 mep_return_addr_rtx (int count)
2341 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2347 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2353 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2357 mep_interrupt_p (void)
2359 if (cfun->machine->interrupt_handler == 0)
2361 int interrupt_handler
2362 = (lookup_attribute ("interrupt",
2363 DECL_ATTRIBUTES (current_function_decl))
2365 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2367 return cfun->machine->interrupt_handler == 2;
2371 mep_disinterrupt_p (void)
2373 if (cfun->machine->disable_interrupts == 0)
2375 int disable_interrupts
2376 = (lookup_attribute ("disinterrupt",
2377 DECL_ATTRIBUTES (current_function_decl))
2379 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2381 return cfun->machine->disable_interrupts == 2;
2385 /* Frame/Epilog/Prolog Related. */
2388 mep_reg_set_p (rtx reg, rtx insn)
2390 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2393 if (FIND_REG_INC_NOTE (insn, reg))
2395 insn = PATTERN (insn);
2398 if (GET_CODE (insn) == SET
2399 && GET_CODE (XEXP (insn, 0)) == REG
2400 && GET_CODE (XEXP (insn, 1)) == REG
2401 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2404 return set_of (reg, insn) != NULL_RTX;
2408 #define MEP_SAVES_UNKNOWN 0
2409 #define MEP_SAVES_YES 1
2410 #define MEP_SAVES_MAYBE 2
2411 #define MEP_SAVES_NO 3
2414 mep_reg_set_in_function (int regno)
2418 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2421 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2424 push_topmost_sequence ();
2425 insn = get_insns ();
2426 pop_topmost_sequence ();
2431 reg = gen_rtx_REG (SImode, regno);
2433 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2434 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2440 mep_asm_without_operands_p (void)
2442 if (cfun->machine->asms_without_operands == 0)
2446 push_topmost_sequence ();
2447 insn = get_insns ();
2448 pop_topmost_sequence ();
2450 cfun->machine->asms_without_operands = 1;
2454 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2456 cfun->machine->asms_without_operands = 2;
2459 insn = NEXT_INSN (insn);
2463 return cfun->machine->asms_without_operands == 2;
2466 /* Interrupt functions save/restore every call-preserved register, and
2467 any call-used register it uses (or all if it calls any function,
2468 since they may get clobbered there too). Here we check to see
2469 which call-used registers need saving. */
2471 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2472 && (r == FIRST_CCR_REGNO + 1 \
2473 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2474 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2477 mep_interrupt_saved_reg (int r)
2479 if (!mep_interrupt_p ())
2481 if (r == REGSAVE_CONTROL_TEMP
2482 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2484 if (mep_asm_without_operands_p ()
2486 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2487 || IVC2_ISAVED_REG (r)))
2489 if (!current_function_is_leaf)
2490 /* Function calls mean we need to save $lp. */
2491 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2493 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2494 /* The interrupt handler might use these registers for repeat blocks,
2495 or it might call a function that does so. */
2496 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2498 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2500 /* Functions we call might clobber these. */
2501 if (call_used_regs[r] && !fixed_regs[r])
2503 /* Additional registers that need to be saved for IVC2. */
2504 if (IVC2_ISAVED_REG (r))
2511 mep_call_saves_register (int r)
2513 if (! cfun->machine->frame_locked)
2515 int rv = MEP_SAVES_NO;
2517 if (cfun->machine->reg_save_slot[r])
2519 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2521 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2523 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2525 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2526 /* We need these to have stack slots so that they can be set during
2529 else if (mep_interrupt_saved_reg (r))
2531 cfun->machine->reg_saved[r] = rv;
2533 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2536 /* Return true if epilogue uses register REGNO. */
2539 mep_epilogue_uses (int regno)
2541 /* Since $lp is a call-saved register, the generic code will normally
2542 mark it used in the epilogue if it needs to be saved and restored.
2543 However, when profiling is enabled, the profiling code will implicitly
2544 clobber $11. This case has to be handled specially both here and in
2545 mep_call_saves_register. */
2546 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2548 /* Interrupt functions save/restore pretty much everything. */
2549 return (reload_completed && mep_interrupt_saved_reg (regno));
2553 mep_reg_size (int regno)
2555 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2560 /* Worker function for TARGET_CAN_ELIMINATE. */
2563 mep_can_eliminate (const int from, const int to)
2565 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2566 ? ! frame_pointer_needed
2571 mep_elimination_offset (int from, int to)
2575 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2578 if (!cfun->machine->frame_locked)
2579 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2581 /* We don't count arg_regs_to_save in the arg pointer offset, because
2582 gcc thinks the arg pointer has moved along with the saved regs.
2583 However, we do count it when we adjust $sp in the prologue. */
2585 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2586 if (mep_call_saves_register (i))
2587 reg_save_size += mep_reg_size (i);
2589 if (reg_save_size % 8)
2590 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2592 cfun->machine->regsave_filler = 0;
2594 /* This is what our total stack adjustment looks like. */
2595 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2598 cfun->machine->frame_filler = 8 - (total_size % 8);
2600 cfun->machine->frame_filler = 0;
2603 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2604 return reg_save_size + cfun->machine->regsave_filler;
2606 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2607 return cfun->machine->frame_filler + frame_size;
2609 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2610 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2618 RTX_FRAME_RELATED_P (x) = 1;
2622 /* Since the prologue/epilogue code is generated after optimization,
2623 we can't rely on gcc to split constants for us. So, this code
2624 captures all the ways to add a constant to a register in one logic
2625 chunk, including optimizing away insns we just don't need. This
2626 makes the prolog/epilog code easier to follow. */
2628 add_constant (int dest, int src, int value, int mark_frame)
2633 if (src == dest && value == 0)
2638 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2639 gen_rtx_REG (SImode, src));
2641 RTX_FRAME_RELATED_P(insn) = 1;
2645 if (value >= -32768 && value <= 32767)
2647 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2648 gen_rtx_REG (SImode, src),
2651 RTX_FRAME_RELATED_P(insn) = 1;
2655 /* Big constant, need to use a temp register. We use
2656 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2657 area is always small enough to directly add to). */
2659 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2660 lo = value & 0xffff;
2662 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2667 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2668 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2672 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2673 gen_rtx_REG (SImode, src),
2674 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2677 RTX_FRAME_RELATED_P(insn) = 1;
2678 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2679 gen_rtx_SET (SImode,
2680 gen_rtx_REG (SImode, dest),
2681 gen_rtx_PLUS (SImode,
2682 gen_rtx_REG (SImode, dest),
2687 /* Move SRC to DEST. Mark the move as being potentially dead if
2691 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2693 rtx insn = emit_move_insn (dest, src);
2696 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2701 /* Used for interrupt functions, which can't assume that $tp and $gp
2702 contain the correct pointers. */
2705 mep_reload_pointer (int regno, const char *symbol)
2709 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2712 reg = gen_rtx_REG (SImode, regno);
2713 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2714 emit_insn (gen_movsi_topsym_s (reg, sym));
2715 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2718 /* Assign save slots for any register not already saved. DImode
2719 registers go at the end of the reg save area; the rest go at the
2720 beginning. This is for alignment purposes. Returns true if a frame
2721 is really needed. */
2723 mep_assign_save_slots (int reg_save_size)
2725 bool really_need_stack_frame = false;
2729 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2730 if (mep_call_saves_register(i))
2732 int regsize = mep_reg_size (i);
2734 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2735 || mep_reg_set_in_function (i))
2736 really_need_stack_frame = true;
2738 if (cfun->machine->reg_save_slot[i])
2743 cfun->machine->reg_save_size += regsize;
2744 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2748 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2752 cfun->machine->frame_locked = 1;
2753 return really_need_stack_frame;
2757 mep_expand_prologue (void)
2759 int i, rss, sp_offset = 0;
2762 int really_need_stack_frame;
2764 /* We must not allow register renaming in interrupt functions,
2765 because that invalidates the correctness of the set of call-used
2766 registers we're going to save/restore. */
2767 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2769 if (mep_disinterrupt_p ())
2770 emit_insn (gen_mep_disable_int ());
2772 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2774 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2775 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2776 really_need_stack_frame = frame_size;
2778 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2780 sp_offset = reg_save_size;
2781 if (sp_offset + frame_size < 128)
2782 sp_offset += frame_size ;
2784 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2786 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2787 if (mep_call_saves_register(i))
2791 enum machine_mode rmode;
2793 rss = cfun->machine->reg_save_slot[i];
2795 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2796 && (!mep_reg_set_in_function (i)
2797 && !mep_interrupt_p ()))
2800 if (mep_reg_size (i) == 8)
2805 /* If there is a pseudo associated with this register's initial value,
2806 reload might have already spilt it to the stack slot suggested by
2807 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2809 mem = gen_rtx_MEM (rmode,
2810 plus_constant (stack_pointer_rtx, sp_offset - rss));
2811 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2813 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2814 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2815 else if (rmode == DImode)
2818 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2820 mem = gen_rtx_MEM (SImode,
2821 plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2823 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2824 gen_rtx_REG (SImode, i),
2826 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2827 gen_rtx_ZERO_EXTRACT (SImode,
2828 gen_rtx_REG (DImode, i),
2832 insn = maybe_dead_move (mem,
2833 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2835 RTX_FRAME_RELATED_P (insn) = 1;
2837 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2838 gen_rtx_SET (VOIDmode,
2840 gen_rtx_REG (rmode, i)));
2841 mem = gen_rtx_MEM (SImode,
2842 plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2843 insn = maybe_dead_move (mem,
2844 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2850 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2851 gen_rtx_REG (rmode, i),
2853 insn = maybe_dead_move (mem,
2854 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2856 RTX_FRAME_RELATED_P (insn) = 1;
2858 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2859 gen_rtx_SET (VOIDmode,
2861 gen_rtx_REG (rmode, i)));
2865 if (frame_pointer_needed)
2867 /* We've already adjusted down by sp_offset. Total $sp change
2868 is reg_save_size + frame_size. We want a net change here of
2869 just reg_save_size. */
2870 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2873 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2875 if (mep_interrupt_p ())
2877 mep_reload_pointer(GP_REGNO, "__sdabase");
2878 mep_reload_pointer(TP_REGNO, "__tpbase");
2883 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2885 int local = hwi_local;
2886 int frame_size = local + crtl->outgoing_args_size;
2891 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2893 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2894 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2895 sp_offset = reg_save_size + frame_size;
2897 ffill = cfun->machine->frame_filler;
2899 if (cfun->machine->mep_frame_pointer_needed)
2900 reg_names[FP_REGNO] = "$fp";
2902 reg_names[FP_REGNO] = "$8";
2907 if (debug_info_level == DINFO_LEVEL_NONE)
2909 fprintf (file, "\t# frame: %d", sp_offset);
2911 fprintf (file, " %d regs", reg_save_size);
2913 fprintf (file, " %d locals", local);
2914 if (crtl->outgoing_args_size)
2915 fprintf (file, " %d args", crtl->outgoing_args_size);
2916 fprintf (file, "\n");
2920 fprintf (file, "\t#\n");
2921 fprintf (file, "\t# Initial Frame Information:\n");
2922 if (sp_offset || !frame_pointer_needed)
2923 fprintf (file, "\t# Entry ---------- 0\n");
2925 /* Sort registers by save slots, so they're printed in the order
2926 they appear in memory, not the order they're saved in. */
2927 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2929 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2930 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2931 if (cfun->machine->reg_save_slot[slot_map[si]]
2932 > cfun->machine->reg_save_slot[slot_map[sj]])
2934 int t = slot_map[si];
2935 slot_map[si] = slot_map[sj];
2940 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2943 int r = slot_map[i];
2944 int rss = cfun->machine->reg_save_slot[r];
2946 if (!mep_call_saves_register (r))
2949 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2950 && (!mep_reg_set_in_function (r)
2951 && !mep_interrupt_p ()))
2954 rsize = mep_reg_size(r);
2955 skip = rss - (sp+rsize);
2957 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2958 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2959 rsize, reg_names[r], sp_offset - rss);
2963 skip = reg_save_size - sp;
2965 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2967 if (frame_pointer_needed)
2968 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2970 fprintf (file, "\t# %3d bytes for local vars\n", local);
2972 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2973 if (crtl->outgoing_args_size)
2974 fprintf (file, "\t# %3d bytes for outgoing args\n",
2975 crtl->outgoing_args_size);
2976 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2977 fprintf (file, "\t#\n");
2981 static int mep_prevent_lp_restore = 0;
2982 static int mep_sibcall_epilogue = 0;
2985 mep_expand_epilogue (void)
2987 int i, sp_offset = 0;
2988 int reg_save_size = 0;
2990 int lp_temp = LP_REGNO, lp_slot = -1;
2991 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2992 int interrupt_handler = mep_interrupt_p ();
2994 if (profile_arc_flag == 2)
2995 emit_insn (gen_mep_bb_trace_ret ());
2997 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2998 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
3000 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
3002 if (frame_pointer_needed)
3004 /* If we have a frame pointer, we won't have a reliable stack
3005 pointer (alloca, you know), so rebase SP from FP */
3006 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3007 gen_rtx_REG (SImode, FP_REGNO));
3008 sp_offset = reg_save_size;
3012 /* SP is right under our local variable space. Adjust it if
3014 sp_offset = reg_save_size + frame_size;
3015 if (sp_offset >= 128)
3017 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3018 sp_offset -= frame_size;
3022 /* This is backwards so that we restore the control and coprocessor
3023 registers before the temporary registers we use to restore
3025 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3026 if (mep_call_saves_register (i))
3028 enum machine_mode rmode;
3029 int rss = cfun->machine->reg_save_slot[i];
3031 if (mep_reg_size (i) == 8)
3036 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3037 && !(mep_reg_set_in_function (i) || interrupt_handler))
3039 if (mep_prevent_lp_restore && i == LP_REGNO)
3041 if (!mep_prevent_lp_restore
3042 && !interrupt_handler
3043 && (i == 10 || i == 11))
3046 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3047 emit_move_insn (gen_rtx_REG (rmode, i),
3049 plus_constant (stack_pointer_rtx,
3053 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3054 /* Defer this one so we can jump indirect rather than
3055 copying the RA to $lp and "ret". EH epilogues
3056 automatically skip this anyway. */
3057 lp_slot = sp_offset-rss;
3060 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3062 plus_constant (stack_pointer_rtx,
3064 emit_move_insn (gen_rtx_REG (rmode, i),
3065 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3071 /* Restore this one last so we know it will be in the temp
3072 register when we return by jumping indirectly via the temp. */
3073 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3074 gen_rtx_MEM (SImode,
3075 plus_constant (stack_pointer_rtx,
3077 lp_temp = REGSAVE_CONTROL_TEMP;
3081 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3083 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3084 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3085 gen_rtx_REG (SImode, SP_REGNO),
3086 cfun->machine->eh_stack_adjust));
3088 if (mep_sibcall_epilogue)
3091 if (mep_disinterrupt_p ())
3092 emit_insn (gen_mep_enable_int ());
3094 if (mep_prevent_lp_restore)
3096 emit_jump_insn (gen_eh_return_internal ());
3099 else if (interrupt_handler)
3100 emit_jump_insn (gen_mep_reti ());
3102 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3106 mep_expand_eh_return (rtx *operands)
3108 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3110 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3111 emit_move_insn (ra, operands[0]);
3115 emit_insn (gen_eh_epilogue (operands[0]));
3119 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3121 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3122 mep_prevent_lp_restore = 1;
3123 mep_expand_epilogue ();
3124 mep_prevent_lp_restore = 0;
3128 mep_expand_sibcall_epilogue (void)
3130 mep_sibcall_epilogue = 1;
3131 mep_expand_epilogue ();
3132 mep_sibcall_epilogue = 0;
3136 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3141 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3144 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3145 if (mep_interrupt_p () || mep_disinterrupt_p ())
3152 mep_return_stackadj_rtx (void)
3154 return gen_rtx_REG (SImode, 10);
3158 mep_return_handler_rtx (void)
3160 return gen_rtx_REG (SImode, LP_REGNO);
3164 mep_function_profiler (FILE *file)
3166 /* Always right at the beginning of the function. */
3167 fprintf (file, "\t# mep function profiler\n");
3168 fprintf (file, "\tadd\t$sp, -8\n");
3169 fprintf (file, "\tsw\t$0, ($sp)\n");
3170 fprintf (file, "\tldc\t$0, $lp\n");
3171 fprintf (file, "\tsw\t$0, 4($sp)\n");
3172 fprintf (file, "\tbsr\t__mep_mcount\n");
3173 fprintf (file, "\tlw\t$0, 4($sp)\n");
3174 fprintf (file, "\tstc\t$0, $lp\n");
3175 fprintf (file, "\tlw\t$0, ($sp)\n");
3176 fprintf (file, "\tadd\t$sp, 8\n\n");
3180 mep_emit_bb_trace_ret (void)
3182 fprintf (asm_out_file, "\t# end of block profiling\n");
3183 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3184 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3185 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3186 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3187 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3188 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3189 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3190 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3191 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3198 /* Operand Printing. */
3201 mep_print_operand_address (FILE *stream, rtx address)
3203 if (GET_CODE (address) == MEM)
3204 address = XEXP (address, 0);
3206 /* cf: gcc.dg/asm-4.c. */
3207 gcc_assert (GET_CODE (address) == REG);
3209 mep_print_operand (stream, address, 0);
3215 const char *pattern;
3218 const conversions[] =
3221 { 0, "m+ri", "3(2)" },
3225 { 0, "mLrs", "%lo(3)(2)" },
3226 { 0, "mLr+si", "%lo(4+5)(2)" },
3227 { 0, "m+ru2s", "%tpoff(5)(2)" },
3228 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3229 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3230 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3231 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3232 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3234 { 0, "m+si", "(2+3)" },
3235 { 0, "m+li", "(2+3)" },
3238 { 0, "+si", "1+2" },
3239 { 0, "+u2si", "%tpoff(3+4)" },
3240 { 0, "+u3si", "%sdaoff(3+4)" },
3246 { 'h', "Hs", "%hi(1)" },
3248 { 'I', "u2s", "%tpoff(2)" },
3249 { 'I', "u3s", "%sdaoff(2)" },
3250 { 'I', "+u2si", "%tpoff(3+4)" },
3251 { 'I', "+u3si", "%sdaoff(3+4)" },
3253 { 'P', "mr", "(1\\+),\\0" },
3259 unique_bit_in (HOST_WIDE_INT i)
3263 case 0x01: case 0xfe: return 0;
3264 case 0x02: case 0xfd: return 1;
3265 case 0x04: case 0xfb: return 2;
3266 case 0x08: case 0xf7: return 3;
3267 case 0x10: case 0x7f: return 4;
3268 case 0x20: case 0xbf: return 5;
3269 case 0x40: case 0xdf: return 6;
3270 case 0x80: case 0xef: return 7;
3277 bit_size_for_clip (HOST_WIDE_INT i)
3281 for (rv = 0; rv < 31; rv ++)
3282 if (((HOST_WIDE_INT) 1 << rv) > i)
3287 /* Print an operand to a assembler instruction. */
3290 mep_print_operand (FILE *file, rtx x, int code)
3293 const char *real_name;
3297 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3298 we're using, then skip over the "mep_" part of its name. */
3299 const struct cgen_insn *insn;
3301 if (mep_get_move_insn (mep_cmov, &insn))
3302 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3304 mep_intrinsic_unavailable (mep_cmov);
3309 switch (GET_CODE (x))
3312 fputs ("clr", file);
3315 fputs ("set", file);
3318 fputs ("not", file);
3321 output_operand_lossage ("invalid %%L code");
3326 /* Print the second operand of a CR <- CR move. If we're using
3327 a two-operand instruction (i.e., a real cmov), then just print
3328 the operand normally. If we're using a "reg, reg, immediate"
3329 instruction such as caddi3, print the operand followed by a
3330 zero field. If we're using a three-register instruction,
3331 print the operand twice. */
3332 const struct cgen_insn *insn;
3334 mep_print_operand (file, x, 0);
3335 if (mep_get_move_insn (mep_cmov, &insn)
3336 && insn_data[insn->icode].n_operands == 3)
3339 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3340 mep_print_operand (file, x, 0);
3342 mep_print_operand (file, const0_rtx, 0);
3348 for (i = 0; conversions[i].pattern; i++)
3349 if (conversions[i].code == code
3350 && strcmp(conversions[i].pattern, pattern) == 0)
3352 for (j = 0; conversions[i].format[j]; j++)
3353 if (conversions[i].format[j] == '\\')
3355 fputc (conversions[i].format[j+1], file);
3358 else if (ISDIGIT(conversions[i].format[j]))
3360 rtx r = patternr[conversions[i].format[j] - '0'];
3361 switch (GET_CODE (r))
3364 fprintf (file, "%s", reg_names [REGNO (r)]);
3370 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3373 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3376 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3379 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3382 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3385 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3386 && !(INTVAL (r) & 0xff))
3387 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3389 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3392 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3393 && conversions[i].format[j+1] == 0)
3395 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3396 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3399 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3402 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3407 fprintf(file, "[const_double 0x%lx]",
3408 (unsigned long) CONST_DOUBLE_HIGH(r));
3411 real_name = targetm.strip_name_encoding (XSTR (r, 0));
3412 assemble_name (file, real_name);
3415 output_asm_label (r);
3418 fprintf (stderr, "don't know how to print this operand:");
3425 if (conversions[i].format[j] == '+'
3426 && (!code || code == 'I')
3427 && ISDIGIT (conversions[i].format[j+1])
3428 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3429 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3431 fputc(conversions[i].format[j], file);
3435 if (!conversions[i].pattern)
3437 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3445 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3446 int noperands ATTRIBUTE_UNUSED)
3448 /* Despite the fact that MeP is perfectly capable of branching and
3449 doing something else in the same bundle, gcc does jump
3450 optimization *after* scheduling, so we cannot trust the bundling
3451 flags on jump instructions. */
3452 if (GET_MODE (insn) == BImode
3453 && get_attr_slots (insn) != SLOTS_CORE)
3454 fputc ('+', asm_out_file);
3457 /* Function args in registers. */
3460 mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
3461 enum machine_mode mode ATTRIBUTE_UNUSED,
3462 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3463 int second_time ATTRIBUTE_UNUSED)
3465 int nsave = 4 - (cum->nregs + 1);
3468 cfun->machine->arg_regs_to_save = nsave;
3469 *pretend_size = nsave * 4;
3473 bytesize (const_tree type, enum machine_mode mode)
3475 if (mode == BLKmode)
3476 return int_size_in_bytes (type);
3477 return GET_MODE_SIZE (mode);
3481 mep_expand_builtin_saveregs (void)
3486 ns = cfun->machine->arg_regs_to_save;
3489 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3490 regbuf = assign_stack_local (SImode, bufsize, 64);
3495 regbuf = assign_stack_local (SImode, bufsize, 32);
3498 move_block_from_reg (5-ns, regbuf, ns);
3502 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3503 int ofs = 8 * ((ns+1)/2);
3505 for (i=0; i<ns; i++)
3507 int rn = (4-ns) + i + 49;
3510 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3511 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3515 return XEXP (regbuf, 0);
3518 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3521 mep_build_builtin_va_list (void)
3523 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3527 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3529 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3530 get_identifier ("__va_next_gp"), ptr_type_node);
3531 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3532 get_identifier ("__va_next_gp_limit"),
3534 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3536 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3539 DECL_FIELD_CONTEXT (f_next_gp) = record;
3540 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3541 DECL_FIELD_CONTEXT (f_next_cop) = record;
3542 DECL_FIELD_CONTEXT (f_next_stack) = record;
3544 TYPE_FIELDS (record) = f_next_gp;
3545 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3546 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3547 DECL_CHAIN (f_next_cop) = f_next_stack;
3549 layout_type (record);
3555 mep_expand_va_start (tree valist, rtx nextarg)
3557 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3558 tree next_gp, next_gp_limit, next_cop, next_stack;
3562 ns = cfun->machine->arg_regs_to_save;
3564 f_next_gp = TYPE_FIELDS (va_list_type_node);
3565 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3566 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3567 f_next_stack = DECL_CHAIN (f_next_cop);
3569 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3571 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3572 valist, f_next_gp_limit, NULL_TREE);
3573 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3575 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3576 valist, f_next_stack, NULL_TREE);
3578 /* va_list.next_gp = expand_builtin_saveregs (); */
3579 u = make_tree (sizetype, expand_builtin_saveregs ());
3580 u = fold_convert (ptr_type_node, u);
3581 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3582 TREE_SIDE_EFFECTS (t) = 1;
3583 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3585 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3586 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3588 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3589 TREE_SIDE_EFFECTS (t) = 1;
3590 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3592 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3593 size_int (8 * ((ns+1)/2)));
3594 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3595 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3596 TREE_SIDE_EFFECTS (t) = 1;
3597 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3599 /* va_list.next_stack = nextarg; */
3600 u = make_tree (ptr_type_node, nextarg);
3601 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3602 TREE_SIDE_EFFECTS (t) = 1;
3603 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3607 mep_gimplify_va_arg_expr (tree valist, tree type,
3609 gimple_seq *post_p ATTRIBUTE_UNUSED)
3611 HOST_WIDE_INT size, rsize;
3612 bool by_reference, ivc2_vec;
3613 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3614 tree next_gp, next_gp_limit, next_cop, next_stack;
3615 tree label_sover, label_selse;
3618 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3620 size = int_size_in_bytes (type);
3621 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3625 type = build_pointer_type (type);
3628 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3630 f_next_gp = TYPE_FIELDS (va_list_type_node);
3631 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3632 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3633 f_next_stack = DECL_CHAIN (f_next_cop);
3635 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3637 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3638 valist, f_next_gp_limit, NULL_TREE);
3639 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3641 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3642 valist, f_next_stack, NULL_TREE);
3644 /* if f_next_gp < f_next_gp_limit
3645 IF (VECTOR_P && IVC2)
3653 val = *f_next_stack;
3654 f_next_stack += rsize;
3658 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3659 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3660 res_addr = create_tmp_var (ptr_type_node, NULL);
3662 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3663 unshare_expr (next_gp_limit));
3664 tmp = build3 (COND_EXPR, void_type_node, tmp,
3665 build1 (GOTO_EXPR, void_type_node,
3666 unshare_expr (label_selse)),
3668 gimplify_and_add (tmp, pre_p);
3672 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3673 gimplify_and_add (tmp, pre_p);
3677 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3678 gimplify_and_add (tmp, pre_p);
3681 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3682 unshare_expr (next_gp), size_int (4));
3683 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3685 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3686 unshare_expr (next_cop), size_int (8));
3687 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3689 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3690 gimplify_and_add (tmp, pre_p);
3694 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3695 gimplify_and_add (tmp, pre_p);
3697 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3698 gimplify_and_add (tmp, pre_p);
3700 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3701 unshare_expr (next_stack), size_int (rsize));
3702 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3706 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3707 gimplify_and_add (tmp, pre_p);
3709 res_addr = fold_convert (build_pointer_type (type), res_addr);
3712 res_addr = build_va_arg_indirect_ref (res_addr);
3714 return build_va_arg_indirect_ref (res_addr);
3718 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3719 rtx libname ATTRIBUTE_UNUSED,
3720 tree fndecl ATTRIBUTE_UNUSED)
3724 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3730 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3731 larger than 4 bytes are passed indirectly. Return value in 0,
3732 unless bigger than 4 bytes, then the caller passes a pointer as the
3733 first arg. For varargs, we copy $1..$4 to the stack. */
3736 mep_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3737 const_tree type ATTRIBUTE_UNUSED,
3738 bool named ATTRIBUTE_UNUSED)
3740 /* VOIDmode is a signal for the backend to pass data to the call
3741 expander via the second operand to the call pattern. We use
3742 this to determine whether to use "jsr" or "jsrv". */
3743 if (mode == VOIDmode)
3744 return GEN_INT (cum->vliw);
3746 /* If we havn't run out of argument registers, return the next. */
3749 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3750 return gen_rtx_REG (mode, cum->nregs + 49);
3752 return gen_rtx_REG (mode, cum->nregs + 1);
3755 /* Otherwise the argument goes on the stack. */
3760 mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
3761 enum machine_mode mode,
3763 bool named ATTRIBUTE_UNUSED)
3765 int size = bytesize (type, mode);
3767 /* This is non-obvious, but yes, large values passed after we've run
3768 out of registers are *still* passed by reference - we put the
3769 address of the parameter on the stack, as well as putting the
3770 parameter itself elsewhere on the stack. */
3772 if (size <= 0 || size > 8)
3776 if (TARGET_IVC2 && cum->nregs < 4 && type != NULL_TREE && VECTOR_TYPE_P (type))
3782 mep_function_arg_advance (CUMULATIVE_ARGS *pcum,
3783 enum machine_mode mode ATTRIBUTE_UNUSED,
3784 const_tree type ATTRIBUTE_UNUSED,
3785 bool named ATTRIBUTE_UNUSED)
3791 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3793 int size = bytesize (type, BLKmode);
3794 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3795 return size > 0 && size <= 8 ? 0 : 1;
3796 return size > 0 && size <= 4 ? 0 : 1;
3800 mep_narrow_volatile_bitfield (void)
3806 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3809 mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3811 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3812 return gen_rtx_REG (TYPE_MODE (type), 48);
3813 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3816 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3819 mep_libcall_value (enum machine_mode mode)
3821 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3824 /* Handle pipeline hazards. */
3826 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3827 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3829 static int prev_opcode = 0;
3831 /* This isn't as optimal as it could be, because we don't know what
3832 control register the STC opcode is storing in. We only need to add
3833 the nop if it's the relevent register, but we add it for irrelevent
3837 mep_asm_output_opcode (FILE *file, const char *ptr)
3839 int this_opcode = op_none;
3840 const char *hazard = 0;
3845 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3846 this_opcode = op_fsft;
3849 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3850 this_opcode = op_ret;
3853 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3854 this_opcode = op_stc;
3858 if (prev_opcode == op_stc && this_opcode == op_fsft)
3860 if (prev_opcode == op_stc && this_opcode == op_ret)
3864 fprintf(file, "%s\t# %s-%s hazard\n\t",
3865 hazard, opnames[prev_opcode], opnames[this_opcode]);
3867 prev_opcode = this_opcode;
3870 /* Handle attributes. */
3873 mep_validate_based_tiny (tree *node, tree name, tree args,
3874 int flags ATTRIBUTE_UNUSED, bool *no_add)
3876 if (TREE_CODE (*node) != VAR_DECL
3877 && TREE_CODE (*node) != POINTER_TYPE
3878 && TREE_CODE (*node) != TYPE_DECL)
3880 warning (0, "%qE attribute only applies to variables", name);
3883 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3885 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3887 warning (0, "address region attributes not allowed with auto storage class");
3890 /* Ignore storage attribute of pointed to variable: char __far * x; */
3891 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3893 warning (0, "address region attributes on pointed-to types ignored");
3902 mep_multiple_address_regions (tree list, bool check_section_attr)
3905 int count_sections = 0;
3906 int section_attr_count = 0;
3908 for (a = list; a; a = TREE_CHAIN (a))
3910 if (is_attribute_p ("based", TREE_PURPOSE (a))
3911 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3912 || is_attribute_p ("near", TREE_PURPOSE (a))
3913 || is_attribute_p ("far", TREE_PURPOSE (a))
3914 || is_attribute_p ("io", TREE_PURPOSE (a)))
3916 if (check_section_attr)
3917 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3920 if (check_section_attr)
3921 return section_attr_count;
3923 return count_sections;
3926 #define MEP_ATTRIBUTES(decl) \
3927 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3928 : DECL_ATTRIBUTES (decl) \
3929 ? (DECL_ATTRIBUTES (decl)) \
3930 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3933 mep_validate_near_far (tree *node, tree name, tree args,
3934 int flags ATTRIBUTE_UNUSED, bool *no_add)
3936 if (TREE_CODE (*node) != VAR_DECL
3937 && TREE_CODE (*node) != FUNCTION_DECL
3938 && TREE_CODE (*node) != METHOD_TYPE
3939 && TREE_CODE (*node) != POINTER_TYPE
3940 && TREE_CODE (*node) != TYPE_DECL)
3942 warning (0, "%qE attribute only applies to variables and functions",
3946 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3948 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3950 warning (0, "address region attributes not allowed with auto storage class");
3953 /* Ignore storage attribute of pointed to variable: char __far * x; */
3954 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3956 warning (0, "address region attributes on pointed-to types ignored");
3960 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3962 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3963 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3964 DECL_ATTRIBUTES (*node) = NULL_TREE;
3970 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3971 int flags ATTRIBUTE_UNUSED, bool *no_add)
3973 if (TREE_CODE (*node) != FUNCTION_DECL
3974 && TREE_CODE (*node) != METHOD_TYPE)
3976 warning (0, "%qE attribute only applies to functions", name);
3983 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3984 int flags ATTRIBUTE_UNUSED, bool *no_add)
3988 if (TREE_CODE (*node) != FUNCTION_DECL)
3990 warning (0, "%qE attribute only applies to functions", name);
3995 if (DECL_DECLARED_INLINE_P (*node))
3996 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3997 DECL_UNINLINABLE (*node) = 1;
3999 function_type = TREE_TYPE (*node);
4001 if (TREE_TYPE (function_type) != void_type_node)
4002 error ("interrupt function must have return type of void");
4004 if (TYPE_ARG_TYPES (function_type)
4005 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4006 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4007 error ("interrupt function must have no arguments");
4013 mep_validate_io_cb (tree *node, tree name, tree args,
4014 int flags ATTRIBUTE_UNUSED, bool *no_add)
4016 if (TREE_CODE (*node) != VAR_DECL)
4018 warning (0, "%qE attribute only applies to variables", name);
4022 if (args != NULL_TREE)
4024 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4025 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4026 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4028 warning (0, "%qE attribute allows only an integer constant argument",
4034 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4035 TREE_THIS_VOLATILE (*node) = 1;
4041 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4042 int flags ATTRIBUTE_UNUSED, bool *no_add)
4044 if (TREE_CODE (*node) != FUNCTION_TYPE
4045 && TREE_CODE (*node) != FUNCTION_DECL
4046 && TREE_CODE (*node) != METHOD_TYPE
4047 && TREE_CODE (*node) != FIELD_DECL
4048 && TREE_CODE (*node) != TYPE_DECL)
4050 static int gave_pointer_note = 0;
4051 static int gave_array_note = 0;
4052 static const char * given_type = NULL;
4054 given_type = tree_code_name[TREE_CODE (*node)];
4055 if (TREE_CODE (*node) == POINTER_TYPE)
4056 given_type = "pointers";
4057 if (TREE_CODE (*node) == ARRAY_TYPE)
4058 given_type = "arrays";
4061 warning (0, "%qE attribute only applies to functions, not %s",
4064 warning (0, "%qE attribute only applies to functions",
4068 if (TREE_CODE (*node) == POINTER_TYPE
4069 && !gave_pointer_note)
4071 inform (input_location, "to describe a pointer to a VLIW function, use syntax like this:");
4072 inform (input_location, " typedef int (__vliw *vfuncptr) ();");
4073 gave_pointer_note = 1;
4076 if (TREE_CODE (*node) == ARRAY_TYPE
4077 && !gave_array_note)
4079 inform (input_location, "to describe an array of VLIW function pointers, use syntax like this:");
4080 inform (input_location, " typedef int (__vliw *vfuncptr[]) ();");
4081 gave_array_note = 1;
4085 error ("VLIW functions are not allowed without a VLIW configuration");
4089 static const struct attribute_spec mep_attribute_table[11] =
4091 /* name min max decl type func handler */
4092 { "based", 0, 0, false, false, false, mep_validate_based_tiny },
4093 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny },
4094 { "near", 0, 0, false, false, false, mep_validate_near_far },
4095 { "far", 0, 0, false, false, false, mep_validate_near_far },
4096 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt },
4097 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt },
4098 { "io", 0, 1, false, false, false, mep_validate_io_cb },
4099 { "cb", 0, 1, false, false, false, mep_validate_io_cb },
4100 { "vliw", 0, 0, false, true, false, mep_validate_vliw },
4101 { NULL, 0, 0, false, false, false, NULL }
4105 mep_function_attribute_inlinable_p (const_tree callee)
4107 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4108 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4109 return (lookup_attribute ("disinterrupt", attrs) == 0
4110 && lookup_attribute ("interrupt", attrs) == 0);
4114 mep_can_inline_p (tree caller, tree callee)
4116 if (TREE_CODE (callee) == ADDR_EXPR)
4117 callee = TREE_OPERAND (callee, 0);
4119 if (!mep_vliw_function_p (caller)
4120 && mep_vliw_function_p (callee))
4128 #define FUNC_DISINTERRUPT 2
4131 struct GTY(()) pragma_entry {
4134 const char *funcname;
4136 typedef struct pragma_entry pragma_entry;
4138 /* Hash table of farcall-tagged sections. */
4139 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4142 pragma_entry_eq (const void *p1, const void *p2)
4144 const pragma_entry *old = (const pragma_entry *) p1;
4145 const char *new_name = (const char *) p2;
4147 return strcmp (old->funcname, new_name) == 0;
4151 pragma_entry_hash (const void *p)
4153 const pragma_entry *old = (const pragma_entry *) p;
4154 return htab_hash_string (old->funcname);
4158 mep_note_pragma_flag (const char *funcname, int flag)
4160 pragma_entry **slot;
4163 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4164 pragma_entry_eq, NULL);
4166 slot = (pragma_entry **)
4167 htab_find_slot_with_hash (pragma_htab, funcname,
4168 htab_hash_string (funcname), INSERT);
4172 *slot = ggc_alloc_pragma_entry ();
4175 (*slot)->funcname = ggc_strdup (funcname);
4177 (*slot)->flag |= flag;
4181 mep_lookup_pragma_flag (const char *funcname, int flag)
4183 pragma_entry **slot;
4188 if (funcname[0] == '@' && funcname[2] == '.')
4191 slot = (pragma_entry **)
4192 htab_find_slot_with_hash (pragma_htab, funcname,
4193 htab_hash_string (funcname), NO_INSERT);
4194 if (slot && *slot && ((*slot)->flag & flag))
4196 (*slot)->used |= flag;
4203 mep_lookup_pragma_call (const char *funcname)
4205 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4209 mep_note_pragma_call (const char *funcname)
4211 mep_note_pragma_flag (funcname, FUNC_CALL);
4215 mep_lookup_pragma_disinterrupt (const char *funcname)
4217 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4221 mep_note_pragma_disinterrupt (const char *funcname)
4223 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4227 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4229 const pragma_entry *d = (const pragma_entry *)(*slot);
4231 if ((d->flag & FUNC_DISINTERRUPT)
4232 && !(d->used & FUNC_DISINTERRUPT))
4233 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4238 mep_file_cleanups (void)
4241 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4244 /* These three functions provide a bridge between the pramgas that
4245 affect register classes, and the functions that maintain them. We
4246 can't call those functions directly as pragma handling is part of
4247 the front end and doesn't have direct access to them. */
4250 mep_save_register_info (void)
4252 save_register_info ();
4256 mep_reinit_regs (void)
4262 mep_init_regs (void)
4270 mep_attrlist_to_encoding (tree list, tree decl)
4272 if (mep_multiple_address_regions (list, false) > 1)
4274 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4275 TREE_PURPOSE (TREE_CHAIN (list)),
4277 DECL_SOURCE_LINE (decl));
4278 TREE_CHAIN (list) = NULL_TREE;
4283 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4285 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4287 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4289 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4291 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4293 if (TREE_VALUE (list)
4294 && TREE_VALUE (TREE_VALUE (list))
4295 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4297 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4299 && location <= 0x1000000)
4304 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4306 list = TREE_CHAIN (list);
4309 && TREE_CODE (decl) == FUNCTION_DECL
4310 && DECL_SECTION_NAME (decl) == 0)
4316 mep_comp_type_attributes (const_tree t1, const_tree t2)
4320 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4321 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4330 mep_insert_attributes (tree decl, tree *attributes)
4333 const char *secname = 0;
4334 tree attrib, attrlist;
4337 if (TREE_CODE (decl) == FUNCTION_DECL)
4339 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4341 if (mep_lookup_pragma_disinterrupt (funcname))
4343 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4344 *attributes = chainon (*attributes, attrib);
4348 if (TREE_CODE (decl) != VAR_DECL
4349 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4352 if (TREE_READONLY (decl) && TARGET_DC)
4353 /* -mdc means that const variables default to the near section,
4354 regardless of the size cutoff. */
4357 /* User specified an attribute, so override the default.
4358 Ignore storage attribute of pointed to variable. char __far * x; */
4359 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4361 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4362 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4363 else if (DECL_ATTRIBUTES (decl) && *attributes)
4364 DECL_ATTRIBUTES (decl) = NULL_TREE;
4367 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4368 encoding = mep_attrlist_to_encoding (attrlist, decl);
4369 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4371 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4372 encoding = mep_attrlist_to_encoding (attrlist, decl);
4376 /* This means that the declaration has a specific section
4377 attribute, so we should not apply the default rules. */
4379 if (encoding == 'i' || encoding == 'I')
4381 tree attr = lookup_attribute ("io", attrlist);
4383 && TREE_VALUE (attr)
4384 && TREE_VALUE (TREE_VALUE(attr)))
4386 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4387 static tree previous_value = 0;
4388 static int previous_location = 0;
4389 static tree previous_name = 0;
4391 /* We take advantage of the fact that gcc will reuse the
4392 same tree pointer when applying an attribute to a
4393 list of decls, but produce a new tree for attributes
4394 on separate source lines, even when they're textually
4395 identical. This is the behavior we want. */
4396 if (TREE_VALUE (attr) == previous_value
4397 && location == previous_location)
4399 warning(0, "__io address 0x%x is the same for %qE and %qE",
4400 location, previous_name, DECL_NAME (decl));
4402 previous_name = DECL_NAME (decl);
4403 previous_location = location;
4404 previous_value = TREE_VALUE (attr);
4411 /* Declarations of arrays can change size. Don't trust them. */
4412 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4415 size = int_size_in_bytes (TREE_TYPE (decl));
4417 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4419 if (TREE_PUBLIC (decl)
4420 || DECL_EXTERNAL (decl)
4421 || TREE_STATIC (decl))
4423 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4447 if (size <= mep_based_cutoff && size > 0)
4449 else if (size <= mep_tiny_cutoff && size > 0)
4455 if (mep_const_section && TREE_READONLY (decl))
4457 if (strcmp (mep_const_section, "tiny") == 0)
4459 else if (strcmp (mep_const_section, "near") == 0)
4461 else if (strcmp (mep_const_section, "far") == 0)
4468 if (!mep_multiple_address_regions (*attributes, true)
4469 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4471 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4473 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4474 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4475 and mep_validate_based_tiny. */
4476 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4481 mep_encode_section_info (tree decl, rtx rtl, int first)
4484 const char *oldname;
4485 const char *secname;
4491 tree mep_attributes;
4496 if (TREE_CODE (decl) != VAR_DECL
4497 && TREE_CODE (decl) != FUNCTION_DECL)
4500 rtlname = XEXP (rtl, 0);
4501 if (GET_CODE (rtlname) == SYMBOL_REF)
4502 oldname = XSTR (rtlname, 0);
4503 else if (GET_CODE (rtlname) == MEM
4504 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4505 oldname = XSTR (XEXP (rtlname, 0), 0);
4509 type = TREE_TYPE (decl);
4510 if (type == error_mark_node)
4512 mep_attributes = MEP_ATTRIBUTES (decl);
4514 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4518 newname = (char *) alloca (strlen (oldname) + 4);
4519 sprintf (newname, "@%c.%s", encoding, oldname);
4520 idp = get_identifier (newname);
4522 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4523 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4524 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4537 maxsize = 0x1000000;
4545 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4547 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4549 (long) int_size_in_bytes (TREE_TYPE (decl)),
4557 mep_strip_name_encoding (const char *sym)
4563 else if (*sym == '@' && sym[2] == '.')
4571 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4572 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4577 switch (TREE_CODE (decl))
4580 if (!TREE_READONLY (decl)
4581 || TREE_SIDE_EFFECTS (decl)
4582 || !DECL_INITIAL (decl)
4583 || (DECL_INITIAL (decl) != error_mark_node
4584 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4588 if (! TREE_CONSTANT (decl))
4596 if (TREE_CODE (decl) == FUNCTION_DECL)
4598 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4600 if (name[0] == '@' && name[2] == '.')
4605 if (flag_function_sections || DECL_ONE_ONLY (decl))
4606 mep_unique_section (decl, 0);
4607 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4609 if (encoding == 'f')
4610 return vftext_section;
4612 return vtext_section;
4614 else if (encoding == 'f')
4615 return ftext_section;
4617 return text_section;
4620 if (TREE_CODE (decl) == VAR_DECL)
4622 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4624 if (name[0] == '@' && name[2] == '.')
4628 return based_section;
4632 return srodata_section;
4633 if (DECL_INITIAL (decl))
4634 return sdata_section;
4635 return tinybss_section;
4639 return frodata_section;
4644 error_at (DECL_SOURCE_LOCATION (decl),
4645 "variable %D of type %<io%> must be uninitialized", decl);
4646 return data_section;
4649 error_at (DECL_SOURCE_LOCATION (decl),
4650 "variable %D of type %<cb%> must be uninitialized", decl);
4651 return data_section;
4656 return readonly_data_section;
4658 return data_section;
4662 mep_unique_section (tree decl, int reloc)
4664 static const char *prefixes[][2] =
4666 { ".text.", ".gnu.linkonce.t." },
4667 { ".rodata.", ".gnu.linkonce.r." },
4668 { ".data.", ".gnu.linkonce.d." },
4669 { ".based.", ".gnu.linkonce.based." },
4670 { ".sdata.", ".gnu.linkonce.s." },
4671 { ".far.", ".gnu.linkonce.far." },
4672 { ".ftext.", ".gnu.linkonce.ft." },
4673 { ".frodata.", ".gnu.linkonce.frd." },
4674 { ".srodata.", ".gnu.linkonce.srd." },
4675 { ".vtext.", ".gnu.linkonce.v." },
4676 { ".vftext.", ".gnu.linkonce.vf." }
4678 int sec = 2; /* .data */
4680 const char *name, *prefix;
4683 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4684 if (DECL_RTL (decl))
4685 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4687 if (TREE_CODE (decl) == FUNCTION_DECL)
4689 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4690 sec = 9; /* .vtext */
4692 sec = 0; /* .text */
4694 else if (decl_readonly_section (decl, reloc))
4695 sec = 1; /* .rodata */
4697 if (name[0] == '@' && name[2] == '.')
4702 sec = 3; /* .based */
4706 sec = 8; /* .srodata */
4708 sec = 4; /* .sdata */
4712 sec = 6; /* .ftext */
4714 sec = 10; /* .vftext */
4716 sec = 7; /* .frodata */
4718 sec = 5; /* .far. */
4724 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4725 len = strlen (name) + strlen (prefix);
4726 string = (char *) alloca (len + 1);
4728 sprintf (string, "%s%s", prefix, name);
4730 DECL_SECTION_NAME (decl) = build_string (len, string);
4733 /* Given a decl, a section name, and whether the decl initializer
4734 has relocs, choose attributes for the section. */
4736 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4739 mep_section_type_flags (tree decl, const char *name, int reloc)
4741 unsigned int flags = default_section_type_flags (decl, name, reloc);
4743 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4744 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4745 flags |= SECTION_MEP_VLIW;
4750 /* Switch to an arbitrary section NAME with attributes as specified
4751 by FLAGS. ALIGN specifies any known alignment requirements for
4752 the section; 0 if the default should be used.
4754 Differs from the standard ELF version only in support of VLIW mode. */
4757 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4759 char flagchars[8], *f = flagchars;
4762 if (!(flags & SECTION_DEBUG))
4764 if (flags & SECTION_WRITE)
4766 if (flags & SECTION_CODE)
4768 if (flags & SECTION_SMALL)
4770 if (flags & SECTION_MEP_VLIW)
4774 if (flags & SECTION_BSS)
4779 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4780 name, flagchars, type);
4782 if (flags & SECTION_CODE)
4783 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4788 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4789 int size, int align, int global)
4791 /* We intentionally don't use mep_section_tag() here. */
4793 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4797 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4798 DECL_ATTRIBUTES (decl));
4800 && TREE_VALUE (attr)
4801 && TREE_VALUE (TREE_VALUE(attr)))
4802 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4807 fprintf (stream, "\t.globl\t");
4808 assemble_name (stream, name);
4809 fprintf (stream, "\n");
4811 assemble_name (stream, name);
4812 fprintf (stream, " = %d\n", location);
4815 if (name[0] == '@' && name[2] == '.')
4817 const char *sec = 0;
4821 switch_to_section (based_section);
4825 switch_to_section (tinybss_section);
4829 switch_to_section (farbss_section);
4838 while (align > BITS_PER_UNIT)
4843 name2 = targetm.strip_name_encoding (name);
4845 fprintf (stream, "\t.globl\t%s\n", name2);
4846 fprintf (stream, "\t.p2align %d\n", p2align);
4847 fprintf (stream, "\t.type\t%s,@object\n", name2);
4848 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4849 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4856 fprintf (stream, "\t.local\t");
4857 assemble_name (stream, name);
4858 fprintf (stream, "\n");
4860 fprintf (stream, "\t.comm\t");
4861 assemble_name (stream, name);
4862 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4868 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4870 rtx addr = XEXP (m_tramp, 0);
4871 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4873 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4874 LCT_NORMAL, VOIDmode, 3,
4877 static_chain, Pmode);
4880 /* Experimental Reorg. */
4883 mep_mentioned_p (rtx in,
4884 rtx reg, /* NULL for mem */
4885 int modes_too) /* if nonzero, modes must match also. */
4893 if (reg && GET_CODE (reg) != REG)
4896 if (GET_CODE (in) == LABEL_REF)
4899 code = GET_CODE (in);
4905 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4911 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4913 return (REGNO (in) == REGNO (reg));
4926 /* Set's source should be read-only. */
4927 if (code == SET && !reg)
4928 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4930 fmt = GET_RTX_FORMAT (code);
4932 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4937 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4938 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4941 else if (fmt[i] == 'e'
4942 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4948 #define EXPERIMENTAL_REGMOVE_REORG 1
4950 #if EXPERIMENTAL_REGMOVE_REORG
4953 mep_compatible_reg_class (int r1, int r2)
4955 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4957 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4963 mep_reorg_regmove (rtx insns)
4965 rtx insn, next, pat, follow, *where;
4966 int count = 0, done = 0, replace, before = 0;
4969 for (insn = insns; insn; insn = NEXT_INSN (insn))
4970 if (GET_CODE (insn) == INSN)
4973 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4974 set that uses the r2 and r2 dies there. We replace r2 with r1
4975 and see if it's still a valid insn. If so, delete the first set.
4976 Copied from reorg.c. */
4981 for (insn = insns; insn; insn = next)
4983 next = NEXT_INSN (insn);
4984 if (GET_CODE (insn) != INSN)
4986 pat = PATTERN (insn);
4990 if (GET_CODE (pat) == SET
4991 && GET_CODE (SET_SRC (pat)) == REG
4992 && GET_CODE (SET_DEST (pat)) == REG
4993 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4994 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4996 follow = next_nonnote_insn (insn);
4998 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
5000 while (follow && GET_CODE (follow) == INSN
5001 && GET_CODE (PATTERN (follow)) == SET
5002 && !dead_or_set_p (follow, SET_SRC (pat))
5003 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
5004 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
5007 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
5008 follow = next_nonnote_insn (follow);
5012 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5013 if (follow && GET_CODE (follow) == INSN
5014 && GET_CODE (PATTERN (follow)) == SET
5015 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5017 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5019 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5022 where = & SET_SRC (PATTERN (follow));
5025 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5027 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5030 where = & PATTERN (follow);
5036 /* If so, follow is the corresponding insn */
5043 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5044 for (x = insn; x ;x = NEXT_INSN (x))
5046 print_rtl_single (dump_file, x);
5049 fprintf (dump_file, "\n");
5053 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5057 next = delete_insn (insn);
5060 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5061 print_rtl_single (dump_file, follow);
5071 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5072 fprintf (dump_file, "=====\n");
5078 /* Figure out where to put LABEL, which is the label for a repeat loop.
5079 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5080 the loop ends just before LAST_INSN. If SHARED, insns other than the
5081 "repeat" might use LABEL to jump to the loop's continuation point.
5083 Return the last instruction in the adjusted loop. */
5086 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5090 int count = 0, code, icode;
5093 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5094 INSN_UID (last_insn));
5096 /* Set PREV to the last insn in the loop. */
5099 prev = PREV_INSN (prev);
5101 /* Set NEXT to the next insn after the repeat label. */
5106 code = GET_CODE (prev);
5107 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5112 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5113 prev = XVECEXP (PATTERN (prev), 0, 1);
5115 /* Other insns that should not be in the last two opcodes. */
5116 icode = recog_memoized (prev);
5118 || icode == CODE_FOR_repeat
5119 || icode == CODE_FOR_erepeat
5120 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5123 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5124 is the second instruction in a VLIW bundle. In that case,
5125 loop again: if the first instruction also satisfies the
5126 conditions above then we will reach here again and put
5127 both of them into the repeat epilogue. Otherwise both
5128 should remain outside. */
5129 if (GET_MODE (prev) != BImode)
5134 print_rtl_single (dump_file, next);
5139 prev = PREV_INSN (prev);
5142 /* See if we're adding the label immediately after the repeat insn.
5143 If so, we need to separate them with a nop. */
5144 prev = prev_real_insn (next);
5146 switch (recog_memoized (prev))
5148 case CODE_FOR_repeat:
5149 case CODE_FOR_erepeat:
5151 fprintf (dump_file, "Adding nop inside loop\n");
5152 emit_insn_before (gen_nop (), next);
5159 /* Insert the label. */
5160 emit_label_before (label, next);
5162 /* Insert the nops. */
5163 if (dump_file && count < 2)
5164 fprintf (dump_file, "Adding %d nop%s\n\n",
5165 2 - count, count == 1 ? "" : "s");
5167 for (; count < 2; count++)
5169 last_insn = emit_insn_after (gen_nop (), last_insn);
5171 emit_insn_before (gen_nop (), last_insn);
5178 mep_emit_doloop (rtx *operands, int is_end)
5182 if (cfun->machine->doloop_tags == 0
5183 || cfun->machine->doloop_tag_from_end == is_end)
5185 cfun->machine->doloop_tags++;
5186 cfun->machine->doloop_tag_from_end = is_end;
5189 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5191 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5193 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5197 /* Code for converting doloop_begins and doloop_ends into valid
5198 MeP instructions. A doloop_begin is just a placeholder:
5200 $count = unspec ($count)
5202 where $count is initially the number of iterations - 1.
5203 doloop_end has the form:
5205 if ($count-- == 0) goto label
5207 The counter variable is private to the doloop insns, nothing else
5208 relies on its value.
5210 There are three cases, in decreasing order of preference:
5212 1. A loop has exactly one doloop_begin and one doloop_end.
5213 The doloop_end branches to the first instruction after
5216 In this case we can replace the doloop_begin with a repeat
5217 instruction and remove the doloop_end. I.e.:
5219 $count1 = unspec ($count1)
5224 if ($count2-- == 0) goto label
5228 repeat $count1,repeat_label
5236 2. As for (1), except there are several doloop_ends. One of them
5237 (call it X) falls through to a label L. All the others fall
5238 through to branches to L.
5240 In this case, we remove X and replace the other doloop_ends
5241 with branches to the repeat label. For example:
5243 $count1 = unspec ($count1)
5246 if ($count2-- == 0) goto label
5249 if ($count3-- == 0) goto label
5254 repeat $count1,repeat_label
5265 3. The fallback case. Replace doloop_begins with:
5269 Replace doloop_ends with the equivalent of:
5272 if ($count == 0) goto label
5274 Note that this might need a scratch register if $count
5275 is stored in memory. */
5277 /* A structure describing one doloop_begin. */
5278 struct mep_doloop_begin {
5279 /* The next doloop_begin with the same tag. */
5280 struct mep_doloop_begin *next;
5282 /* The instruction itself. */
5285 /* The initial counter value. This is known to be a general register. */
5289 /* A structure describing a doloop_end. */
5290 struct mep_doloop_end {
5291 /* The next doloop_end with the same loop tag. */
5292 struct mep_doloop_end *next;
5294 /* The instruction itself. */
5297 /* The first instruction after INSN when the branch isn't taken. */
5300 /* The location of the counter value. Since doloop_end_internal is a
5301 jump instruction, it has to allow the counter to be stored anywhere
5302 (any non-fixed register or memory location). */
5305 /* The target label (the place where the insn branches when the counter
5309 /* A scratch register. Only available when COUNTER isn't stored
5310 in a general register. */
5315 /* One do-while loop. */
5317 /* All the doloop_begins for this loop (in no particular order). */
5318 struct mep_doloop_begin *begin;
5320 /* All the doloop_ends. When there is more than one, arrange things
5321 so that the first one is the most likely to be X in case (2) above. */
5322 struct mep_doloop_end *end;
5326 /* Return true if LOOP can be converted into repeat/repeat_end form
5327 (that is, if it matches cases (1) or (2) above). */
5330 mep_repeat_loop_p (struct mep_doloop *loop)
5332 struct mep_doloop_end *end;
5335 /* There must be exactly one doloop_begin and at least one doloop_end. */
5336 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5339 /* The first doloop_end (X) must branch back to the insn after
5340 the doloop_begin. */
5341 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5344 /* All the other doloop_ends must branch to the same place as X.
5345 When the branch isn't taken, they must jump to the instruction
5347 fallthrough = loop->end->fallthrough;
5348 for (end = loop->end->next; end != 0; end = end->next)
5349 if (end->label != loop->end->label
5350 || !simplejump_p (end->fallthrough)
5351 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5358 /* The main repeat reorg function. See comment above for details. */
5361 mep_reorg_repeat (rtx insns)
5364 struct mep_doloop *loops, *loop;
5365 struct mep_doloop_begin *begin;
5366 struct mep_doloop_end *end;
5368 /* Quick exit if we haven't created any loops. */
5369 if (cfun->machine->doloop_tags == 0)
5372 /* Create an array of mep_doloop structures. */
5373 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5374 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5376 /* Search the function for do-while insns and group them by loop tag. */
5377 for (insn = insns; insn; insn = NEXT_INSN (insn))
5379 switch (recog_memoized (insn))
5381 case CODE_FOR_doloop_begin_internal:
5382 insn_extract (insn);
5383 loop = &loops[INTVAL (recog_data.operand[2])];
5385 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5386 begin->next = loop->begin;
5388 begin->counter = recog_data.operand[0];
5390 loop->begin = begin;
5393 case CODE_FOR_doloop_end_internal:
5394 insn_extract (insn);
5395 loop = &loops[INTVAL (recog_data.operand[2])];
5397 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5399 end->fallthrough = next_real_insn (insn);
5400 end->counter = recog_data.operand[0];
5401 end->label = recog_data.operand[1];
5402 end->scratch = recog_data.operand[3];
5404 /* If this insn falls through to an unconditional jump,
5405 give it a lower priority than the others. */
5406 if (loop->end != 0 && simplejump_p (end->fallthrough))
5408 end->next = loop->end->next;
5409 loop->end->next = end;
5413 end->next = loop->end;
5419 /* Convert the insns for each loop in turn. */
5420 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5421 if (mep_repeat_loop_p (loop))
5423 /* Case (1) or (2). */
5424 rtx repeat_label, label_ref;
5426 /* Create a new label for the repeat insn. */
5427 repeat_label = gen_label_rtx ();
5429 /* Replace the doloop_begin with a repeat. */
5430 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5431 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5433 delete_insn (loop->begin->insn);
5435 /* Insert the repeat label before the first doloop_end.
5436 Fill the gap with nops if there are other doloop_ends. */
5437 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5438 false, loop->end->next != 0);
5440 /* Emit a repeat_end (to improve the readability of the output). */
5441 emit_insn_before (gen_repeat_end (), loop->end->insn);
5443 /* Delete the first doloop_end. */
5444 delete_insn (loop->end->insn);
5446 /* Replace the others with branches to REPEAT_LABEL. */
5447 for (end = loop->end->next; end != 0; end = end->next)
5449 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5450 delete_insn (end->insn);
5451 delete_insn (end->fallthrough);
5456 /* Case (3). First replace all the doloop_begins with increment
5458 for (begin = loop->begin; begin != 0; begin = begin->next)
5460 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5461 begin->counter, const1_rtx),
5463 delete_insn (begin->insn);
5466 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5467 for (end = loop->end; end != 0; end = end->next)
5473 /* Load the counter value into a general register. */
5475 if (!REG_P (reg) || REGNO (reg) > 15)
5478 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5481 /* Decrement the counter. */
5482 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5485 /* Copy it back to its original location. */
5486 if (reg != end->counter)
5487 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5489 /* Jump back to the start label. */
5490 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5492 JUMP_LABEL (insn) = end->label;
5493 LABEL_NUSES (end->label)++;
5495 /* Emit the whole sequence before the doloop_end. */
5496 insn = get_insns ();
5498 emit_insn_before (insn, end->insn);
5500 /* Delete the doloop_end. */
5501 delete_insn (end->insn);
5508 mep_invertable_branch_p (rtx insn)
5511 enum rtx_code old_code;
5514 set = PATTERN (insn);
5515 if (GET_CODE (set) != SET)
5517 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5519 cond = XEXP (XEXP (set, 1), 0);
5520 old_code = GET_CODE (cond);
5524 PUT_CODE (cond, NE);
5527 PUT_CODE (cond, EQ);
5530 PUT_CODE (cond, GE);
5533 PUT_CODE (cond, LT);
5538 INSN_CODE (insn) = -1;
5539 i = recog_memoized (insn);
5540 PUT_CODE (cond, old_code);
5541 INSN_CODE (insn) = -1;
5546 mep_invert_branch (rtx insn, rtx after)
5548 rtx cond, set, label;
5551 set = PATTERN (insn);
5553 gcc_assert (GET_CODE (set) == SET);
5554 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5556 cond = XEXP (XEXP (set, 1), 0);
5557 switch (GET_CODE (cond))
5560 PUT_CODE (cond, NE);
5563 PUT_CODE (cond, EQ);
5566 PUT_CODE (cond, GE);
5569 PUT_CODE (cond, LT);
5574 label = gen_label_rtx ();
5575 emit_label_after (label, after);
5576 for (i=1; i<=2; i++)
5577 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5579 rtx ref = XEXP (XEXP (set, 1), i);
5580 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5581 delete_insn (XEXP (ref, 0));
5582 XEXP (ref, 0) = label;
5583 LABEL_NUSES (label) ++;
5584 JUMP_LABEL (insn) = label;
5586 INSN_CODE (insn) = -1;
5587 i = recog_memoized (insn);
5588 gcc_assert (i >= 0);
5592 mep_reorg_erepeat (rtx insns)
5594 rtx insn, prev, l, x;
5597 for (insn = insns; insn; insn = NEXT_INSN (insn))
5599 && ! JUMP_TABLE_DATA_P (insn)
5600 && mep_invertable_branch_p (insn))
5604 fprintf (dump_file, "\n------------------------------\n");
5605 fprintf (dump_file, "erepeat: considering this jump:\n");
5606 print_rtl_single (dump_file, insn);
5608 count = simplejump_p (insn) ? 0 : 1;
5609 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5611 if (GET_CODE (prev) == CALL_INSN
5612 || BARRIER_P (prev))
5615 if (prev == JUMP_LABEL (insn))
5619 fprintf (dump_file, "found loop top, %d insns\n", count);
5621 if (LABEL_NUSES (prev) == 1)
5622 /* We're the only user, always safe */ ;
5623 else if (LABEL_NUSES (prev) == 2)
5625 /* See if there's a barrier before this label. If
5626 so, we know nobody inside the loop uses it.
5627 But we must be careful to put the erepeat
5628 *after* the label. */
5630 for (barrier = PREV_INSN (prev);
5631 barrier && GET_CODE (barrier) == NOTE;
5632 barrier = PREV_INSN (barrier))
5634 if (barrier && GET_CODE (barrier) != BARRIER)
5639 /* We don't know who else, within or without our loop, uses this */
5641 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5645 /* Generate a label to be used by the erepat insn. */
5646 l = gen_label_rtx ();
5648 /* Insert the erepeat after INSN's target label. */
5649 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5651 emit_insn_after (x, prev);
5653 /* Insert the erepeat label. */
5654 newlast = (mep_insert_repeat_label_last
5655 (insn, l, !simplejump_p (insn), false));
5656 if (simplejump_p (insn))
5658 emit_insn_before (gen_erepeat_end (), insn);
5663 mep_invert_branch (insn, newlast);
5664 emit_insn_after (gen_erepeat_end (), newlast);
5671 /* A label is OK if there is exactly one user, and we
5672 can find that user before the next label. */
5675 if (LABEL_NUSES (prev) == 1)
5677 for (user = PREV_INSN (prev);
5678 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5679 user = PREV_INSN (user))
5680 if (GET_CODE (user) == JUMP_INSN
5681 && JUMP_LABEL (user) == prev)
5683 safe = INSN_UID (user);
5690 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5691 safe, INSN_UID (prev));
5701 fprintf (dump_file, "\n==============================\n");
5704 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5705 always do this on its own. */
5708 mep_jmp_return_reorg (rtx insns)
5710 rtx insn, label, ret;
5713 for (insn = insns; insn; insn = NEXT_INSN (insn))
5714 if (simplejump_p (insn))
5716 /* Find the fist real insn the jump jumps to. */
5717 label = ret = JUMP_LABEL (insn);
5719 && (GET_CODE (ret) == NOTE
5720 || GET_CODE (ret) == CODE_LABEL
5721 || GET_CODE (PATTERN (ret)) == USE))
5722 ret = NEXT_INSN (ret);
5726 /* Is it a return? */
5727 ret_code = recog_memoized (ret);
5728 if (ret_code == CODE_FOR_return_internal
5729 || ret_code == CODE_FOR_eh_return_internal)
5731 /* It is. Replace the jump with a return. */
5732 LABEL_NUSES (label) --;
5733 if (LABEL_NUSES (label) == 0)
5734 delete_insn (label);
5735 PATTERN (insn) = copy_rtx (PATTERN (ret));
5736 INSN_CODE (insn) = -1;
5744 mep_reorg_addcombine (rtx insns)
5748 for (i = insns; i; i = NEXT_INSN (i))
5750 && INSN_CODE (i) == CODE_FOR_addsi3
5751 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5752 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5753 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5754 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5758 && INSN_CODE (n) == CODE_FOR_addsi3
5759 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5760 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5761 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5762 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5764 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5765 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5766 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5768 && ic + nc > -32768)
5770 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5771 NEXT_INSN (i) = NEXT_INSN (n);
5773 PREV_INSN (NEXT_INSN (i)) = i;
5779 /* If this insn adjusts the stack, return the adjustment, else return
5782 add_sp_insn_p (rtx insn)
5786 if (! single_set (insn))
5788 pat = PATTERN (insn);
5789 if (GET_CODE (SET_DEST (pat)) != REG)
5791 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5793 if (GET_CODE (SET_SRC (pat)) != PLUS)
5795 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5797 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5799 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5801 return INTVAL (XEXP (SET_SRC (pat), 1));
5804 /* Check for trivial functions that set up an unneeded stack
5807 mep_reorg_noframe (rtx insns)
5809 rtx start_frame_insn;
5810 rtx end_frame_insn = 0;
5814 /* The first insn should be $sp = $sp + N */
5815 while (insns && ! INSN_P (insns))
5816 insns = NEXT_INSN (insns);
5820 sp_adjust = add_sp_insn_p (insns);
5824 start_frame_insn = insns;
5825 sp = SET_DEST (PATTERN (start_frame_insn));
5827 insns = next_real_insn (insns);
5831 rtx next = next_real_insn (insns);
5835 sp2 = add_sp_insn_p (insns);
5840 end_frame_insn = insns;
5841 if (sp2 != -sp_adjust)
5844 else if (mep_mentioned_p (insns, sp, 0))
5846 else if (CALL_P (insns))
5854 delete_insn (start_frame_insn);
5855 delete_insn (end_frame_insn);
5862 rtx insns = get_insns ();
5864 /* We require accurate REG_DEAD notes. */
5865 compute_bb_for_insn ();
5866 df_note_add_problem ();
5869 mep_reorg_addcombine (insns);
5870 #if EXPERIMENTAL_REGMOVE_REORG
5871 /* VLIW packing has been done already, so we can't just delete things. */
5872 if (!mep_vliw_function_p (cfun->decl))
5873 mep_reorg_regmove (insns);
5875 mep_jmp_return_reorg (insns);
5876 mep_bundle_insns (insns);
5877 mep_reorg_repeat (insns);
5880 && !profile_arc_flag
5881 && TARGET_OPT_REPEAT
5882 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5883 mep_reorg_erepeat (insns);
5885 /* This may delete *insns so make sure it's last. */
5886 mep_reorg_noframe (insns);
5888 df_finish_pass (false);
5893 /*----------------------------------------------------------------------*/
5895 /*----------------------------------------------------------------------*/
5897 /* Element X gives the index into cgen_insns[] of the most general
5898 implementation of intrinsic X. Unimplemented intrinsics are
5900 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5902 /* Element X gives the index of another instruction that is mapped to
5903 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5906 Things are set up so that mep_intrinsic_chain[X] < X. */
5907 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5909 /* The bitmask for the current ISA. The ISA masks are declared
5911 unsigned int mep_selected_isa;
5914 const char *config_name;
5918 static struct mep_config mep_configs[] = {
5919 #ifdef COPROC_SELECTION_TABLE
5920 COPROC_SELECTION_TABLE,
5925 /* Initialize the global intrinsics variables above. */
5928 mep_init_intrinsics (void)
5932 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5933 mep_selected_isa = mep_configs[0].isa;
5934 if (mep_config_string != 0)
5935 for (i = 0; mep_configs[i].config_name; i++)
5936 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5938 mep_selected_isa = mep_configs[i].isa;
5942 /* Assume all intrinsics are unavailable. */
5943 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5944 mep_intrinsic_insn[i] = -1;
5946 /* Build up the global intrinsic tables. */
5947 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5948 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5950 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5951 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5953 /* See whether we can directly move values between one coprocessor
5954 register and another. */
5955 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5956 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5957 mep_have_copro_copro_moves_p = true;
5959 /* See whether we can directly move values between core and
5960 coprocessor registers. */
5961 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5962 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5964 mep_have_core_copro_moves_p = 1;
5967 /* Declare all available intrinsic functions. Called once only. */
5969 static tree cp_data_bus_int_type_node;
5970 static tree opaque_vector_type_node;
5971 static tree v8qi_type_node;
5972 static tree v4hi_type_node;
5973 static tree v2si_type_node;
5974 static tree v8uqi_type_node;
5975 static tree v4uhi_type_node;
5976 static tree v2usi_type_node;
5979 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5983 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5984 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5985 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5986 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5987 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5988 case cgen_regnum_operand_type_CHAR: return char_type_node;
5989 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5990 case cgen_regnum_operand_type_SI: return intSI_type_node;
5991 case cgen_regnum_operand_type_DI: return intDI_type_node;
5992 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5993 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5994 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5995 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5996 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5997 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5998 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5999 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
6001 return void_type_node;
6006 mep_init_builtins (void)
6010 if (TARGET_64BIT_CR_REGS)
6011 cp_data_bus_int_type_node = long_long_integer_type_node;
6013 cp_data_bus_int_type_node = long_integer_type_node;
6015 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
6016 v8qi_type_node = build_vector_type (intQI_type_node, 8);
6017 v4hi_type_node = build_vector_type (intHI_type_node, 4);
6018 v2si_type_node = build_vector_type (intSI_type_node, 2);
6019 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6020 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6021 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6023 (*lang_hooks.decls.pushdecl)
6024 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
6025 cp_data_bus_int_type_node));
6027 (*lang_hooks.decls.pushdecl)
6028 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
6029 opaque_vector_type_node));
6031 (*lang_hooks.decls.pushdecl)
6032 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6034 (*lang_hooks.decls.pushdecl)
6035 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6037 (*lang_hooks.decls.pushdecl)
6038 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6041 (*lang_hooks.decls.pushdecl)
6042 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6044 (*lang_hooks.decls.pushdecl)
6045 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6047 (*lang_hooks.decls.pushdecl)
6048 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6051 /* Intrinsics like mep_cadd3 are implemented with two groups of
6052 instructions, one which uses UNSPECs and one which uses a specific
6053 rtl code such as PLUS. Instructions in the latter group belong
6054 to GROUP_KNOWN_CODE.
6056 In such cases, the intrinsic will have two entries in the global
6057 tables above. The unspec form is accessed using builtin functions
6058 while the specific form is accessed using the mep_* enum in
6061 The idea is that __cop arithmetic and builtin functions have
6062 different optimization requirements. If mep_cadd3() appears in
6063 the source code, the user will surely except gcc to use cadd3
6064 rather than a work-alike such as add3. However, if the user
6065 just writes "a + b", where a or b are __cop variables, it is
6066 reasonable for gcc to choose a core instruction rather than
6067 cadd3 if it believes that is more optimal. */
6068 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6069 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6070 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6072 tree ret_type = void_type_node;
6075 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6078 if (cgen_insns[i].cret_p)
6079 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6081 bi_type = build_function_type (ret_type, 0);
6082 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6084 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6088 /* Report the unavailablity of the given intrinsic. */
6092 mep_intrinsic_unavailable (int intrinsic)
6094 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6096 if (already_reported_p[intrinsic])
6099 if (mep_intrinsic_insn[intrinsic] < 0)
6100 error ("coprocessor intrinsic %qs is not available in this configuration",
6101 cgen_intrinsics[intrinsic]);
6102 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6103 error ("%qs is not available in VLIW functions",
6104 cgen_intrinsics[intrinsic]);
6106 error ("%qs is not available in non-VLIW functions",
6107 cgen_intrinsics[intrinsic]);
6109 already_reported_p[intrinsic] = 1;
6114 /* See if any implementation of INTRINSIC is available to the
6115 current function. If so, store the most general implementation
6116 in *INSN_PTR and return true. Return false otherwise. */
6119 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6123 i = mep_intrinsic_insn[intrinsic];
6124 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6125 i = mep_intrinsic_chain[i];
6129 *insn_ptr = &cgen_insns[i];
6136 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6137 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6138 try using a work-alike instead. In this case, the returned insn
6139 may have three operands rather than two. */
6142 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6146 if (intrinsic == mep_cmov)
6148 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6149 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6153 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6157 /* If ARG is a register operand that is the same size as MODE, convert it
6158 to MODE using a subreg. Otherwise return ARG as-is. */
6161 mep_convert_arg (enum machine_mode mode, rtx arg)
6163 if (GET_MODE (arg) != mode
6164 && register_operand (arg, VOIDmode)
6165 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6166 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6171 /* Apply regnum conversions to ARG using the description given by REGNUM.
6172 Return the new argument on success and null on failure. */
6175 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6177 if (regnum->count == 0)
6180 if (GET_CODE (arg) != CONST_INT
6182 || INTVAL (arg) >= regnum->count)
6185 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6189 /* Try to make intrinsic argument ARG match the given operand.
6190 UNSIGNED_P is true if the argument has an unsigned type. */
6193 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6196 if (GET_CODE (arg) == CONST_INT)
6198 /* CONST_INTs can only be bound to integer operands. */
6199 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6202 else if (GET_CODE (arg) == CONST_DOUBLE)
6203 /* These hold vector constants. */;
6204 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6206 /* If the argument is a different size from what's expected, we must
6207 have a value in the right mode class in order to convert it. */
6208 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6211 /* If the operand is an rvalue, promote or demote it to match the
6212 operand's size. This might not need extra instructions when
6213 ARG is a register value. */
6214 if (operand->constraint[0] != '=')
6215 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6218 /* If the operand is an lvalue, bind the operand to a new register.
6219 The caller will copy this value into ARG after the main
6220 instruction. By doing this always, we produce slightly more
6222 /* But not for control registers. */
6223 if (operand->constraint[0] == '='
6225 || ! (CONTROL_REGNO_P (REGNO (arg))
6226 || CCR_REGNO_P (REGNO (arg))
6227 || CR_REGNO_P (REGNO (arg)))
6229 return gen_reg_rtx (operand->mode);
6231 /* Try simple mode punning. */
6232 arg = mep_convert_arg (operand->mode, arg);
6233 if (operand->predicate (arg, operand->mode))
6236 /* See if forcing the argument into a register will make it match. */
6237 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6238 arg = force_reg (operand->mode, arg);
6240 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6241 if (operand->predicate (arg, operand->mode))
6248 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6249 function FNNAME. OPERAND describes the operand to which ARGNUM
6253 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6254 int argnum, tree fnname)
6258 if (GET_CODE (arg) == CONST_INT)
6259 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6260 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6262 const struct cgen_immediate_predicate *predicate;
6263 HOST_WIDE_INT argval;
6265 predicate = &cgen_immediate_predicates[i];
6266 argval = INTVAL (arg);
6267 if (argval < predicate->lower || argval >= predicate->upper)
6268 error ("argument %d of %qE must be in the range %d...%d",
6269 argnum, fnname, predicate->lower, predicate->upper - 1);
6271 error ("argument %d of %qE must be a multiple of %d",
6272 argnum, fnname, predicate->align);
6276 error ("incompatible type for argument %d of %qE", argnum, fnname);
6280 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6281 rtx subtarget ATTRIBUTE_UNUSED,
6282 enum machine_mode mode ATTRIBUTE_UNUSED,
6283 int ignore ATTRIBUTE_UNUSED)
6285 rtx pat, op[10], arg[10];
6287 int opindex, unsigned_p[10];
6289 unsigned int n_args;
6291 const struct cgen_insn *cgen_insn;
6292 const struct insn_data_d *idata;
6293 unsigned int first_arg = 0;
6294 unsigned int builtin_n_args;
6296 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6297 fnname = DECL_NAME (fndecl);
6299 /* Find out which instruction we should emit. Note that some coprocessor
6300 intrinsics may only be available in VLIW mode, or only in normal mode. */
6301 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6303 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6306 idata = &insn_data[cgen_insn->icode];
6308 builtin_n_args = cgen_insn->num_args;
6310 if (cgen_insn->cret_p)
6312 if (cgen_insn->cret_p > 1)
6315 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6319 /* Evaluate each argument. */
6320 n_args = call_expr_nargs (exp);
6322 if (n_args < builtin_n_args)
6324 error ("too few arguments to %qE", fnname);
6327 if (n_args > builtin_n_args)
6329 error ("too many arguments to %qE", fnname);
6333 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6337 args = CALL_EXPR_ARG (exp, a - first_arg);
6342 if (cgen_insn->regnums[a].reference_p)
6344 if (TREE_CODE (value) != ADDR_EXPR)
6347 error ("argument %d of %qE must be an address", a+1, fnname);
6350 value = TREE_OPERAND (value, 0);
6354 /* If the argument has been promoted to int, get the unpromoted
6355 value. This is necessary when sub-int memory values are bound
6356 to reference parameters. */
6357 if (TREE_CODE (value) == NOP_EXPR
6358 && TREE_TYPE (value) == integer_type_node
6359 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6360 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6361 < TYPE_PRECISION (TREE_TYPE (value))))
6362 value = TREE_OPERAND (value, 0);
6364 /* If the argument has been promoted to double, get the unpromoted
6365 SFmode value. This is necessary for FMAX support, for example. */
6366 if (TREE_CODE (value) == NOP_EXPR
6367 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6368 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6369 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6370 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6371 value = TREE_OPERAND (value, 0);
6373 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6374 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6375 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6376 if (cgen_insn->regnums[a].reference_p)
6378 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6379 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6381 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6385 error ("argument %d of %qE must be in the range %d...%d",
6386 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6391 for (a = 0; a < first_arg; a++)
6393 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6396 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6399 /* Convert the arguments into a form suitable for the intrinsic.
6400 Report an error if this isn't possible. */
6401 for (opindex = 0; opindex < idata->n_operands; opindex++)
6403 a = cgen_insn->op_mapping[opindex];
6404 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6405 arg[a], unsigned_p[a]);
6406 if (op[opindex] == 0)
6408 mep_incompatible_arg (&idata->operand[opindex],
6409 arg[a], a + 1 - first_arg, fnname);
6414 /* Emit the instruction. */
6415 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6416 op[5], op[6], op[7], op[8], op[9]);
6418 if (GET_CODE (pat) == SET
6419 && GET_CODE (SET_DEST (pat)) == PC
6420 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6421 emit_jump_insn (pat);
6425 /* Copy lvalues back to their final locations. */
6426 for (opindex = 0; opindex < idata->n_operands; opindex++)
6427 if (idata->operand[opindex].constraint[0] == '=')
6429 a = cgen_insn->op_mapping[opindex];
6432 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6433 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6434 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6438 /* First convert the operand to the right mode, then copy it
6439 into the destination. Doing the conversion as a separate
6440 step (rather than using convert_move) means that we can
6441 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6442 refer to the same register. */
6443 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6444 op[opindex], unsigned_p[a]);
6445 if (!rtx_equal_p (arg[a], op[opindex]))
6446 emit_move_insn (arg[a], op[opindex]);
6451 if (first_arg > 0 && target && target != op[0])
6453 emit_move_insn (target, op[0]);
6460 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6465 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6466 a global register. */
6469 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6477 switch (GET_CODE (x))
6480 if (REG_P (SUBREG_REG (x)))
6482 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6483 && global_regs[subreg_regno (x)])
6491 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6505 /* A non-constant call might use a global register. */
6515 /* Returns nonzero if X mentions a global register. */
6518 global_reg_mentioned_p (rtx x)
6524 if (! RTL_CONST_OR_PURE_CALL_P (x))
6526 x = CALL_INSN_FUNCTION_USAGE (x);
6534 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6536 /* Scheduling hooks for VLIW mode.
6538 Conceptually this is very simple: we have a two-pack architecture
6539 that takes one core insn and one coprocessor insn to make up either
6540 a 32- or 64-bit instruction word (depending on the option bit set in
6541 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6542 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6543 and one 48-bit cop insn or two 32-bit core/cop insns.
6545 In practice, instruction selection will be a bear. Consider in
6546 VL64 mode the following insns
6551 these cannot pack, since the add is a 16-bit core insn and cmov
6552 is a 32-bit cop insn. However,
6557 packs just fine. For good VLIW code generation in VL64 mode, we
6558 will have to have 32-bit alternatives for many of the common core
6559 insns. Not implemented. */
6562 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6566 if (REG_NOTE_KIND (link) != 0)
6568 /* See whether INSN and DEP_INSN are intrinsics that set the same
6569 hard register. If so, it is more important to free up DEP_INSN
6570 than it is to free up INSN.
6572 Note that intrinsics like mep_mulr are handled differently from
6573 the equivalent mep.md patterns. In mep.md, if we don't care
6574 about the value of $lo and $hi, the pattern will just clobber
6575 the registers, not set them. Since clobbers don't count as
6576 output dependencies, it is often possible to reorder two mulrs,
6579 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6580 so any pair of mep_mulr()s will be inter-dependent. We should
6581 therefore give the first mep_mulr() a higher priority. */
6582 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6583 && global_reg_mentioned_p (PATTERN (insn))
6584 && global_reg_mentioned_p (PATTERN (dep_insn)))
6587 /* If the dependence is an anti or output dependence, assume it
6592 /* If we can't recognize the insns, we can't really do anything. */
6593 if (recog_memoized (dep_insn) < 0)
6596 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6597 attribute instead. */
6600 cost_specified = get_attr_latency (dep_insn);
6601 if (cost_specified != 0)
6602 return cost_specified;
6608 /* ??? We don't properly compute the length of a load/store insn,
6609 taking into account the addressing mode. */
6612 mep_issue_rate (void)
6614 return TARGET_IVC2 ? 3 : 2;
6617 /* Return true if function DECL was declared with the vliw attribute. */
6620 mep_vliw_function_p (tree decl)
6622 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6626 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6630 for (i = nready - 1; i >= 0; --i)
6632 rtx insn = ready[i];
6633 if (recog_memoized (insn) >= 0
6634 && get_attr_slot (insn) == slot
6635 && get_attr_length (insn) == length)
6643 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6647 for (i = 0; i < nready; ++i)
6648 if (ready[i] == insn)
6650 for (; i < nready - 1; ++i)
6651 ready[i] = ready[i + 1];
6660 mep_print_sched_insn (FILE *dump, rtx insn)
6662 const char *slots = "none";
6663 const char *name = NULL;
6667 if (GET_CODE (PATTERN (insn)) == SET
6668 || GET_CODE (PATTERN (insn)) == PARALLEL)
6670 switch (get_attr_slots (insn))
6672 case SLOTS_CORE: slots = "core"; break;
6673 case SLOTS_C3: slots = "c3"; break;
6674 case SLOTS_P0: slots = "p0"; break;
6675 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6676 case SLOTS_P0_P1: slots = "p0,p1"; break;
6677 case SLOTS_P0S: slots = "p0s"; break;
6678 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6679 case SLOTS_P1: slots = "p1"; break;
6681 sprintf(buf, "%d", get_attr_slots (insn));
6686 if (GET_CODE (PATTERN (insn)) == USE)
6689 code = INSN_CODE (insn);
6691 name = get_insn_name (code);
6696 "insn %4d %4d %8s %s\n",
6704 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6705 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6706 int *pnready, int clock ATTRIBUTE_UNUSED)
6708 int nready = *pnready;
6709 rtx core_insn, cop_insn;
6712 if (dump && sched_verbose > 1)
6714 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6715 for (i=0; i<nready; i++)
6716 mep_print_sched_insn (dump, ready[i]);
6717 fprintf (dump, "\n");
6720 if (!mep_vliw_function_p (cfun->decl))
6725 /* IVC2 uses a DFA to determine what's ready and what's not. */
6729 /* We can issue either a core or coprocessor instruction.
6730 Look for a matched pair of insns to reorder. If we don't
6731 find any, don't second-guess the scheduler's priorities. */
6733 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6734 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6735 TARGET_OPT_VL64 ? 6 : 2)))
6737 else if (TARGET_OPT_VL64
6738 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6739 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6742 /* We didn't find a pair. Issue the single insn at the head
6743 of the ready list. */
6746 /* Reorder the two insns first. */
6747 mep_move_ready_insn (ready, nready, core_insn);
6748 mep_move_ready_insn (ready, nready - 1, cop_insn);
6752 /* A for_each_rtx callback. Return true if *X is a register that is
6753 set by insn PREV. */
6756 mep_store_find_set (rtx *x, void *prev)
6758 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6761 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6762 not the containing insn. */
6765 mep_store_data_bypass_1 (rtx prev, rtx pat)
6767 /* Cope with intrinsics like swcpa. */
6768 if (GET_CODE (pat) == PARALLEL)
6772 for (i = 0; i < XVECLEN (pat, 0); i++)
6773 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6779 /* Check for some sort of store. */
6780 if (GET_CODE (pat) != SET
6781 || GET_CODE (SET_DEST (pat)) != MEM)
6784 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6785 The first operand to the unspec is the store data and the other operands
6786 are used to calculate the address. */
6787 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6792 src = SET_SRC (pat);
6793 for (i = 1; i < XVECLEN (src, 0); i++)
6794 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6800 /* Otherwise just check that PREV doesn't modify any register mentioned
6801 in the memory destination. */
6802 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6805 /* Return true if INSN is a store instruction and if the store address
6806 has no true dependence on PREV. */
6809 mep_store_data_bypass_p (rtx prev, rtx insn)
6811 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6814 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6815 is a register other than LO or HI and if PREV sets *X. */
6818 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6821 && REGNO (*x) != LO_REGNO
6822 && REGNO (*x) != HI_REGNO
6823 && reg_set_p (*x, (const_rtx) prev));
6826 /* Return true if, apart from HI/LO, there are no true dependencies
6827 between multiplication instructions PREV and INSN. */
6830 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6834 pat = PATTERN (insn);
6835 if (GET_CODE (pat) == PARALLEL)
6836 pat = XVECEXP (pat, 0, 0);
6837 return (GET_CODE (pat) == SET
6838 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6841 /* Return true if INSN is an ldc instruction that issues to the
6842 MeP-h1 integer pipeline. This is true for instructions that
6843 read from PSW, LP, SAR, HI and LO. */
6846 mep_ipipe_ldc_p (rtx insn)
6850 pat = PATTERN (insn);
6852 /* Cope with instrinsics that set both a hard register and its shadow.
6853 The set of the hard register comes first. */
6854 if (GET_CODE (pat) == PARALLEL)
6855 pat = XVECEXP (pat, 0, 0);
6857 if (GET_CODE (pat) == SET)
6859 src = SET_SRC (pat);
6861 /* Cope with intrinsics. The first operand to the unspec is
6862 the source register. */
6863 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6864 src = XVECEXP (src, 0, 0);
6867 switch (REGNO (src))
6880 /* Create a VLIW bundle from core instruction CORE and coprocessor
6881 instruction COP. COP always satisfies INSN_P, but CORE can be
6882 either a new pattern or an existing instruction.
6884 Emit the bundle in place of COP and return it. */
6887 mep_make_bundle (rtx core, rtx cop)
6891 /* If CORE is an existing instruction, remove it, otherwise put
6892 the new pattern in an INSN harness. */
6896 core = make_insn_raw (core);
6898 /* Generate the bundle sequence and replace COP with it. */
6899 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6900 insn = emit_insn_after (insn, cop);
6903 /* Set up the links of the insns inside the SEQUENCE. */
6904 PREV_INSN (core) = PREV_INSN (insn);
6905 NEXT_INSN (core) = cop;
6906 PREV_INSN (cop) = core;
6907 NEXT_INSN (cop) = NEXT_INSN (insn);
6909 /* Set the VLIW flag for the coprocessor instruction. */
6910 PUT_MODE (core, VOIDmode);
6911 PUT_MODE (cop, BImode);
6913 /* Derive a location for the bundle. Individual instructions cannot
6914 have their own location because there can be no assembler labels
6915 between CORE and COP. */
6916 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6917 INSN_LOCATOR (core) = 0;
6918 INSN_LOCATOR (cop) = 0;
6923 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6926 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6928 rtx * pinsn = (rtx *) data;
6930 if (*pinsn && reg_mentioned_p (x, *pinsn))
6934 /* Return true if anything in insn X is (anti,output,true) dependent on
6935 anything in insn Y. */
6938 mep_insn_dependent_p (rtx x, rtx y)
6942 gcc_assert (INSN_P (x));
6943 gcc_assert (INSN_P (y));
6946 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6947 if (tmp == NULL_RTX)
6951 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6952 if (tmp == NULL_RTX)
6959 core_insn_p (rtx insn)
6961 if (GET_CODE (PATTERN (insn)) == USE)
6963 if (get_attr_slot (insn) == SLOT_CORE)
6968 /* Mark coprocessor instructions that can be bundled together with
6969 the immediately preceeding core instruction. This is later used
6970 to emit the "+" that tells the assembler to create a VLIW insn.
6972 For unbundled insns, the assembler will automatically add coprocessor
6973 nops, and 16-bit core nops. Due to an apparent oversight in the
6974 spec, the assembler will _not_ automatically add 32-bit core nops,
6975 so we have to emit those here.
6977 Called from mep_insn_reorg. */
6980 mep_bundle_insns (rtx insns)
6982 rtx insn, last = NULL_RTX, first = NULL_RTX;
6983 int saw_scheduling = 0;
6985 /* Only do bundling if we're in vliw mode. */
6986 if (!mep_vliw_function_p (cfun->decl))
6989 /* The first insn in a bundle are TImode, the remainder are
6990 VOIDmode. After this function, the first has VOIDmode and the
6991 rest have BImode. */
6993 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6995 /* First, move any NOTEs that are within a bundle, to the beginning
6997 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6999 if (NOTE_P (insn) && first)
7000 /* Don't clear FIRST. */;
7002 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
7005 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
7009 /* INSN is part of a bundle; FIRST is the first insn in that
7010 bundle. Move all intervening notes out of the bundle.
7011 In addition, since the debug pass may insert a label
7012 whenever the current line changes, set the location info
7013 for INSN to match FIRST. */
7015 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
7017 note = PREV_INSN (insn);
7018 while (note && note != first)
7020 prev = PREV_INSN (note);
7024 /* Remove NOTE from here... */
7025 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7026 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7027 /* ...and put it in here. */
7028 NEXT_INSN (note) = first;
7029 PREV_INSN (note) = PREV_INSN (first);
7030 NEXT_INSN (PREV_INSN (note)) = note;
7031 PREV_INSN (NEXT_INSN (note)) = note;
7038 else if (!NONJUMP_INSN_P (insn))
7042 /* Now fix up the bundles. */
7043 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7048 if (!NONJUMP_INSN_P (insn))
7054 /* If we're not optimizing enough, there won't be scheduling
7055 info. We detect that here. */
7056 if (GET_MODE (insn) == TImode)
7058 if (!saw_scheduling)
7063 rtx core_insn = NULL_RTX;
7065 /* IVC2 slots are scheduled by DFA, so we just accept
7066 whatever the scheduler gives us. However, we must make
7067 sure the core insn (if any) is the first in the bundle.
7068 The IVC2 assembler can insert whatever NOPs are needed,
7069 and allows a COP insn to be first. */
7071 if (NONJUMP_INSN_P (insn)
7072 && GET_CODE (PATTERN (insn)) != USE
7073 && GET_MODE (insn) == TImode)
7077 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7078 && NONJUMP_INSN_P (NEXT_INSN (last));
7079 last = NEXT_INSN (last))
7081 if (core_insn_p (last))
7084 if (core_insn_p (last))
7087 if (core_insn && core_insn != insn)
7089 /* Swap core insn to first in the bundle. */
7091 /* Remove core insn. */
7092 if (PREV_INSN (core_insn))
7093 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7094 if (NEXT_INSN (core_insn))
7095 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7097 /* Re-insert core insn. */
7098 PREV_INSN (core_insn) = PREV_INSN (insn);
7099 NEXT_INSN (core_insn) = insn;
7101 if (PREV_INSN (core_insn))
7102 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7103 PREV_INSN (insn) = core_insn;
7105 PUT_MODE (core_insn, TImode);
7106 PUT_MODE (insn, VOIDmode);
7110 /* The first insn has TImode, the rest have VOIDmode */
7111 if (GET_MODE (insn) == TImode)
7112 PUT_MODE (insn, VOIDmode);
7114 PUT_MODE (insn, BImode);
7118 PUT_MODE (insn, VOIDmode);
7119 if (recog_memoized (insn) >= 0
7120 && get_attr_slot (insn) == SLOT_COP)
7122 if (GET_CODE (insn) == JUMP_INSN
7124 || recog_memoized (last) < 0
7125 || get_attr_slot (last) != SLOT_CORE
7126 || (get_attr_length (insn)
7127 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7128 || mep_insn_dependent_p (insn, last))
7130 switch (get_attr_length (insn))
7135 insn = mep_make_bundle (gen_nop (), insn);
7138 if (TARGET_OPT_VL64)
7139 insn = mep_make_bundle (gen_nop32 (), insn);
7142 if (TARGET_OPT_VL64)
7143 error ("2 byte cop instructions are"
7144 " not allowed in 64-bit VLIW mode");
7146 insn = mep_make_bundle (gen_nop (), insn);
7149 error ("unexpected %d byte cop instruction",
7150 get_attr_length (insn));
7155 insn = mep_make_bundle (last, insn);
7163 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7164 Return true on success. This function can fail if the intrinsic
7165 is unavailable or if the operands don't satisfy their predicates. */
7168 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7170 const struct cgen_insn *cgen_insn;
7171 const struct insn_data_d *idata;
7175 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7178 idata = &insn_data[cgen_insn->icode];
7179 for (i = 0; i < idata->n_operands; i++)
7181 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7182 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7186 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7187 newop[3], newop[4], newop[5],
7188 newop[6], newop[7], newop[8]));
7194 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7195 OPERANDS[0]. Report an error if the instruction could not
7196 be synthesized. OPERANDS[1] is a register_operand. For sign
7197 and zero extensions, it may be smaller than SImode. */
7200 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7201 rtx * operands ATTRIBUTE_UNUSED)
7207 /* Likewise, but apply a binary operation to OPERANDS[1] and
7208 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7209 can be a general_operand.
7211 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7212 third operand. REG and REG3 take register operands only. */
7215 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7216 int ATTRIBUTE_UNUSED immediate3,
7217 int ATTRIBUTE_UNUSED reg,
7218 int ATTRIBUTE_UNUSED reg3,
7219 rtx * operands ATTRIBUTE_UNUSED)
7225 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, bool ATTRIBUTE_UNUSED speed_t)
7230 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7232 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7239 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7243 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7245 : COSTS_N_INSNS (2));
7252 mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7258 mep_handle_option (size_t code,
7259 const char *arg ATTRIBUTE_UNUSED,
7260 int value ATTRIBUTE_UNUSED)
7267 target_flags |= MEP_ALL_OPTS;
7271 target_flags &= ~ MEP_ALL_OPTS;
7275 target_flags |= MASK_COP;
7276 target_flags |= MASK_64BIT_CR_REGS;
7280 option_mtiny_specified = 1;
7283 target_flags |= MASK_COP;
7284 target_flags |= MASK_64BIT_CR_REGS;
7285 target_flags |= MASK_VLIW;
7286 target_flags |= MASK_OPT_VL64;
7287 target_flags |= MASK_IVC2;
7289 for (i=0; i<32; i++)
7290 fixed_regs[i+48] = 0;
7291 for (i=0; i<32; i++)
7292 call_used_regs[i+48] = 1;
7294 call_used_regs[i+48] = 0;
7296 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7333 mep_asm_init_sections (void)
7336 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7337 "\t.section .based,\"aw\"");
7340 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7341 "\t.section .sbss,\"aw\"");
7344 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7345 "\t.section .sdata,\"aw\",@progbits");
7348 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7349 "\t.section .far,\"aw\"");
7352 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7353 "\t.section .farbss,\"aw\"");
7356 = get_unnamed_section (0, output_section_asm_op,
7357 "\t.section .frodata,\"a\"");
7360 = get_unnamed_section (0, output_section_asm_op,
7361 "\t.section .srodata,\"a\"");
7364 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7365 "\t.section .vtext,\"axv\"\n\t.vliw");
7368 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7369 "\t.section .vftext,\"axv\"\n\t.vliw");
7372 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7373 "\t.section .ftext,\"ax\"\n\t.core");
7377 /* Initialize the GCC target structure. */
7379 #undef TARGET_ASM_FUNCTION_PROLOGUE
7380 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7381 #undef TARGET_ATTRIBUTE_TABLE
7382 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7383 #undef TARGET_COMP_TYPE_ATTRIBUTES
7384 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7385 #undef TARGET_INSERT_ATTRIBUTES
7386 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7387 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7388 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7389 #undef TARGET_CAN_INLINE_P
7390 #define TARGET_CAN_INLINE_P mep_can_inline_p
7391 #undef TARGET_SECTION_TYPE_FLAGS
7392 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7393 #undef TARGET_ASM_NAMED_SECTION
7394 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7395 #undef TARGET_INIT_BUILTINS
7396 #define TARGET_INIT_BUILTINS mep_init_builtins
7397 #undef TARGET_EXPAND_BUILTIN
7398 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7399 #undef TARGET_SCHED_ADJUST_COST
7400 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7401 #undef TARGET_SCHED_ISSUE_RATE
7402 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7403 #undef TARGET_SCHED_REORDER
7404 #define TARGET_SCHED_REORDER mep_sched_reorder
7405 #undef TARGET_STRIP_NAME_ENCODING
7406 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7407 #undef TARGET_ASM_SELECT_SECTION
7408 #define TARGET_ASM_SELECT_SECTION mep_select_section
7409 #undef TARGET_ASM_UNIQUE_SECTION
7410 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7411 #undef TARGET_ENCODE_SECTION_INFO
7412 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7413 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7414 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7415 #undef TARGET_RTX_COSTS
7416 #define TARGET_RTX_COSTS mep_rtx_cost
7417 #undef TARGET_ADDRESS_COST
7418 #define TARGET_ADDRESS_COST mep_address_cost
7419 #undef TARGET_MACHINE_DEPENDENT_REORG
7420 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7421 #undef TARGET_SETUP_INCOMING_VARARGS
7422 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7423 #undef TARGET_PASS_BY_REFERENCE
7424 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7425 #undef TARGET_FUNCTION_ARG
7426 #define TARGET_FUNCTION_ARG mep_function_arg
7427 #undef TARGET_FUNCTION_ARG_ADVANCE
7428 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7429 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7430 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7431 #undef TARGET_HANDLE_OPTION
7432 #define TARGET_HANDLE_OPTION mep_handle_option
7433 #undef TARGET_OPTION_OVERRIDE
7434 #define TARGET_OPTION_OVERRIDE mep_option_override
7435 #undef TARGET_OPTION_OPTIMIZATION_TABLE
7436 #define TARGET_OPTION_OPTIMIZATION_TABLE mep_option_optimization_table
7437 #undef TARGET_DEFAULT_TARGET_FLAGS
7438 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
7439 #undef TARGET_ALLOCATE_INITIAL_VALUE
7440 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7441 #undef TARGET_ASM_INIT_SECTIONS
7442 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7443 #undef TARGET_RETURN_IN_MEMORY
7444 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7445 #undef TARGET_NARROW_VOLATILE_BITFIELD
7446 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7447 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7448 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7449 #undef TARGET_BUILD_BUILTIN_VA_LIST
7450 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7451 #undef TARGET_EXPAND_BUILTIN_VA_START
7452 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7453 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7454 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7455 #undef TARGET_CAN_ELIMINATE
7456 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7457 #undef TARGET_TRAMPOLINE_INIT
7458 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7460 struct gcc_target targetm = TARGET_INITIALIZER;