1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Red Hat, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
47 #include "diagnostic-core.h"
48 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
56 /* Structure of this file:
58 + Command Line Option Support
59 + Pattern support - constraints, predicates, expanders
62 + Functions to save and restore machine-specific function data.
63 + Frame/Epilog/Prolog Related
65 + Function args in registers
66 + Handle pipeline hazards
69 + Machine-dependent Reorg
74 Symbols are encoded as @ <char> . <name> where <char> is one of these:
82 c - cb (control bus) */
84 struct GTY(()) machine_function
86 int mep_frame_pointer_needed;
94 /* Records __builtin_return address. */
98 int reg_save_slot[FIRST_PSEUDO_REGISTER];
99 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
101 /* 2 if the current function has an interrupt attribute, 1 if not, 0
102 if unknown. This is here because resource.c uses EPILOGUE_USES
104 int interrupt_handler;
106 /* Likewise, for disinterrupt attribute. */
107 int disable_interrupts;
109 /* Number of doloop tags used so far. */
112 /* True if the last tag was allocated to a doloop_end. */
113 bool doloop_tag_from_end;
115 /* True if reload changes $TP. */
116 bool reload_changes_tp;
118 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
119 We only set this if the function is an interrupt handler. */
120 int asms_without_operands;
123 #define MEP_CONTROL_REG(x) \
124 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
126 static GTY(()) section * based_section;
127 static GTY(()) section * tinybss_section;
128 static GTY(()) section * far_section;
129 static GTY(()) section * farbss_section;
130 static GTY(()) section * frodata_section;
131 static GTY(()) section * srodata_section;
133 static GTY(()) section * vtext_section;
134 static GTY(()) section * vftext_section;
135 static GTY(()) section * ftext_section;
137 static void mep_set_leaf_registers (int);
138 static bool symbol_p (rtx);
139 static bool symbolref_p (rtx);
140 static void encode_pattern_1 (rtx);
141 static void encode_pattern (rtx);
142 static bool const_in_range (rtx, int, int);
143 static void mep_rewrite_mult (rtx, rtx);
144 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
145 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
146 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
147 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
148 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
149 static bool mep_nongeneral_reg (rtx);
150 static bool mep_general_copro_reg (rtx);
151 static bool mep_nonregister (rtx);
152 static struct machine_function* mep_init_machine_status (void);
153 static rtx mep_tp_rtx (void);
154 static rtx mep_gp_rtx (void);
155 static bool mep_interrupt_p (void);
156 static bool mep_disinterrupt_p (void);
157 static bool mep_reg_set_p (rtx, rtx);
158 static bool mep_reg_set_in_function (int);
159 static bool mep_interrupt_saved_reg (int);
160 static bool mep_call_saves_register (int);
162 static void add_constant (int, int, int, int);
163 static rtx maybe_dead_move (rtx, rtx, bool);
164 static void mep_reload_pointer (int, const char *);
165 static void mep_start_function (FILE *, HOST_WIDE_INT);
166 static bool mep_function_ok_for_sibcall (tree, tree);
167 static int unique_bit_in (HOST_WIDE_INT);
168 static int bit_size_for_clip (HOST_WIDE_INT);
169 static int bytesize (const_tree, enum machine_mode);
170 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
171 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
172 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
173 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
174 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
175 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
176 static bool mep_function_attribute_inlinable_p (const_tree);
177 static bool mep_can_inline_p (tree, tree);
178 static bool mep_lookup_pragma_disinterrupt (const char *);
179 static int mep_multiple_address_regions (tree, bool);
180 static int mep_attrlist_to_encoding (tree, tree);
181 static void mep_insert_attributes (tree, tree *);
182 static void mep_encode_section_info (tree, rtx, int);
183 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
184 static void mep_unique_section (tree, int);
185 static unsigned int mep_section_type_flags (tree, const char *, int);
186 static void mep_asm_named_section (const char *, unsigned int, tree);
187 static bool mep_mentioned_p (rtx, rtx, int);
188 static void mep_reorg_regmove (rtx);
189 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
190 static void mep_reorg_repeat (rtx);
191 static bool mep_invertable_branch_p (rtx);
192 static void mep_invert_branch (rtx, rtx);
193 static void mep_reorg_erepeat (rtx);
194 static void mep_jmp_return_reorg (rtx);
195 static void mep_reorg_addcombine (rtx);
196 static void mep_reorg (void);
197 static void mep_init_intrinsics (void);
198 static void mep_init_builtins (void);
199 static void mep_intrinsic_unavailable (int);
200 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
201 static bool mep_get_move_insn (int, const struct cgen_insn **);
202 static rtx mep_convert_arg (enum machine_mode, rtx);
203 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
204 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
205 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
206 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
207 static int mep_adjust_cost (rtx, rtx, rtx, int);
208 static int mep_issue_rate (void);
209 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
210 static void mep_move_ready_insn (rtx *, int, rtx);
211 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
212 static rtx mep_make_bundle (rtx, rtx);
213 static void mep_bundle_insns (rtx);
214 static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
215 static int mep_address_cost (rtx, bool);
216 static void mep_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
218 static bool mep_pass_by_reference (cumulative_args_t cum, enum machine_mode,
220 static rtx mep_function_arg (cumulative_args_t, enum machine_mode,
222 static void mep_function_arg_advance (cumulative_args_t, enum machine_mode,
224 static bool mep_vector_mode_supported_p (enum machine_mode);
225 static rtx mep_allocate_initial_value (rtx);
226 static void mep_asm_init_sections (void);
227 static int mep_comp_type_attributes (const_tree, const_tree);
228 static bool mep_narrow_volatile_bitfield (void);
229 static rtx mep_expand_builtin_saveregs (void);
230 static tree mep_build_builtin_va_list (void);
231 static void mep_expand_va_start (tree, rtx);
232 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
233 static bool mep_can_eliminate (const int, const int);
234 static void mep_conditional_register_usage (void);
235 static void mep_trampoline_init (rtx, tree, rtx);
237 #define WANT_GCC_DEFINITIONS
238 #include "mep-intrin.h"
239 #undef WANT_GCC_DEFINITIONS
242 /* Command Line Option Support. */
244 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
246 /* True if we can use cmov instructions to move values back and forth
247 between core and coprocessor registers. */
248 bool mep_have_core_copro_moves_p;
250 /* True if we can use cmov instructions (or a work-alike) to move
251 values between coprocessor registers. */
252 bool mep_have_copro_copro_moves_p;
254 /* A table of all coprocessor instructions that can act like
255 a coprocessor-to-coprocessor cmov. */
256 static const int mep_cmov_insns[] = {
271 mep_set_leaf_registers (int enable)
275 if (mep_leaf_registers[0] != enable)
276 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
277 mep_leaf_registers[i] = enable;
281 mep_conditional_register_usage (void)
285 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
287 fixed_regs[HI_REGNO] = 1;
288 fixed_regs[LO_REGNO] = 1;
289 call_used_regs[HI_REGNO] = 1;
290 call_used_regs[LO_REGNO] = 1;
293 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
298 mep_option_override (void)
302 cl_deferred_option *opt;
303 VEC(cl_deferred_option,heap) *vec
304 = (VEC(cl_deferred_option,heap) *) mep_deferred_options;
306 FOR_EACH_VEC_ELT (cl_deferred_option, vec, i, opt)
308 switch (opt->opt_index)
311 for (j = 0; j < 32; j++)
312 fixed_regs[j + 48] = 0;
313 for (j = 0; j < 32; j++)
314 call_used_regs[j + 48] = 1;
315 for (j = 6; j < 8; j++)
316 call_used_regs[j + 48] = 0;
318 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
353 warning (OPT_fpic, "-fpic is not supported");
355 warning (OPT_fPIC, "-fPIC is not supported");
356 if (TARGET_S && TARGET_M)
357 error ("only one of -ms and -mm may be given");
358 if (TARGET_S && TARGET_L)
359 error ("only one of -ms and -ml may be given");
360 if (TARGET_M && TARGET_L)
361 error ("only one of -mm and -ml may be given");
362 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
363 error ("only one of -ms and -mtiny= may be given");
364 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
365 error ("only one of -mm and -mtiny= may be given");
366 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
367 warning (0, "-mclip currently has no effect without -mminmax");
369 if (mep_const_section)
371 if (strcmp (mep_const_section, "tiny") != 0
372 && strcmp (mep_const_section, "near") != 0
373 && strcmp (mep_const_section, "far") != 0)
374 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
378 mep_tiny_cutoff = 65536;
381 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
384 if (TARGET_64BIT_CR_REGS)
385 flag_split_wide_types = 0;
387 init_machine_status = mep_init_machine_status;
388 mep_init_intrinsics ();
391 /* Pattern Support - constraints, predicates, expanders. */
393 /* MEP has very few instructions that can refer to the span of
394 addresses used by symbols, so it's common to check for them. */
399 int c = GET_CODE (x);
401 return (c == CONST_INT
411 if (GET_CODE (x) != MEM)
414 c = GET_CODE (XEXP (x, 0));
415 return (c == CONST_INT
420 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
422 #define GEN_REG(R, STRICT) \
425 && ((R) == ARG_POINTER_REGNUM \
426 || (R) >= FIRST_PSEUDO_REGISTER)))
428 static char pattern[12], *patternp;
429 static GTY(()) rtx patternr[12];
430 #define RTX_IS(x) (strcmp (pattern, x) == 0)
433 encode_pattern_1 (rtx x)
437 if (patternp == pattern + sizeof (pattern) - 2)
443 patternr[patternp-pattern] = x;
445 switch (GET_CODE (x))
453 encode_pattern_1 (XEXP(x, 0));
457 encode_pattern_1 (XEXP(x, 0));
458 encode_pattern_1 (XEXP(x, 1));
462 encode_pattern_1 (XEXP(x, 0));
463 encode_pattern_1 (XEXP(x, 1));
467 encode_pattern_1 (XEXP(x, 0));
481 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
482 for (i=0; i<XVECLEN (x, 0); i++)
483 encode_pattern_1 (XVECEXP (x, 0, i));
491 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
500 encode_pattern (rtx x)
503 encode_pattern_1 (x);
508 mep_section_tag (rtx x)
514 switch (GET_CODE (x))
521 x = XVECEXP (x, 0, 0);
524 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
533 if (GET_CODE (x) != SYMBOL_REF)
536 if (name[0] == '@' && name[2] == '.')
538 if (name[1] == 'i' || name[1] == 'I')
541 return 'f'; /* near */
542 return 'n'; /* far */
550 mep_regno_reg_class (int regno)
554 case SP_REGNO: return SP_REGS;
555 case TP_REGNO: return TP_REGS;
556 case GP_REGNO: return GP_REGS;
557 case 0: return R0_REGS;
558 case HI_REGNO: return HI_REGS;
559 case LO_REGNO: return LO_REGS;
560 case ARG_POINTER_REGNUM: return GENERAL_REGS;
563 if (GR_REGNO_P (regno))
564 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
565 if (CONTROL_REGNO_P (regno))
568 if (CR_REGNO_P (regno))
572 /* Search for the register amongst user-defined subclasses of
573 the coprocessor registers. */
574 for (i = USER0_REGS; i <= USER3_REGS; ++i)
576 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
578 for (j = 0; j < N_REG_CLASSES; ++j)
580 enum reg_class sub = reg_class_subclasses[i][j];
582 if (sub == LIM_REG_CLASSES)
584 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
589 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
592 if (CCR_REGNO_P (regno))
595 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
601 mep_reg_class_from_constraint (int c, const char *str)
618 return LOADABLE_CR_REGS;
620 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
622 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
649 enum reg_class which = c - 'A' + USER0_REGS;
650 return (reg_class_size[which] > 0 ? which : NO_REGS);
659 mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
663 case 'I': return value >= -32768 && value < 32768;
664 case 'J': return value >= 0 && value < 65536;
665 case 'K': return value >= 0 && value < 0x01000000;
666 case 'L': return value >= -32 && value < 32;
667 case 'M': return value >= 0 && value < 32;
668 case 'N': return value >= 0 && value < 16;
672 return value >= -2147483647-1 && value <= 2147483647;
679 mep_extra_constraint (rtx value, int c)
681 encode_pattern (value);
686 /* For near symbols, like what call uses. */
687 if (GET_CODE (value) == REG)
689 return mep_call_address_operand (value, GET_MODE (value));
692 /* For signed 8-bit immediates. */
693 return (GET_CODE (value) == CONST_INT
694 && INTVAL (value) >= -128
695 && INTVAL (value) <= 127);
698 /* For tp/gp relative symbol values. */
699 return (RTX_IS ("u3s") || RTX_IS ("u2s")
700 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
703 /* Non-absolute memories. */
704 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
708 return RTX_IS ("Hs");
711 /* Register indirect. */
712 return RTX_IS ("mr");
715 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
726 const_in_range (rtx x, int minv, int maxv)
728 return (GET_CODE (x) == CONST_INT
729 && INTVAL (x) >= minv
730 && INTVAL (x) <= maxv);
733 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
734 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
735 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
736 at the end of the insn stream. */
739 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
741 if (rtx_equal_p (dest, src1))
743 else if (rtx_equal_p (dest, src2))
748 emit_insn (gen_movsi (copy_rtx (dest), src1));
750 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
755 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
756 Change the last element of PATTERN from (clobber (scratch:SI))
757 to (clobber (reg:SI HI_REGNO)). */
760 mep_rewrite_mult (rtx insn, rtx pattern)
764 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
765 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
766 PATTERN (insn) = pattern;
767 INSN_CODE (insn) = -1;
770 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
771 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
772 store the result in DEST if nonnull. */
775 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
779 lo = gen_rtx_REG (SImode, LO_REGNO);
781 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
782 mep_mulr_source (insn, dest, src1, src2));
784 pattern = gen_mulsi3_lo (lo, src1, src2);
785 mep_rewrite_mult (insn, pattern);
788 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
789 SRC3 into $lo, then use either madd or maddr. The move into $lo will
790 be deleted by a peephole2 if SRC3 is already in $lo. */
793 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
797 lo = gen_rtx_REG (SImode, LO_REGNO);
798 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
800 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
801 mep_mulr_source (insn, dest, src1, src2),
804 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
805 mep_rewrite_mult (insn, pattern);
808 /* Return true if $lo has the same value as integer register GPR when
809 instruction INSN is reached. If necessary, rewrite the instruction
810 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
811 rtx for (reg:SI LO_REGNO).
813 This function is intended to be used by the peephole2 pass. Since
814 that pass goes from the end of a basic block to the beginning, and
815 propagates liveness information on the way, there is no need to
816 update register notes here.
818 If GPR_DEAD_P is true on entry, and this function returns true,
819 then the caller will replace _every_ use of GPR in and after INSN
820 with LO. This means that if the instruction that sets $lo is a
821 mulr- or maddr-type instruction, we can rewrite it to use mul or
822 madd instead. In combination with the copy progagation pass,
823 this allows us to replace sequences like:
832 if GPR is no longer used. */
835 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
839 insn = PREV_INSN (insn);
841 switch (recog_memoized (insn))
843 case CODE_FOR_mulsi3_1:
845 if (rtx_equal_p (recog_data.operand[0], gpr))
847 mep_rewrite_mulsi3 (insn,
848 gpr_dead_p ? NULL : recog_data.operand[0],
849 recog_data.operand[1],
850 recog_data.operand[2]);
855 case CODE_FOR_maddsi3:
857 if (rtx_equal_p (recog_data.operand[0], gpr))
859 mep_rewrite_maddsi3 (insn,
860 gpr_dead_p ? NULL : recog_data.operand[0],
861 recog_data.operand[1],
862 recog_data.operand[2],
863 recog_data.operand[3]);
868 case CODE_FOR_mulsi3r:
869 case CODE_FOR_maddsi3r:
871 return rtx_equal_p (recog_data.operand[1], gpr);
874 if (reg_set_p (lo, insn)
875 || reg_set_p (gpr, insn)
876 || volatile_insn_p (PATTERN (insn)))
879 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
884 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
888 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
891 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
893 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
898 /* Return true if SET can be turned into a post-modify load or store
899 that adds OFFSET to GPR. In other words, return true if SET can be
902 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
904 It's OK to change SET to an equivalent operation in order to
908 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
911 unsigned int reg_bytes, mem_bytes;
912 enum machine_mode reg_mode, mem_mode;
914 /* Only simple SETs can be converted. */
915 if (GET_CODE (set) != SET)
918 /* Point REG to what we hope will be the register side of the set and
919 MEM to what we hope will be the memory side. */
920 if (GET_CODE (SET_DEST (set)) == MEM)
922 mem = &SET_DEST (set);
923 reg = &SET_SRC (set);
927 reg = &SET_DEST (set);
928 mem = &SET_SRC (set);
929 if (GET_CODE (*mem) == SIGN_EXTEND)
930 mem = &XEXP (*mem, 0);
933 /* Check that *REG is a suitable coprocessor register. */
934 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
937 /* Check that *MEM is a suitable memory reference. */
938 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
941 /* Get the number of bytes in each operand. */
942 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
943 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
945 /* Check that OFFSET is suitably aligned. */
946 if (INTVAL (offset) & (mem_bytes - 1))
949 /* Convert *MEM to a normal integer mode. */
950 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
951 *mem = change_address (*mem, mem_mode, NULL);
953 /* Adjust *REG as well. */
954 *reg = shallow_copy_rtx (*reg);
955 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
957 /* SET is a subword load. Convert it to an explicit extension. */
958 PUT_MODE (*reg, SImode);
959 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
963 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
964 PUT_MODE (*reg, reg_mode);
969 /* Return the effect of frame-related instruction INSN. */
972 mep_frame_expr (rtx insn)
976 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
977 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
978 RTX_FRAME_RELATED_P (expr) = 1;
982 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
983 new pattern in INSN1; INSN2 will be deleted by the caller. */
986 mep_make_parallel (rtx insn1, rtx insn2)
990 if (RTX_FRAME_RELATED_P (insn2))
992 expr = mep_frame_expr (insn2);
993 if (RTX_FRAME_RELATED_P (insn1))
994 expr = gen_rtx_SEQUENCE (VOIDmode,
995 gen_rtvec (2, mep_frame_expr (insn1), expr));
996 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
997 RTX_FRAME_RELATED_P (insn1) = 1;
1000 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
1001 gen_rtvec (2, PATTERN (insn1),
1003 INSN_CODE (insn1) = -1;
1006 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
1007 the basic block to see if any previous load or store instruction can
1008 be persuaded to do SET_INSN as a side-effect. Return true if so. */
1011 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
1018 insn = PREV_INSN (insn);
1021 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
1023 mep_make_parallel (insn, set_insn);
1027 if (reg_set_p (reg, insn)
1028 || reg_referenced_p (reg, PATTERN (insn))
1029 || volatile_insn_p (PATTERN (insn)))
1033 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1037 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1040 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1042 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1043 extract_insn (insn);
1048 mep_allow_clip (rtx ux, rtx lx, int s)
1050 HOST_WIDE_INT u = INTVAL (ux);
1051 HOST_WIDE_INT l = INTVAL (lx);
1054 if (!TARGET_OPT_CLIP)
1059 for (i = 0; i < 30; i ++)
1060 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1061 && (l == - ((HOST_WIDE_INT) 1 << i)))
1069 for (i = 0; i < 30; i ++)
1070 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1077 mep_bit_position_p (rtx x, bool looking_for)
1079 if (GET_CODE (x) != CONST_INT)
1081 switch ((int) INTVAL(x) & 0xff)
1083 case 0x01: case 0x02: case 0x04: case 0x08:
1084 case 0x10: case 0x20: case 0x40: case 0x80:
1086 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1087 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1088 return !looking_for;
1094 move_needs_splitting (rtx dest, rtx src,
1095 enum machine_mode mode ATTRIBUTE_UNUSED)
1097 int s = mep_section_tag (src);
1101 if (GET_CODE (src) == CONST
1102 || GET_CODE (src) == MEM)
1103 src = XEXP (src, 0);
1104 else if (GET_CODE (src) == SYMBOL_REF
1105 || GET_CODE (src) == LABEL_REF
1106 || GET_CODE (src) == PLUS)
1112 || (GET_CODE (src) == PLUS
1113 && GET_CODE (XEXP (src, 1)) == CONST_INT
1114 && (INTVAL (XEXP (src, 1)) < -65536
1115 || INTVAL (XEXP (src, 1)) > 0xffffff))
1116 || (GET_CODE (dest) == REG
1117 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1123 mep_split_mov (rtx *operands, int symbolic)
1127 if (move_needs_splitting (operands[0], operands[1], SImode))
1132 if (GET_CODE (operands[1]) != CONST_INT)
1135 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1136 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1137 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1140 if (((!reload_completed && !reload_in_progress)
1141 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1142 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1148 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1149 it to one specific value. So the insn chosen depends on whether
1150 the source and destination modes match. */
1153 mep_vliw_mode_match (rtx tgt)
1155 bool src_vliw = mep_vliw_function_p (cfun->decl);
1156 bool tgt_vliw = INTVAL (tgt);
1158 return src_vliw == tgt_vliw;
1161 /* Like the above, but also test for near/far mismatches. */
1164 mep_vliw_jmp_match (rtx tgt)
1166 bool src_vliw = mep_vliw_function_p (cfun->decl);
1167 bool tgt_vliw = INTVAL (tgt);
1169 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1172 return src_vliw == tgt_vliw;
1176 mep_multi_slot (rtx x)
1178 return get_attr_slot (x) == SLOT_MULTI;
1181 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1184 mep_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1186 /* We can't convert symbol values to gp- or tp-rel values after
1187 reload, as reload might have used $gp or $tp for other
1189 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1191 char e = mep_section_tag (x);
1192 return (e != 't' && e != 'b');
1197 /* Be careful not to use macros that need to be compiled one way for
1198 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1201 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1205 #define DEBUG_LEGIT 0
1207 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1211 if (GET_CODE (x) == LO_SUM
1212 && GET_CODE (XEXP (x, 0)) == REG
1213 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1214 && CONSTANT_P (XEXP (x, 1)))
1216 if (GET_MODE_SIZE (mode) > 4)
1218 /* We will end up splitting this, and lo_sums are not
1219 offsettable for us. */
1221 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1226 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1231 if (GET_CODE (x) == REG
1232 && GEN_REG (REGNO (x), strict))
1235 fprintf (stderr, " - yup, [reg]\n");
1240 if (GET_CODE (x) == PLUS
1241 && GET_CODE (XEXP (x, 0)) == REG
1242 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1243 && const_in_range (XEXP (x, 1), -32768, 32767))
1246 fprintf (stderr, " - yup, [reg+const]\n");
1251 if (GET_CODE (x) == PLUS
1252 && GET_CODE (XEXP (x, 0)) == REG
1253 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1254 && GET_CODE (XEXP (x, 1)) == CONST
1255 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1256 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1257 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1258 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1261 fprintf (stderr, " - yup, [reg+unspec]\n");
1266 the_tag = mep_section_tag (x);
1271 fprintf (stderr, " - nope, [far]\n");
1276 if (mode == VOIDmode
1277 && GET_CODE (x) == SYMBOL_REF)
1280 fprintf (stderr, " - yup, call [symbol]\n");
1285 if ((mode == SImode || mode == SFmode)
1287 && mep_legitimate_constant_p (mode, x)
1288 && the_tag != 't' && the_tag != 'b')
1290 if (GET_CODE (x) != CONST_INT
1291 || (INTVAL (x) <= 0xfffff
1293 && (INTVAL (x) % 4) == 0))
1296 fprintf (stderr, " - yup, [const]\n");
1303 fprintf (stderr, " - nope.\n");
1309 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1311 int ind_levels ATTRIBUTE_UNUSED)
1313 enum reload_type type = (enum reload_type) type_i;
1315 if (GET_CODE (*x) == PLUS
1316 && GET_CODE (XEXP (*x, 0)) == MEM
1317 && GET_CODE (XEXP (*x, 1)) == REG)
1319 /* GCC will by default copy the MEM into a REG, which results in
1320 an invalid address. For us, the best thing to do is move the
1321 whole expression to a REG. */
1322 push_reload (*x, NULL_RTX, x, NULL,
1323 GENERAL_REGS, mode, VOIDmode,
1328 if (GET_CODE (*x) == PLUS
1329 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1330 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1332 char e = mep_section_tag (XEXP (*x, 0));
1334 if (e != 't' && e != 'b')
1336 /* GCC thinks that (sym+const) is a valid address. Well,
1337 sometimes it is, this time it isn't. The best thing to
1338 do is reload the symbol to a register, since reg+int
1339 tends to work, and we can't just add the symbol and
1341 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1342 GENERAL_REGS, mode, VOIDmode,
1351 mep_core_address_length (rtx insn, int opn)
1353 rtx set = single_set (insn);
1354 rtx mem = XEXP (set, opn);
1355 rtx other = XEXP (set, 1-opn);
1356 rtx addr = XEXP (mem, 0);
1358 if (register_operand (addr, Pmode))
1360 if (GET_CODE (addr) == PLUS)
1362 rtx addend = XEXP (addr, 1);
1364 gcc_assert (REG_P (XEXP (addr, 0)));
1366 switch (REGNO (XEXP (addr, 0)))
1368 case STACK_POINTER_REGNUM:
1369 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1370 && mep_imm7a4_operand (addend, VOIDmode))
1375 gcc_assert (REG_P (other));
1377 if (REGNO (other) >= 8)
1380 if (GET_CODE (addend) == CONST
1381 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1382 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1385 if (GET_CODE (addend) == CONST_INT
1386 && INTVAL (addend) >= 0
1387 && INTVAL (addend) <= 127
1388 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1398 mep_cop_address_length (rtx insn, int opn)
1400 rtx set = single_set (insn);
1401 rtx mem = XEXP (set, opn);
1402 rtx addr = XEXP (mem, 0);
1404 if (GET_CODE (mem) != MEM)
1406 if (register_operand (addr, Pmode))
1408 if (GET_CODE (addr) == POST_INC)
1414 #define DEBUG_EXPAND_MOV 0
1416 mep_expand_mov (rtx *operands, enum machine_mode mode)
1421 int post_reload = 0;
1423 tag[0] = mep_section_tag (operands[0]);
1424 tag[1] = mep_section_tag (operands[1]);
1426 if (!reload_in_progress
1427 && !reload_completed
1428 && GET_CODE (operands[0]) != REG
1429 && GET_CODE (operands[0]) != SUBREG
1430 && GET_CODE (operands[1]) != REG
1431 && GET_CODE (operands[1]) != SUBREG)
1432 operands[1] = copy_to_mode_reg (mode, operands[1]);
1434 #if DEBUG_EXPAND_MOV
1435 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1436 reload_in_progress || reload_completed);
1437 debug_rtx (operands[0]);
1438 debug_rtx (operands[1]);
1441 if (mode == DImode || mode == DFmode)
1444 if (reload_in_progress || reload_completed)
1448 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1449 cfun->machine->reload_changes_tp = true;
1451 if (tag[0] == 't' || tag[1] == 't')
1453 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1454 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1457 if (tag[0] == 'b' || tag[1] == 'b')
1459 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1460 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1463 if (cfun->machine->reload_changes_tp == true)
1470 if (symbol_p (operands[1]))
1472 t = mep_section_tag (operands[1]);
1473 if (t == 'b' || t == 't')
1476 if (GET_CODE (operands[1]) == SYMBOL_REF)
1478 tpsym = operands[1];
1479 n = gen_rtx_UNSPEC (mode,
1480 gen_rtvec (1, operands[1]),
1481 t == 'b' ? UNS_TPREL : UNS_GPREL);
1482 n = gen_rtx_CONST (mode, n);
1484 else if (GET_CODE (operands[1]) == CONST
1485 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1486 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1487 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1489 tpsym = XEXP (XEXP (operands[1], 0), 0);
1490 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1491 n = gen_rtx_UNSPEC (mode,
1492 gen_rtvec (1, tpsym),
1493 t == 'b' ? UNS_TPREL : UNS_GPREL);
1494 n = gen_rtx_PLUS (mode, n, tpoffs);
1495 n = gen_rtx_CONST (mode, n);
1497 else if (GET_CODE (operands[1]) == CONST
1498 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1502 error ("unusual TP-relative address");
1506 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1507 : mep_gp_rtx ()), n);
1508 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1509 #if DEBUG_EXPAND_MOV
1510 fprintf(stderr, "mep_expand_mov emitting ");
1517 for (i=0; i < 2; i++)
1519 t = mep_section_tag (operands[i]);
1520 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1525 sym = XEXP (operands[i], 0);
1526 if (GET_CODE (sym) == CONST
1527 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1528 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1541 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1542 n = gen_rtx_CONST (Pmode, n);
1543 n = gen_rtx_PLUS (Pmode, r, n);
1544 operands[i] = replace_equiv_address (operands[i], n);
1549 if ((GET_CODE (operands[1]) != REG
1550 && MEP_CONTROL_REG (operands[0]))
1551 || (GET_CODE (operands[0]) != REG
1552 && MEP_CONTROL_REG (operands[1])))
1555 #if DEBUG_EXPAND_MOV
1556 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1558 temp = gen_reg_rtx (mode);
1559 emit_move_insn (temp, operands[1]);
1563 if (symbolref_p (operands[0])
1564 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1565 || (GET_MODE_SIZE (mode) != 4)))
1569 gcc_assert (!reload_in_progress && !reload_completed);
1571 temp = force_reg (Pmode, XEXP (operands[0], 0));
1572 operands[0] = replace_equiv_address (operands[0], temp);
1573 emit_move_insn (operands[0], operands[1]);
1577 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1580 if (symbol_p (operands[1])
1581 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1583 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1584 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1588 if (symbolref_p (operands[1])
1589 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1593 if (reload_in_progress || reload_completed)
1596 temp = gen_reg_rtx (Pmode);
1598 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1599 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1600 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1607 /* Cases where the pattern can't be made to use at all. */
1610 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1614 #define DEBUG_MOV_OK 0
1616 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1617 mep_section_tag (operands[1]));
1618 debug_rtx (operands[0]);
1619 debug_rtx (operands[1]);
1622 /* We want the movh patterns to get these. */
1623 if (GET_CODE (operands[1]) == HIGH)
1626 /* We can't store a register to a far variable without using a
1627 scratch register to hold the address. Using far variables should
1628 be split by mep_emit_mov anyway. */
1629 if (mep_section_tag (operands[0]) == 'f'
1630 || mep_section_tag (operands[1]) == 'f')
1633 fprintf (stderr, " - no, f\n");
1637 i = mep_section_tag (operands[1]);
1638 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1639 /* These are supposed to be generated with adds of the appropriate
1640 register. During and after reload, however, we allow them to
1641 be accessed as normal symbols because adding a dependency on
1642 the base register now might cause problems. */
1645 fprintf (stderr, " - no, bt\n");
1650 /* The only moves we can allow involve at least one general
1651 register, so require it. */
1652 for (i = 0; i < 2; i ++)
1654 /* Allow subregs too, before reload. */
1655 rtx x = operands[i];
1657 if (GET_CODE (x) == SUBREG)
1659 if (GET_CODE (x) == REG
1660 && ! MEP_CONTROL_REG (x))
1663 fprintf (stderr, " - ok\n");
1669 fprintf (stderr, " - no, no gen reg\n");
1674 #define DEBUG_SPLIT_WIDE_MOVE 0
1676 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1680 #if DEBUG_SPLIT_WIDE_MOVE
1681 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1682 debug_rtx (operands[0]);
1683 debug_rtx (operands[1]);
1686 for (i = 0; i <= 1; i++)
1688 rtx op = operands[i], hi, lo;
1690 switch (GET_CODE (op))
1694 unsigned int regno = REGNO (op);
1696 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1700 lo = gen_rtx_REG (SImode, regno);
1702 hi = gen_rtx_ZERO_EXTRACT (SImode,
1703 gen_rtx_REG (DImode, regno),
1708 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1709 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1717 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1718 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1725 /* The high part of CR <- GPR moves must be done after the low part. */
1726 operands [i + 4] = lo;
1727 operands [i + 2] = hi;
1730 if (reg_mentioned_p (operands[2], operands[5])
1731 || GET_CODE (operands[2]) == ZERO_EXTRACT
1732 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1736 /* Overlapping register pairs -- make sure we don't
1737 early-clobber ourselves. */
1739 operands[2] = operands[4];
1742 operands[3] = operands[5];
1746 #if DEBUG_SPLIT_WIDE_MOVE
1747 fprintf(stderr, "\033[34m");
1748 debug_rtx (operands[2]);
1749 debug_rtx (operands[3]);
1750 debug_rtx (operands[4]);
1751 debug_rtx (operands[5]);
1752 fprintf(stderr, "\033[0m");
1756 /* Emit a setcc instruction in its entirity. */
1759 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1767 tmp = op1, op1 = op2, op2 = tmp;
1768 code = swap_condition (code);
1773 op1 = force_reg (SImode, op1);
1774 emit_insn (gen_rtx_SET (VOIDmode, dest,
1775 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1779 if (op2 != const0_rtx)
1780 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1781 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1785 /* Branchful sequence:
1787 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1790 Branchless sequence:
1791 add3 tmp, op1, -op2 32-bit (or mov + sub)
1792 sltu3 tmp, tmp, 1 16-bit
1793 xor3 dest, tmp, 1 32-bit
1795 if (optimize_size && op2 != const0_rtx)
1798 if (op2 != const0_rtx)
1799 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1801 op2 = gen_reg_rtx (SImode);
1802 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1804 emit_insn (gen_rtx_SET (VOIDmode, dest,
1805 gen_rtx_XOR (SImode, op2, const1_rtx)));
1809 if (GET_CODE (op2) != CONST_INT
1810 || INTVAL (op2) == 0x7ffffff)
1812 op2 = GEN_INT (INTVAL (op2) + 1);
1813 return mep_expand_setcc_1 (LT, dest, op1, op2);
1816 if (GET_CODE (op2) != CONST_INT
1817 || INTVAL (op2) == -1)
1819 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1820 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1823 if (GET_CODE (op2) != CONST_INT
1824 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1826 op2 = GEN_INT (INTVAL (op2) - 1);
1827 return mep_expand_setcc_1 (GT, dest, op1, op2);
1830 if (GET_CODE (op2) != CONST_INT
1831 || op2 == const0_rtx)
1833 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1834 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1842 mep_expand_setcc (rtx *operands)
1844 rtx dest = operands[0];
1845 enum rtx_code code = GET_CODE (operands[1]);
1846 rtx op0 = operands[2];
1847 rtx op1 = operands[3];
1849 return mep_expand_setcc_1 (code, dest, op0, op1);
1853 mep_expand_cbranch (rtx *operands)
1855 enum rtx_code code = GET_CODE (operands[0]);
1856 rtx op0 = operands[1];
1857 rtx op1 = operands[2];
1864 if (mep_imm4_operand (op1, SImode))
1867 tmp = gen_reg_rtx (SImode);
1868 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1875 if (mep_imm4_operand (op1, SImode))
1878 tmp = gen_reg_rtx (SImode);
1879 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1888 if (! mep_reg_or_imm4_operand (op1, SImode))
1889 op1 = force_reg (SImode, op1);
1894 if (GET_CODE (op1) == CONST_INT
1895 && INTVAL (op1) != 0x7fffffff)
1897 op1 = GEN_INT (INTVAL (op1) + 1);
1898 code = (code == LE ? LT : GE);
1902 tmp = gen_reg_rtx (SImode);
1903 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1905 code = (code == LE ? EQ : NE);
1911 if (op1 == const1_rtx)
1918 tmp = gen_reg_rtx (SImode);
1919 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1926 tmp = gen_reg_rtx (SImode);
1927 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1929 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1938 tmp = gen_reg_rtx (SImode);
1939 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1940 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1947 tmp = gen_reg_rtx (SImode);
1948 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1950 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1962 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1966 mep_emit_cbranch (rtx *operands, int ne)
1968 if (GET_CODE (operands[1]) == REG)
1969 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1970 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1971 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1973 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1977 mep_expand_call (rtx *operands, int returns_value)
1979 rtx addr = operands[returns_value];
1980 rtx tp = mep_tp_rtx ();
1981 rtx gp = mep_gp_rtx ();
1983 gcc_assert (GET_CODE (addr) == MEM);
1985 addr = XEXP (addr, 0);
1987 if (! mep_call_address_operand (addr, VOIDmode))
1988 addr = force_reg (SImode, addr);
1990 if (! operands[returns_value+2])
1991 operands[returns_value+2] = const0_rtx;
1994 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1995 operands[3], tp, gp));
1997 emit_call_insn (gen_call_internal (addr, operands[1],
1998 operands[2], tp, gp));
2001 /* Aliasing Support. */
2003 /* If X is a machine specific address (i.e. a symbol or label being
2004 referenced as a displacement from the GOT implemented using an
2005 UNSPEC), then return the base term. Otherwise return X. */
2008 mep_find_base_term (rtx x)
2013 if (GET_CODE (x) != PLUS)
2018 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
2019 && base == mep_tp_rtx ())
2021 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
2022 && base == mep_gp_rtx ())
2027 if (GET_CODE (term) != CONST)
2029 term = XEXP (term, 0);
2031 if (GET_CODE (term) != UNSPEC
2032 || XINT (term, 1) != unspec)
2035 return XVECEXP (term, 0, 0);
2038 /* Reload Support. */
2040 /* Return true if the registers in CLASS cannot represent the change from
2041 modes FROM to TO. */
2044 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2045 enum reg_class regclass)
2050 /* 64-bit COP regs must remain 64-bit COP regs. */
2051 if (TARGET_64BIT_CR_REGS
2052 && (regclass == CR_REGS
2053 || regclass == LOADABLE_CR_REGS)
2054 && (GET_MODE_SIZE (to) < 8
2055 || GET_MODE_SIZE (from) < 8))
2061 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2064 mep_general_reg (rtx x)
2066 while (GET_CODE (x) == SUBREG)
2068 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2072 mep_nongeneral_reg (rtx x)
2074 while (GET_CODE (x) == SUBREG)
2076 return (GET_CODE (x) == REG
2077 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2081 mep_general_copro_reg (rtx x)
2083 while (GET_CODE (x) == SUBREG)
2085 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2089 mep_nonregister (rtx x)
2091 while (GET_CODE (x) == SUBREG)
2093 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2096 #define DEBUG_RELOAD 0
2098 /* Return the secondary reload class needed for moving value X to or
2099 from a register in coprocessor register class CLASS. */
2101 static enum reg_class
2102 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2104 if (mep_general_reg (x))
2105 /* We can do the move directly if mep_have_core_copro_moves_p,
2106 otherwise we need to go through memory. Either way, no secondary
2107 register is needed. */
2110 if (mep_general_copro_reg (x))
2112 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2113 if (mep_have_copro_copro_moves_p)
2116 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2117 if (mep_have_core_copro_moves_p)
2118 return GENERAL_REGS;
2120 /* Otherwise we need to do it through memory. No secondary
2121 register is needed. */
2125 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2126 && constraint_satisfied_p (x, CONSTRAINT_U))
2127 /* X is a memory value that we can access directly. */
2130 /* We have to move X into a GPR first and then copy it to
2131 the coprocessor register. The move from the GPR to the
2132 coprocessor might be done directly or through memory,
2133 depending on mep_have_core_copro_moves_p. */
2134 return GENERAL_REGS;
2137 /* Copying X to register in RCLASS. */
2140 mep_secondary_input_reload_class (enum reg_class rclass,
2141 enum machine_mode mode ATTRIBUTE_UNUSED,
2147 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2151 if (reg_class_subset_p (rclass, CR_REGS))
2152 rv = mep_secondary_copro_reload_class (rclass, x);
2153 else if (MEP_NONGENERAL_CLASS (rclass)
2154 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2158 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2160 return (enum reg_class) rv;
2163 /* Copying register in RCLASS to X. */
2166 mep_secondary_output_reload_class (enum reg_class rclass,
2167 enum machine_mode mode ATTRIBUTE_UNUSED,
2173 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2177 if (reg_class_subset_p (rclass, CR_REGS))
2178 rv = mep_secondary_copro_reload_class (rclass, x);
2179 else if (MEP_NONGENERAL_CLASS (rclass)
2180 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2184 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2187 return (enum reg_class) rv;
2190 /* Implement SECONDARY_MEMORY_NEEDED. */
2193 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2194 enum machine_mode mode ATTRIBUTE_UNUSED)
2196 if (!mep_have_core_copro_moves_p)
2198 if (reg_classes_intersect_p (rclass1, CR_REGS)
2199 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2201 if (reg_classes_intersect_p (rclass2, CR_REGS)
2202 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2204 if (!mep_have_copro_copro_moves_p
2205 && reg_classes_intersect_p (rclass1, CR_REGS)
2206 && reg_classes_intersect_p (rclass2, CR_REGS))
2213 mep_expand_reload (rtx *operands, enum machine_mode mode)
2215 /* There are three cases for each direction:
2220 int s0 = mep_section_tag (operands[0]) == 'f';
2221 int s1 = mep_section_tag (operands[1]) == 'f';
2222 int c0 = mep_nongeneral_reg (operands[0]);
2223 int c1 = mep_nongeneral_reg (operands[1]);
2224 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2227 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2228 debug_rtx (operands[0]);
2229 debug_rtx (operands[1]);
2234 case 00: /* Don't know why this gets here. */
2235 case 02: /* general = far */
2236 emit_move_insn (operands[0], operands[1]);
2239 case 10: /* cr = mem */
2240 case 11: /* cr = cr */
2241 case 01: /* mem = cr */
2242 case 12: /* cr = far */
2243 emit_move_insn (operands[2], operands[1]);
2244 emit_move_insn (operands[0], operands[2]);
2247 case 20: /* far = general */
2248 emit_move_insn (operands[2], XEXP (operands[1], 0));
2249 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2252 case 21: /* far = cr */
2253 case 22: /* far = far */
2255 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2256 which, mode_name[mode]);
2257 debug_rtx (operands[0]);
2258 debug_rtx (operands[1]);
2263 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2264 can be moved directly into registers 0 to 7, but not into the rest.
2265 If so, and if the required class includes registers 0 to 7, restrict
2266 it to those registers. */
2269 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2271 switch (GET_CODE (x))
2274 if (INTVAL (x) >= 0x10000
2275 && INTVAL (x) < 0x01000000
2276 && (INTVAL (x) & 0xffff) != 0
2277 && reg_class_subset_p (TPREL_REGS, rclass))
2278 rclass = TPREL_REGS;
2284 if (mep_section_tag (x) != 'f'
2285 && reg_class_subset_p (TPREL_REGS, rclass))
2286 rclass = TPREL_REGS;
2295 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2296 moves, 4 for direct double-register moves, and 1000 for anything
2297 that requires a temporary register or temporary stack slot. */
2300 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2302 if (mep_have_copro_copro_moves_p
2303 && reg_class_subset_p (from, CR_REGS)
2304 && reg_class_subset_p (to, CR_REGS))
2306 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2310 if (reg_class_subset_p (from, CR_REGS)
2311 && reg_class_subset_p (to, CR_REGS))
2313 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2317 if (reg_class_subset_p (from, CR_REGS)
2318 || reg_class_subset_p (to, CR_REGS))
2320 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2324 if (mep_secondary_memory_needed (from, to, mode))
2326 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2329 if (GET_MODE_SIZE (mode) > 4)
2336 /* Functions to save and restore machine-specific function data. */
2338 static struct machine_function *
2339 mep_init_machine_status (void)
2341 return ggc_alloc_cleared_machine_function ();
2345 mep_allocate_initial_value (rtx reg)
2349 if (GET_CODE (reg) != REG)
2352 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2355 /* In interrupt functions, the "initial" values of $gp and $tp are
2356 provided by the prologue. They are not necessarily the same as
2357 the values that the caller was using. */
2358 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2359 if (mep_interrupt_p ())
2362 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2364 cfun->machine->reg_save_size += 4;
2365 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2368 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2369 return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2373 mep_return_addr_rtx (int count)
2378 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2384 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2390 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2394 mep_interrupt_p (void)
2396 if (cfun->machine->interrupt_handler == 0)
2398 int interrupt_handler
2399 = (lookup_attribute ("interrupt",
2400 DECL_ATTRIBUTES (current_function_decl))
2402 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2404 return cfun->machine->interrupt_handler == 2;
2408 mep_disinterrupt_p (void)
2410 if (cfun->machine->disable_interrupts == 0)
2412 int disable_interrupts
2413 = (lookup_attribute ("disinterrupt",
2414 DECL_ATTRIBUTES (current_function_decl))
2416 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2418 return cfun->machine->disable_interrupts == 2;
2422 /* Frame/Epilog/Prolog Related. */
2425 mep_reg_set_p (rtx reg, rtx insn)
2427 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2430 if (FIND_REG_INC_NOTE (insn, reg))
2432 insn = PATTERN (insn);
2435 if (GET_CODE (insn) == SET
2436 && GET_CODE (XEXP (insn, 0)) == REG
2437 && GET_CODE (XEXP (insn, 1)) == REG
2438 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2441 return set_of (reg, insn) != NULL_RTX;
2445 #define MEP_SAVES_UNKNOWN 0
2446 #define MEP_SAVES_YES 1
2447 #define MEP_SAVES_MAYBE 2
2448 #define MEP_SAVES_NO 3
2451 mep_reg_set_in_function (int regno)
2455 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2458 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2461 push_topmost_sequence ();
2462 insn = get_insns ();
2463 pop_topmost_sequence ();
2468 reg = gen_rtx_REG (SImode, regno);
2470 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2471 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2477 mep_asm_without_operands_p (void)
2479 if (cfun->machine->asms_without_operands == 0)
2483 push_topmost_sequence ();
2484 insn = get_insns ();
2485 pop_topmost_sequence ();
2487 cfun->machine->asms_without_operands = 1;
2491 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2493 cfun->machine->asms_without_operands = 2;
2496 insn = NEXT_INSN (insn);
2500 return cfun->machine->asms_without_operands == 2;
2503 /* Interrupt functions save/restore every call-preserved register, and
2504 any call-used register it uses (or all if it calls any function,
2505 since they may get clobbered there too). Here we check to see
2506 which call-used registers need saving. */
2508 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2509 && (r == FIRST_CCR_REGNO + 1 \
2510 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2511 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2514 mep_interrupt_saved_reg (int r)
2516 if (!mep_interrupt_p ())
2518 if (r == REGSAVE_CONTROL_TEMP
2519 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2521 if (mep_asm_without_operands_p ()
2523 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2524 || IVC2_ISAVED_REG (r)))
2526 if (!current_function_is_leaf)
2527 /* Function calls mean we need to save $lp. */
2528 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2530 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2531 /* The interrupt handler might use these registers for repeat blocks,
2532 or it might call a function that does so. */
2533 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2535 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2537 /* Functions we call might clobber these. */
2538 if (call_used_regs[r] && !fixed_regs[r])
2540 /* Additional registers that need to be saved for IVC2. */
2541 if (IVC2_ISAVED_REG (r))
2548 mep_call_saves_register (int r)
2550 if (! cfun->machine->frame_locked)
2552 int rv = MEP_SAVES_NO;
2554 if (cfun->machine->reg_save_slot[r])
2556 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2558 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2560 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2562 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2563 /* We need these to have stack slots so that they can be set during
2566 else if (mep_interrupt_saved_reg (r))
2568 cfun->machine->reg_saved[r] = rv;
2570 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2573 /* Return true if epilogue uses register REGNO. */
2576 mep_epilogue_uses (int regno)
2578 /* Since $lp is a call-saved register, the generic code will normally
2579 mark it used in the epilogue if it needs to be saved and restored.
2580 However, when profiling is enabled, the profiling code will implicitly
2581 clobber $11. This case has to be handled specially both here and in
2582 mep_call_saves_register. */
2583 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2585 /* Interrupt functions save/restore pretty much everything. */
2586 return (reload_completed && mep_interrupt_saved_reg (regno));
2590 mep_reg_size (int regno)
2592 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2597 /* Worker function for TARGET_CAN_ELIMINATE. */
2600 mep_can_eliminate (const int from, const int to)
2602 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2603 ? ! frame_pointer_needed
2608 mep_elimination_offset (int from, int to)
2612 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2615 if (!cfun->machine->frame_locked)
2616 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2618 /* We don't count arg_regs_to_save in the arg pointer offset, because
2619 gcc thinks the arg pointer has moved along with the saved regs.
2620 However, we do count it when we adjust $sp in the prologue. */
2622 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2623 if (mep_call_saves_register (i))
2624 reg_save_size += mep_reg_size (i);
2626 if (reg_save_size % 8)
2627 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2629 cfun->machine->regsave_filler = 0;
2631 /* This is what our total stack adjustment looks like. */
2632 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2635 cfun->machine->frame_filler = 8 - (total_size % 8);
2637 cfun->machine->frame_filler = 0;
2640 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2641 return reg_save_size + cfun->machine->regsave_filler;
2643 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2644 return cfun->machine->frame_filler + frame_size;
2646 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2647 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2655 RTX_FRAME_RELATED_P (x) = 1;
2659 /* Since the prologue/epilogue code is generated after optimization,
2660 we can't rely on gcc to split constants for us. So, this code
2661 captures all the ways to add a constant to a register in one logic
2662 chunk, including optimizing away insns we just don't need. This
2663 makes the prolog/epilog code easier to follow. */
2665 add_constant (int dest, int src, int value, int mark_frame)
2670 if (src == dest && value == 0)
2675 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2676 gen_rtx_REG (SImode, src));
2678 RTX_FRAME_RELATED_P(insn) = 1;
2682 if (value >= -32768 && value <= 32767)
2684 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2685 gen_rtx_REG (SImode, src),
2688 RTX_FRAME_RELATED_P(insn) = 1;
2692 /* Big constant, need to use a temp register. We use
2693 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2694 area is always small enough to directly add to). */
2696 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2697 lo = value & 0xffff;
2699 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2704 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2705 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2709 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2710 gen_rtx_REG (SImode, src),
2711 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2714 RTX_FRAME_RELATED_P(insn) = 1;
2715 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2716 gen_rtx_SET (SImode,
2717 gen_rtx_REG (SImode, dest),
2718 gen_rtx_PLUS (SImode,
2719 gen_rtx_REG (SImode, dest),
2724 /* Move SRC to DEST. Mark the move as being potentially dead if
2728 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2730 rtx insn = emit_move_insn (dest, src);
2733 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2738 /* Used for interrupt functions, which can't assume that $tp and $gp
2739 contain the correct pointers. */
2742 mep_reload_pointer (int regno, const char *symbol)
2746 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2749 reg = gen_rtx_REG (SImode, regno);
2750 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2751 emit_insn (gen_movsi_topsym_s (reg, sym));
2752 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2755 /* Assign save slots for any register not already saved. DImode
2756 registers go at the end of the reg save area; the rest go at the
2757 beginning. This is for alignment purposes. Returns true if a frame
2758 is really needed. */
2760 mep_assign_save_slots (int reg_save_size)
2762 bool really_need_stack_frame = false;
2766 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2767 if (mep_call_saves_register(i))
2769 int regsize = mep_reg_size (i);
2771 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2772 || mep_reg_set_in_function (i))
2773 really_need_stack_frame = true;
2775 if (cfun->machine->reg_save_slot[i])
2780 cfun->machine->reg_save_size += regsize;
2781 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2785 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2789 cfun->machine->frame_locked = 1;
2790 return really_need_stack_frame;
2794 mep_expand_prologue (void)
2796 int i, rss, sp_offset = 0;
2799 int really_need_stack_frame;
2801 /* We must not allow register renaming in interrupt functions,
2802 because that invalidates the correctness of the set of call-used
2803 registers we're going to save/restore. */
2804 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2806 if (mep_disinterrupt_p ())
2807 emit_insn (gen_mep_disable_int ());
2809 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2811 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2812 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2813 really_need_stack_frame = frame_size;
2815 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2817 sp_offset = reg_save_size;
2818 if (sp_offset + frame_size < 128)
2819 sp_offset += frame_size ;
2821 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2823 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2824 if (mep_call_saves_register(i))
2828 enum machine_mode rmode;
2830 rss = cfun->machine->reg_save_slot[i];
2832 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2833 && (!mep_reg_set_in_function (i)
2834 && !mep_interrupt_p ()))
2837 if (mep_reg_size (i) == 8)
2842 /* If there is a pseudo associated with this register's initial value,
2843 reload might have already spilt it to the stack slot suggested by
2844 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2846 mem = gen_rtx_MEM (rmode,
2847 plus_constant (stack_pointer_rtx, sp_offset - rss));
2848 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2850 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2851 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2852 else if (rmode == DImode)
2855 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2857 mem = gen_rtx_MEM (SImode,
2858 plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2860 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2861 gen_rtx_REG (SImode, i),
2863 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2864 gen_rtx_ZERO_EXTRACT (SImode,
2865 gen_rtx_REG (DImode, i),
2869 insn = maybe_dead_move (mem,
2870 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2872 RTX_FRAME_RELATED_P (insn) = 1;
2874 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2875 gen_rtx_SET (VOIDmode,
2877 gen_rtx_REG (rmode, i)));
2878 mem = gen_rtx_MEM (SImode,
2879 plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2880 insn = maybe_dead_move (mem,
2881 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2887 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2888 gen_rtx_REG (rmode, i),
2890 insn = maybe_dead_move (mem,
2891 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2893 RTX_FRAME_RELATED_P (insn) = 1;
2895 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2896 gen_rtx_SET (VOIDmode,
2898 gen_rtx_REG (rmode, i)));
2902 if (frame_pointer_needed)
2904 /* We've already adjusted down by sp_offset. Total $sp change
2905 is reg_save_size + frame_size. We want a net change here of
2906 just reg_save_size. */
2907 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2910 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2912 if (mep_interrupt_p ())
2914 mep_reload_pointer(GP_REGNO, "__sdabase");
2915 mep_reload_pointer(TP_REGNO, "__tpbase");
2920 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2922 int local = hwi_local;
2923 int frame_size = local + crtl->outgoing_args_size;
2928 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2930 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2931 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2932 sp_offset = reg_save_size + frame_size;
2934 ffill = cfun->machine->frame_filler;
2936 if (cfun->machine->mep_frame_pointer_needed)
2937 reg_names[FP_REGNO] = "$fp";
2939 reg_names[FP_REGNO] = "$8";
2944 if (debug_info_level == DINFO_LEVEL_NONE)
2946 fprintf (file, "\t# frame: %d", sp_offset);
2948 fprintf (file, " %d regs", reg_save_size);
2950 fprintf (file, " %d locals", local);
2951 if (crtl->outgoing_args_size)
2952 fprintf (file, " %d args", crtl->outgoing_args_size);
2953 fprintf (file, "\n");
2957 fprintf (file, "\t#\n");
2958 fprintf (file, "\t# Initial Frame Information:\n");
2959 if (sp_offset || !frame_pointer_needed)
2960 fprintf (file, "\t# Entry ---------- 0\n");
2962 /* Sort registers by save slots, so they're printed in the order
2963 they appear in memory, not the order they're saved in. */
2964 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2966 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2967 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2968 if (cfun->machine->reg_save_slot[slot_map[si]]
2969 > cfun->machine->reg_save_slot[slot_map[sj]])
2971 int t = slot_map[si];
2972 slot_map[si] = slot_map[sj];
2977 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2980 int r = slot_map[i];
2981 int rss = cfun->machine->reg_save_slot[r];
2983 if (!mep_call_saves_register (r))
2986 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2987 && (!mep_reg_set_in_function (r)
2988 && !mep_interrupt_p ()))
2991 rsize = mep_reg_size(r);
2992 skip = rss - (sp+rsize);
2994 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2995 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2996 rsize, reg_names[r], sp_offset - rss);
3000 skip = reg_save_size - sp;
3002 fprintf (file, "\t# %3d bytes for alignment\n", skip);
3004 if (frame_pointer_needed)
3005 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
3007 fprintf (file, "\t# %3d bytes for local vars\n", local);
3009 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
3010 if (crtl->outgoing_args_size)
3011 fprintf (file, "\t# %3d bytes for outgoing args\n",
3012 crtl->outgoing_args_size);
3013 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
3014 fprintf (file, "\t#\n");
3018 static int mep_prevent_lp_restore = 0;
3019 static int mep_sibcall_epilogue = 0;
3022 mep_expand_epilogue (void)
3024 int i, sp_offset = 0;
3025 int reg_save_size = 0;
3027 int lp_temp = LP_REGNO, lp_slot = -1;
3028 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
3029 int interrupt_handler = mep_interrupt_p ();
3031 if (profile_arc_flag == 2)
3032 emit_insn (gen_mep_bb_trace_ret ());
3034 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
3035 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
3037 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
3039 if (frame_pointer_needed)
3041 /* If we have a frame pointer, we won't have a reliable stack
3042 pointer (alloca, you know), so rebase SP from FP */
3043 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3044 gen_rtx_REG (SImode, FP_REGNO));
3045 sp_offset = reg_save_size;
3049 /* SP is right under our local variable space. Adjust it if
3051 sp_offset = reg_save_size + frame_size;
3052 if (sp_offset >= 128)
3054 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3055 sp_offset -= frame_size;
3059 /* This is backwards so that we restore the control and coprocessor
3060 registers before the temporary registers we use to restore
3062 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3063 if (mep_call_saves_register (i))
3065 enum machine_mode rmode;
3066 int rss = cfun->machine->reg_save_slot[i];
3068 if (mep_reg_size (i) == 8)
3073 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3074 && !(mep_reg_set_in_function (i) || interrupt_handler))
3076 if (mep_prevent_lp_restore && i == LP_REGNO)
3078 if (!mep_prevent_lp_restore
3079 && !interrupt_handler
3080 && (i == 10 || i == 11))
3083 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3084 emit_move_insn (gen_rtx_REG (rmode, i),
3086 plus_constant (stack_pointer_rtx,
3090 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3091 /* Defer this one so we can jump indirect rather than
3092 copying the RA to $lp and "ret". EH epilogues
3093 automatically skip this anyway. */
3094 lp_slot = sp_offset-rss;
3097 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3099 plus_constant (stack_pointer_rtx,
3101 emit_move_insn (gen_rtx_REG (rmode, i),
3102 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3108 /* Restore this one last so we know it will be in the temp
3109 register when we return by jumping indirectly via the temp. */
3110 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3111 gen_rtx_MEM (SImode,
3112 plus_constant (stack_pointer_rtx,
3114 lp_temp = REGSAVE_CONTROL_TEMP;
3118 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3120 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3121 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3122 gen_rtx_REG (SImode, SP_REGNO),
3123 cfun->machine->eh_stack_adjust));
3125 if (mep_sibcall_epilogue)
3128 if (mep_disinterrupt_p ())
3129 emit_insn (gen_mep_enable_int ());
3131 if (mep_prevent_lp_restore)
3133 emit_jump_insn (gen_eh_return_internal ());
3136 else if (interrupt_handler)
3137 emit_jump_insn (gen_mep_reti ());
3139 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3143 mep_expand_eh_return (rtx *operands)
3145 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3147 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3148 emit_move_insn (ra, operands[0]);
3152 emit_insn (gen_eh_epilogue (operands[0]));
3156 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3158 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3159 mep_prevent_lp_restore = 1;
3160 mep_expand_epilogue ();
3161 mep_prevent_lp_restore = 0;
3165 mep_expand_sibcall_epilogue (void)
3167 mep_sibcall_epilogue = 1;
3168 mep_expand_epilogue ();
3169 mep_sibcall_epilogue = 0;
3173 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3178 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3181 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3182 if (mep_interrupt_p () || mep_disinterrupt_p ())
3189 mep_return_stackadj_rtx (void)
3191 return gen_rtx_REG (SImode, 10);
3195 mep_return_handler_rtx (void)
3197 return gen_rtx_REG (SImode, LP_REGNO);
3201 mep_function_profiler (FILE *file)
3203 /* Always right at the beginning of the function. */
3204 fprintf (file, "\t# mep function profiler\n");
3205 fprintf (file, "\tadd\t$sp, -8\n");
3206 fprintf (file, "\tsw\t$0, ($sp)\n");
3207 fprintf (file, "\tldc\t$0, $lp\n");
3208 fprintf (file, "\tsw\t$0, 4($sp)\n");
3209 fprintf (file, "\tbsr\t__mep_mcount\n");
3210 fprintf (file, "\tlw\t$0, 4($sp)\n");
3211 fprintf (file, "\tstc\t$0, $lp\n");
3212 fprintf (file, "\tlw\t$0, ($sp)\n");
3213 fprintf (file, "\tadd\t$sp, 8\n\n");
3217 mep_emit_bb_trace_ret (void)
3219 fprintf (asm_out_file, "\t# end of block profiling\n");
3220 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3221 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3222 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3223 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3224 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3225 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3226 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3227 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3228 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3235 /* Operand Printing. */
3238 mep_print_operand_address (FILE *stream, rtx address)
3240 if (GET_CODE (address) == MEM)
3241 address = XEXP (address, 0);
3243 /* cf: gcc.dg/asm-4.c. */
3244 gcc_assert (GET_CODE (address) == REG);
3246 mep_print_operand (stream, address, 0);
3252 const char *pattern;
3255 const conversions[] =
3258 { 0, "m+ri", "3(2)" },
3262 { 0, "mLrs", "%lo(3)(2)" },
3263 { 0, "mLr+si", "%lo(4+5)(2)" },
3264 { 0, "m+ru2s", "%tpoff(5)(2)" },
3265 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3266 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3267 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3268 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3269 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3271 { 0, "m+si", "(2+3)" },
3272 { 0, "m+li", "(2+3)" },
3275 { 0, "+si", "1+2" },
3276 { 0, "+u2si", "%tpoff(3+4)" },
3277 { 0, "+u3si", "%sdaoff(3+4)" },
3283 { 'h', "Hs", "%hi(1)" },
3285 { 'I', "u2s", "%tpoff(2)" },
3286 { 'I', "u3s", "%sdaoff(2)" },
3287 { 'I', "+u2si", "%tpoff(3+4)" },
3288 { 'I', "+u3si", "%sdaoff(3+4)" },
3290 { 'P', "mr", "(1\\+),\\0" },
3296 unique_bit_in (HOST_WIDE_INT i)
3300 case 0x01: case 0xfe: return 0;
3301 case 0x02: case 0xfd: return 1;
3302 case 0x04: case 0xfb: return 2;
3303 case 0x08: case 0xf7: return 3;
3304 case 0x10: case 0x7f: return 4;
3305 case 0x20: case 0xbf: return 5;
3306 case 0x40: case 0xdf: return 6;
3307 case 0x80: case 0xef: return 7;
3314 bit_size_for_clip (HOST_WIDE_INT i)
3318 for (rv = 0; rv < 31; rv ++)
3319 if (((HOST_WIDE_INT) 1 << rv) > i)
3324 /* Print an operand to a assembler instruction. */
3327 mep_print_operand (FILE *file, rtx x, int code)
3330 const char *real_name;
3334 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3335 we're using, then skip over the "mep_" part of its name. */
3336 const struct cgen_insn *insn;
3338 if (mep_get_move_insn (mep_cmov, &insn))
3339 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3341 mep_intrinsic_unavailable (mep_cmov);
3346 switch (GET_CODE (x))
3349 fputs ("clr", file);
3352 fputs ("set", file);
3355 fputs ("not", file);
3358 output_operand_lossage ("invalid %%L code");
3363 /* Print the second operand of a CR <- CR move. If we're using
3364 a two-operand instruction (i.e., a real cmov), then just print
3365 the operand normally. If we're using a "reg, reg, immediate"
3366 instruction such as caddi3, print the operand followed by a
3367 zero field. If we're using a three-register instruction,
3368 print the operand twice. */
3369 const struct cgen_insn *insn;
3371 mep_print_operand (file, x, 0);
3372 if (mep_get_move_insn (mep_cmov, &insn)
3373 && insn_data[insn->icode].n_operands == 3)
3376 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3377 mep_print_operand (file, x, 0);
3379 mep_print_operand (file, const0_rtx, 0);
3385 for (i = 0; conversions[i].pattern; i++)
3386 if (conversions[i].code == code
3387 && strcmp(conversions[i].pattern, pattern) == 0)
3389 for (j = 0; conversions[i].format[j]; j++)
3390 if (conversions[i].format[j] == '\\')
3392 fputc (conversions[i].format[j+1], file);
3395 else if (ISDIGIT(conversions[i].format[j]))
3397 rtx r = patternr[conversions[i].format[j] - '0'];
3398 switch (GET_CODE (r))
3401 fprintf (file, "%s", reg_names [REGNO (r)]);
3407 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3410 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3413 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3416 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3419 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3422 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3423 && !(INTVAL (r) & 0xff))
3424 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3426 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3429 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3430 && conversions[i].format[j+1] == 0)
3432 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3433 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3436 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3439 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3444 fprintf(file, "[const_double 0x%lx]",
3445 (unsigned long) CONST_DOUBLE_HIGH(r));
3448 real_name = targetm.strip_name_encoding (XSTR (r, 0));
3449 assemble_name (file, real_name);
3452 output_asm_label (r);
3455 fprintf (stderr, "don't know how to print this operand:");
3462 if (conversions[i].format[j] == '+'
3463 && (!code || code == 'I')
3464 && ISDIGIT (conversions[i].format[j+1])
3465 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3466 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3468 fputc(conversions[i].format[j], file);
3472 if (!conversions[i].pattern)
3474 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3482 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3483 int noperands ATTRIBUTE_UNUSED)
3485 /* Despite the fact that MeP is perfectly capable of branching and
3486 doing something else in the same bundle, gcc does jump
3487 optimization *after* scheduling, so we cannot trust the bundling
3488 flags on jump instructions. */
3489 if (GET_MODE (insn) == BImode
3490 && get_attr_slots (insn) != SLOTS_CORE)
3491 fputc ('+', asm_out_file);
3494 /* Function args in registers. */
3497 mep_setup_incoming_varargs (cumulative_args_t cum,
3498 enum machine_mode mode ATTRIBUTE_UNUSED,
3499 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3500 int second_time ATTRIBUTE_UNUSED)
3502 int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
3505 cfun->machine->arg_regs_to_save = nsave;
3506 *pretend_size = nsave * 4;
3510 bytesize (const_tree type, enum machine_mode mode)
3512 if (mode == BLKmode)
3513 return int_size_in_bytes (type);
3514 return GET_MODE_SIZE (mode);
3518 mep_expand_builtin_saveregs (void)
3523 ns = cfun->machine->arg_regs_to_save;
3526 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3527 regbuf = assign_stack_local (SImode, bufsize, 64);
3532 regbuf = assign_stack_local (SImode, bufsize, 32);
3535 move_block_from_reg (5-ns, regbuf, ns);
3539 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3540 int ofs = 8 * ((ns+1)/2);
3542 for (i=0; i<ns; i++)
3544 int rn = (4-ns) + i + 49;
3547 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3548 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3552 return XEXP (regbuf, 0);
3555 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3558 mep_build_builtin_va_list (void)
3560 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3564 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3566 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3567 get_identifier ("__va_next_gp"), ptr_type_node);
3568 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3569 get_identifier ("__va_next_gp_limit"),
3571 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3573 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3576 DECL_FIELD_CONTEXT (f_next_gp) = record;
3577 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3578 DECL_FIELD_CONTEXT (f_next_cop) = record;
3579 DECL_FIELD_CONTEXT (f_next_stack) = record;
3581 TYPE_FIELDS (record) = f_next_gp;
3582 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3583 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3584 DECL_CHAIN (f_next_cop) = f_next_stack;
3586 layout_type (record);
3592 mep_expand_va_start (tree valist, rtx nextarg)
3594 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3595 tree next_gp, next_gp_limit, next_cop, next_stack;
3599 ns = cfun->machine->arg_regs_to_save;
3601 f_next_gp = TYPE_FIELDS (va_list_type_node);
3602 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3603 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3604 f_next_stack = DECL_CHAIN (f_next_cop);
3606 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3608 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3609 valist, f_next_gp_limit, NULL_TREE);
3610 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3612 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3613 valist, f_next_stack, NULL_TREE);
3615 /* va_list.next_gp = expand_builtin_saveregs (); */
3616 u = make_tree (sizetype, expand_builtin_saveregs ());
3617 u = fold_convert (ptr_type_node, u);
3618 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3619 TREE_SIDE_EFFECTS (t) = 1;
3620 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3622 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3623 u = fold_build_pointer_plus_hwi (u, 4 * ns);
3624 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3625 TREE_SIDE_EFFECTS (t) = 1;
3626 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3628 u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
3629 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3630 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3631 TREE_SIDE_EFFECTS (t) = 1;
3632 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3634 /* va_list.next_stack = nextarg; */
3635 u = make_tree (ptr_type_node, nextarg);
3636 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3637 TREE_SIDE_EFFECTS (t) = 1;
3638 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3642 mep_gimplify_va_arg_expr (tree valist, tree type,
3644 gimple_seq *post_p ATTRIBUTE_UNUSED)
3646 HOST_WIDE_INT size, rsize;
3647 bool by_reference, ivc2_vec;
3648 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3649 tree next_gp, next_gp_limit, next_cop, next_stack;
3650 tree label_sover, label_selse;
3653 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3655 size = int_size_in_bytes (type);
3656 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3660 type = build_pointer_type (type);
3663 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3665 f_next_gp = TYPE_FIELDS (va_list_type_node);
3666 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3667 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3668 f_next_stack = DECL_CHAIN (f_next_cop);
3670 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3672 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3673 valist, f_next_gp_limit, NULL_TREE);
3674 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3676 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3677 valist, f_next_stack, NULL_TREE);
3679 /* if f_next_gp < f_next_gp_limit
3680 IF (VECTOR_P && IVC2)
3688 val = *f_next_stack;
3689 f_next_stack += rsize;
3693 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3694 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3695 res_addr = create_tmp_var (ptr_type_node, NULL);
3697 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3698 unshare_expr (next_gp_limit));
3699 tmp = build3 (COND_EXPR, void_type_node, tmp,
3700 build1 (GOTO_EXPR, void_type_node,
3701 unshare_expr (label_selse)),
3703 gimplify_and_add (tmp, pre_p);
3707 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3708 gimplify_and_add (tmp, pre_p);
3712 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3713 gimplify_and_add (tmp, pre_p);
3716 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
3717 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3719 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
3720 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3722 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3723 gimplify_and_add (tmp, pre_p);
3727 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3728 gimplify_and_add (tmp, pre_p);
3730 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3731 gimplify_and_add (tmp, pre_p);
3733 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
3734 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3738 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3739 gimplify_and_add (tmp, pre_p);
3741 res_addr = fold_convert (build_pointer_type (type), res_addr);
3744 res_addr = build_va_arg_indirect_ref (res_addr);
3746 return build_va_arg_indirect_ref (res_addr);
3750 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3751 rtx libname ATTRIBUTE_UNUSED,
3752 tree fndecl ATTRIBUTE_UNUSED)
3756 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3762 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3763 larger than 4 bytes are passed indirectly. Return value in 0,
3764 unless bigger than 4 bytes, then the caller passes a pointer as the
3765 first arg. For varargs, we copy $1..$4 to the stack. */
3768 mep_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
3769 const_tree type ATTRIBUTE_UNUSED,
3770 bool named ATTRIBUTE_UNUSED)
3772 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3774 /* VOIDmode is a signal for the backend to pass data to the call
3775 expander via the second operand to the call pattern. We use
3776 this to determine whether to use "jsr" or "jsrv". */
3777 if (mode == VOIDmode)
3778 return GEN_INT (cum->vliw);
3780 /* If we havn't run out of argument registers, return the next. */
3783 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3784 return gen_rtx_REG (mode, cum->nregs + 49);
3786 return gen_rtx_REG (mode, cum->nregs + 1);
3789 /* Otherwise the argument goes on the stack. */
3794 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
3795 enum machine_mode mode,
3797 bool named ATTRIBUTE_UNUSED)
3799 int size = bytesize (type, mode);
3801 /* This is non-obvious, but yes, large values passed after we've run
3802 out of registers are *still* passed by reference - we put the
3803 address of the parameter on the stack, as well as putting the
3804 parameter itself elsewhere on the stack. */
3806 if (size <= 0 || size > 8)
3810 if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3811 && type != NULL_TREE && VECTOR_TYPE_P (type))
3817 mep_function_arg_advance (cumulative_args_t pcum,
3818 enum machine_mode mode ATTRIBUTE_UNUSED,
3819 const_tree type ATTRIBUTE_UNUSED,
3820 bool named ATTRIBUTE_UNUSED)
3822 get_cumulative_args (pcum)->nregs += 1;
3826 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3828 int size = bytesize (type, BLKmode);
3829 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3830 return size > 0 && size <= 8 ? 0 : 1;
3831 return size > 0 && size <= 4 ? 0 : 1;
3835 mep_narrow_volatile_bitfield (void)
3841 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3844 mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3846 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3847 return gen_rtx_REG (TYPE_MODE (type), 48);
3848 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3851 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3854 mep_libcall_value (enum machine_mode mode)
3856 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3859 /* Handle pipeline hazards. */
3861 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3862 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3864 static int prev_opcode = 0;
3866 /* This isn't as optimal as it could be, because we don't know what
3867 control register the STC opcode is storing in. We only need to add
3868 the nop if it's the relevent register, but we add it for irrelevent
3872 mep_asm_output_opcode (FILE *file, const char *ptr)
3874 int this_opcode = op_none;
3875 const char *hazard = 0;
3880 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3881 this_opcode = op_fsft;
3884 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3885 this_opcode = op_ret;
3888 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3889 this_opcode = op_stc;
3893 if (prev_opcode == op_stc && this_opcode == op_fsft)
3895 if (prev_opcode == op_stc && this_opcode == op_ret)
3899 fprintf(file, "%s\t# %s-%s hazard\n\t",
3900 hazard, opnames[prev_opcode], opnames[this_opcode]);
3902 prev_opcode = this_opcode;
3905 /* Handle attributes. */
3908 mep_validate_based_tiny (tree *node, tree name, tree args,
3909 int flags ATTRIBUTE_UNUSED, bool *no_add)
3911 if (TREE_CODE (*node) != VAR_DECL
3912 && TREE_CODE (*node) != POINTER_TYPE
3913 && TREE_CODE (*node) != TYPE_DECL)
3915 warning (0, "%qE attribute only applies to variables", name);
3918 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3920 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3922 warning (0, "address region attributes not allowed with auto storage class");
3925 /* Ignore storage attribute of pointed to variable: char __far * x; */
3926 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3928 warning (0, "address region attributes on pointed-to types ignored");
3937 mep_multiple_address_regions (tree list, bool check_section_attr)
3940 int count_sections = 0;
3941 int section_attr_count = 0;
3943 for (a = list; a; a = TREE_CHAIN (a))
3945 if (is_attribute_p ("based", TREE_PURPOSE (a))
3946 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3947 || is_attribute_p ("near", TREE_PURPOSE (a))
3948 || is_attribute_p ("far", TREE_PURPOSE (a))
3949 || is_attribute_p ("io", TREE_PURPOSE (a)))
3951 if (check_section_attr)
3952 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3955 if (check_section_attr)
3956 return section_attr_count;
3958 return count_sections;
3961 #define MEP_ATTRIBUTES(decl) \
3962 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3963 : DECL_ATTRIBUTES (decl) \
3964 ? (DECL_ATTRIBUTES (decl)) \
3965 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3968 mep_validate_near_far (tree *node, tree name, tree args,
3969 int flags ATTRIBUTE_UNUSED, bool *no_add)
3971 if (TREE_CODE (*node) != VAR_DECL
3972 && TREE_CODE (*node) != FUNCTION_DECL
3973 && TREE_CODE (*node) != METHOD_TYPE
3974 && TREE_CODE (*node) != POINTER_TYPE
3975 && TREE_CODE (*node) != TYPE_DECL)
3977 warning (0, "%qE attribute only applies to variables and functions",
3981 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3983 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3985 warning (0, "address region attributes not allowed with auto storage class");
3988 /* Ignore storage attribute of pointed to variable: char __far * x; */
3989 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3991 warning (0, "address region attributes on pointed-to types ignored");
3995 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3997 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3998 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3999 DECL_ATTRIBUTES (*node) = NULL_TREE;
4005 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4006 int flags ATTRIBUTE_UNUSED, bool *no_add)
4008 if (TREE_CODE (*node) != FUNCTION_DECL
4009 && TREE_CODE (*node) != METHOD_TYPE)
4011 warning (0, "%qE attribute only applies to functions", name);
4018 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4019 int flags ATTRIBUTE_UNUSED, bool *no_add)
4023 if (TREE_CODE (*node) != FUNCTION_DECL)
4025 warning (0, "%qE attribute only applies to functions", name);
4030 if (DECL_DECLARED_INLINE_P (*node))
4031 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
4032 DECL_UNINLINABLE (*node) = 1;
4034 function_type = TREE_TYPE (*node);
4036 if (TREE_TYPE (function_type) != void_type_node)
4037 error ("interrupt function must have return type of void");
4039 if (prototype_p (function_type)
4040 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4041 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4042 error ("interrupt function must have no arguments");
4048 mep_validate_io_cb (tree *node, tree name, tree args,
4049 int flags ATTRIBUTE_UNUSED, bool *no_add)
4051 if (TREE_CODE (*node) != VAR_DECL)
4053 warning (0, "%qE attribute only applies to variables", name);
4057 if (args != NULL_TREE)
4059 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4060 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4061 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4063 warning (0, "%qE attribute allows only an integer constant argument",
4069 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4070 TREE_THIS_VOLATILE (*node) = 1;
4076 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4077 int flags ATTRIBUTE_UNUSED, bool *no_add)
4079 if (TREE_CODE (*node) != FUNCTION_TYPE
4080 && TREE_CODE (*node) != FUNCTION_DECL
4081 && TREE_CODE (*node) != METHOD_TYPE
4082 && TREE_CODE (*node) != FIELD_DECL
4083 && TREE_CODE (*node) != TYPE_DECL)
4085 static int gave_pointer_note = 0;
4086 static int gave_array_note = 0;
4087 static const char * given_type = NULL;
4089 given_type = tree_code_name[TREE_CODE (*node)];
4090 if (TREE_CODE (*node) == POINTER_TYPE)
4091 given_type = "pointers";
4092 if (TREE_CODE (*node) == ARRAY_TYPE)
4093 given_type = "arrays";
4096 warning (0, "%qE attribute only applies to functions, not %s",
4099 warning (0, "%qE attribute only applies to functions",
4103 if (TREE_CODE (*node) == POINTER_TYPE
4104 && !gave_pointer_note)
4106 inform (input_location,
4107 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
4108 " typedef int (__vliw *vfuncptr) ();");
4109 gave_pointer_note = 1;
4112 if (TREE_CODE (*node) == ARRAY_TYPE
4113 && !gave_array_note)
4115 inform (input_location,
4116 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4117 " typedef int (__vliw *vfuncptr[]) ();");
4118 gave_array_note = 1;
4122 error ("VLIW functions are not allowed without a VLIW configuration");
4126 static const struct attribute_spec mep_attribute_table[11] =
4128 /* name min max decl type func handler
4129 affects_type_identity */
4130 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4131 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4132 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4133 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4134 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4136 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4137 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4138 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4139 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4140 { NULL, 0, 0, false, false, false, NULL, false }
4144 mep_function_attribute_inlinable_p (const_tree callee)
4146 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4147 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4148 return (lookup_attribute ("disinterrupt", attrs) == 0
4149 && lookup_attribute ("interrupt", attrs) == 0);
4153 mep_can_inline_p (tree caller, tree callee)
4155 if (TREE_CODE (callee) == ADDR_EXPR)
4156 callee = TREE_OPERAND (callee, 0);
4158 if (!mep_vliw_function_p (caller)
4159 && mep_vliw_function_p (callee))
4167 #define FUNC_DISINTERRUPT 2
4170 struct GTY(()) pragma_entry {
4173 const char *funcname;
4175 typedef struct pragma_entry pragma_entry;
4177 /* Hash table of farcall-tagged sections. */
4178 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4181 pragma_entry_eq (const void *p1, const void *p2)
4183 const pragma_entry *old = (const pragma_entry *) p1;
4184 const char *new_name = (const char *) p2;
4186 return strcmp (old->funcname, new_name) == 0;
4190 pragma_entry_hash (const void *p)
4192 const pragma_entry *old = (const pragma_entry *) p;
4193 return htab_hash_string (old->funcname);
4197 mep_note_pragma_flag (const char *funcname, int flag)
4199 pragma_entry **slot;
4202 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4203 pragma_entry_eq, NULL);
4205 slot = (pragma_entry **)
4206 htab_find_slot_with_hash (pragma_htab, funcname,
4207 htab_hash_string (funcname), INSERT);
4211 *slot = ggc_alloc_pragma_entry ();
4214 (*slot)->funcname = ggc_strdup (funcname);
4216 (*slot)->flag |= flag;
4220 mep_lookup_pragma_flag (const char *funcname, int flag)
4222 pragma_entry **slot;
4227 if (funcname[0] == '@' && funcname[2] == '.')
4230 slot = (pragma_entry **)
4231 htab_find_slot_with_hash (pragma_htab, funcname,
4232 htab_hash_string (funcname), NO_INSERT);
4233 if (slot && *slot && ((*slot)->flag & flag))
4235 (*slot)->used |= flag;
4242 mep_lookup_pragma_call (const char *funcname)
4244 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4248 mep_note_pragma_call (const char *funcname)
4250 mep_note_pragma_flag (funcname, FUNC_CALL);
4254 mep_lookup_pragma_disinterrupt (const char *funcname)
4256 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4260 mep_note_pragma_disinterrupt (const char *funcname)
4262 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4266 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4268 const pragma_entry *d = (const pragma_entry *)(*slot);
4270 if ((d->flag & FUNC_DISINTERRUPT)
4271 && !(d->used & FUNC_DISINTERRUPT))
4272 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4277 mep_file_cleanups (void)
4280 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4283 /* These three functions provide a bridge between the pramgas that
4284 affect register classes, and the functions that maintain them. We
4285 can't call those functions directly as pragma handling is part of
4286 the front end and doesn't have direct access to them. */
4289 mep_save_register_info (void)
4291 save_register_info ();
4295 mep_reinit_regs (void)
4301 mep_init_regs (void)
4309 mep_attrlist_to_encoding (tree list, tree decl)
4311 if (mep_multiple_address_regions (list, false) > 1)
4313 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4314 TREE_PURPOSE (TREE_CHAIN (list)),
4316 DECL_SOURCE_LINE (decl));
4317 TREE_CHAIN (list) = NULL_TREE;
4322 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4324 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4326 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4328 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4330 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4332 if (TREE_VALUE (list)
4333 && TREE_VALUE (TREE_VALUE (list))
4334 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4336 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4338 && location <= 0x1000000)
4343 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4345 list = TREE_CHAIN (list);
4348 && TREE_CODE (decl) == FUNCTION_DECL
4349 && DECL_SECTION_NAME (decl) == 0)
4355 mep_comp_type_attributes (const_tree t1, const_tree t2)
4359 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4360 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4369 mep_insert_attributes (tree decl, tree *attributes)
4372 const char *secname = 0;
4373 tree attrib, attrlist;
4376 if (TREE_CODE (decl) == FUNCTION_DECL)
4378 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4380 if (mep_lookup_pragma_disinterrupt (funcname))
4382 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4383 *attributes = chainon (*attributes, attrib);
4387 if (TREE_CODE (decl) != VAR_DECL
4388 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4391 if (TREE_READONLY (decl) && TARGET_DC)
4392 /* -mdc means that const variables default to the near section,
4393 regardless of the size cutoff. */
4396 /* User specified an attribute, so override the default.
4397 Ignore storage attribute of pointed to variable. char __far * x; */
4398 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4400 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4401 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4402 else if (DECL_ATTRIBUTES (decl) && *attributes)
4403 DECL_ATTRIBUTES (decl) = NULL_TREE;
4406 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4407 encoding = mep_attrlist_to_encoding (attrlist, decl);
4408 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4410 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4411 encoding = mep_attrlist_to_encoding (attrlist, decl);
4415 /* This means that the declaration has a specific section
4416 attribute, so we should not apply the default rules. */
4418 if (encoding == 'i' || encoding == 'I')
4420 tree attr = lookup_attribute ("io", attrlist);
4422 && TREE_VALUE (attr)
4423 && TREE_VALUE (TREE_VALUE(attr)))
4425 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4426 static tree previous_value = 0;
4427 static int previous_location = 0;
4428 static tree previous_name = 0;
4430 /* We take advantage of the fact that gcc will reuse the
4431 same tree pointer when applying an attribute to a
4432 list of decls, but produce a new tree for attributes
4433 on separate source lines, even when they're textually
4434 identical. This is the behavior we want. */
4435 if (TREE_VALUE (attr) == previous_value
4436 && location == previous_location)
4438 warning(0, "__io address 0x%x is the same for %qE and %qE",
4439 location, previous_name, DECL_NAME (decl));
4441 previous_name = DECL_NAME (decl);
4442 previous_location = location;
4443 previous_value = TREE_VALUE (attr);
4450 /* Declarations of arrays can change size. Don't trust them. */
4451 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4454 size = int_size_in_bytes (TREE_TYPE (decl));
4456 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4458 if (TREE_PUBLIC (decl)
4459 || DECL_EXTERNAL (decl)
4460 || TREE_STATIC (decl))
4462 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4486 if (size <= mep_based_cutoff && size > 0)
4488 else if (size <= mep_tiny_cutoff && size > 0)
4494 if (mep_const_section && TREE_READONLY (decl))
4496 if (strcmp (mep_const_section, "tiny") == 0)
4498 else if (strcmp (mep_const_section, "near") == 0)
4500 else if (strcmp (mep_const_section, "far") == 0)
4507 if (!mep_multiple_address_regions (*attributes, true)
4508 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4510 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4512 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4513 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4514 and mep_validate_based_tiny. */
4515 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4520 mep_encode_section_info (tree decl, rtx rtl, int first)
4523 const char *oldname;
4524 const char *secname;
4530 tree mep_attributes;
4535 if (TREE_CODE (decl) != VAR_DECL
4536 && TREE_CODE (decl) != FUNCTION_DECL)
4539 rtlname = XEXP (rtl, 0);
4540 if (GET_CODE (rtlname) == SYMBOL_REF)
4541 oldname = XSTR (rtlname, 0);
4542 else if (GET_CODE (rtlname) == MEM
4543 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4544 oldname = XSTR (XEXP (rtlname, 0), 0);
4548 type = TREE_TYPE (decl);
4549 if (type == error_mark_node)
4551 mep_attributes = MEP_ATTRIBUTES (decl);
4553 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4557 newname = (char *) alloca (strlen (oldname) + 4);
4558 sprintf (newname, "@%c.%s", encoding, oldname);
4559 idp = get_identifier (newname);
4561 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4562 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4563 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4576 maxsize = 0x1000000;
4584 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4586 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4588 (long) int_size_in_bytes (TREE_TYPE (decl)),
4596 mep_strip_name_encoding (const char *sym)
4602 else if (*sym == '@' && sym[2] == '.')
4610 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4611 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4616 switch (TREE_CODE (decl))
4619 if (!TREE_READONLY (decl)
4620 || TREE_SIDE_EFFECTS (decl)
4621 || !DECL_INITIAL (decl)
4622 || (DECL_INITIAL (decl) != error_mark_node
4623 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4627 if (! TREE_CONSTANT (decl))
4635 if (TREE_CODE (decl) == FUNCTION_DECL)
4637 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4639 if (name[0] == '@' && name[2] == '.')
4644 if (flag_function_sections || DECL_ONE_ONLY (decl))
4645 mep_unique_section (decl, 0);
4646 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4648 if (encoding == 'f')
4649 return vftext_section;
4651 return vtext_section;
4653 else if (encoding == 'f')
4654 return ftext_section;
4656 return text_section;
4659 if (TREE_CODE (decl) == VAR_DECL)
4661 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4663 if (name[0] == '@' && name[2] == '.')
4667 return based_section;
4671 return srodata_section;
4672 if (DECL_INITIAL (decl))
4673 return sdata_section;
4674 return tinybss_section;
4678 return frodata_section;
4683 error_at (DECL_SOURCE_LOCATION (decl),
4684 "variable %D of type %<io%> must be uninitialized", decl);
4685 return data_section;
4688 error_at (DECL_SOURCE_LOCATION (decl),
4689 "variable %D of type %<cb%> must be uninitialized", decl);
4690 return data_section;
4695 return readonly_data_section;
4697 return data_section;
4701 mep_unique_section (tree decl, int reloc)
4703 static const char *prefixes[][2] =
4705 { ".text.", ".gnu.linkonce.t." },
4706 { ".rodata.", ".gnu.linkonce.r." },
4707 { ".data.", ".gnu.linkonce.d." },
4708 { ".based.", ".gnu.linkonce.based." },
4709 { ".sdata.", ".gnu.linkonce.s." },
4710 { ".far.", ".gnu.linkonce.far." },
4711 { ".ftext.", ".gnu.linkonce.ft." },
4712 { ".frodata.", ".gnu.linkonce.frd." },
4713 { ".srodata.", ".gnu.linkonce.srd." },
4714 { ".vtext.", ".gnu.linkonce.v." },
4715 { ".vftext.", ".gnu.linkonce.vf." }
4717 int sec = 2; /* .data */
4719 const char *name, *prefix;
4722 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4723 if (DECL_RTL (decl))
4724 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4726 if (TREE_CODE (decl) == FUNCTION_DECL)
4728 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4729 sec = 9; /* .vtext */
4731 sec = 0; /* .text */
4733 else if (decl_readonly_section (decl, reloc))
4734 sec = 1; /* .rodata */
4736 if (name[0] == '@' && name[2] == '.')
4741 sec = 3; /* .based */
4745 sec = 8; /* .srodata */
4747 sec = 4; /* .sdata */
4751 sec = 6; /* .ftext */
4753 sec = 10; /* .vftext */
4755 sec = 7; /* .frodata */
4757 sec = 5; /* .far. */
4763 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4764 len = strlen (name) + strlen (prefix);
4765 string = (char *) alloca (len + 1);
4767 sprintf (string, "%s%s", prefix, name);
4769 DECL_SECTION_NAME (decl) = build_string (len, string);
4772 /* Given a decl, a section name, and whether the decl initializer
4773 has relocs, choose attributes for the section. */
4775 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4778 mep_section_type_flags (tree decl, const char *name, int reloc)
4780 unsigned int flags = default_section_type_flags (decl, name, reloc);
4782 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4783 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4784 flags |= SECTION_MEP_VLIW;
4789 /* Switch to an arbitrary section NAME with attributes as specified
4790 by FLAGS. ALIGN specifies any known alignment requirements for
4791 the section; 0 if the default should be used.
4793 Differs from the standard ELF version only in support of VLIW mode. */
4796 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4798 char flagchars[8], *f = flagchars;
4801 if (!(flags & SECTION_DEBUG))
4803 if (flags & SECTION_WRITE)
4805 if (flags & SECTION_CODE)
4807 if (flags & SECTION_SMALL)
4809 if (flags & SECTION_MEP_VLIW)
4813 if (flags & SECTION_BSS)
4818 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4819 name, flagchars, type);
4821 if (flags & SECTION_CODE)
4822 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4827 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4828 int size, int align, int global)
4830 /* We intentionally don't use mep_section_tag() here. */
4832 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4836 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4837 DECL_ATTRIBUTES (decl));
4839 && TREE_VALUE (attr)
4840 && TREE_VALUE (TREE_VALUE(attr)))
4841 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4846 fprintf (stream, "\t.globl\t");
4847 assemble_name (stream, name);
4848 fprintf (stream, "\n");
4850 assemble_name (stream, name);
4851 fprintf (stream, " = %d\n", location);
4854 if (name[0] == '@' && name[2] == '.')
4856 const char *sec = 0;
4860 switch_to_section (based_section);
4864 switch_to_section (tinybss_section);
4868 switch_to_section (farbss_section);
4877 while (align > BITS_PER_UNIT)
4882 name2 = targetm.strip_name_encoding (name);
4884 fprintf (stream, "\t.globl\t%s\n", name2);
4885 fprintf (stream, "\t.p2align %d\n", p2align);
4886 fprintf (stream, "\t.type\t%s,@object\n", name2);
4887 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4888 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4895 fprintf (stream, "\t.local\t");
4896 assemble_name (stream, name);
4897 fprintf (stream, "\n");
4899 fprintf (stream, "\t.comm\t");
4900 assemble_name (stream, name);
4901 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4907 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4909 rtx addr = XEXP (m_tramp, 0);
4910 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4912 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4913 LCT_NORMAL, VOIDmode, 3,
4916 static_chain, Pmode);
4919 /* Experimental Reorg. */
4922 mep_mentioned_p (rtx in,
4923 rtx reg, /* NULL for mem */
4924 int modes_too) /* if nonzero, modes must match also. */
4932 if (reg && GET_CODE (reg) != REG)
4935 if (GET_CODE (in) == LABEL_REF)
4938 code = GET_CODE (in);
4944 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4950 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4952 return (REGNO (in) == REGNO (reg));
4965 /* Set's source should be read-only. */
4966 if (code == SET && !reg)
4967 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4969 fmt = GET_RTX_FORMAT (code);
4971 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4976 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4977 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4980 else if (fmt[i] == 'e'
4981 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4987 #define EXPERIMENTAL_REGMOVE_REORG 1
4989 #if EXPERIMENTAL_REGMOVE_REORG
4992 mep_compatible_reg_class (int r1, int r2)
4994 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4996 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
5002 mep_reorg_regmove (rtx insns)
5004 rtx insn, next, pat, follow, *where;
5005 int count = 0, done = 0, replace, before = 0;
5008 for (insn = insns; insn; insn = NEXT_INSN (insn))
5009 if (GET_CODE (insn) == INSN)
5012 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
5013 set that uses the r2 and r2 dies there. We replace r2 with r1
5014 and see if it's still a valid insn. If so, delete the first set.
5015 Copied from reorg.c. */
5020 for (insn = insns; insn; insn = next)
5022 next = NEXT_INSN (insn);
5023 if (GET_CODE (insn) != INSN)
5025 pat = PATTERN (insn);
5029 if (GET_CODE (pat) == SET
5030 && GET_CODE (SET_SRC (pat)) == REG
5031 && GET_CODE (SET_DEST (pat)) == REG
5032 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
5033 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
5035 follow = next_nonnote_insn (insn);
5037 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
5039 while (follow && GET_CODE (follow) == INSN
5040 && GET_CODE (PATTERN (follow)) == SET
5041 && !dead_or_set_p (follow, SET_SRC (pat))
5042 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
5043 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
5046 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
5047 follow = next_nonnote_insn (follow);
5051 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5052 if (follow && GET_CODE (follow) == INSN
5053 && GET_CODE (PATTERN (follow)) == SET
5054 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5056 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5058 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5061 where = & SET_SRC (PATTERN (follow));
5064 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5066 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5069 where = & PATTERN (follow);
5075 /* If so, follow is the corresponding insn */
5082 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5083 for (x = insn; x ;x = NEXT_INSN (x))
5085 print_rtl_single (dump_file, x);
5088 fprintf (dump_file, "\n");
5092 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5096 next = delete_insn (insn);
5099 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5100 print_rtl_single (dump_file, follow);
5110 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5111 fprintf (dump_file, "=====\n");
5117 /* Figure out where to put LABEL, which is the label for a repeat loop.
5118 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5119 the loop ends just before LAST_INSN. If SHARED, insns other than the
5120 "repeat" might use LABEL to jump to the loop's continuation point.
5122 Return the last instruction in the adjusted loop. */
5125 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5129 int count = 0, code, icode;
5132 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5133 INSN_UID (last_insn));
5135 /* Set PREV to the last insn in the loop. */
5138 prev = PREV_INSN (prev);
5140 /* Set NEXT to the next insn after the repeat label. */
5145 code = GET_CODE (prev);
5146 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5151 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5152 prev = XVECEXP (PATTERN (prev), 0, 1);
5154 /* Other insns that should not be in the last two opcodes. */
5155 icode = recog_memoized (prev);
5157 || icode == CODE_FOR_repeat
5158 || icode == CODE_FOR_erepeat
5159 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5162 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5163 is the second instruction in a VLIW bundle. In that case,
5164 loop again: if the first instruction also satisfies the
5165 conditions above then we will reach here again and put
5166 both of them into the repeat epilogue. Otherwise both
5167 should remain outside. */
5168 if (GET_MODE (prev) != BImode)
5173 print_rtl_single (dump_file, next);
5178 prev = PREV_INSN (prev);
5181 /* See if we're adding the label immediately after the repeat insn.
5182 If so, we need to separate them with a nop. */
5183 prev = prev_real_insn (next);
5185 switch (recog_memoized (prev))
5187 case CODE_FOR_repeat:
5188 case CODE_FOR_erepeat:
5190 fprintf (dump_file, "Adding nop inside loop\n");
5191 emit_insn_before (gen_nop (), next);
5198 /* Insert the label. */
5199 emit_label_before (label, next);
5201 /* Insert the nops. */
5202 if (dump_file && count < 2)
5203 fprintf (dump_file, "Adding %d nop%s\n\n",
5204 2 - count, count == 1 ? "" : "s");
5206 for (; count < 2; count++)
5208 last_insn = emit_insn_after (gen_nop (), last_insn);
5210 emit_insn_before (gen_nop (), last_insn);
5217 mep_emit_doloop (rtx *operands, int is_end)
5221 if (cfun->machine->doloop_tags == 0
5222 || cfun->machine->doloop_tag_from_end == is_end)
5224 cfun->machine->doloop_tags++;
5225 cfun->machine->doloop_tag_from_end = is_end;
5228 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5230 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5232 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5236 /* Code for converting doloop_begins and doloop_ends into valid
5237 MeP instructions. A doloop_begin is just a placeholder:
5239 $count = unspec ($count)
5241 where $count is initially the number of iterations - 1.
5242 doloop_end has the form:
5244 if ($count-- == 0) goto label
5246 The counter variable is private to the doloop insns, nothing else
5247 relies on its value.
5249 There are three cases, in decreasing order of preference:
5251 1. A loop has exactly one doloop_begin and one doloop_end.
5252 The doloop_end branches to the first instruction after
5255 In this case we can replace the doloop_begin with a repeat
5256 instruction and remove the doloop_end. I.e.:
5258 $count1 = unspec ($count1)
5263 if ($count2-- == 0) goto label
5267 repeat $count1,repeat_label
5275 2. As for (1), except there are several doloop_ends. One of them
5276 (call it X) falls through to a label L. All the others fall
5277 through to branches to L.
5279 In this case, we remove X and replace the other doloop_ends
5280 with branches to the repeat label. For example:
5282 $count1 = unspec ($count1)
5285 if ($count2-- == 0) goto label
5288 if ($count3-- == 0) goto label
5293 repeat $count1,repeat_label
5304 3. The fallback case. Replace doloop_begins with:
5308 Replace doloop_ends with the equivalent of:
5311 if ($count == 0) goto label
5313 Note that this might need a scratch register if $count
5314 is stored in memory. */
5316 /* A structure describing one doloop_begin. */
5317 struct mep_doloop_begin {
5318 /* The next doloop_begin with the same tag. */
5319 struct mep_doloop_begin *next;
5321 /* The instruction itself. */
5324 /* The initial counter value. This is known to be a general register. */
5328 /* A structure describing a doloop_end. */
5329 struct mep_doloop_end {
5330 /* The next doloop_end with the same loop tag. */
5331 struct mep_doloop_end *next;
5333 /* The instruction itself. */
5336 /* The first instruction after INSN when the branch isn't taken. */
5339 /* The location of the counter value. Since doloop_end_internal is a
5340 jump instruction, it has to allow the counter to be stored anywhere
5341 (any non-fixed register or memory location). */
5344 /* The target label (the place where the insn branches when the counter
5348 /* A scratch register. Only available when COUNTER isn't stored
5349 in a general register. */
5354 /* One do-while loop. */
5356 /* All the doloop_begins for this loop (in no particular order). */
5357 struct mep_doloop_begin *begin;
5359 /* All the doloop_ends. When there is more than one, arrange things
5360 so that the first one is the most likely to be X in case (2) above. */
5361 struct mep_doloop_end *end;
5365 /* Return true if LOOP can be converted into repeat/repeat_end form
5366 (that is, if it matches cases (1) or (2) above). */
5369 mep_repeat_loop_p (struct mep_doloop *loop)
5371 struct mep_doloop_end *end;
5374 /* There must be exactly one doloop_begin and at least one doloop_end. */
5375 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5378 /* The first doloop_end (X) must branch back to the insn after
5379 the doloop_begin. */
5380 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5383 /* All the other doloop_ends must branch to the same place as X.
5384 When the branch isn't taken, they must jump to the instruction
5386 fallthrough = loop->end->fallthrough;
5387 for (end = loop->end->next; end != 0; end = end->next)
5388 if (end->label != loop->end->label
5389 || !simplejump_p (end->fallthrough)
5390 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5397 /* The main repeat reorg function. See comment above for details. */
5400 mep_reorg_repeat (rtx insns)
5403 struct mep_doloop *loops, *loop;
5404 struct mep_doloop_begin *begin;
5405 struct mep_doloop_end *end;
5407 /* Quick exit if we haven't created any loops. */
5408 if (cfun->machine->doloop_tags == 0)
5411 /* Create an array of mep_doloop structures. */
5412 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5413 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5415 /* Search the function for do-while insns and group them by loop tag. */
5416 for (insn = insns; insn; insn = NEXT_INSN (insn))
5418 switch (recog_memoized (insn))
5420 case CODE_FOR_doloop_begin_internal:
5421 insn_extract (insn);
5422 loop = &loops[INTVAL (recog_data.operand[2])];
5424 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5425 begin->next = loop->begin;
5427 begin->counter = recog_data.operand[0];
5429 loop->begin = begin;
5432 case CODE_FOR_doloop_end_internal:
5433 insn_extract (insn);
5434 loop = &loops[INTVAL (recog_data.operand[2])];
5436 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5438 end->fallthrough = next_real_insn (insn);
5439 end->counter = recog_data.operand[0];
5440 end->label = recog_data.operand[1];
5441 end->scratch = recog_data.operand[3];
5443 /* If this insn falls through to an unconditional jump,
5444 give it a lower priority than the others. */
5445 if (loop->end != 0 && simplejump_p (end->fallthrough))
5447 end->next = loop->end->next;
5448 loop->end->next = end;
5452 end->next = loop->end;
5458 /* Convert the insns for each loop in turn. */
5459 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5460 if (mep_repeat_loop_p (loop))
5462 /* Case (1) or (2). */
5463 rtx repeat_label, label_ref;
5465 /* Create a new label for the repeat insn. */
5466 repeat_label = gen_label_rtx ();
5468 /* Replace the doloop_begin with a repeat. */
5469 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5470 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5472 delete_insn (loop->begin->insn);
5474 /* Insert the repeat label before the first doloop_end.
5475 Fill the gap with nops if there are other doloop_ends. */
5476 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5477 false, loop->end->next != 0);
5479 /* Emit a repeat_end (to improve the readability of the output). */
5480 emit_insn_before (gen_repeat_end (), loop->end->insn);
5482 /* Delete the first doloop_end. */
5483 delete_insn (loop->end->insn);
5485 /* Replace the others with branches to REPEAT_LABEL. */
5486 for (end = loop->end->next; end != 0; end = end->next)
5488 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5489 delete_insn (end->insn);
5490 delete_insn (end->fallthrough);
5495 /* Case (3). First replace all the doloop_begins with increment
5497 for (begin = loop->begin; begin != 0; begin = begin->next)
5499 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5500 begin->counter, const1_rtx),
5502 delete_insn (begin->insn);
5505 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5506 for (end = loop->end; end != 0; end = end->next)
5512 /* Load the counter value into a general register. */
5514 if (!REG_P (reg) || REGNO (reg) > 15)
5517 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5520 /* Decrement the counter. */
5521 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5524 /* Copy it back to its original location. */
5525 if (reg != end->counter)
5526 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5528 /* Jump back to the start label. */
5529 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5531 JUMP_LABEL (insn) = end->label;
5532 LABEL_NUSES (end->label)++;
5534 /* Emit the whole sequence before the doloop_end. */
5535 insn = get_insns ();
5537 emit_insn_before (insn, end->insn);
5539 /* Delete the doloop_end. */
5540 delete_insn (end->insn);
5547 mep_invertable_branch_p (rtx insn)
5550 enum rtx_code old_code;
5553 set = PATTERN (insn);
5554 if (GET_CODE (set) != SET)
5556 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5558 cond = XEXP (XEXP (set, 1), 0);
5559 old_code = GET_CODE (cond);
5563 PUT_CODE (cond, NE);
5566 PUT_CODE (cond, EQ);
5569 PUT_CODE (cond, GE);
5572 PUT_CODE (cond, LT);
5577 INSN_CODE (insn) = -1;
5578 i = recog_memoized (insn);
5579 PUT_CODE (cond, old_code);
5580 INSN_CODE (insn) = -1;
5585 mep_invert_branch (rtx insn, rtx after)
5587 rtx cond, set, label;
5590 set = PATTERN (insn);
5592 gcc_assert (GET_CODE (set) == SET);
5593 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5595 cond = XEXP (XEXP (set, 1), 0);
5596 switch (GET_CODE (cond))
5599 PUT_CODE (cond, NE);
5602 PUT_CODE (cond, EQ);
5605 PUT_CODE (cond, GE);
5608 PUT_CODE (cond, LT);
5613 label = gen_label_rtx ();
5614 emit_label_after (label, after);
5615 for (i=1; i<=2; i++)
5616 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5618 rtx ref = XEXP (XEXP (set, 1), i);
5619 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5620 delete_insn (XEXP (ref, 0));
5621 XEXP (ref, 0) = label;
5622 LABEL_NUSES (label) ++;
5623 JUMP_LABEL (insn) = label;
5625 INSN_CODE (insn) = -1;
5626 i = recog_memoized (insn);
5627 gcc_assert (i >= 0);
5631 mep_reorg_erepeat (rtx insns)
5633 rtx insn, prev, l, x;
5636 for (insn = insns; insn; insn = NEXT_INSN (insn))
5638 && ! JUMP_TABLE_DATA_P (insn)
5639 && mep_invertable_branch_p (insn))
5643 fprintf (dump_file, "\n------------------------------\n");
5644 fprintf (dump_file, "erepeat: considering this jump:\n");
5645 print_rtl_single (dump_file, insn);
5647 count = simplejump_p (insn) ? 0 : 1;
5648 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5650 if (GET_CODE (prev) == CALL_INSN
5651 || BARRIER_P (prev))
5654 if (prev == JUMP_LABEL (insn))
5658 fprintf (dump_file, "found loop top, %d insns\n", count);
5660 if (LABEL_NUSES (prev) == 1)
5661 /* We're the only user, always safe */ ;
5662 else if (LABEL_NUSES (prev) == 2)
5664 /* See if there's a barrier before this label. If
5665 so, we know nobody inside the loop uses it.
5666 But we must be careful to put the erepeat
5667 *after* the label. */
5669 for (barrier = PREV_INSN (prev);
5670 barrier && GET_CODE (barrier) == NOTE;
5671 barrier = PREV_INSN (barrier))
5673 if (barrier && GET_CODE (barrier) != BARRIER)
5678 /* We don't know who else, within or without our loop, uses this */
5680 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5684 /* Generate a label to be used by the erepat insn. */
5685 l = gen_label_rtx ();
5687 /* Insert the erepeat after INSN's target label. */
5688 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5690 emit_insn_after (x, prev);
5692 /* Insert the erepeat label. */
5693 newlast = (mep_insert_repeat_label_last
5694 (insn, l, !simplejump_p (insn), false));
5695 if (simplejump_p (insn))
5697 emit_insn_before (gen_erepeat_end (), insn);
5702 mep_invert_branch (insn, newlast);
5703 emit_insn_after (gen_erepeat_end (), newlast);
5710 /* A label is OK if there is exactly one user, and we
5711 can find that user before the next label. */
5714 if (LABEL_NUSES (prev) == 1)
5716 for (user = PREV_INSN (prev);
5717 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5718 user = PREV_INSN (user))
5719 if (GET_CODE (user) == JUMP_INSN
5720 && JUMP_LABEL (user) == prev)
5722 safe = INSN_UID (user);
5729 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5730 safe, INSN_UID (prev));
5740 fprintf (dump_file, "\n==============================\n");
5743 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5744 always do this on its own. */
5747 mep_jmp_return_reorg (rtx insns)
5749 rtx insn, label, ret;
5752 for (insn = insns; insn; insn = NEXT_INSN (insn))
5753 if (simplejump_p (insn))
5755 /* Find the fist real insn the jump jumps to. */
5756 label = ret = JUMP_LABEL (insn);
5758 && (GET_CODE (ret) == NOTE
5759 || GET_CODE (ret) == CODE_LABEL
5760 || GET_CODE (PATTERN (ret)) == USE))
5761 ret = NEXT_INSN (ret);
5765 /* Is it a return? */
5766 ret_code = recog_memoized (ret);
5767 if (ret_code == CODE_FOR_return_internal
5768 || ret_code == CODE_FOR_eh_return_internal)
5770 /* It is. Replace the jump with a return. */
5771 LABEL_NUSES (label) --;
5772 if (LABEL_NUSES (label) == 0)
5773 delete_insn (label);
5774 PATTERN (insn) = copy_rtx (PATTERN (ret));
5775 INSN_CODE (insn) = -1;
5783 mep_reorg_addcombine (rtx insns)
5787 for (i = insns; i; i = NEXT_INSN (i))
5789 && INSN_CODE (i) == CODE_FOR_addsi3
5790 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5791 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5792 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5793 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5797 && INSN_CODE (n) == CODE_FOR_addsi3
5798 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5799 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5800 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5801 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5803 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5804 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5805 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5807 && ic + nc > -32768)
5809 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5810 NEXT_INSN (i) = NEXT_INSN (n);
5812 PREV_INSN (NEXT_INSN (i)) = i;
5818 /* If this insn adjusts the stack, return the adjustment, else return
5821 add_sp_insn_p (rtx insn)
5825 if (! single_set (insn))
5827 pat = PATTERN (insn);
5828 if (GET_CODE (SET_DEST (pat)) != REG)
5830 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5832 if (GET_CODE (SET_SRC (pat)) != PLUS)
5834 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5836 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5838 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5840 return INTVAL (XEXP (SET_SRC (pat), 1));
5843 /* Check for trivial functions that set up an unneeded stack
5846 mep_reorg_noframe (rtx insns)
5848 rtx start_frame_insn;
5849 rtx end_frame_insn = 0;
5853 /* The first insn should be $sp = $sp + N */
5854 while (insns && ! INSN_P (insns))
5855 insns = NEXT_INSN (insns);
5859 sp_adjust = add_sp_insn_p (insns);
5863 start_frame_insn = insns;
5864 sp = SET_DEST (PATTERN (start_frame_insn));
5866 insns = next_real_insn (insns);
5870 rtx next = next_real_insn (insns);
5874 sp2 = add_sp_insn_p (insns);
5879 end_frame_insn = insns;
5880 if (sp2 != -sp_adjust)
5883 else if (mep_mentioned_p (insns, sp, 0))
5885 else if (CALL_P (insns))
5893 delete_insn (start_frame_insn);
5894 delete_insn (end_frame_insn);
5901 rtx insns = get_insns ();
5903 /* We require accurate REG_DEAD notes. */
5904 compute_bb_for_insn ();
5905 df_note_add_problem ();
5908 mep_reorg_addcombine (insns);
5909 #if EXPERIMENTAL_REGMOVE_REORG
5910 /* VLIW packing has been done already, so we can't just delete things. */
5911 if (!mep_vliw_function_p (cfun->decl))
5912 mep_reorg_regmove (insns);
5914 mep_jmp_return_reorg (insns);
5915 mep_bundle_insns (insns);
5916 mep_reorg_repeat (insns);
5919 && !profile_arc_flag
5920 && TARGET_OPT_REPEAT
5921 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5922 mep_reorg_erepeat (insns);
5924 /* This may delete *insns so make sure it's last. */
5925 mep_reorg_noframe (insns);
5927 df_finish_pass (false);
5932 /*----------------------------------------------------------------------*/
5934 /*----------------------------------------------------------------------*/
5936 /* Element X gives the index into cgen_insns[] of the most general
5937 implementation of intrinsic X. Unimplemented intrinsics are
5939 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5941 /* Element X gives the index of another instruction that is mapped to
5942 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5945 Things are set up so that mep_intrinsic_chain[X] < X. */
5946 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5948 /* The bitmask for the current ISA. The ISA masks are declared
5950 unsigned int mep_selected_isa;
5953 const char *config_name;
5957 static struct mep_config mep_configs[] = {
5958 #ifdef COPROC_SELECTION_TABLE
5959 COPROC_SELECTION_TABLE,
5964 /* Initialize the global intrinsics variables above. */
5967 mep_init_intrinsics (void)
5971 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5972 mep_selected_isa = mep_configs[0].isa;
5973 if (mep_config_string != 0)
5974 for (i = 0; mep_configs[i].config_name; i++)
5975 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5977 mep_selected_isa = mep_configs[i].isa;
5981 /* Assume all intrinsics are unavailable. */
5982 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5983 mep_intrinsic_insn[i] = -1;
5985 /* Build up the global intrinsic tables. */
5986 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5987 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5989 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5990 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5992 /* See whether we can directly move values between one coprocessor
5993 register and another. */
5994 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5995 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5996 mep_have_copro_copro_moves_p = true;
5998 /* See whether we can directly move values between core and
5999 coprocessor registers. */
6000 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
6001 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
6003 mep_have_core_copro_moves_p = 1;
6006 /* Declare all available intrinsic functions. Called once only. */
6008 static tree cp_data_bus_int_type_node;
6009 static tree opaque_vector_type_node;
6010 static tree v8qi_type_node;
6011 static tree v4hi_type_node;
6012 static tree v2si_type_node;
6013 static tree v8uqi_type_node;
6014 static tree v4uhi_type_node;
6015 static tree v2usi_type_node;
6018 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
6022 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
6023 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
6024 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
6025 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
6026 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
6027 case cgen_regnum_operand_type_CHAR: return char_type_node;
6028 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
6029 case cgen_regnum_operand_type_SI: return intSI_type_node;
6030 case cgen_regnum_operand_type_DI: return intDI_type_node;
6031 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
6032 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
6033 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
6034 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
6035 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
6036 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
6037 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
6038 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
6040 return void_type_node;
6045 mep_init_builtins (void)
6049 if (TARGET_64BIT_CR_REGS)
6050 cp_data_bus_int_type_node = long_long_integer_type_node;
6052 cp_data_bus_int_type_node = long_integer_type_node;
6054 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
6055 v8qi_type_node = build_vector_type (intQI_type_node, 8);
6056 v4hi_type_node = build_vector_type (intHI_type_node, 4);
6057 v2si_type_node = build_vector_type (intSI_type_node, 2);
6058 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6059 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6060 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6062 (*lang_hooks.decls.pushdecl)
6063 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
6064 cp_data_bus_int_type_node));
6066 (*lang_hooks.decls.pushdecl)
6067 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
6068 opaque_vector_type_node));
6070 (*lang_hooks.decls.pushdecl)
6071 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6073 (*lang_hooks.decls.pushdecl)
6074 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6076 (*lang_hooks.decls.pushdecl)
6077 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6080 (*lang_hooks.decls.pushdecl)
6081 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6083 (*lang_hooks.decls.pushdecl)
6084 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6086 (*lang_hooks.decls.pushdecl)
6087 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6090 /* Intrinsics like mep_cadd3 are implemented with two groups of
6091 instructions, one which uses UNSPECs and one which uses a specific
6092 rtl code such as PLUS. Instructions in the latter group belong
6093 to GROUP_KNOWN_CODE.
6095 In such cases, the intrinsic will have two entries in the global
6096 tables above. The unspec form is accessed using builtin functions
6097 while the specific form is accessed using the mep_* enum in
6100 The idea is that __cop arithmetic and builtin functions have
6101 different optimization requirements. If mep_cadd3() appears in
6102 the source code, the user will surely except gcc to use cadd3
6103 rather than a work-alike such as add3. However, if the user
6104 just writes "a + b", where a or b are __cop variables, it is
6105 reasonable for gcc to choose a core instruction rather than
6106 cadd3 if it believes that is more optimal. */
6107 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6108 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6109 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6111 tree ret_type = void_type_node;
6114 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6117 if (cgen_insns[i].cret_p)
6118 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6120 bi_type = build_function_type_list (ret_type, NULL_TREE);
6121 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6123 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6127 /* Report the unavailablity of the given intrinsic. */
6131 mep_intrinsic_unavailable (int intrinsic)
6133 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6135 if (already_reported_p[intrinsic])
6138 if (mep_intrinsic_insn[intrinsic] < 0)
6139 error ("coprocessor intrinsic %qs is not available in this configuration",
6140 cgen_intrinsics[intrinsic]);
6141 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6142 error ("%qs is not available in VLIW functions",
6143 cgen_intrinsics[intrinsic]);
6145 error ("%qs is not available in non-VLIW functions",
6146 cgen_intrinsics[intrinsic]);
6148 already_reported_p[intrinsic] = 1;
6153 /* See if any implementation of INTRINSIC is available to the
6154 current function. If so, store the most general implementation
6155 in *INSN_PTR and return true. Return false otherwise. */
6158 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6162 i = mep_intrinsic_insn[intrinsic];
6163 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6164 i = mep_intrinsic_chain[i];
6168 *insn_ptr = &cgen_insns[i];
6175 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6176 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6177 try using a work-alike instead. In this case, the returned insn
6178 may have three operands rather than two. */
6181 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6185 if (intrinsic == mep_cmov)
6187 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6188 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6192 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6196 /* If ARG is a register operand that is the same size as MODE, convert it
6197 to MODE using a subreg. Otherwise return ARG as-is. */
6200 mep_convert_arg (enum machine_mode mode, rtx arg)
6202 if (GET_MODE (arg) != mode
6203 && register_operand (arg, VOIDmode)
6204 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6205 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6210 /* Apply regnum conversions to ARG using the description given by REGNUM.
6211 Return the new argument on success and null on failure. */
6214 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6216 if (regnum->count == 0)
6219 if (GET_CODE (arg) != CONST_INT
6221 || INTVAL (arg) >= regnum->count)
6224 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6228 /* Try to make intrinsic argument ARG match the given operand.
6229 UNSIGNED_P is true if the argument has an unsigned type. */
6232 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6235 if (GET_CODE (arg) == CONST_INT)
6237 /* CONST_INTs can only be bound to integer operands. */
6238 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6241 else if (GET_CODE (arg) == CONST_DOUBLE)
6242 /* These hold vector constants. */;
6243 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6245 /* If the argument is a different size from what's expected, we must
6246 have a value in the right mode class in order to convert it. */
6247 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6250 /* If the operand is an rvalue, promote or demote it to match the
6251 operand's size. This might not need extra instructions when
6252 ARG is a register value. */
6253 if (operand->constraint[0] != '=')
6254 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6257 /* If the operand is an lvalue, bind the operand to a new register.
6258 The caller will copy this value into ARG after the main
6259 instruction. By doing this always, we produce slightly more
6261 /* But not for control registers. */
6262 if (operand->constraint[0] == '='
6264 || ! (CONTROL_REGNO_P (REGNO (arg))
6265 || CCR_REGNO_P (REGNO (arg))
6266 || CR_REGNO_P (REGNO (arg)))
6268 return gen_reg_rtx (operand->mode);
6270 /* Try simple mode punning. */
6271 arg = mep_convert_arg (operand->mode, arg);
6272 if (operand->predicate (arg, operand->mode))
6275 /* See if forcing the argument into a register will make it match. */
6276 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6277 arg = force_reg (operand->mode, arg);
6279 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6280 if (operand->predicate (arg, operand->mode))
6287 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6288 function FNNAME. OPERAND describes the operand to which ARGNUM
6292 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6293 int argnum, tree fnname)
6297 if (GET_CODE (arg) == CONST_INT)
6298 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6299 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6301 const struct cgen_immediate_predicate *predicate;
6302 HOST_WIDE_INT argval;
6304 predicate = &cgen_immediate_predicates[i];
6305 argval = INTVAL (arg);
6306 if (argval < predicate->lower || argval >= predicate->upper)
6307 error ("argument %d of %qE must be in the range %d...%d",
6308 argnum, fnname, predicate->lower, predicate->upper - 1);
6310 error ("argument %d of %qE must be a multiple of %d",
6311 argnum, fnname, predicate->align);
6315 error ("incompatible type for argument %d of %qE", argnum, fnname);
6319 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6320 rtx subtarget ATTRIBUTE_UNUSED,
6321 enum machine_mode mode ATTRIBUTE_UNUSED,
6322 int ignore ATTRIBUTE_UNUSED)
6324 rtx pat, op[10], arg[10];
6326 int opindex, unsigned_p[10];
6328 unsigned int n_args;
6330 const struct cgen_insn *cgen_insn;
6331 const struct insn_data_d *idata;
6332 unsigned int first_arg = 0;
6333 unsigned int builtin_n_args;
6335 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6336 fnname = DECL_NAME (fndecl);
6338 /* Find out which instruction we should emit. Note that some coprocessor
6339 intrinsics may only be available in VLIW mode, or only in normal mode. */
6340 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6342 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6345 idata = &insn_data[cgen_insn->icode];
6347 builtin_n_args = cgen_insn->num_args;
6349 if (cgen_insn->cret_p)
6351 if (cgen_insn->cret_p > 1)
6354 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6358 /* Evaluate each argument. */
6359 n_args = call_expr_nargs (exp);
6361 if (n_args < builtin_n_args)
6363 error ("too few arguments to %qE", fnname);
6366 if (n_args > builtin_n_args)
6368 error ("too many arguments to %qE", fnname);
6372 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6376 args = CALL_EXPR_ARG (exp, a - first_arg);
6381 if (cgen_insn->regnums[a].reference_p)
6383 if (TREE_CODE (value) != ADDR_EXPR)
6386 error ("argument %d of %qE must be an address", a+1, fnname);
6389 value = TREE_OPERAND (value, 0);
6393 /* If the argument has been promoted to int, get the unpromoted
6394 value. This is necessary when sub-int memory values are bound
6395 to reference parameters. */
6396 if (TREE_CODE (value) == NOP_EXPR
6397 && TREE_TYPE (value) == integer_type_node
6398 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6399 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6400 < TYPE_PRECISION (TREE_TYPE (value))))
6401 value = TREE_OPERAND (value, 0);
6403 /* If the argument has been promoted to double, get the unpromoted
6404 SFmode value. This is necessary for FMAX support, for example. */
6405 if (TREE_CODE (value) == NOP_EXPR
6406 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6407 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6408 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6409 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6410 value = TREE_OPERAND (value, 0);
6412 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6413 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6414 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6415 if (cgen_insn->regnums[a].reference_p)
6417 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6418 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6420 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6424 error ("argument %d of %qE must be in the range %d...%d",
6425 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6430 for (a = 0; a < first_arg; a++)
6432 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6435 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6438 /* Convert the arguments into a form suitable for the intrinsic.
6439 Report an error if this isn't possible. */
6440 for (opindex = 0; opindex < idata->n_operands; opindex++)
6442 a = cgen_insn->op_mapping[opindex];
6443 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6444 arg[a], unsigned_p[a]);
6445 if (op[opindex] == 0)
6447 mep_incompatible_arg (&idata->operand[opindex],
6448 arg[a], a + 1 - first_arg, fnname);
6453 /* Emit the instruction. */
6454 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6455 op[5], op[6], op[7], op[8], op[9]);
6457 if (GET_CODE (pat) == SET
6458 && GET_CODE (SET_DEST (pat)) == PC
6459 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6460 emit_jump_insn (pat);
6464 /* Copy lvalues back to their final locations. */
6465 for (opindex = 0; opindex < idata->n_operands; opindex++)
6466 if (idata->operand[opindex].constraint[0] == '=')
6468 a = cgen_insn->op_mapping[opindex];
6471 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6472 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6473 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6477 /* First convert the operand to the right mode, then copy it
6478 into the destination. Doing the conversion as a separate
6479 step (rather than using convert_move) means that we can
6480 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6481 refer to the same register. */
6482 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6483 op[opindex], unsigned_p[a]);
6484 if (!rtx_equal_p (arg[a], op[opindex]))
6485 emit_move_insn (arg[a], op[opindex]);
6490 if (first_arg > 0 && target && target != op[0])
6492 emit_move_insn (target, op[0]);
6499 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6504 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6505 a global register. */
6508 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6516 switch (GET_CODE (x))
6519 if (REG_P (SUBREG_REG (x)))
6521 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6522 && global_regs[subreg_regno (x)])
6530 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6544 /* A non-constant call might use a global register. */
6554 /* Returns nonzero if X mentions a global register. */
6557 global_reg_mentioned_p (rtx x)
6563 if (! RTL_CONST_OR_PURE_CALL_P (x))
6565 x = CALL_INSN_FUNCTION_USAGE (x);
6573 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6575 /* Scheduling hooks for VLIW mode.
6577 Conceptually this is very simple: we have a two-pack architecture
6578 that takes one core insn and one coprocessor insn to make up either
6579 a 32- or 64-bit instruction word (depending on the option bit set in
6580 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6581 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6582 and one 48-bit cop insn or two 32-bit core/cop insns.
6584 In practice, instruction selection will be a bear. Consider in
6585 VL64 mode the following insns
6590 these cannot pack, since the add is a 16-bit core insn and cmov
6591 is a 32-bit cop insn. However,
6596 packs just fine. For good VLIW code generation in VL64 mode, we
6597 will have to have 32-bit alternatives for many of the common core
6598 insns. Not implemented. */
6601 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6605 if (REG_NOTE_KIND (link) != 0)
6607 /* See whether INSN and DEP_INSN are intrinsics that set the same
6608 hard register. If so, it is more important to free up DEP_INSN
6609 than it is to free up INSN.
6611 Note that intrinsics like mep_mulr are handled differently from
6612 the equivalent mep.md patterns. In mep.md, if we don't care
6613 about the value of $lo and $hi, the pattern will just clobber
6614 the registers, not set them. Since clobbers don't count as
6615 output dependencies, it is often possible to reorder two mulrs,
6618 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6619 so any pair of mep_mulr()s will be inter-dependent. We should
6620 therefore give the first mep_mulr() a higher priority. */
6621 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6622 && global_reg_mentioned_p (PATTERN (insn))
6623 && global_reg_mentioned_p (PATTERN (dep_insn)))
6626 /* If the dependence is an anti or output dependence, assume it
6631 /* If we can't recognize the insns, we can't really do anything. */
6632 if (recog_memoized (dep_insn) < 0)
6635 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6636 attribute instead. */
6639 cost_specified = get_attr_latency (dep_insn);
6640 if (cost_specified != 0)
6641 return cost_specified;
6647 /* ??? We don't properly compute the length of a load/store insn,
6648 taking into account the addressing mode. */
6651 mep_issue_rate (void)
6653 return TARGET_IVC2 ? 3 : 2;
6656 /* Return true if function DECL was declared with the vliw attribute. */
6659 mep_vliw_function_p (tree decl)
6661 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6665 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6669 for (i = nready - 1; i >= 0; --i)
6671 rtx insn = ready[i];
6672 if (recog_memoized (insn) >= 0
6673 && get_attr_slot (insn) == slot
6674 && get_attr_length (insn) == length)
6682 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6686 for (i = 0; i < nready; ++i)
6687 if (ready[i] == insn)
6689 for (; i < nready - 1; ++i)
6690 ready[i] = ready[i + 1];
6699 mep_print_sched_insn (FILE *dump, rtx insn)
6701 const char *slots = "none";
6702 const char *name = NULL;
6706 if (GET_CODE (PATTERN (insn)) == SET
6707 || GET_CODE (PATTERN (insn)) == PARALLEL)
6709 switch (get_attr_slots (insn))
6711 case SLOTS_CORE: slots = "core"; break;
6712 case SLOTS_C3: slots = "c3"; break;
6713 case SLOTS_P0: slots = "p0"; break;
6714 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6715 case SLOTS_P0_P1: slots = "p0,p1"; break;
6716 case SLOTS_P0S: slots = "p0s"; break;
6717 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6718 case SLOTS_P1: slots = "p1"; break;
6720 sprintf(buf, "%d", get_attr_slots (insn));
6725 if (GET_CODE (PATTERN (insn)) == USE)
6728 code = INSN_CODE (insn);
6730 name = get_insn_name (code);
6735 "insn %4d %4d %8s %s\n",
6743 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6744 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6745 int *pnready, int clock ATTRIBUTE_UNUSED)
6747 int nready = *pnready;
6748 rtx core_insn, cop_insn;
6751 if (dump && sched_verbose > 1)
6753 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6754 for (i=0; i<nready; i++)
6755 mep_print_sched_insn (dump, ready[i]);
6756 fprintf (dump, "\n");
6759 if (!mep_vliw_function_p (cfun->decl))
6764 /* IVC2 uses a DFA to determine what's ready and what's not. */
6768 /* We can issue either a core or coprocessor instruction.
6769 Look for a matched pair of insns to reorder. If we don't
6770 find any, don't second-guess the scheduler's priorities. */
6772 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6773 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6774 TARGET_OPT_VL64 ? 6 : 2)))
6776 else if (TARGET_OPT_VL64
6777 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6778 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6781 /* We didn't find a pair. Issue the single insn at the head
6782 of the ready list. */
6785 /* Reorder the two insns first. */
6786 mep_move_ready_insn (ready, nready, core_insn);
6787 mep_move_ready_insn (ready, nready - 1, cop_insn);
6791 /* A for_each_rtx callback. Return true if *X is a register that is
6792 set by insn PREV. */
6795 mep_store_find_set (rtx *x, void *prev)
6797 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6800 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6801 not the containing insn. */
6804 mep_store_data_bypass_1 (rtx prev, rtx pat)
6806 /* Cope with intrinsics like swcpa. */
6807 if (GET_CODE (pat) == PARALLEL)
6811 for (i = 0; i < XVECLEN (pat, 0); i++)
6812 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6818 /* Check for some sort of store. */
6819 if (GET_CODE (pat) != SET
6820 || GET_CODE (SET_DEST (pat)) != MEM)
6823 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6824 The first operand to the unspec is the store data and the other operands
6825 are used to calculate the address. */
6826 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6831 src = SET_SRC (pat);
6832 for (i = 1; i < XVECLEN (src, 0); i++)
6833 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6839 /* Otherwise just check that PREV doesn't modify any register mentioned
6840 in the memory destination. */
6841 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6844 /* Return true if INSN is a store instruction and if the store address
6845 has no true dependence on PREV. */
6848 mep_store_data_bypass_p (rtx prev, rtx insn)
6850 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6853 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6854 is a register other than LO or HI and if PREV sets *X. */
6857 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6860 && REGNO (*x) != LO_REGNO
6861 && REGNO (*x) != HI_REGNO
6862 && reg_set_p (*x, (const_rtx) prev));
6865 /* Return true if, apart from HI/LO, there are no true dependencies
6866 between multiplication instructions PREV and INSN. */
6869 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6873 pat = PATTERN (insn);
6874 if (GET_CODE (pat) == PARALLEL)
6875 pat = XVECEXP (pat, 0, 0);
6876 return (GET_CODE (pat) == SET
6877 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6880 /* Return true if INSN is an ldc instruction that issues to the
6881 MeP-h1 integer pipeline. This is true for instructions that
6882 read from PSW, LP, SAR, HI and LO. */
6885 mep_ipipe_ldc_p (rtx insn)
6889 pat = PATTERN (insn);
6891 /* Cope with instrinsics that set both a hard register and its shadow.
6892 The set of the hard register comes first. */
6893 if (GET_CODE (pat) == PARALLEL)
6894 pat = XVECEXP (pat, 0, 0);
6896 if (GET_CODE (pat) == SET)
6898 src = SET_SRC (pat);
6900 /* Cope with intrinsics. The first operand to the unspec is
6901 the source register. */
6902 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6903 src = XVECEXP (src, 0, 0);
6906 switch (REGNO (src))
6919 /* Create a VLIW bundle from core instruction CORE and coprocessor
6920 instruction COP. COP always satisfies INSN_P, but CORE can be
6921 either a new pattern or an existing instruction.
6923 Emit the bundle in place of COP and return it. */
6926 mep_make_bundle (rtx core, rtx cop)
6930 /* If CORE is an existing instruction, remove it, otherwise put
6931 the new pattern in an INSN harness. */
6935 core = make_insn_raw (core);
6937 /* Generate the bundle sequence and replace COP with it. */
6938 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6939 insn = emit_insn_after (insn, cop);
6942 /* Set up the links of the insns inside the SEQUENCE. */
6943 PREV_INSN (core) = PREV_INSN (insn);
6944 NEXT_INSN (core) = cop;
6945 PREV_INSN (cop) = core;
6946 NEXT_INSN (cop) = NEXT_INSN (insn);
6948 /* Set the VLIW flag for the coprocessor instruction. */
6949 PUT_MODE (core, VOIDmode);
6950 PUT_MODE (cop, BImode);
6952 /* Derive a location for the bundle. Individual instructions cannot
6953 have their own location because there can be no assembler labels
6954 between CORE and COP. */
6955 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6956 INSN_LOCATOR (core) = 0;
6957 INSN_LOCATOR (cop) = 0;
6962 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6965 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6967 rtx * pinsn = (rtx *) data;
6969 if (*pinsn && reg_mentioned_p (x, *pinsn))
6973 /* Return true if anything in insn X is (anti,output,true) dependent on
6974 anything in insn Y. */
6977 mep_insn_dependent_p (rtx x, rtx y)
6981 gcc_assert (INSN_P (x));
6982 gcc_assert (INSN_P (y));
6985 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6986 if (tmp == NULL_RTX)
6990 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6991 if (tmp == NULL_RTX)
6998 core_insn_p (rtx insn)
7000 if (GET_CODE (PATTERN (insn)) == USE)
7002 if (get_attr_slot (insn) == SLOT_CORE)
7007 /* Mark coprocessor instructions that can be bundled together with
7008 the immediately preceeding core instruction. This is later used
7009 to emit the "+" that tells the assembler to create a VLIW insn.
7011 For unbundled insns, the assembler will automatically add coprocessor
7012 nops, and 16-bit core nops. Due to an apparent oversight in the
7013 spec, the assembler will _not_ automatically add 32-bit core nops,
7014 so we have to emit those here.
7016 Called from mep_insn_reorg. */
7019 mep_bundle_insns (rtx insns)
7021 rtx insn, last = NULL_RTX, first = NULL_RTX;
7022 int saw_scheduling = 0;
7024 /* Only do bundling if we're in vliw mode. */
7025 if (!mep_vliw_function_p (cfun->decl))
7028 /* The first insn in a bundle are TImode, the remainder are
7029 VOIDmode. After this function, the first has VOIDmode and the
7030 rest have BImode. */
7032 /* Note: this doesn't appear to be true for JUMP_INSNs. */
7034 /* First, move any NOTEs that are within a bundle, to the beginning
7036 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7038 if (NOTE_P (insn) && first)
7039 /* Don't clear FIRST. */;
7041 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
7044 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
7048 /* INSN is part of a bundle; FIRST is the first insn in that
7049 bundle. Move all intervening notes out of the bundle.
7050 In addition, since the debug pass may insert a label
7051 whenever the current line changes, set the location info
7052 for INSN to match FIRST. */
7054 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
7056 note = PREV_INSN (insn);
7057 while (note && note != first)
7059 prev = PREV_INSN (note);
7063 /* Remove NOTE from here... */
7064 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7065 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7066 /* ...and put it in here. */
7067 NEXT_INSN (note) = first;
7068 PREV_INSN (note) = PREV_INSN (first);
7069 NEXT_INSN (PREV_INSN (note)) = note;
7070 PREV_INSN (NEXT_INSN (note)) = note;
7077 else if (!NONJUMP_INSN_P (insn))
7081 /* Now fix up the bundles. */
7082 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7087 if (!NONJUMP_INSN_P (insn))
7093 /* If we're not optimizing enough, there won't be scheduling
7094 info. We detect that here. */
7095 if (GET_MODE (insn) == TImode)
7097 if (!saw_scheduling)
7102 rtx core_insn = NULL_RTX;
7104 /* IVC2 slots are scheduled by DFA, so we just accept
7105 whatever the scheduler gives us. However, we must make
7106 sure the core insn (if any) is the first in the bundle.
7107 The IVC2 assembler can insert whatever NOPs are needed,
7108 and allows a COP insn to be first. */
7110 if (NONJUMP_INSN_P (insn)
7111 && GET_CODE (PATTERN (insn)) != USE
7112 && GET_MODE (insn) == TImode)
7116 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7117 && NONJUMP_INSN_P (NEXT_INSN (last));
7118 last = NEXT_INSN (last))
7120 if (core_insn_p (last))
7123 if (core_insn_p (last))
7126 if (core_insn && core_insn != insn)
7128 /* Swap core insn to first in the bundle. */
7130 /* Remove core insn. */
7131 if (PREV_INSN (core_insn))
7132 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7133 if (NEXT_INSN (core_insn))
7134 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7136 /* Re-insert core insn. */
7137 PREV_INSN (core_insn) = PREV_INSN (insn);
7138 NEXT_INSN (core_insn) = insn;
7140 if (PREV_INSN (core_insn))
7141 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7142 PREV_INSN (insn) = core_insn;
7144 PUT_MODE (core_insn, TImode);
7145 PUT_MODE (insn, VOIDmode);
7149 /* The first insn has TImode, the rest have VOIDmode */
7150 if (GET_MODE (insn) == TImode)
7151 PUT_MODE (insn, VOIDmode);
7153 PUT_MODE (insn, BImode);
7157 PUT_MODE (insn, VOIDmode);
7158 if (recog_memoized (insn) >= 0
7159 && get_attr_slot (insn) == SLOT_COP)
7161 if (GET_CODE (insn) == JUMP_INSN
7163 || recog_memoized (last) < 0
7164 || get_attr_slot (last) != SLOT_CORE
7165 || (get_attr_length (insn)
7166 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7167 || mep_insn_dependent_p (insn, last))
7169 switch (get_attr_length (insn))
7174 insn = mep_make_bundle (gen_nop (), insn);
7177 if (TARGET_OPT_VL64)
7178 insn = mep_make_bundle (gen_nop32 (), insn);
7181 if (TARGET_OPT_VL64)
7182 error ("2 byte cop instructions are"
7183 " not allowed in 64-bit VLIW mode");
7185 insn = mep_make_bundle (gen_nop (), insn);
7188 error ("unexpected %d byte cop instruction",
7189 get_attr_length (insn));
7194 insn = mep_make_bundle (last, insn);
7202 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7203 Return true on success. This function can fail if the intrinsic
7204 is unavailable or if the operands don't satisfy their predicates. */
7207 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7209 const struct cgen_insn *cgen_insn;
7210 const struct insn_data_d *idata;
7214 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7217 idata = &insn_data[cgen_insn->icode];
7218 for (i = 0; i < idata->n_operands; i++)
7220 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7221 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7225 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7226 newop[3], newop[4], newop[5],
7227 newop[6], newop[7], newop[8]));
7233 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7234 OPERANDS[0]. Report an error if the instruction could not
7235 be synthesized. OPERANDS[1] is a register_operand. For sign
7236 and zero extensions, it may be smaller than SImode. */
7239 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7240 rtx * operands ATTRIBUTE_UNUSED)
7246 /* Likewise, but apply a binary operation to OPERANDS[1] and
7247 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7248 can be a general_operand.
7250 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7251 third operand. REG and REG3 take register operands only. */
7254 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7255 int ATTRIBUTE_UNUSED immediate3,
7256 int ATTRIBUTE_UNUSED reg,
7257 int ATTRIBUTE_UNUSED reg3,
7258 rtx * operands ATTRIBUTE_UNUSED)
7264 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7265 int opno ATTRIBUTE_UNUSED, int *total,
7266 bool ATTRIBUTE_UNUSED speed_t)
7271 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7273 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7280 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7284 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7286 : COSTS_N_INSNS (2));
7293 mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7299 mep_asm_init_sections (void)
7302 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7303 "\t.section .based,\"aw\"");
7306 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7307 "\t.section .sbss,\"aw\"");
7310 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7311 "\t.section .sdata,\"aw\",@progbits");
7314 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7315 "\t.section .far,\"aw\"");
7318 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7319 "\t.section .farbss,\"aw\"");
7322 = get_unnamed_section (0, output_section_asm_op,
7323 "\t.section .frodata,\"a\"");
7326 = get_unnamed_section (0, output_section_asm_op,
7327 "\t.section .srodata,\"a\"");
7330 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7331 "\t.section .vtext,\"axv\"\n\t.vliw");
7334 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7335 "\t.section .vftext,\"axv\"\n\t.vliw");
7338 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7339 "\t.section .ftext,\"ax\"\n\t.core");
7343 /* Initialize the GCC target structure. */
7345 #undef TARGET_ASM_FUNCTION_PROLOGUE
7346 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7347 #undef TARGET_ATTRIBUTE_TABLE
7348 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7349 #undef TARGET_COMP_TYPE_ATTRIBUTES
7350 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7351 #undef TARGET_INSERT_ATTRIBUTES
7352 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7353 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7354 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7355 #undef TARGET_CAN_INLINE_P
7356 #define TARGET_CAN_INLINE_P mep_can_inline_p
7357 #undef TARGET_SECTION_TYPE_FLAGS
7358 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7359 #undef TARGET_ASM_NAMED_SECTION
7360 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7361 #undef TARGET_INIT_BUILTINS
7362 #define TARGET_INIT_BUILTINS mep_init_builtins
7363 #undef TARGET_EXPAND_BUILTIN
7364 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7365 #undef TARGET_SCHED_ADJUST_COST
7366 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7367 #undef TARGET_SCHED_ISSUE_RATE
7368 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7369 #undef TARGET_SCHED_REORDER
7370 #define TARGET_SCHED_REORDER mep_sched_reorder
7371 #undef TARGET_STRIP_NAME_ENCODING
7372 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7373 #undef TARGET_ASM_SELECT_SECTION
7374 #define TARGET_ASM_SELECT_SECTION mep_select_section
7375 #undef TARGET_ASM_UNIQUE_SECTION
7376 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7377 #undef TARGET_ENCODE_SECTION_INFO
7378 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7379 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7380 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7381 #undef TARGET_RTX_COSTS
7382 #define TARGET_RTX_COSTS mep_rtx_cost
7383 #undef TARGET_ADDRESS_COST
7384 #define TARGET_ADDRESS_COST mep_address_cost
7385 #undef TARGET_MACHINE_DEPENDENT_REORG
7386 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7387 #undef TARGET_SETUP_INCOMING_VARARGS
7388 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7389 #undef TARGET_PASS_BY_REFERENCE
7390 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7391 #undef TARGET_FUNCTION_ARG
7392 #define TARGET_FUNCTION_ARG mep_function_arg
7393 #undef TARGET_FUNCTION_ARG_ADVANCE
7394 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7395 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7396 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7397 #undef TARGET_OPTION_OVERRIDE
7398 #define TARGET_OPTION_OVERRIDE mep_option_override
7399 #undef TARGET_ALLOCATE_INITIAL_VALUE
7400 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7401 #undef TARGET_ASM_INIT_SECTIONS
7402 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7403 #undef TARGET_RETURN_IN_MEMORY
7404 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7405 #undef TARGET_NARROW_VOLATILE_BITFIELD
7406 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7407 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7408 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7409 #undef TARGET_BUILD_BUILTIN_VA_LIST
7410 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7411 #undef TARGET_EXPAND_BUILTIN_VA_START
7412 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7413 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7414 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7415 #undef TARGET_CAN_ELIMINATE
7416 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7417 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7418 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7419 #undef TARGET_TRAMPOLINE_INIT
7420 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7421 #undef TARGET_LEGITIMATE_CONSTANT_P
7422 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7424 struct gcc_target targetm = TARGET_INITIALIZER;