1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Red Hat, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
47 #include "diagnostic-core.h"
48 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
56 /* Structure of this file:
58 + Command Line Option Support
59 + Pattern support - constraints, predicates, expanders
62 + Functions to save and restore machine-specific function data.
63 + Frame/Epilog/Prolog Related
65 + Function args in registers
66 + Handle pipeline hazards
69 + Machine-dependent Reorg
74 Symbols are encoded as @ <char> . <name> where <char> is one of these:
82 c - cb (control bus) */
84 struct GTY(()) machine_function
86 int mep_frame_pointer_needed;
94 /* Records __builtin_return address. */
98 int reg_save_slot[FIRST_PSEUDO_REGISTER];
99 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
101 /* 2 if the current function has an interrupt attribute, 1 if not, 0
102 if unknown. This is here because resource.c uses EPILOGUE_USES
104 int interrupt_handler;
106 /* Likewise, for disinterrupt attribute. */
107 int disable_interrupts;
109 /* Number of doloop tags used so far. */
112 /* True if the last tag was allocated to a doloop_end. */
113 bool doloop_tag_from_end;
115 /* True if reload changes $TP. */
116 bool reload_changes_tp;
118 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
119 We only set this if the function is an interrupt handler. */
120 int asms_without_operands;
123 #define MEP_CONTROL_REG(x) \
124 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
126 static GTY(()) section * based_section;
127 static GTY(()) section * tinybss_section;
128 static GTY(()) section * far_section;
129 static GTY(()) section * farbss_section;
130 static GTY(()) section * frodata_section;
131 static GTY(()) section * srodata_section;
133 static GTY(()) section * vtext_section;
134 static GTY(()) section * vftext_section;
135 static GTY(()) section * ftext_section;
137 static void mep_set_leaf_registers (int);
138 static bool symbol_p (rtx);
139 static bool symbolref_p (rtx);
140 static void encode_pattern_1 (rtx);
141 static void encode_pattern (rtx);
142 static bool const_in_range (rtx, int, int);
143 static void mep_rewrite_mult (rtx, rtx);
144 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
145 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
146 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
147 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
148 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
149 static bool mep_nongeneral_reg (rtx);
150 static bool mep_general_copro_reg (rtx);
151 static bool mep_nonregister (rtx);
152 static struct machine_function* mep_init_machine_status (void);
153 static rtx mep_tp_rtx (void);
154 static rtx mep_gp_rtx (void);
155 static bool mep_interrupt_p (void);
156 static bool mep_disinterrupt_p (void);
157 static bool mep_reg_set_p (rtx, rtx);
158 static bool mep_reg_set_in_function (int);
159 static bool mep_interrupt_saved_reg (int);
160 static bool mep_call_saves_register (int);
162 static void add_constant (int, int, int, int);
163 static rtx maybe_dead_move (rtx, rtx, bool);
164 static void mep_reload_pointer (int, const char *);
165 static void mep_start_function (FILE *, HOST_WIDE_INT);
166 static bool mep_function_ok_for_sibcall (tree, tree);
167 static int unique_bit_in (HOST_WIDE_INT);
168 static int bit_size_for_clip (HOST_WIDE_INT);
169 static int bytesize (const_tree, enum machine_mode);
170 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
171 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
172 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
173 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
174 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
175 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
176 static bool mep_function_attribute_inlinable_p (const_tree);
177 static bool mep_can_inline_p (tree, tree);
178 static bool mep_lookup_pragma_disinterrupt (const char *);
179 static int mep_multiple_address_regions (tree, bool);
180 static int mep_attrlist_to_encoding (tree, tree);
181 static void mep_insert_attributes (tree, tree *);
182 static void mep_encode_section_info (tree, rtx, int);
183 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
184 static void mep_unique_section (tree, int);
185 static unsigned int mep_section_type_flags (tree, const char *, int);
186 static void mep_asm_named_section (const char *, unsigned int, tree);
187 static bool mep_mentioned_p (rtx, rtx, int);
188 static void mep_reorg_regmove (rtx);
189 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
190 static void mep_reorg_repeat (rtx);
191 static bool mep_invertable_branch_p (rtx);
192 static void mep_invert_branch (rtx, rtx);
193 static void mep_reorg_erepeat (rtx);
194 static void mep_jmp_return_reorg (rtx);
195 static void mep_reorg_addcombine (rtx);
196 static void mep_reorg (void);
197 static void mep_init_intrinsics (void);
198 static void mep_init_builtins (void);
199 static void mep_intrinsic_unavailable (int);
200 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
201 static bool mep_get_move_insn (int, const struct cgen_insn **);
202 static rtx mep_convert_arg (enum machine_mode, rtx);
203 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
204 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
205 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
206 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
207 static int mep_adjust_cost (rtx, rtx, rtx, int);
208 static int mep_issue_rate (void);
209 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
210 static void mep_move_ready_insn (rtx *, int, rtx);
211 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
212 static rtx mep_make_bundle (rtx, rtx);
213 static void mep_bundle_insns (rtx);
214 static bool mep_rtx_cost (rtx, int, int, int *, bool);
215 static int mep_address_cost (rtx, bool);
216 static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
218 static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
220 static rtx mep_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
222 static void mep_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
224 static bool mep_vector_mode_supported_p (enum machine_mode);
225 static bool mep_handle_option (struct gcc_options *, struct gcc_options *,
226 const struct cl_decoded_option *, location_t);
227 static rtx mep_allocate_initial_value (rtx);
228 static void mep_asm_init_sections (void);
229 static int mep_comp_type_attributes (const_tree, const_tree);
230 static bool mep_narrow_volatile_bitfield (void);
231 static rtx mep_expand_builtin_saveregs (void);
232 static tree mep_build_builtin_va_list (void);
233 static void mep_expand_va_start (tree, rtx);
234 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
235 static bool mep_can_eliminate (const int, const int);
236 static void mep_conditional_register_usage (void);
237 static void mep_trampoline_init (rtx, tree, rtx);
239 #define WANT_GCC_DEFINITIONS
240 #include "mep-intrin.h"
241 #undef WANT_GCC_DEFINITIONS
244 /* Command Line Option Support. */
246 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
248 /* True if we can use cmov instructions to move values back and forth
249 between core and coprocessor registers. */
250 bool mep_have_core_copro_moves_p;
252 /* True if we can use cmov instructions (or a work-alike) to move
253 values between coprocessor registers. */
254 bool mep_have_copro_copro_moves_p;
256 /* A table of all coprocessor instructions that can act like
257 a coprocessor-to-coprocessor cmov. */
258 static const int mep_cmov_insns[] = {
273 mep_set_leaf_registers (int enable)
277 if (mep_leaf_registers[0] != enable)
278 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
279 mep_leaf_registers[i] = enable;
283 mep_conditional_register_usage (void)
287 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
289 fixed_regs[HI_REGNO] = 1;
290 fixed_regs[LO_REGNO] = 1;
291 call_used_regs[HI_REGNO] = 1;
292 call_used_regs[LO_REGNO] = 1;
295 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
300 static const struct default_options mep_option_optimization_table[] =
302 /* The first scheduling pass often increases register pressure and
303 tends to result in more spill code. Only run it when
304 specifically asked. */
305 { OPT_LEVELS_ALL, OPT_fschedule_insns, NULL, 0 },
307 /* Using $fp doesn't gain us much, even when debugging is
309 { OPT_LEVELS_ALL, OPT_fomit_frame_pointer, NULL, 1 },
311 { OPT_LEVELS_NONE, 0, NULL, 0 }
315 mep_option_override (void)
319 cl_deferred_option *opt;
320 VEC(cl_deferred_option,heap) *vec
321 = (VEC(cl_deferred_option,heap) *) mep_deferred_options;
323 FOR_EACH_VEC_ELT (cl_deferred_option, vec, i, opt)
325 switch (opt->opt_index)
328 for (j = 0; j < 32; j++)
329 fixed_regs[j + 48] = 0;
330 for (j = 0; j < 32; j++)
331 call_used_regs[j + 48] = 1;
332 for (j = 6; j < 8; j++)
333 call_used_regs[j + 48] = 0;
335 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
370 warning (OPT_fpic, "-fpic is not supported");
372 warning (OPT_fPIC, "-fPIC is not supported");
373 if (TARGET_S && TARGET_M)
374 error ("only one of -ms and -mm may be given");
375 if (TARGET_S && TARGET_L)
376 error ("only one of -ms and -ml may be given");
377 if (TARGET_M && TARGET_L)
378 error ("only one of -mm and -ml may be given");
379 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
380 error ("only one of -ms and -mtiny= may be given");
381 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
382 error ("only one of -mm and -mtiny= may be given");
383 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
384 warning (0, "-mclip currently has no effect without -mminmax");
386 if (mep_const_section)
388 if (strcmp (mep_const_section, "tiny") != 0
389 && strcmp (mep_const_section, "near") != 0
390 && strcmp (mep_const_section, "far") != 0)
391 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
395 mep_tiny_cutoff = 65536;
398 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
401 if (TARGET_64BIT_CR_REGS)
402 flag_split_wide_types = 0;
404 init_machine_status = mep_init_machine_status;
405 mep_init_intrinsics ();
408 /* Pattern Support - constraints, predicates, expanders. */
410 /* MEP has very few instructions that can refer to the span of
411 addresses used by symbols, so it's common to check for them. */
416 int c = GET_CODE (x);
418 return (c == CONST_INT
428 if (GET_CODE (x) != MEM)
431 c = GET_CODE (XEXP (x, 0));
432 return (c == CONST_INT
437 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
439 #define GEN_REG(R, STRICT) \
442 && ((R) == ARG_POINTER_REGNUM \
443 || (R) >= FIRST_PSEUDO_REGISTER)))
445 static char pattern[12], *patternp;
446 static GTY(()) rtx patternr[12];
447 #define RTX_IS(x) (strcmp (pattern, x) == 0)
450 encode_pattern_1 (rtx x)
454 if (patternp == pattern + sizeof (pattern) - 2)
460 patternr[patternp-pattern] = x;
462 switch (GET_CODE (x))
470 encode_pattern_1 (XEXP(x, 0));
474 encode_pattern_1 (XEXP(x, 0));
475 encode_pattern_1 (XEXP(x, 1));
479 encode_pattern_1 (XEXP(x, 0));
480 encode_pattern_1 (XEXP(x, 1));
484 encode_pattern_1 (XEXP(x, 0));
498 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
499 for (i=0; i<XVECLEN (x, 0); i++)
500 encode_pattern_1 (XVECEXP (x, 0, i));
508 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
517 encode_pattern (rtx x)
520 encode_pattern_1 (x);
525 mep_section_tag (rtx x)
531 switch (GET_CODE (x))
538 x = XVECEXP (x, 0, 0);
541 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
550 if (GET_CODE (x) != SYMBOL_REF)
553 if (name[0] == '@' && name[2] == '.')
555 if (name[1] == 'i' || name[1] == 'I')
558 return 'f'; /* near */
559 return 'n'; /* far */
567 mep_regno_reg_class (int regno)
571 case SP_REGNO: return SP_REGS;
572 case TP_REGNO: return TP_REGS;
573 case GP_REGNO: return GP_REGS;
574 case 0: return R0_REGS;
575 case HI_REGNO: return HI_REGS;
576 case LO_REGNO: return LO_REGS;
577 case ARG_POINTER_REGNUM: return GENERAL_REGS;
580 if (GR_REGNO_P (regno))
581 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
582 if (CONTROL_REGNO_P (regno))
585 if (CR_REGNO_P (regno))
589 /* Search for the register amongst user-defined subclasses of
590 the coprocessor registers. */
591 for (i = USER0_REGS; i <= USER3_REGS; ++i)
593 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
595 for (j = 0; j < N_REG_CLASSES; ++j)
597 enum reg_class sub = reg_class_subclasses[i][j];
599 if (sub == LIM_REG_CLASSES)
601 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
606 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
609 if (CCR_REGNO_P (regno))
612 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
618 mep_reg_class_from_constraint (int c, const char *str)
635 return LOADABLE_CR_REGS;
637 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
639 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
666 enum reg_class which = c - 'A' + USER0_REGS;
667 return (reg_class_size[which] > 0 ? which : NO_REGS);
676 mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
680 case 'I': return value >= -32768 && value < 32768;
681 case 'J': return value >= 0 && value < 65536;
682 case 'K': return value >= 0 && value < 0x01000000;
683 case 'L': return value >= -32 && value < 32;
684 case 'M': return value >= 0 && value < 32;
685 case 'N': return value >= 0 && value < 16;
689 return value >= -2147483647-1 && value <= 2147483647;
696 mep_extra_constraint (rtx value, int c)
698 encode_pattern (value);
703 /* For near symbols, like what call uses. */
704 if (GET_CODE (value) == REG)
706 return mep_call_address_operand (value, GET_MODE (value));
709 /* For signed 8-bit immediates. */
710 return (GET_CODE (value) == CONST_INT
711 && INTVAL (value) >= -128
712 && INTVAL (value) <= 127);
715 /* For tp/gp relative symbol values. */
716 return (RTX_IS ("u3s") || RTX_IS ("u2s")
717 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
720 /* Non-absolute memories. */
721 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
725 return RTX_IS ("Hs");
728 /* Register indirect. */
729 return RTX_IS ("mr");
732 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
743 const_in_range (rtx x, int minv, int maxv)
745 return (GET_CODE (x) == CONST_INT
746 && INTVAL (x) >= minv
747 && INTVAL (x) <= maxv);
750 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
751 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
752 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
753 at the end of the insn stream. */
756 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
758 if (rtx_equal_p (dest, src1))
760 else if (rtx_equal_p (dest, src2))
765 emit_insn (gen_movsi (copy_rtx (dest), src1));
767 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
772 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
773 Change the last element of PATTERN from (clobber (scratch:SI))
774 to (clobber (reg:SI HI_REGNO)). */
777 mep_rewrite_mult (rtx insn, rtx pattern)
781 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
782 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
783 PATTERN (insn) = pattern;
784 INSN_CODE (insn) = -1;
787 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
788 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
789 store the result in DEST if nonnull. */
792 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
796 lo = gen_rtx_REG (SImode, LO_REGNO);
798 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
799 mep_mulr_source (insn, dest, src1, src2));
801 pattern = gen_mulsi3_lo (lo, src1, src2);
802 mep_rewrite_mult (insn, pattern);
805 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
806 SRC3 into $lo, then use either madd or maddr. The move into $lo will
807 be deleted by a peephole2 if SRC3 is already in $lo. */
810 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
814 lo = gen_rtx_REG (SImode, LO_REGNO);
815 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
817 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
818 mep_mulr_source (insn, dest, src1, src2),
821 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
822 mep_rewrite_mult (insn, pattern);
825 /* Return true if $lo has the same value as integer register GPR when
826 instruction INSN is reached. If necessary, rewrite the instruction
827 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
828 rtx for (reg:SI LO_REGNO).
830 This function is intended to be used by the peephole2 pass. Since
831 that pass goes from the end of a basic block to the beginning, and
832 propagates liveness information on the way, there is no need to
833 update register notes here.
835 If GPR_DEAD_P is true on entry, and this function returns true,
836 then the caller will replace _every_ use of GPR in and after INSN
837 with LO. This means that if the instruction that sets $lo is a
838 mulr- or maddr-type instruction, we can rewrite it to use mul or
839 madd instead. In combination with the copy progagation pass,
840 this allows us to replace sequences like:
849 if GPR is no longer used. */
852 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
856 insn = PREV_INSN (insn);
858 switch (recog_memoized (insn))
860 case CODE_FOR_mulsi3_1:
862 if (rtx_equal_p (recog_data.operand[0], gpr))
864 mep_rewrite_mulsi3 (insn,
865 gpr_dead_p ? NULL : recog_data.operand[0],
866 recog_data.operand[1],
867 recog_data.operand[2]);
872 case CODE_FOR_maddsi3:
874 if (rtx_equal_p (recog_data.operand[0], gpr))
876 mep_rewrite_maddsi3 (insn,
877 gpr_dead_p ? NULL : recog_data.operand[0],
878 recog_data.operand[1],
879 recog_data.operand[2],
880 recog_data.operand[3]);
885 case CODE_FOR_mulsi3r:
886 case CODE_FOR_maddsi3r:
888 return rtx_equal_p (recog_data.operand[1], gpr);
891 if (reg_set_p (lo, insn)
892 || reg_set_p (gpr, insn)
893 || volatile_insn_p (PATTERN (insn)))
896 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
901 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
905 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
908 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
910 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
915 /* Return true if SET can be turned into a post-modify load or store
916 that adds OFFSET to GPR. In other words, return true if SET can be
919 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
921 It's OK to change SET to an equivalent operation in order to
925 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
928 unsigned int reg_bytes, mem_bytes;
929 enum machine_mode reg_mode, mem_mode;
931 /* Only simple SETs can be converted. */
932 if (GET_CODE (set) != SET)
935 /* Point REG to what we hope will be the register side of the set and
936 MEM to what we hope will be the memory side. */
937 if (GET_CODE (SET_DEST (set)) == MEM)
939 mem = &SET_DEST (set);
940 reg = &SET_SRC (set);
944 reg = &SET_DEST (set);
945 mem = &SET_SRC (set);
946 if (GET_CODE (*mem) == SIGN_EXTEND)
947 mem = &XEXP (*mem, 0);
950 /* Check that *REG is a suitable coprocessor register. */
951 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
954 /* Check that *MEM is a suitable memory reference. */
955 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
958 /* Get the number of bytes in each operand. */
959 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
960 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
962 /* Check that OFFSET is suitably aligned. */
963 if (INTVAL (offset) & (mem_bytes - 1))
966 /* Convert *MEM to a normal integer mode. */
967 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
968 *mem = change_address (*mem, mem_mode, NULL);
970 /* Adjust *REG as well. */
971 *reg = shallow_copy_rtx (*reg);
972 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
974 /* SET is a subword load. Convert it to an explicit extension. */
975 PUT_MODE (*reg, SImode);
976 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
980 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
981 PUT_MODE (*reg, reg_mode);
986 /* Return the effect of frame-related instruction INSN. */
989 mep_frame_expr (rtx insn)
993 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
994 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
995 RTX_FRAME_RELATED_P (expr) = 1;
999 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
1000 new pattern in INSN1; INSN2 will be deleted by the caller. */
1003 mep_make_parallel (rtx insn1, rtx insn2)
1007 if (RTX_FRAME_RELATED_P (insn2))
1009 expr = mep_frame_expr (insn2);
1010 if (RTX_FRAME_RELATED_P (insn1))
1011 expr = gen_rtx_SEQUENCE (VOIDmode,
1012 gen_rtvec (2, mep_frame_expr (insn1), expr));
1013 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
1014 RTX_FRAME_RELATED_P (insn1) = 1;
1017 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
1018 gen_rtvec (2, PATTERN (insn1),
1020 INSN_CODE (insn1) = -1;
1023 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
1024 the basic block to see if any previous load or store instruction can
1025 be persuaded to do SET_INSN as a side-effect. Return true if so. */
1028 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
1035 insn = PREV_INSN (insn);
1038 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
1040 mep_make_parallel (insn, set_insn);
1044 if (reg_set_p (reg, insn)
1045 || reg_referenced_p (reg, PATTERN (insn))
1046 || volatile_insn_p (PATTERN (insn)))
1050 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1054 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1057 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1059 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1060 extract_insn (insn);
1065 mep_allow_clip (rtx ux, rtx lx, int s)
1067 HOST_WIDE_INT u = INTVAL (ux);
1068 HOST_WIDE_INT l = INTVAL (lx);
1071 if (!TARGET_OPT_CLIP)
1076 for (i = 0; i < 30; i ++)
1077 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1078 && (l == - ((HOST_WIDE_INT) 1 << i)))
1086 for (i = 0; i < 30; i ++)
1087 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1094 mep_bit_position_p (rtx x, bool looking_for)
1096 if (GET_CODE (x) != CONST_INT)
1098 switch ((int) INTVAL(x) & 0xff)
1100 case 0x01: case 0x02: case 0x04: case 0x08:
1101 case 0x10: case 0x20: case 0x40: case 0x80:
1103 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1104 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1105 return !looking_for;
1111 move_needs_splitting (rtx dest, rtx src,
1112 enum machine_mode mode ATTRIBUTE_UNUSED)
1114 int s = mep_section_tag (src);
1118 if (GET_CODE (src) == CONST
1119 || GET_CODE (src) == MEM)
1120 src = XEXP (src, 0);
1121 else if (GET_CODE (src) == SYMBOL_REF
1122 || GET_CODE (src) == LABEL_REF
1123 || GET_CODE (src) == PLUS)
1129 || (GET_CODE (src) == PLUS
1130 && GET_CODE (XEXP (src, 1)) == CONST_INT
1131 && (INTVAL (XEXP (src, 1)) < -65536
1132 || INTVAL (XEXP (src, 1)) > 0xffffff))
1133 || (GET_CODE (dest) == REG
1134 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1140 mep_split_mov (rtx *operands, int symbolic)
1144 if (move_needs_splitting (operands[0], operands[1], SImode))
1149 if (GET_CODE (operands[1]) != CONST_INT)
1152 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1153 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1154 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1157 if (((!reload_completed && !reload_in_progress)
1158 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1159 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1165 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1166 it to one specific value. So the insn chosen depends on whether
1167 the source and destination modes match. */
1170 mep_vliw_mode_match (rtx tgt)
1172 bool src_vliw = mep_vliw_function_p (cfun->decl);
1173 bool tgt_vliw = INTVAL (tgt);
1175 return src_vliw == tgt_vliw;
1178 /* Like the above, but also test for near/far mismatches. */
1181 mep_vliw_jmp_match (rtx tgt)
1183 bool src_vliw = mep_vliw_function_p (cfun->decl);
1184 bool tgt_vliw = INTVAL (tgt);
1186 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1189 return src_vliw == tgt_vliw;
1193 mep_multi_slot (rtx x)
1195 return get_attr_slot (x) == SLOT_MULTI;
1200 mep_legitimate_constant_p (rtx x)
1202 /* We can't convert symbol values to gp- or tp-rel values after
1203 reload, as reload might have used $gp or $tp for other
1205 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1207 char e = mep_section_tag (x);
1208 return (e != 't' && e != 'b');
1213 /* Be careful not to use macros that need to be compiled one way for
1214 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1217 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1221 #define DEBUG_LEGIT 0
1223 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1227 if (GET_CODE (x) == LO_SUM
1228 && GET_CODE (XEXP (x, 0)) == REG
1229 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1230 && CONSTANT_P (XEXP (x, 1)))
1232 if (GET_MODE_SIZE (mode) > 4)
1234 /* We will end up splitting this, and lo_sums are not
1235 offsettable for us. */
1237 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1242 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1247 if (GET_CODE (x) == REG
1248 && GEN_REG (REGNO (x), strict))
1251 fprintf (stderr, " - yup, [reg]\n");
1256 if (GET_CODE (x) == PLUS
1257 && GET_CODE (XEXP (x, 0)) == REG
1258 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1259 && const_in_range (XEXP (x, 1), -32768, 32767))
1262 fprintf (stderr, " - yup, [reg+const]\n");
1267 if (GET_CODE (x) == PLUS
1268 && GET_CODE (XEXP (x, 0)) == REG
1269 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1270 && GET_CODE (XEXP (x, 1)) == CONST
1271 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1272 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1273 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1274 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1277 fprintf (stderr, " - yup, [reg+unspec]\n");
1282 the_tag = mep_section_tag (x);
1287 fprintf (stderr, " - nope, [far]\n");
1292 if (mode == VOIDmode
1293 && GET_CODE (x) == SYMBOL_REF)
1296 fprintf (stderr, " - yup, call [symbol]\n");
1301 if ((mode == SImode || mode == SFmode)
1303 && LEGITIMATE_CONSTANT_P (x)
1304 && the_tag != 't' && the_tag != 'b')
1306 if (GET_CODE (x) != CONST_INT
1307 || (INTVAL (x) <= 0xfffff
1309 && (INTVAL (x) % 4) == 0))
1312 fprintf (stderr, " - yup, [const]\n");
1319 fprintf (stderr, " - nope.\n");
1325 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1327 int ind_levels ATTRIBUTE_UNUSED)
1329 enum reload_type type = (enum reload_type) type_i;
1331 if (GET_CODE (*x) == PLUS
1332 && GET_CODE (XEXP (*x, 0)) == MEM
1333 && GET_CODE (XEXP (*x, 1)) == REG)
1335 /* GCC will by default copy the MEM into a REG, which results in
1336 an invalid address. For us, the best thing to do is move the
1337 whole expression to a REG. */
1338 push_reload (*x, NULL_RTX, x, NULL,
1339 GENERAL_REGS, mode, VOIDmode,
1344 if (GET_CODE (*x) == PLUS
1345 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1346 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1348 char e = mep_section_tag (XEXP (*x, 0));
1350 if (e != 't' && e != 'b')
1352 /* GCC thinks that (sym+const) is a valid address. Well,
1353 sometimes it is, this time it isn't. The best thing to
1354 do is reload the symbol to a register, since reg+int
1355 tends to work, and we can't just add the symbol and
1357 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1358 GENERAL_REGS, mode, VOIDmode,
1367 mep_core_address_length (rtx insn, int opn)
1369 rtx set = single_set (insn);
1370 rtx mem = XEXP (set, opn);
1371 rtx other = XEXP (set, 1-opn);
1372 rtx addr = XEXP (mem, 0);
1374 if (register_operand (addr, Pmode))
1376 if (GET_CODE (addr) == PLUS)
1378 rtx addend = XEXP (addr, 1);
1380 gcc_assert (REG_P (XEXP (addr, 0)));
1382 switch (REGNO (XEXP (addr, 0)))
1384 case STACK_POINTER_REGNUM:
1385 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1386 && mep_imm7a4_operand (addend, VOIDmode))
1391 gcc_assert (REG_P (other));
1393 if (REGNO (other) >= 8)
1396 if (GET_CODE (addend) == CONST
1397 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1398 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1401 if (GET_CODE (addend) == CONST_INT
1402 && INTVAL (addend) >= 0
1403 && INTVAL (addend) <= 127
1404 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1414 mep_cop_address_length (rtx insn, int opn)
1416 rtx set = single_set (insn);
1417 rtx mem = XEXP (set, opn);
1418 rtx addr = XEXP (mem, 0);
1420 if (GET_CODE (mem) != MEM)
1422 if (register_operand (addr, Pmode))
1424 if (GET_CODE (addr) == POST_INC)
1430 #define DEBUG_EXPAND_MOV 0
1432 mep_expand_mov (rtx *operands, enum machine_mode mode)
1437 int post_reload = 0;
1439 tag[0] = mep_section_tag (operands[0]);
1440 tag[1] = mep_section_tag (operands[1]);
1442 if (!reload_in_progress
1443 && !reload_completed
1444 && GET_CODE (operands[0]) != REG
1445 && GET_CODE (operands[0]) != SUBREG
1446 && GET_CODE (operands[1]) != REG
1447 && GET_CODE (operands[1]) != SUBREG)
1448 operands[1] = copy_to_mode_reg (mode, operands[1]);
1450 #if DEBUG_EXPAND_MOV
1451 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1452 reload_in_progress || reload_completed);
1453 debug_rtx (operands[0]);
1454 debug_rtx (operands[1]);
1457 if (mode == DImode || mode == DFmode)
1460 if (reload_in_progress || reload_completed)
1464 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1465 cfun->machine->reload_changes_tp = true;
1467 if (tag[0] == 't' || tag[1] == 't')
1469 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1470 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1473 if (tag[0] == 'b' || tag[1] == 'b')
1475 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1476 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1479 if (cfun->machine->reload_changes_tp == true)
1486 if (symbol_p (operands[1]))
1488 t = mep_section_tag (operands[1]);
1489 if (t == 'b' || t == 't')
1492 if (GET_CODE (operands[1]) == SYMBOL_REF)
1494 tpsym = operands[1];
1495 n = gen_rtx_UNSPEC (mode,
1496 gen_rtvec (1, operands[1]),
1497 t == 'b' ? UNS_TPREL : UNS_GPREL);
1498 n = gen_rtx_CONST (mode, n);
1500 else if (GET_CODE (operands[1]) == CONST
1501 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1502 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1503 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1505 tpsym = XEXP (XEXP (operands[1], 0), 0);
1506 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1507 n = gen_rtx_UNSPEC (mode,
1508 gen_rtvec (1, tpsym),
1509 t == 'b' ? UNS_TPREL : UNS_GPREL);
1510 n = gen_rtx_PLUS (mode, n, tpoffs);
1511 n = gen_rtx_CONST (mode, n);
1513 else if (GET_CODE (operands[1]) == CONST
1514 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1518 error ("unusual TP-relative address");
1522 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1523 : mep_gp_rtx ()), n);
1524 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1525 #if DEBUG_EXPAND_MOV
1526 fprintf(stderr, "mep_expand_mov emitting ");
1533 for (i=0; i < 2; i++)
1535 t = mep_section_tag (operands[i]);
1536 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1541 sym = XEXP (operands[i], 0);
1542 if (GET_CODE (sym) == CONST
1543 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1544 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1557 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1558 n = gen_rtx_CONST (Pmode, n);
1559 n = gen_rtx_PLUS (Pmode, r, n);
1560 operands[i] = replace_equiv_address (operands[i], n);
1565 if ((GET_CODE (operands[1]) != REG
1566 && MEP_CONTROL_REG (operands[0]))
1567 || (GET_CODE (operands[0]) != REG
1568 && MEP_CONTROL_REG (operands[1])))
1571 #if DEBUG_EXPAND_MOV
1572 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1574 temp = gen_reg_rtx (mode);
1575 emit_move_insn (temp, operands[1]);
1579 if (symbolref_p (operands[0])
1580 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1581 || (GET_MODE_SIZE (mode) != 4)))
1585 gcc_assert (!reload_in_progress && !reload_completed);
1587 temp = force_reg (Pmode, XEXP (operands[0], 0));
1588 operands[0] = replace_equiv_address (operands[0], temp);
1589 emit_move_insn (operands[0], operands[1]);
1593 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1596 if (symbol_p (operands[1])
1597 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1599 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1600 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1604 if (symbolref_p (operands[1])
1605 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1609 if (reload_in_progress || reload_completed)
1612 temp = gen_reg_rtx (Pmode);
1614 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1615 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1616 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1623 /* Cases where the pattern can't be made to use at all. */
1626 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1630 #define DEBUG_MOV_OK 0
1632 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1633 mep_section_tag (operands[1]));
1634 debug_rtx (operands[0]);
1635 debug_rtx (operands[1]);
1638 /* We want the movh patterns to get these. */
1639 if (GET_CODE (operands[1]) == HIGH)
1642 /* We can't store a register to a far variable without using a
1643 scratch register to hold the address. Using far variables should
1644 be split by mep_emit_mov anyway. */
1645 if (mep_section_tag (operands[0]) == 'f'
1646 || mep_section_tag (operands[1]) == 'f')
1649 fprintf (stderr, " - no, f\n");
1653 i = mep_section_tag (operands[1]);
1654 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1655 /* These are supposed to be generated with adds of the appropriate
1656 register. During and after reload, however, we allow them to
1657 be accessed as normal symbols because adding a dependency on
1658 the base register now might cause problems. */
1661 fprintf (stderr, " - no, bt\n");
1666 /* The only moves we can allow involve at least one general
1667 register, so require it. */
1668 for (i = 0; i < 2; i ++)
1670 /* Allow subregs too, before reload. */
1671 rtx x = operands[i];
1673 if (GET_CODE (x) == SUBREG)
1675 if (GET_CODE (x) == REG
1676 && ! MEP_CONTROL_REG (x))
1679 fprintf (stderr, " - ok\n");
1685 fprintf (stderr, " - no, no gen reg\n");
1690 #define DEBUG_SPLIT_WIDE_MOVE 0
1692 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1696 #if DEBUG_SPLIT_WIDE_MOVE
1697 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1698 debug_rtx (operands[0]);
1699 debug_rtx (operands[1]);
1702 for (i = 0; i <= 1; i++)
1704 rtx op = operands[i], hi, lo;
1706 switch (GET_CODE (op))
1710 unsigned int regno = REGNO (op);
1712 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1716 lo = gen_rtx_REG (SImode, regno);
1718 hi = gen_rtx_ZERO_EXTRACT (SImode,
1719 gen_rtx_REG (DImode, regno),
1724 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1725 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1733 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1734 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1741 /* The high part of CR <- GPR moves must be done after the low part. */
1742 operands [i + 4] = lo;
1743 operands [i + 2] = hi;
1746 if (reg_mentioned_p (operands[2], operands[5])
1747 || GET_CODE (operands[2]) == ZERO_EXTRACT
1748 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1752 /* Overlapping register pairs -- make sure we don't
1753 early-clobber ourselves. */
1755 operands[2] = operands[4];
1758 operands[3] = operands[5];
1762 #if DEBUG_SPLIT_WIDE_MOVE
1763 fprintf(stderr, "\033[34m");
1764 debug_rtx (operands[2]);
1765 debug_rtx (operands[3]);
1766 debug_rtx (operands[4]);
1767 debug_rtx (operands[5]);
1768 fprintf(stderr, "\033[0m");
1772 /* Emit a setcc instruction in its entirity. */
1775 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1783 tmp = op1, op1 = op2, op2 = tmp;
1784 code = swap_condition (code);
1789 op1 = force_reg (SImode, op1);
1790 emit_insn (gen_rtx_SET (VOIDmode, dest,
1791 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1795 if (op2 != const0_rtx)
1796 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1797 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1801 /* Branchful sequence:
1803 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1806 Branchless sequence:
1807 add3 tmp, op1, -op2 32-bit (or mov + sub)
1808 sltu3 tmp, tmp, 1 16-bit
1809 xor3 dest, tmp, 1 32-bit
1811 if (optimize_size && op2 != const0_rtx)
1814 if (op2 != const0_rtx)
1815 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1817 op2 = gen_reg_rtx (SImode);
1818 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1820 emit_insn (gen_rtx_SET (VOIDmode, dest,
1821 gen_rtx_XOR (SImode, op2, const1_rtx)));
1825 if (GET_CODE (op2) != CONST_INT
1826 || INTVAL (op2) == 0x7ffffff)
1828 op2 = GEN_INT (INTVAL (op2) + 1);
1829 return mep_expand_setcc_1 (LT, dest, op1, op2);
1832 if (GET_CODE (op2) != CONST_INT
1833 || INTVAL (op2) == -1)
1835 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1836 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1839 if (GET_CODE (op2) != CONST_INT
1840 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1842 op2 = GEN_INT (INTVAL (op2) - 1);
1843 return mep_expand_setcc_1 (GT, dest, op1, op2);
1846 if (GET_CODE (op2) != CONST_INT
1847 || op2 == const0_rtx)
1849 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1850 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1858 mep_expand_setcc (rtx *operands)
1860 rtx dest = operands[0];
1861 enum rtx_code code = GET_CODE (operands[1]);
1862 rtx op0 = operands[2];
1863 rtx op1 = operands[3];
1865 return mep_expand_setcc_1 (code, dest, op0, op1);
1869 mep_expand_cbranch (rtx *operands)
1871 enum rtx_code code = GET_CODE (operands[0]);
1872 rtx op0 = operands[1];
1873 rtx op1 = operands[2];
1880 if (mep_imm4_operand (op1, SImode))
1883 tmp = gen_reg_rtx (SImode);
1884 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1891 if (mep_imm4_operand (op1, SImode))
1894 tmp = gen_reg_rtx (SImode);
1895 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1904 if (! mep_reg_or_imm4_operand (op1, SImode))
1905 op1 = force_reg (SImode, op1);
1910 if (GET_CODE (op1) == CONST_INT
1911 && INTVAL (op1) != 0x7fffffff)
1913 op1 = GEN_INT (INTVAL (op1) + 1);
1914 code = (code == LE ? LT : GE);
1918 tmp = gen_reg_rtx (SImode);
1919 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1921 code = (code == LE ? EQ : NE);
1927 if (op1 == const1_rtx)
1934 tmp = gen_reg_rtx (SImode);
1935 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1942 tmp = gen_reg_rtx (SImode);
1943 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1945 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1954 tmp = gen_reg_rtx (SImode);
1955 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1956 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1963 tmp = gen_reg_rtx (SImode);
1964 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1966 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1978 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1982 mep_emit_cbranch (rtx *operands, int ne)
1984 if (GET_CODE (operands[1]) == REG)
1985 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1986 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1987 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1989 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1993 mep_expand_call (rtx *operands, int returns_value)
1995 rtx addr = operands[returns_value];
1996 rtx tp = mep_tp_rtx ();
1997 rtx gp = mep_gp_rtx ();
1999 gcc_assert (GET_CODE (addr) == MEM);
2001 addr = XEXP (addr, 0);
2003 if (! mep_call_address_operand (addr, VOIDmode))
2004 addr = force_reg (SImode, addr);
2006 if (! operands[returns_value+2])
2007 operands[returns_value+2] = const0_rtx;
2010 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
2011 operands[3], tp, gp));
2013 emit_call_insn (gen_call_internal (addr, operands[1],
2014 operands[2], tp, gp));
2017 /* Aliasing Support. */
2019 /* If X is a machine specific address (i.e. a symbol or label being
2020 referenced as a displacement from the GOT implemented using an
2021 UNSPEC), then return the base term. Otherwise return X. */
2024 mep_find_base_term (rtx x)
2029 if (GET_CODE (x) != PLUS)
2034 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
2035 && base == mep_tp_rtx ())
2037 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
2038 && base == mep_gp_rtx ())
2043 if (GET_CODE (term) != CONST)
2045 term = XEXP (term, 0);
2047 if (GET_CODE (term) != UNSPEC
2048 || XINT (term, 1) != unspec)
2051 return XVECEXP (term, 0, 0);
2054 /* Reload Support. */
2056 /* Return true if the registers in CLASS cannot represent the change from
2057 modes FROM to TO. */
2060 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2061 enum reg_class regclass)
2066 /* 64-bit COP regs must remain 64-bit COP regs. */
2067 if (TARGET_64BIT_CR_REGS
2068 && (regclass == CR_REGS
2069 || regclass == LOADABLE_CR_REGS)
2070 && (GET_MODE_SIZE (to) < 8
2071 || GET_MODE_SIZE (from) < 8))
2077 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2080 mep_general_reg (rtx x)
2082 while (GET_CODE (x) == SUBREG)
2084 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2088 mep_nongeneral_reg (rtx x)
2090 while (GET_CODE (x) == SUBREG)
2092 return (GET_CODE (x) == REG
2093 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2097 mep_general_copro_reg (rtx x)
2099 while (GET_CODE (x) == SUBREG)
2101 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2105 mep_nonregister (rtx x)
2107 while (GET_CODE (x) == SUBREG)
2109 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2112 #define DEBUG_RELOAD 0
2114 /* Return the secondary reload class needed for moving value X to or
2115 from a register in coprocessor register class CLASS. */
2117 static enum reg_class
2118 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2120 if (mep_general_reg (x))
2121 /* We can do the move directly if mep_have_core_copro_moves_p,
2122 otherwise we need to go through memory. Either way, no secondary
2123 register is needed. */
2126 if (mep_general_copro_reg (x))
2128 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2129 if (mep_have_copro_copro_moves_p)
2132 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2133 if (mep_have_core_copro_moves_p)
2134 return GENERAL_REGS;
2136 /* Otherwise we need to do it through memory. No secondary
2137 register is needed. */
2141 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2142 && constraint_satisfied_p (x, CONSTRAINT_U))
2143 /* X is a memory value that we can access directly. */
2146 /* We have to move X into a GPR first and then copy it to
2147 the coprocessor register. The move from the GPR to the
2148 coprocessor might be done directly or through memory,
2149 depending on mep_have_core_copro_moves_p. */
2150 return GENERAL_REGS;
2153 /* Copying X to register in RCLASS. */
2156 mep_secondary_input_reload_class (enum reg_class rclass,
2157 enum machine_mode mode ATTRIBUTE_UNUSED,
2163 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2167 if (reg_class_subset_p (rclass, CR_REGS))
2168 rv = mep_secondary_copro_reload_class (rclass, x);
2169 else if (MEP_NONGENERAL_CLASS (rclass)
2170 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2174 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2176 return (enum reg_class) rv;
2179 /* Copying register in RCLASS to X. */
2182 mep_secondary_output_reload_class (enum reg_class rclass,
2183 enum machine_mode mode ATTRIBUTE_UNUSED,
2189 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2193 if (reg_class_subset_p (rclass, CR_REGS))
2194 rv = mep_secondary_copro_reload_class (rclass, x);
2195 else if (MEP_NONGENERAL_CLASS (rclass)
2196 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2200 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2203 return (enum reg_class) rv;
2206 /* Implement SECONDARY_MEMORY_NEEDED. */
2209 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2210 enum machine_mode mode ATTRIBUTE_UNUSED)
2212 if (!mep_have_core_copro_moves_p)
2214 if (reg_classes_intersect_p (rclass1, CR_REGS)
2215 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2217 if (reg_classes_intersect_p (rclass2, CR_REGS)
2218 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2220 if (!mep_have_copro_copro_moves_p
2221 && reg_classes_intersect_p (rclass1, CR_REGS)
2222 && reg_classes_intersect_p (rclass2, CR_REGS))
2229 mep_expand_reload (rtx *operands, enum machine_mode mode)
2231 /* There are three cases for each direction:
2236 int s0 = mep_section_tag (operands[0]) == 'f';
2237 int s1 = mep_section_tag (operands[1]) == 'f';
2238 int c0 = mep_nongeneral_reg (operands[0]);
2239 int c1 = mep_nongeneral_reg (operands[1]);
2240 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2243 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2244 debug_rtx (operands[0]);
2245 debug_rtx (operands[1]);
2250 case 00: /* Don't know why this gets here. */
2251 case 02: /* general = far */
2252 emit_move_insn (operands[0], operands[1]);
2255 case 10: /* cr = mem */
2256 case 11: /* cr = cr */
2257 case 01: /* mem = cr */
2258 case 12: /* cr = far */
2259 emit_move_insn (operands[2], operands[1]);
2260 emit_move_insn (operands[0], operands[2]);
2263 case 20: /* far = general */
2264 emit_move_insn (operands[2], XEXP (operands[1], 0));
2265 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2268 case 21: /* far = cr */
2269 case 22: /* far = far */
2271 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2272 which, mode_name[mode]);
2273 debug_rtx (operands[0]);
2274 debug_rtx (operands[1]);
2279 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2280 can be moved directly into registers 0 to 7, but not into the rest.
2281 If so, and if the required class includes registers 0 to 7, restrict
2282 it to those registers. */
2285 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2287 switch (GET_CODE (x))
2290 if (INTVAL (x) >= 0x10000
2291 && INTVAL (x) < 0x01000000
2292 && (INTVAL (x) & 0xffff) != 0
2293 && reg_class_subset_p (TPREL_REGS, rclass))
2294 rclass = TPREL_REGS;
2300 if (mep_section_tag (x) != 'f'
2301 && reg_class_subset_p (TPREL_REGS, rclass))
2302 rclass = TPREL_REGS;
2311 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2312 moves, 4 for direct double-register moves, and 1000 for anything
2313 that requires a temporary register or temporary stack slot. */
2316 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2318 if (mep_have_copro_copro_moves_p
2319 && reg_class_subset_p (from, CR_REGS)
2320 && reg_class_subset_p (to, CR_REGS))
2322 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2326 if (reg_class_subset_p (from, CR_REGS)
2327 && reg_class_subset_p (to, CR_REGS))
2329 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2333 if (reg_class_subset_p (from, CR_REGS)
2334 || reg_class_subset_p (to, CR_REGS))
2336 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2340 if (mep_secondary_memory_needed (from, to, mode))
2342 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2345 if (GET_MODE_SIZE (mode) > 4)
2352 /* Functions to save and restore machine-specific function data. */
2354 static struct machine_function *
2355 mep_init_machine_status (void)
2357 return ggc_alloc_cleared_machine_function ();
2361 mep_allocate_initial_value (rtx reg)
2365 if (GET_CODE (reg) != REG)
2368 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2371 /* In interrupt functions, the "initial" values of $gp and $tp are
2372 provided by the prologue. They are not necessarily the same as
2373 the values that the caller was using. */
2374 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2375 if (mep_interrupt_p ())
2378 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2380 cfun->machine->reg_save_size += 4;
2381 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2384 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2385 return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2389 mep_return_addr_rtx (int count)
2394 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2400 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2406 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2410 mep_interrupt_p (void)
2412 if (cfun->machine->interrupt_handler == 0)
2414 int interrupt_handler
2415 = (lookup_attribute ("interrupt",
2416 DECL_ATTRIBUTES (current_function_decl))
2418 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2420 return cfun->machine->interrupt_handler == 2;
2424 mep_disinterrupt_p (void)
2426 if (cfun->machine->disable_interrupts == 0)
2428 int disable_interrupts
2429 = (lookup_attribute ("disinterrupt",
2430 DECL_ATTRIBUTES (current_function_decl))
2432 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2434 return cfun->machine->disable_interrupts == 2;
2438 /* Frame/Epilog/Prolog Related. */
2441 mep_reg_set_p (rtx reg, rtx insn)
2443 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2446 if (FIND_REG_INC_NOTE (insn, reg))
2448 insn = PATTERN (insn);
2451 if (GET_CODE (insn) == SET
2452 && GET_CODE (XEXP (insn, 0)) == REG
2453 && GET_CODE (XEXP (insn, 1)) == REG
2454 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2457 return set_of (reg, insn) != NULL_RTX;
2461 #define MEP_SAVES_UNKNOWN 0
2462 #define MEP_SAVES_YES 1
2463 #define MEP_SAVES_MAYBE 2
2464 #define MEP_SAVES_NO 3
2467 mep_reg_set_in_function (int regno)
2471 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2474 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2477 push_topmost_sequence ();
2478 insn = get_insns ();
2479 pop_topmost_sequence ();
2484 reg = gen_rtx_REG (SImode, regno);
2486 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2487 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2493 mep_asm_without_operands_p (void)
2495 if (cfun->machine->asms_without_operands == 0)
2499 push_topmost_sequence ();
2500 insn = get_insns ();
2501 pop_topmost_sequence ();
2503 cfun->machine->asms_without_operands = 1;
2507 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2509 cfun->machine->asms_without_operands = 2;
2512 insn = NEXT_INSN (insn);
2516 return cfun->machine->asms_without_operands == 2;
2519 /* Interrupt functions save/restore every call-preserved register, and
2520 any call-used register it uses (or all if it calls any function,
2521 since they may get clobbered there too). Here we check to see
2522 which call-used registers need saving. */
2524 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2525 && (r == FIRST_CCR_REGNO + 1 \
2526 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2527 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2530 mep_interrupt_saved_reg (int r)
2532 if (!mep_interrupt_p ())
2534 if (r == REGSAVE_CONTROL_TEMP
2535 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2537 if (mep_asm_without_operands_p ()
2539 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2540 || IVC2_ISAVED_REG (r)))
2542 if (!current_function_is_leaf)
2543 /* Function calls mean we need to save $lp. */
2544 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2546 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2547 /* The interrupt handler might use these registers for repeat blocks,
2548 or it might call a function that does so. */
2549 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2551 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2553 /* Functions we call might clobber these. */
2554 if (call_used_regs[r] && !fixed_regs[r])
2556 /* Additional registers that need to be saved for IVC2. */
2557 if (IVC2_ISAVED_REG (r))
2564 mep_call_saves_register (int r)
2566 if (! cfun->machine->frame_locked)
2568 int rv = MEP_SAVES_NO;
2570 if (cfun->machine->reg_save_slot[r])
2572 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2574 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2576 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2578 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2579 /* We need these to have stack slots so that they can be set during
2582 else if (mep_interrupt_saved_reg (r))
2584 cfun->machine->reg_saved[r] = rv;
2586 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2589 /* Return true if epilogue uses register REGNO. */
2592 mep_epilogue_uses (int regno)
2594 /* Since $lp is a call-saved register, the generic code will normally
2595 mark it used in the epilogue if it needs to be saved and restored.
2596 However, when profiling is enabled, the profiling code will implicitly
2597 clobber $11. This case has to be handled specially both here and in
2598 mep_call_saves_register. */
2599 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2601 /* Interrupt functions save/restore pretty much everything. */
2602 return (reload_completed && mep_interrupt_saved_reg (regno));
2606 mep_reg_size (int regno)
2608 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2613 /* Worker function for TARGET_CAN_ELIMINATE. */
2616 mep_can_eliminate (const int from, const int to)
2618 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2619 ? ! frame_pointer_needed
2624 mep_elimination_offset (int from, int to)
2628 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2631 if (!cfun->machine->frame_locked)
2632 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2634 /* We don't count arg_regs_to_save in the arg pointer offset, because
2635 gcc thinks the arg pointer has moved along with the saved regs.
2636 However, we do count it when we adjust $sp in the prologue. */
2638 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2639 if (mep_call_saves_register (i))
2640 reg_save_size += mep_reg_size (i);
2642 if (reg_save_size % 8)
2643 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2645 cfun->machine->regsave_filler = 0;
2647 /* This is what our total stack adjustment looks like. */
2648 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2651 cfun->machine->frame_filler = 8 - (total_size % 8);
2653 cfun->machine->frame_filler = 0;
2656 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2657 return reg_save_size + cfun->machine->regsave_filler;
2659 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2660 return cfun->machine->frame_filler + frame_size;
2662 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2663 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2671 RTX_FRAME_RELATED_P (x) = 1;
2675 /* Since the prologue/epilogue code is generated after optimization,
2676 we can't rely on gcc to split constants for us. So, this code
2677 captures all the ways to add a constant to a register in one logic
2678 chunk, including optimizing away insns we just don't need. This
2679 makes the prolog/epilog code easier to follow. */
2681 add_constant (int dest, int src, int value, int mark_frame)
2686 if (src == dest && value == 0)
2691 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2692 gen_rtx_REG (SImode, src));
2694 RTX_FRAME_RELATED_P(insn) = 1;
2698 if (value >= -32768 && value <= 32767)
2700 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2701 gen_rtx_REG (SImode, src),
2704 RTX_FRAME_RELATED_P(insn) = 1;
2708 /* Big constant, need to use a temp register. We use
2709 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2710 area is always small enough to directly add to). */
2712 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2713 lo = value & 0xffff;
2715 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2720 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2721 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2725 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2726 gen_rtx_REG (SImode, src),
2727 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2730 RTX_FRAME_RELATED_P(insn) = 1;
2731 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2732 gen_rtx_SET (SImode,
2733 gen_rtx_REG (SImode, dest),
2734 gen_rtx_PLUS (SImode,
2735 gen_rtx_REG (SImode, dest),
2740 /* Move SRC to DEST. Mark the move as being potentially dead if
2744 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2746 rtx insn = emit_move_insn (dest, src);
2749 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2754 /* Used for interrupt functions, which can't assume that $tp and $gp
2755 contain the correct pointers. */
2758 mep_reload_pointer (int regno, const char *symbol)
2762 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2765 reg = gen_rtx_REG (SImode, regno);
2766 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2767 emit_insn (gen_movsi_topsym_s (reg, sym));
2768 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2771 /* Assign save slots for any register not already saved. DImode
2772 registers go at the end of the reg save area; the rest go at the
2773 beginning. This is for alignment purposes. Returns true if a frame
2774 is really needed. */
2776 mep_assign_save_slots (int reg_save_size)
2778 bool really_need_stack_frame = false;
2782 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2783 if (mep_call_saves_register(i))
2785 int regsize = mep_reg_size (i);
2787 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2788 || mep_reg_set_in_function (i))
2789 really_need_stack_frame = true;
2791 if (cfun->machine->reg_save_slot[i])
2796 cfun->machine->reg_save_size += regsize;
2797 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2801 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2805 cfun->machine->frame_locked = 1;
2806 return really_need_stack_frame;
2810 mep_expand_prologue (void)
2812 int i, rss, sp_offset = 0;
2815 int really_need_stack_frame;
2817 /* We must not allow register renaming in interrupt functions,
2818 because that invalidates the correctness of the set of call-used
2819 registers we're going to save/restore. */
2820 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2822 if (mep_disinterrupt_p ())
2823 emit_insn (gen_mep_disable_int ());
2825 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2827 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2828 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2829 really_need_stack_frame = frame_size;
2831 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2833 sp_offset = reg_save_size;
2834 if (sp_offset + frame_size < 128)
2835 sp_offset += frame_size ;
2837 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2839 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2840 if (mep_call_saves_register(i))
2844 enum machine_mode rmode;
2846 rss = cfun->machine->reg_save_slot[i];
2848 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2849 && (!mep_reg_set_in_function (i)
2850 && !mep_interrupt_p ()))
2853 if (mep_reg_size (i) == 8)
2858 /* If there is a pseudo associated with this register's initial value,
2859 reload might have already spilt it to the stack slot suggested by
2860 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2862 mem = gen_rtx_MEM (rmode,
2863 plus_constant (stack_pointer_rtx, sp_offset - rss));
2864 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2866 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2867 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2868 else if (rmode == DImode)
2871 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2873 mem = gen_rtx_MEM (SImode,
2874 plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2876 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2877 gen_rtx_REG (SImode, i),
2879 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2880 gen_rtx_ZERO_EXTRACT (SImode,
2881 gen_rtx_REG (DImode, i),
2885 insn = maybe_dead_move (mem,
2886 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2888 RTX_FRAME_RELATED_P (insn) = 1;
2890 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2891 gen_rtx_SET (VOIDmode,
2893 gen_rtx_REG (rmode, i)));
2894 mem = gen_rtx_MEM (SImode,
2895 plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2896 insn = maybe_dead_move (mem,
2897 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2903 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2904 gen_rtx_REG (rmode, i),
2906 insn = maybe_dead_move (mem,
2907 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2909 RTX_FRAME_RELATED_P (insn) = 1;
2911 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2912 gen_rtx_SET (VOIDmode,
2914 gen_rtx_REG (rmode, i)));
2918 if (frame_pointer_needed)
2920 /* We've already adjusted down by sp_offset. Total $sp change
2921 is reg_save_size + frame_size. We want a net change here of
2922 just reg_save_size. */
2923 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2926 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2928 if (mep_interrupt_p ())
2930 mep_reload_pointer(GP_REGNO, "__sdabase");
2931 mep_reload_pointer(TP_REGNO, "__tpbase");
2936 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2938 int local = hwi_local;
2939 int frame_size = local + crtl->outgoing_args_size;
2944 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2946 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2947 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2948 sp_offset = reg_save_size + frame_size;
2950 ffill = cfun->machine->frame_filler;
2952 if (cfun->machine->mep_frame_pointer_needed)
2953 reg_names[FP_REGNO] = "$fp";
2955 reg_names[FP_REGNO] = "$8";
2960 if (debug_info_level == DINFO_LEVEL_NONE)
2962 fprintf (file, "\t# frame: %d", sp_offset);
2964 fprintf (file, " %d regs", reg_save_size);
2966 fprintf (file, " %d locals", local);
2967 if (crtl->outgoing_args_size)
2968 fprintf (file, " %d args", crtl->outgoing_args_size);
2969 fprintf (file, "\n");
2973 fprintf (file, "\t#\n");
2974 fprintf (file, "\t# Initial Frame Information:\n");
2975 if (sp_offset || !frame_pointer_needed)
2976 fprintf (file, "\t# Entry ---------- 0\n");
2978 /* Sort registers by save slots, so they're printed in the order
2979 they appear in memory, not the order they're saved in. */
2980 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2982 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2983 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2984 if (cfun->machine->reg_save_slot[slot_map[si]]
2985 > cfun->machine->reg_save_slot[slot_map[sj]])
2987 int t = slot_map[si];
2988 slot_map[si] = slot_map[sj];
2993 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2996 int r = slot_map[i];
2997 int rss = cfun->machine->reg_save_slot[r];
2999 if (!mep_call_saves_register (r))
3002 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
3003 && (!mep_reg_set_in_function (r)
3004 && !mep_interrupt_p ()))
3007 rsize = mep_reg_size(r);
3008 skip = rss - (sp+rsize);
3010 fprintf (file, "\t# %3d bytes for alignment\n", skip);
3011 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
3012 rsize, reg_names[r], sp_offset - rss);
3016 skip = reg_save_size - sp;
3018 fprintf (file, "\t# %3d bytes for alignment\n", skip);
3020 if (frame_pointer_needed)
3021 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
3023 fprintf (file, "\t# %3d bytes for local vars\n", local);
3025 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
3026 if (crtl->outgoing_args_size)
3027 fprintf (file, "\t# %3d bytes for outgoing args\n",
3028 crtl->outgoing_args_size);
3029 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
3030 fprintf (file, "\t#\n");
3034 static int mep_prevent_lp_restore = 0;
3035 static int mep_sibcall_epilogue = 0;
3038 mep_expand_epilogue (void)
3040 int i, sp_offset = 0;
3041 int reg_save_size = 0;
3043 int lp_temp = LP_REGNO, lp_slot = -1;
3044 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
3045 int interrupt_handler = mep_interrupt_p ();
3047 if (profile_arc_flag == 2)
3048 emit_insn (gen_mep_bb_trace_ret ());
3050 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
3051 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
3053 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
3055 if (frame_pointer_needed)
3057 /* If we have a frame pointer, we won't have a reliable stack
3058 pointer (alloca, you know), so rebase SP from FP */
3059 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3060 gen_rtx_REG (SImode, FP_REGNO));
3061 sp_offset = reg_save_size;
3065 /* SP is right under our local variable space. Adjust it if
3067 sp_offset = reg_save_size + frame_size;
3068 if (sp_offset >= 128)
3070 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3071 sp_offset -= frame_size;
3075 /* This is backwards so that we restore the control and coprocessor
3076 registers before the temporary registers we use to restore
3078 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3079 if (mep_call_saves_register (i))
3081 enum machine_mode rmode;
3082 int rss = cfun->machine->reg_save_slot[i];
3084 if (mep_reg_size (i) == 8)
3089 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3090 && !(mep_reg_set_in_function (i) || interrupt_handler))
3092 if (mep_prevent_lp_restore && i == LP_REGNO)
3094 if (!mep_prevent_lp_restore
3095 && !interrupt_handler
3096 && (i == 10 || i == 11))
3099 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3100 emit_move_insn (gen_rtx_REG (rmode, i),
3102 plus_constant (stack_pointer_rtx,
3106 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3107 /* Defer this one so we can jump indirect rather than
3108 copying the RA to $lp and "ret". EH epilogues
3109 automatically skip this anyway. */
3110 lp_slot = sp_offset-rss;
3113 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3115 plus_constant (stack_pointer_rtx,
3117 emit_move_insn (gen_rtx_REG (rmode, i),
3118 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3124 /* Restore this one last so we know it will be in the temp
3125 register when we return by jumping indirectly via the temp. */
3126 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3127 gen_rtx_MEM (SImode,
3128 plus_constant (stack_pointer_rtx,
3130 lp_temp = REGSAVE_CONTROL_TEMP;
3134 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3136 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3137 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3138 gen_rtx_REG (SImode, SP_REGNO),
3139 cfun->machine->eh_stack_adjust));
3141 if (mep_sibcall_epilogue)
3144 if (mep_disinterrupt_p ())
3145 emit_insn (gen_mep_enable_int ());
3147 if (mep_prevent_lp_restore)
3149 emit_jump_insn (gen_eh_return_internal ());
3152 else if (interrupt_handler)
3153 emit_jump_insn (gen_mep_reti ());
3155 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3159 mep_expand_eh_return (rtx *operands)
3161 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3163 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3164 emit_move_insn (ra, operands[0]);
3168 emit_insn (gen_eh_epilogue (operands[0]));
3172 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3174 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3175 mep_prevent_lp_restore = 1;
3176 mep_expand_epilogue ();
3177 mep_prevent_lp_restore = 0;
3181 mep_expand_sibcall_epilogue (void)
3183 mep_sibcall_epilogue = 1;
3184 mep_expand_epilogue ();
3185 mep_sibcall_epilogue = 0;
3189 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3194 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3197 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3198 if (mep_interrupt_p () || mep_disinterrupt_p ())
3205 mep_return_stackadj_rtx (void)
3207 return gen_rtx_REG (SImode, 10);
3211 mep_return_handler_rtx (void)
3213 return gen_rtx_REG (SImode, LP_REGNO);
3217 mep_function_profiler (FILE *file)
3219 /* Always right at the beginning of the function. */
3220 fprintf (file, "\t# mep function profiler\n");
3221 fprintf (file, "\tadd\t$sp, -8\n");
3222 fprintf (file, "\tsw\t$0, ($sp)\n");
3223 fprintf (file, "\tldc\t$0, $lp\n");
3224 fprintf (file, "\tsw\t$0, 4($sp)\n");
3225 fprintf (file, "\tbsr\t__mep_mcount\n");
3226 fprintf (file, "\tlw\t$0, 4($sp)\n");
3227 fprintf (file, "\tstc\t$0, $lp\n");
3228 fprintf (file, "\tlw\t$0, ($sp)\n");
3229 fprintf (file, "\tadd\t$sp, 8\n\n");
3233 mep_emit_bb_trace_ret (void)
3235 fprintf (asm_out_file, "\t# end of block profiling\n");
3236 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3237 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3238 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3239 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3240 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3241 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3242 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3243 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3244 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3251 /* Operand Printing. */
3254 mep_print_operand_address (FILE *stream, rtx address)
3256 if (GET_CODE (address) == MEM)
3257 address = XEXP (address, 0);
3259 /* cf: gcc.dg/asm-4.c. */
3260 gcc_assert (GET_CODE (address) == REG);
3262 mep_print_operand (stream, address, 0);
3268 const char *pattern;
3271 const conversions[] =
3274 { 0, "m+ri", "3(2)" },
3278 { 0, "mLrs", "%lo(3)(2)" },
3279 { 0, "mLr+si", "%lo(4+5)(2)" },
3280 { 0, "m+ru2s", "%tpoff(5)(2)" },
3281 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3282 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3283 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3284 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3285 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3287 { 0, "m+si", "(2+3)" },
3288 { 0, "m+li", "(2+3)" },
3291 { 0, "+si", "1+2" },
3292 { 0, "+u2si", "%tpoff(3+4)" },
3293 { 0, "+u3si", "%sdaoff(3+4)" },
3299 { 'h', "Hs", "%hi(1)" },
3301 { 'I', "u2s", "%tpoff(2)" },
3302 { 'I', "u3s", "%sdaoff(2)" },
3303 { 'I', "+u2si", "%tpoff(3+4)" },
3304 { 'I', "+u3si", "%sdaoff(3+4)" },
3306 { 'P', "mr", "(1\\+),\\0" },
3312 unique_bit_in (HOST_WIDE_INT i)
3316 case 0x01: case 0xfe: return 0;
3317 case 0x02: case 0xfd: return 1;
3318 case 0x04: case 0xfb: return 2;
3319 case 0x08: case 0xf7: return 3;
3320 case 0x10: case 0x7f: return 4;
3321 case 0x20: case 0xbf: return 5;
3322 case 0x40: case 0xdf: return 6;
3323 case 0x80: case 0xef: return 7;
3330 bit_size_for_clip (HOST_WIDE_INT i)
3334 for (rv = 0; rv < 31; rv ++)
3335 if (((HOST_WIDE_INT) 1 << rv) > i)
3340 /* Print an operand to a assembler instruction. */
3343 mep_print_operand (FILE *file, rtx x, int code)
3346 const char *real_name;
3350 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3351 we're using, then skip over the "mep_" part of its name. */
3352 const struct cgen_insn *insn;
3354 if (mep_get_move_insn (mep_cmov, &insn))
3355 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3357 mep_intrinsic_unavailable (mep_cmov);
3362 switch (GET_CODE (x))
3365 fputs ("clr", file);
3368 fputs ("set", file);
3371 fputs ("not", file);
3374 output_operand_lossage ("invalid %%L code");
3379 /* Print the second operand of a CR <- CR move. If we're using
3380 a two-operand instruction (i.e., a real cmov), then just print
3381 the operand normally. If we're using a "reg, reg, immediate"
3382 instruction such as caddi3, print the operand followed by a
3383 zero field. If we're using a three-register instruction,
3384 print the operand twice. */
3385 const struct cgen_insn *insn;
3387 mep_print_operand (file, x, 0);
3388 if (mep_get_move_insn (mep_cmov, &insn)
3389 && insn_data[insn->icode].n_operands == 3)
3392 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3393 mep_print_operand (file, x, 0);
3395 mep_print_operand (file, const0_rtx, 0);
3401 for (i = 0; conversions[i].pattern; i++)
3402 if (conversions[i].code == code
3403 && strcmp(conversions[i].pattern, pattern) == 0)
3405 for (j = 0; conversions[i].format[j]; j++)
3406 if (conversions[i].format[j] == '\\')
3408 fputc (conversions[i].format[j+1], file);
3411 else if (ISDIGIT(conversions[i].format[j]))
3413 rtx r = patternr[conversions[i].format[j] - '0'];
3414 switch (GET_CODE (r))
3417 fprintf (file, "%s", reg_names [REGNO (r)]);
3423 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3426 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3429 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3432 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3435 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3438 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3439 && !(INTVAL (r) & 0xff))
3440 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3442 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3445 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3446 && conversions[i].format[j+1] == 0)
3448 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3449 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3452 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3455 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3460 fprintf(file, "[const_double 0x%lx]",
3461 (unsigned long) CONST_DOUBLE_HIGH(r));
3464 real_name = targetm.strip_name_encoding (XSTR (r, 0));
3465 assemble_name (file, real_name);
3468 output_asm_label (r);
3471 fprintf (stderr, "don't know how to print this operand:");
3478 if (conversions[i].format[j] == '+'
3479 && (!code || code == 'I')
3480 && ISDIGIT (conversions[i].format[j+1])
3481 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3482 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3484 fputc(conversions[i].format[j], file);
3488 if (!conversions[i].pattern)
3490 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3498 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3499 int noperands ATTRIBUTE_UNUSED)
3501 /* Despite the fact that MeP is perfectly capable of branching and
3502 doing something else in the same bundle, gcc does jump
3503 optimization *after* scheduling, so we cannot trust the bundling
3504 flags on jump instructions. */
3505 if (GET_MODE (insn) == BImode
3506 && get_attr_slots (insn) != SLOTS_CORE)
3507 fputc ('+', asm_out_file);
3510 /* Function args in registers. */
3513 mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
3514 enum machine_mode mode ATTRIBUTE_UNUSED,
3515 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3516 int second_time ATTRIBUTE_UNUSED)
3518 int nsave = 4 - (cum->nregs + 1);
3521 cfun->machine->arg_regs_to_save = nsave;
3522 *pretend_size = nsave * 4;
3526 bytesize (const_tree type, enum machine_mode mode)
3528 if (mode == BLKmode)
3529 return int_size_in_bytes (type);
3530 return GET_MODE_SIZE (mode);
3534 mep_expand_builtin_saveregs (void)
3539 ns = cfun->machine->arg_regs_to_save;
3542 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3543 regbuf = assign_stack_local (SImode, bufsize, 64);
3548 regbuf = assign_stack_local (SImode, bufsize, 32);
3551 move_block_from_reg (5-ns, regbuf, ns);
3555 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3556 int ofs = 8 * ((ns+1)/2);
3558 for (i=0; i<ns; i++)
3560 int rn = (4-ns) + i + 49;
3563 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3564 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3568 return XEXP (regbuf, 0);
3571 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3574 mep_build_builtin_va_list (void)
3576 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3580 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3582 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3583 get_identifier ("__va_next_gp"), ptr_type_node);
3584 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3585 get_identifier ("__va_next_gp_limit"),
3587 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3589 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3592 DECL_FIELD_CONTEXT (f_next_gp) = record;
3593 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3594 DECL_FIELD_CONTEXT (f_next_cop) = record;
3595 DECL_FIELD_CONTEXT (f_next_stack) = record;
3597 TYPE_FIELDS (record) = f_next_gp;
3598 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3599 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3600 DECL_CHAIN (f_next_cop) = f_next_stack;
3602 layout_type (record);
3608 mep_expand_va_start (tree valist, rtx nextarg)
3610 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3611 tree next_gp, next_gp_limit, next_cop, next_stack;
3615 ns = cfun->machine->arg_regs_to_save;
3617 f_next_gp = TYPE_FIELDS (va_list_type_node);
3618 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3619 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3620 f_next_stack = DECL_CHAIN (f_next_cop);
3622 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3624 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3625 valist, f_next_gp_limit, NULL_TREE);
3626 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3628 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3629 valist, f_next_stack, NULL_TREE);
3631 /* va_list.next_gp = expand_builtin_saveregs (); */
3632 u = make_tree (sizetype, expand_builtin_saveregs ());
3633 u = fold_convert (ptr_type_node, u);
3634 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3635 TREE_SIDE_EFFECTS (t) = 1;
3636 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3638 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3639 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3641 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3642 TREE_SIDE_EFFECTS (t) = 1;
3643 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3645 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3646 size_int (8 * ((ns+1)/2)));
3647 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3648 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3649 TREE_SIDE_EFFECTS (t) = 1;
3650 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3652 /* va_list.next_stack = nextarg; */
3653 u = make_tree (ptr_type_node, nextarg);
3654 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3655 TREE_SIDE_EFFECTS (t) = 1;
3656 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3660 mep_gimplify_va_arg_expr (tree valist, tree type,
3662 gimple_seq *post_p ATTRIBUTE_UNUSED)
3664 HOST_WIDE_INT size, rsize;
3665 bool by_reference, ivc2_vec;
3666 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3667 tree next_gp, next_gp_limit, next_cop, next_stack;
3668 tree label_sover, label_selse;
3671 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3673 size = int_size_in_bytes (type);
3674 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3678 type = build_pointer_type (type);
3681 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3683 f_next_gp = TYPE_FIELDS (va_list_type_node);
3684 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3685 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3686 f_next_stack = DECL_CHAIN (f_next_cop);
3688 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3690 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3691 valist, f_next_gp_limit, NULL_TREE);
3692 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3694 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3695 valist, f_next_stack, NULL_TREE);
3697 /* if f_next_gp < f_next_gp_limit
3698 IF (VECTOR_P && IVC2)
3706 val = *f_next_stack;
3707 f_next_stack += rsize;
3711 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3712 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3713 res_addr = create_tmp_var (ptr_type_node, NULL);
3715 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3716 unshare_expr (next_gp_limit));
3717 tmp = build3 (COND_EXPR, void_type_node, tmp,
3718 build1 (GOTO_EXPR, void_type_node,
3719 unshare_expr (label_selse)),
3721 gimplify_and_add (tmp, pre_p);
3725 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3726 gimplify_and_add (tmp, pre_p);
3730 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3731 gimplify_and_add (tmp, pre_p);
3734 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3735 unshare_expr (next_gp), size_int (4));
3736 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3738 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3739 unshare_expr (next_cop), size_int (8));
3740 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3742 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3743 gimplify_and_add (tmp, pre_p);
3747 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3748 gimplify_and_add (tmp, pre_p);
3750 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3751 gimplify_and_add (tmp, pre_p);
3753 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3754 unshare_expr (next_stack), size_int (rsize));
3755 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3759 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3760 gimplify_and_add (tmp, pre_p);
3762 res_addr = fold_convert (build_pointer_type (type), res_addr);
3765 res_addr = build_va_arg_indirect_ref (res_addr);
3767 return build_va_arg_indirect_ref (res_addr);
3771 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3772 rtx libname ATTRIBUTE_UNUSED,
3773 tree fndecl ATTRIBUTE_UNUSED)
3777 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3783 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3784 larger than 4 bytes are passed indirectly. Return value in 0,
3785 unless bigger than 4 bytes, then the caller passes a pointer as the
3786 first arg. For varargs, we copy $1..$4 to the stack. */
3789 mep_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3790 const_tree type ATTRIBUTE_UNUSED,
3791 bool named ATTRIBUTE_UNUSED)
3793 /* VOIDmode is a signal for the backend to pass data to the call
3794 expander via the second operand to the call pattern. We use
3795 this to determine whether to use "jsr" or "jsrv". */
3796 if (mode == VOIDmode)
3797 return GEN_INT (cum->vliw);
3799 /* If we havn't run out of argument registers, return the next. */
3802 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3803 return gen_rtx_REG (mode, cum->nregs + 49);
3805 return gen_rtx_REG (mode, cum->nregs + 1);
3808 /* Otherwise the argument goes on the stack. */
3813 mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
3814 enum machine_mode mode,
3816 bool named ATTRIBUTE_UNUSED)
3818 int size = bytesize (type, mode);
3820 /* This is non-obvious, but yes, large values passed after we've run
3821 out of registers are *still* passed by reference - we put the
3822 address of the parameter on the stack, as well as putting the
3823 parameter itself elsewhere on the stack. */
3825 if (size <= 0 || size > 8)
3829 if (TARGET_IVC2 && cum->nregs < 4 && type != NULL_TREE && VECTOR_TYPE_P (type))
3835 mep_function_arg_advance (CUMULATIVE_ARGS *pcum,
3836 enum machine_mode mode ATTRIBUTE_UNUSED,
3837 const_tree type ATTRIBUTE_UNUSED,
3838 bool named ATTRIBUTE_UNUSED)
3844 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3846 int size = bytesize (type, BLKmode);
3847 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3848 return size > 0 && size <= 8 ? 0 : 1;
3849 return size > 0 && size <= 4 ? 0 : 1;
3853 mep_narrow_volatile_bitfield (void)
3859 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3862 mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3864 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3865 return gen_rtx_REG (TYPE_MODE (type), 48);
3866 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3869 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3872 mep_libcall_value (enum machine_mode mode)
3874 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3877 /* Handle pipeline hazards. */
3879 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3880 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3882 static int prev_opcode = 0;
3884 /* This isn't as optimal as it could be, because we don't know what
3885 control register the STC opcode is storing in. We only need to add
3886 the nop if it's the relevent register, but we add it for irrelevent
3890 mep_asm_output_opcode (FILE *file, const char *ptr)
3892 int this_opcode = op_none;
3893 const char *hazard = 0;
3898 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3899 this_opcode = op_fsft;
3902 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3903 this_opcode = op_ret;
3906 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3907 this_opcode = op_stc;
3911 if (prev_opcode == op_stc && this_opcode == op_fsft)
3913 if (prev_opcode == op_stc && this_opcode == op_ret)
3917 fprintf(file, "%s\t# %s-%s hazard\n\t",
3918 hazard, opnames[prev_opcode], opnames[this_opcode]);
3920 prev_opcode = this_opcode;
3923 /* Handle attributes. */
3926 mep_validate_based_tiny (tree *node, tree name, tree args,
3927 int flags ATTRIBUTE_UNUSED, bool *no_add)
3929 if (TREE_CODE (*node) != VAR_DECL
3930 && TREE_CODE (*node) != POINTER_TYPE
3931 && TREE_CODE (*node) != TYPE_DECL)
3933 warning (0, "%qE attribute only applies to variables", name);
3936 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3938 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3940 warning (0, "address region attributes not allowed with auto storage class");
3943 /* Ignore storage attribute of pointed to variable: char __far * x; */
3944 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3946 warning (0, "address region attributes on pointed-to types ignored");
3955 mep_multiple_address_regions (tree list, bool check_section_attr)
3958 int count_sections = 0;
3959 int section_attr_count = 0;
3961 for (a = list; a; a = TREE_CHAIN (a))
3963 if (is_attribute_p ("based", TREE_PURPOSE (a))
3964 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3965 || is_attribute_p ("near", TREE_PURPOSE (a))
3966 || is_attribute_p ("far", TREE_PURPOSE (a))
3967 || is_attribute_p ("io", TREE_PURPOSE (a)))
3969 if (check_section_attr)
3970 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3973 if (check_section_attr)
3974 return section_attr_count;
3976 return count_sections;
3979 #define MEP_ATTRIBUTES(decl) \
3980 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3981 : DECL_ATTRIBUTES (decl) \
3982 ? (DECL_ATTRIBUTES (decl)) \
3983 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3986 mep_validate_near_far (tree *node, tree name, tree args,
3987 int flags ATTRIBUTE_UNUSED, bool *no_add)
3989 if (TREE_CODE (*node) != VAR_DECL
3990 && TREE_CODE (*node) != FUNCTION_DECL
3991 && TREE_CODE (*node) != METHOD_TYPE
3992 && TREE_CODE (*node) != POINTER_TYPE
3993 && TREE_CODE (*node) != TYPE_DECL)
3995 warning (0, "%qE attribute only applies to variables and functions",
3999 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
4001 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
4003 warning (0, "address region attributes not allowed with auto storage class");
4006 /* Ignore storage attribute of pointed to variable: char __far * x; */
4007 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
4009 warning (0, "address region attributes on pointed-to types ignored");
4013 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
4015 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4016 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
4017 DECL_ATTRIBUTES (*node) = NULL_TREE;
4023 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4024 int flags ATTRIBUTE_UNUSED, bool *no_add)
4026 if (TREE_CODE (*node) != FUNCTION_DECL
4027 && TREE_CODE (*node) != METHOD_TYPE)
4029 warning (0, "%qE attribute only applies to functions", name);
4036 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4037 int flags ATTRIBUTE_UNUSED, bool *no_add)
4041 if (TREE_CODE (*node) != FUNCTION_DECL)
4043 warning (0, "%qE attribute only applies to functions", name);
4048 if (DECL_DECLARED_INLINE_P (*node))
4049 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
4050 DECL_UNINLINABLE (*node) = 1;
4052 function_type = TREE_TYPE (*node);
4054 if (TREE_TYPE (function_type) != void_type_node)
4055 error ("interrupt function must have return type of void");
4057 if (prototype_p (function_type)
4058 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4059 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4060 error ("interrupt function must have no arguments");
4066 mep_validate_io_cb (tree *node, tree name, tree args,
4067 int flags ATTRIBUTE_UNUSED, bool *no_add)
4069 if (TREE_CODE (*node) != VAR_DECL)
4071 warning (0, "%qE attribute only applies to variables", name);
4075 if (args != NULL_TREE)
4077 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4078 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4079 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4081 warning (0, "%qE attribute allows only an integer constant argument",
4087 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4088 TREE_THIS_VOLATILE (*node) = 1;
4094 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4095 int flags ATTRIBUTE_UNUSED, bool *no_add)
4097 if (TREE_CODE (*node) != FUNCTION_TYPE
4098 && TREE_CODE (*node) != FUNCTION_DECL
4099 && TREE_CODE (*node) != METHOD_TYPE
4100 && TREE_CODE (*node) != FIELD_DECL
4101 && TREE_CODE (*node) != TYPE_DECL)
4103 static int gave_pointer_note = 0;
4104 static int gave_array_note = 0;
4105 static const char * given_type = NULL;
4107 given_type = tree_code_name[TREE_CODE (*node)];
4108 if (TREE_CODE (*node) == POINTER_TYPE)
4109 given_type = "pointers";
4110 if (TREE_CODE (*node) == ARRAY_TYPE)
4111 given_type = "arrays";
4114 warning (0, "%qE attribute only applies to functions, not %s",
4117 warning (0, "%qE attribute only applies to functions",
4121 if (TREE_CODE (*node) == POINTER_TYPE
4122 && !gave_pointer_note)
4124 inform (input_location, "to describe a pointer to a VLIW function, use syntax like this:");
4125 inform (input_location, " typedef int (__vliw *vfuncptr) ();");
4126 gave_pointer_note = 1;
4129 if (TREE_CODE (*node) == ARRAY_TYPE
4130 && !gave_array_note)
4132 inform (input_location, "to describe an array of VLIW function pointers, use syntax like this:");
4133 inform (input_location, " typedef int (__vliw *vfuncptr[]) ();");
4134 gave_array_note = 1;
4138 error ("VLIW functions are not allowed without a VLIW configuration");
4142 static const struct attribute_spec mep_attribute_table[11] =
4144 /* name min max decl type func handler
4145 affects_type_identity */
4146 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4147 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4148 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4149 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4150 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4152 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4153 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4154 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4155 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4156 { NULL, 0, 0, false, false, false, NULL, false }
4160 mep_function_attribute_inlinable_p (const_tree callee)
4162 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4163 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4164 return (lookup_attribute ("disinterrupt", attrs) == 0
4165 && lookup_attribute ("interrupt", attrs) == 0);
4169 mep_can_inline_p (tree caller, tree callee)
4171 if (TREE_CODE (callee) == ADDR_EXPR)
4172 callee = TREE_OPERAND (callee, 0);
4174 if (!mep_vliw_function_p (caller)
4175 && mep_vliw_function_p (callee))
4183 #define FUNC_DISINTERRUPT 2
4186 struct GTY(()) pragma_entry {
4189 const char *funcname;
4191 typedef struct pragma_entry pragma_entry;
4193 /* Hash table of farcall-tagged sections. */
4194 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4197 pragma_entry_eq (const void *p1, const void *p2)
4199 const pragma_entry *old = (const pragma_entry *) p1;
4200 const char *new_name = (const char *) p2;
4202 return strcmp (old->funcname, new_name) == 0;
4206 pragma_entry_hash (const void *p)
4208 const pragma_entry *old = (const pragma_entry *) p;
4209 return htab_hash_string (old->funcname);
4213 mep_note_pragma_flag (const char *funcname, int flag)
4215 pragma_entry **slot;
4218 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4219 pragma_entry_eq, NULL);
4221 slot = (pragma_entry **)
4222 htab_find_slot_with_hash (pragma_htab, funcname,
4223 htab_hash_string (funcname), INSERT);
4227 *slot = ggc_alloc_pragma_entry ();
4230 (*slot)->funcname = ggc_strdup (funcname);
4232 (*slot)->flag |= flag;
4236 mep_lookup_pragma_flag (const char *funcname, int flag)
4238 pragma_entry **slot;
4243 if (funcname[0] == '@' && funcname[2] == '.')
4246 slot = (pragma_entry **)
4247 htab_find_slot_with_hash (pragma_htab, funcname,
4248 htab_hash_string (funcname), NO_INSERT);
4249 if (slot && *slot && ((*slot)->flag & flag))
4251 (*slot)->used |= flag;
4258 mep_lookup_pragma_call (const char *funcname)
4260 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4264 mep_note_pragma_call (const char *funcname)
4266 mep_note_pragma_flag (funcname, FUNC_CALL);
4270 mep_lookup_pragma_disinterrupt (const char *funcname)
4272 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4276 mep_note_pragma_disinterrupt (const char *funcname)
4278 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4282 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4284 const pragma_entry *d = (const pragma_entry *)(*slot);
4286 if ((d->flag & FUNC_DISINTERRUPT)
4287 && !(d->used & FUNC_DISINTERRUPT))
4288 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4293 mep_file_cleanups (void)
4296 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4299 /* These three functions provide a bridge between the pramgas that
4300 affect register classes, and the functions that maintain them. We
4301 can't call those functions directly as pragma handling is part of
4302 the front end and doesn't have direct access to them. */
4305 mep_save_register_info (void)
4307 save_register_info ();
4311 mep_reinit_regs (void)
4317 mep_init_regs (void)
4325 mep_attrlist_to_encoding (tree list, tree decl)
4327 if (mep_multiple_address_regions (list, false) > 1)
4329 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4330 TREE_PURPOSE (TREE_CHAIN (list)),
4332 DECL_SOURCE_LINE (decl));
4333 TREE_CHAIN (list) = NULL_TREE;
4338 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4340 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4342 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4344 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4346 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4348 if (TREE_VALUE (list)
4349 && TREE_VALUE (TREE_VALUE (list))
4350 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4352 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4354 && location <= 0x1000000)
4359 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4361 list = TREE_CHAIN (list);
4364 && TREE_CODE (decl) == FUNCTION_DECL
4365 && DECL_SECTION_NAME (decl) == 0)
4371 mep_comp_type_attributes (const_tree t1, const_tree t2)
4375 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4376 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4385 mep_insert_attributes (tree decl, tree *attributes)
4388 const char *secname = 0;
4389 tree attrib, attrlist;
4392 if (TREE_CODE (decl) == FUNCTION_DECL)
4394 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4396 if (mep_lookup_pragma_disinterrupt (funcname))
4398 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4399 *attributes = chainon (*attributes, attrib);
4403 if (TREE_CODE (decl) != VAR_DECL
4404 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4407 if (TREE_READONLY (decl) && TARGET_DC)
4408 /* -mdc means that const variables default to the near section,
4409 regardless of the size cutoff. */
4412 /* User specified an attribute, so override the default.
4413 Ignore storage attribute of pointed to variable. char __far * x; */
4414 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4416 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4417 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4418 else if (DECL_ATTRIBUTES (decl) && *attributes)
4419 DECL_ATTRIBUTES (decl) = NULL_TREE;
4422 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4423 encoding = mep_attrlist_to_encoding (attrlist, decl);
4424 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4426 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4427 encoding = mep_attrlist_to_encoding (attrlist, decl);
4431 /* This means that the declaration has a specific section
4432 attribute, so we should not apply the default rules. */
4434 if (encoding == 'i' || encoding == 'I')
4436 tree attr = lookup_attribute ("io", attrlist);
4438 && TREE_VALUE (attr)
4439 && TREE_VALUE (TREE_VALUE(attr)))
4441 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4442 static tree previous_value = 0;
4443 static int previous_location = 0;
4444 static tree previous_name = 0;
4446 /* We take advantage of the fact that gcc will reuse the
4447 same tree pointer when applying an attribute to a
4448 list of decls, but produce a new tree for attributes
4449 on separate source lines, even when they're textually
4450 identical. This is the behavior we want. */
4451 if (TREE_VALUE (attr) == previous_value
4452 && location == previous_location)
4454 warning(0, "__io address 0x%x is the same for %qE and %qE",
4455 location, previous_name, DECL_NAME (decl));
4457 previous_name = DECL_NAME (decl);
4458 previous_location = location;
4459 previous_value = TREE_VALUE (attr);
4466 /* Declarations of arrays can change size. Don't trust them. */
4467 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4470 size = int_size_in_bytes (TREE_TYPE (decl));
4472 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4474 if (TREE_PUBLIC (decl)
4475 || DECL_EXTERNAL (decl)
4476 || TREE_STATIC (decl))
4478 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4502 if (size <= mep_based_cutoff && size > 0)
4504 else if (size <= mep_tiny_cutoff && size > 0)
4510 if (mep_const_section && TREE_READONLY (decl))
4512 if (strcmp (mep_const_section, "tiny") == 0)
4514 else if (strcmp (mep_const_section, "near") == 0)
4516 else if (strcmp (mep_const_section, "far") == 0)
4523 if (!mep_multiple_address_regions (*attributes, true)
4524 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4526 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4528 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4529 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4530 and mep_validate_based_tiny. */
4531 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4536 mep_encode_section_info (tree decl, rtx rtl, int first)
4539 const char *oldname;
4540 const char *secname;
4546 tree mep_attributes;
4551 if (TREE_CODE (decl) != VAR_DECL
4552 && TREE_CODE (decl) != FUNCTION_DECL)
4555 rtlname = XEXP (rtl, 0);
4556 if (GET_CODE (rtlname) == SYMBOL_REF)
4557 oldname = XSTR (rtlname, 0);
4558 else if (GET_CODE (rtlname) == MEM
4559 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4560 oldname = XSTR (XEXP (rtlname, 0), 0);
4564 type = TREE_TYPE (decl);
4565 if (type == error_mark_node)
4567 mep_attributes = MEP_ATTRIBUTES (decl);
4569 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4573 newname = (char *) alloca (strlen (oldname) + 4);
4574 sprintf (newname, "@%c.%s", encoding, oldname);
4575 idp = get_identifier (newname);
4577 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4578 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4579 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4592 maxsize = 0x1000000;
4600 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4602 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4604 (long) int_size_in_bytes (TREE_TYPE (decl)),
4612 mep_strip_name_encoding (const char *sym)
4618 else if (*sym == '@' && sym[2] == '.')
4626 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4627 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4632 switch (TREE_CODE (decl))
4635 if (!TREE_READONLY (decl)
4636 || TREE_SIDE_EFFECTS (decl)
4637 || !DECL_INITIAL (decl)
4638 || (DECL_INITIAL (decl) != error_mark_node
4639 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4643 if (! TREE_CONSTANT (decl))
4651 if (TREE_CODE (decl) == FUNCTION_DECL)
4653 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4655 if (name[0] == '@' && name[2] == '.')
4660 if (flag_function_sections || DECL_ONE_ONLY (decl))
4661 mep_unique_section (decl, 0);
4662 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4664 if (encoding == 'f')
4665 return vftext_section;
4667 return vtext_section;
4669 else if (encoding == 'f')
4670 return ftext_section;
4672 return text_section;
4675 if (TREE_CODE (decl) == VAR_DECL)
4677 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4679 if (name[0] == '@' && name[2] == '.')
4683 return based_section;
4687 return srodata_section;
4688 if (DECL_INITIAL (decl))
4689 return sdata_section;
4690 return tinybss_section;
4694 return frodata_section;
4699 error_at (DECL_SOURCE_LOCATION (decl),
4700 "variable %D of type %<io%> must be uninitialized", decl);
4701 return data_section;
4704 error_at (DECL_SOURCE_LOCATION (decl),
4705 "variable %D of type %<cb%> must be uninitialized", decl);
4706 return data_section;
4711 return readonly_data_section;
4713 return data_section;
4717 mep_unique_section (tree decl, int reloc)
4719 static const char *prefixes[][2] =
4721 { ".text.", ".gnu.linkonce.t." },
4722 { ".rodata.", ".gnu.linkonce.r." },
4723 { ".data.", ".gnu.linkonce.d." },
4724 { ".based.", ".gnu.linkonce.based." },
4725 { ".sdata.", ".gnu.linkonce.s." },
4726 { ".far.", ".gnu.linkonce.far." },
4727 { ".ftext.", ".gnu.linkonce.ft." },
4728 { ".frodata.", ".gnu.linkonce.frd." },
4729 { ".srodata.", ".gnu.linkonce.srd." },
4730 { ".vtext.", ".gnu.linkonce.v." },
4731 { ".vftext.", ".gnu.linkonce.vf." }
4733 int sec = 2; /* .data */
4735 const char *name, *prefix;
4738 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4739 if (DECL_RTL (decl))
4740 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4742 if (TREE_CODE (decl) == FUNCTION_DECL)
4744 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4745 sec = 9; /* .vtext */
4747 sec = 0; /* .text */
4749 else if (decl_readonly_section (decl, reloc))
4750 sec = 1; /* .rodata */
4752 if (name[0] == '@' && name[2] == '.')
4757 sec = 3; /* .based */
4761 sec = 8; /* .srodata */
4763 sec = 4; /* .sdata */
4767 sec = 6; /* .ftext */
4769 sec = 10; /* .vftext */
4771 sec = 7; /* .frodata */
4773 sec = 5; /* .far. */
4779 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4780 len = strlen (name) + strlen (prefix);
4781 string = (char *) alloca (len + 1);
4783 sprintf (string, "%s%s", prefix, name);
4785 DECL_SECTION_NAME (decl) = build_string (len, string);
4788 /* Given a decl, a section name, and whether the decl initializer
4789 has relocs, choose attributes for the section. */
4791 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4794 mep_section_type_flags (tree decl, const char *name, int reloc)
4796 unsigned int flags = default_section_type_flags (decl, name, reloc);
4798 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4799 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4800 flags |= SECTION_MEP_VLIW;
4805 /* Switch to an arbitrary section NAME with attributes as specified
4806 by FLAGS. ALIGN specifies any known alignment requirements for
4807 the section; 0 if the default should be used.
4809 Differs from the standard ELF version only in support of VLIW mode. */
4812 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4814 char flagchars[8], *f = flagchars;
4817 if (!(flags & SECTION_DEBUG))
4819 if (flags & SECTION_WRITE)
4821 if (flags & SECTION_CODE)
4823 if (flags & SECTION_SMALL)
4825 if (flags & SECTION_MEP_VLIW)
4829 if (flags & SECTION_BSS)
4834 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4835 name, flagchars, type);
4837 if (flags & SECTION_CODE)
4838 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4843 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4844 int size, int align, int global)
4846 /* We intentionally don't use mep_section_tag() here. */
4848 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4852 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4853 DECL_ATTRIBUTES (decl));
4855 && TREE_VALUE (attr)
4856 && TREE_VALUE (TREE_VALUE(attr)))
4857 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4862 fprintf (stream, "\t.globl\t");
4863 assemble_name (stream, name);
4864 fprintf (stream, "\n");
4866 assemble_name (stream, name);
4867 fprintf (stream, " = %d\n", location);
4870 if (name[0] == '@' && name[2] == '.')
4872 const char *sec = 0;
4876 switch_to_section (based_section);
4880 switch_to_section (tinybss_section);
4884 switch_to_section (farbss_section);
4893 while (align > BITS_PER_UNIT)
4898 name2 = targetm.strip_name_encoding (name);
4900 fprintf (stream, "\t.globl\t%s\n", name2);
4901 fprintf (stream, "\t.p2align %d\n", p2align);
4902 fprintf (stream, "\t.type\t%s,@object\n", name2);
4903 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4904 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4911 fprintf (stream, "\t.local\t");
4912 assemble_name (stream, name);
4913 fprintf (stream, "\n");
4915 fprintf (stream, "\t.comm\t");
4916 assemble_name (stream, name);
4917 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4923 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4925 rtx addr = XEXP (m_tramp, 0);
4926 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4928 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4929 LCT_NORMAL, VOIDmode, 3,
4932 static_chain, Pmode);
4935 /* Experimental Reorg. */
4938 mep_mentioned_p (rtx in,
4939 rtx reg, /* NULL for mem */
4940 int modes_too) /* if nonzero, modes must match also. */
4948 if (reg && GET_CODE (reg) != REG)
4951 if (GET_CODE (in) == LABEL_REF)
4954 code = GET_CODE (in);
4960 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4966 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4968 return (REGNO (in) == REGNO (reg));
4981 /* Set's source should be read-only. */
4982 if (code == SET && !reg)
4983 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4985 fmt = GET_RTX_FORMAT (code);
4987 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4992 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4993 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4996 else if (fmt[i] == 'e'
4997 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
5003 #define EXPERIMENTAL_REGMOVE_REORG 1
5005 #if EXPERIMENTAL_REGMOVE_REORG
5008 mep_compatible_reg_class (int r1, int r2)
5010 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
5012 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
5018 mep_reorg_regmove (rtx insns)
5020 rtx insn, next, pat, follow, *where;
5021 int count = 0, done = 0, replace, before = 0;
5024 for (insn = insns; insn; insn = NEXT_INSN (insn))
5025 if (GET_CODE (insn) == INSN)
5028 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
5029 set that uses the r2 and r2 dies there. We replace r2 with r1
5030 and see if it's still a valid insn. If so, delete the first set.
5031 Copied from reorg.c. */
5036 for (insn = insns; insn; insn = next)
5038 next = NEXT_INSN (insn);
5039 if (GET_CODE (insn) != INSN)
5041 pat = PATTERN (insn);
5045 if (GET_CODE (pat) == SET
5046 && GET_CODE (SET_SRC (pat)) == REG
5047 && GET_CODE (SET_DEST (pat)) == REG
5048 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
5049 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
5051 follow = next_nonnote_insn (insn);
5053 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
5055 while (follow && GET_CODE (follow) == INSN
5056 && GET_CODE (PATTERN (follow)) == SET
5057 && !dead_or_set_p (follow, SET_SRC (pat))
5058 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
5059 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
5062 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
5063 follow = next_nonnote_insn (follow);
5067 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5068 if (follow && GET_CODE (follow) == INSN
5069 && GET_CODE (PATTERN (follow)) == SET
5070 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5072 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5074 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5077 where = & SET_SRC (PATTERN (follow));
5080 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5082 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5085 where = & PATTERN (follow);
5091 /* If so, follow is the corresponding insn */
5098 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5099 for (x = insn; x ;x = NEXT_INSN (x))
5101 print_rtl_single (dump_file, x);
5104 fprintf (dump_file, "\n");
5108 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5112 next = delete_insn (insn);
5115 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5116 print_rtl_single (dump_file, follow);
5126 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5127 fprintf (dump_file, "=====\n");
5133 /* Figure out where to put LABEL, which is the label for a repeat loop.
5134 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5135 the loop ends just before LAST_INSN. If SHARED, insns other than the
5136 "repeat" might use LABEL to jump to the loop's continuation point.
5138 Return the last instruction in the adjusted loop. */
5141 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5145 int count = 0, code, icode;
5148 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5149 INSN_UID (last_insn));
5151 /* Set PREV to the last insn in the loop. */
5154 prev = PREV_INSN (prev);
5156 /* Set NEXT to the next insn after the repeat label. */
5161 code = GET_CODE (prev);
5162 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5167 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5168 prev = XVECEXP (PATTERN (prev), 0, 1);
5170 /* Other insns that should not be in the last two opcodes. */
5171 icode = recog_memoized (prev);
5173 || icode == CODE_FOR_repeat
5174 || icode == CODE_FOR_erepeat
5175 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5178 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5179 is the second instruction in a VLIW bundle. In that case,
5180 loop again: if the first instruction also satisfies the
5181 conditions above then we will reach here again and put
5182 both of them into the repeat epilogue. Otherwise both
5183 should remain outside. */
5184 if (GET_MODE (prev) != BImode)
5189 print_rtl_single (dump_file, next);
5194 prev = PREV_INSN (prev);
5197 /* See if we're adding the label immediately after the repeat insn.
5198 If so, we need to separate them with a nop. */
5199 prev = prev_real_insn (next);
5201 switch (recog_memoized (prev))
5203 case CODE_FOR_repeat:
5204 case CODE_FOR_erepeat:
5206 fprintf (dump_file, "Adding nop inside loop\n");
5207 emit_insn_before (gen_nop (), next);
5214 /* Insert the label. */
5215 emit_label_before (label, next);
5217 /* Insert the nops. */
5218 if (dump_file && count < 2)
5219 fprintf (dump_file, "Adding %d nop%s\n\n",
5220 2 - count, count == 1 ? "" : "s");
5222 for (; count < 2; count++)
5224 last_insn = emit_insn_after (gen_nop (), last_insn);
5226 emit_insn_before (gen_nop (), last_insn);
5233 mep_emit_doloop (rtx *operands, int is_end)
5237 if (cfun->machine->doloop_tags == 0
5238 || cfun->machine->doloop_tag_from_end == is_end)
5240 cfun->machine->doloop_tags++;
5241 cfun->machine->doloop_tag_from_end = is_end;
5244 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5246 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5248 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5252 /* Code for converting doloop_begins and doloop_ends into valid
5253 MeP instructions. A doloop_begin is just a placeholder:
5255 $count = unspec ($count)
5257 where $count is initially the number of iterations - 1.
5258 doloop_end has the form:
5260 if ($count-- == 0) goto label
5262 The counter variable is private to the doloop insns, nothing else
5263 relies on its value.
5265 There are three cases, in decreasing order of preference:
5267 1. A loop has exactly one doloop_begin and one doloop_end.
5268 The doloop_end branches to the first instruction after
5271 In this case we can replace the doloop_begin with a repeat
5272 instruction and remove the doloop_end. I.e.:
5274 $count1 = unspec ($count1)
5279 if ($count2-- == 0) goto label
5283 repeat $count1,repeat_label
5291 2. As for (1), except there are several doloop_ends. One of them
5292 (call it X) falls through to a label L. All the others fall
5293 through to branches to L.
5295 In this case, we remove X and replace the other doloop_ends
5296 with branches to the repeat label. For example:
5298 $count1 = unspec ($count1)
5301 if ($count2-- == 0) goto label
5304 if ($count3-- == 0) goto label
5309 repeat $count1,repeat_label
5320 3. The fallback case. Replace doloop_begins with:
5324 Replace doloop_ends with the equivalent of:
5327 if ($count == 0) goto label
5329 Note that this might need a scratch register if $count
5330 is stored in memory. */
5332 /* A structure describing one doloop_begin. */
5333 struct mep_doloop_begin {
5334 /* The next doloop_begin with the same tag. */
5335 struct mep_doloop_begin *next;
5337 /* The instruction itself. */
5340 /* The initial counter value. This is known to be a general register. */
5344 /* A structure describing a doloop_end. */
5345 struct mep_doloop_end {
5346 /* The next doloop_end with the same loop tag. */
5347 struct mep_doloop_end *next;
5349 /* The instruction itself. */
5352 /* The first instruction after INSN when the branch isn't taken. */
5355 /* The location of the counter value. Since doloop_end_internal is a
5356 jump instruction, it has to allow the counter to be stored anywhere
5357 (any non-fixed register or memory location). */
5360 /* The target label (the place where the insn branches when the counter
5364 /* A scratch register. Only available when COUNTER isn't stored
5365 in a general register. */
5370 /* One do-while loop. */
5372 /* All the doloop_begins for this loop (in no particular order). */
5373 struct mep_doloop_begin *begin;
5375 /* All the doloop_ends. When there is more than one, arrange things
5376 so that the first one is the most likely to be X in case (2) above. */
5377 struct mep_doloop_end *end;
5381 /* Return true if LOOP can be converted into repeat/repeat_end form
5382 (that is, if it matches cases (1) or (2) above). */
5385 mep_repeat_loop_p (struct mep_doloop *loop)
5387 struct mep_doloop_end *end;
5390 /* There must be exactly one doloop_begin and at least one doloop_end. */
5391 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5394 /* The first doloop_end (X) must branch back to the insn after
5395 the doloop_begin. */
5396 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5399 /* All the other doloop_ends must branch to the same place as X.
5400 When the branch isn't taken, they must jump to the instruction
5402 fallthrough = loop->end->fallthrough;
5403 for (end = loop->end->next; end != 0; end = end->next)
5404 if (end->label != loop->end->label
5405 || !simplejump_p (end->fallthrough)
5406 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5413 /* The main repeat reorg function. See comment above for details. */
5416 mep_reorg_repeat (rtx insns)
5419 struct mep_doloop *loops, *loop;
5420 struct mep_doloop_begin *begin;
5421 struct mep_doloop_end *end;
5423 /* Quick exit if we haven't created any loops. */
5424 if (cfun->machine->doloop_tags == 0)
5427 /* Create an array of mep_doloop structures. */
5428 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5429 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5431 /* Search the function for do-while insns and group them by loop tag. */
5432 for (insn = insns; insn; insn = NEXT_INSN (insn))
5434 switch (recog_memoized (insn))
5436 case CODE_FOR_doloop_begin_internal:
5437 insn_extract (insn);
5438 loop = &loops[INTVAL (recog_data.operand[2])];
5440 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5441 begin->next = loop->begin;
5443 begin->counter = recog_data.operand[0];
5445 loop->begin = begin;
5448 case CODE_FOR_doloop_end_internal:
5449 insn_extract (insn);
5450 loop = &loops[INTVAL (recog_data.operand[2])];
5452 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5454 end->fallthrough = next_real_insn (insn);
5455 end->counter = recog_data.operand[0];
5456 end->label = recog_data.operand[1];
5457 end->scratch = recog_data.operand[3];
5459 /* If this insn falls through to an unconditional jump,
5460 give it a lower priority than the others. */
5461 if (loop->end != 0 && simplejump_p (end->fallthrough))
5463 end->next = loop->end->next;
5464 loop->end->next = end;
5468 end->next = loop->end;
5474 /* Convert the insns for each loop in turn. */
5475 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5476 if (mep_repeat_loop_p (loop))
5478 /* Case (1) or (2). */
5479 rtx repeat_label, label_ref;
5481 /* Create a new label for the repeat insn. */
5482 repeat_label = gen_label_rtx ();
5484 /* Replace the doloop_begin with a repeat. */
5485 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5486 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5488 delete_insn (loop->begin->insn);
5490 /* Insert the repeat label before the first doloop_end.
5491 Fill the gap with nops if there are other doloop_ends. */
5492 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5493 false, loop->end->next != 0);
5495 /* Emit a repeat_end (to improve the readability of the output). */
5496 emit_insn_before (gen_repeat_end (), loop->end->insn);
5498 /* Delete the first doloop_end. */
5499 delete_insn (loop->end->insn);
5501 /* Replace the others with branches to REPEAT_LABEL. */
5502 for (end = loop->end->next; end != 0; end = end->next)
5504 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5505 delete_insn (end->insn);
5506 delete_insn (end->fallthrough);
5511 /* Case (3). First replace all the doloop_begins with increment
5513 for (begin = loop->begin; begin != 0; begin = begin->next)
5515 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5516 begin->counter, const1_rtx),
5518 delete_insn (begin->insn);
5521 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5522 for (end = loop->end; end != 0; end = end->next)
5528 /* Load the counter value into a general register. */
5530 if (!REG_P (reg) || REGNO (reg) > 15)
5533 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5536 /* Decrement the counter. */
5537 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5540 /* Copy it back to its original location. */
5541 if (reg != end->counter)
5542 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5544 /* Jump back to the start label. */
5545 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5547 JUMP_LABEL (insn) = end->label;
5548 LABEL_NUSES (end->label)++;
5550 /* Emit the whole sequence before the doloop_end. */
5551 insn = get_insns ();
5553 emit_insn_before (insn, end->insn);
5555 /* Delete the doloop_end. */
5556 delete_insn (end->insn);
5563 mep_invertable_branch_p (rtx insn)
5566 enum rtx_code old_code;
5569 set = PATTERN (insn);
5570 if (GET_CODE (set) != SET)
5572 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5574 cond = XEXP (XEXP (set, 1), 0);
5575 old_code = GET_CODE (cond);
5579 PUT_CODE (cond, NE);
5582 PUT_CODE (cond, EQ);
5585 PUT_CODE (cond, GE);
5588 PUT_CODE (cond, LT);
5593 INSN_CODE (insn) = -1;
5594 i = recog_memoized (insn);
5595 PUT_CODE (cond, old_code);
5596 INSN_CODE (insn) = -1;
5601 mep_invert_branch (rtx insn, rtx after)
5603 rtx cond, set, label;
5606 set = PATTERN (insn);
5608 gcc_assert (GET_CODE (set) == SET);
5609 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5611 cond = XEXP (XEXP (set, 1), 0);
5612 switch (GET_CODE (cond))
5615 PUT_CODE (cond, NE);
5618 PUT_CODE (cond, EQ);
5621 PUT_CODE (cond, GE);
5624 PUT_CODE (cond, LT);
5629 label = gen_label_rtx ();
5630 emit_label_after (label, after);
5631 for (i=1; i<=2; i++)
5632 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5634 rtx ref = XEXP (XEXP (set, 1), i);
5635 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5636 delete_insn (XEXP (ref, 0));
5637 XEXP (ref, 0) = label;
5638 LABEL_NUSES (label) ++;
5639 JUMP_LABEL (insn) = label;
5641 INSN_CODE (insn) = -1;
5642 i = recog_memoized (insn);
5643 gcc_assert (i >= 0);
5647 mep_reorg_erepeat (rtx insns)
5649 rtx insn, prev, l, x;
5652 for (insn = insns; insn; insn = NEXT_INSN (insn))
5654 && ! JUMP_TABLE_DATA_P (insn)
5655 && mep_invertable_branch_p (insn))
5659 fprintf (dump_file, "\n------------------------------\n");
5660 fprintf (dump_file, "erepeat: considering this jump:\n");
5661 print_rtl_single (dump_file, insn);
5663 count = simplejump_p (insn) ? 0 : 1;
5664 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5666 if (GET_CODE (prev) == CALL_INSN
5667 || BARRIER_P (prev))
5670 if (prev == JUMP_LABEL (insn))
5674 fprintf (dump_file, "found loop top, %d insns\n", count);
5676 if (LABEL_NUSES (prev) == 1)
5677 /* We're the only user, always safe */ ;
5678 else if (LABEL_NUSES (prev) == 2)
5680 /* See if there's a barrier before this label. If
5681 so, we know nobody inside the loop uses it.
5682 But we must be careful to put the erepeat
5683 *after* the label. */
5685 for (barrier = PREV_INSN (prev);
5686 barrier && GET_CODE (barrier) == NOTE;
5687 barrier = PREV_INSN (barrier))
5689 if (barrier && GET_CODE (barrier) != BARRIER)
5694 /* We don't know who else, within or without our loop, uses this */
5696 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5700 /* Generate a label to be used by the erepat insn. */
5701 l = gen_label_rtx ();
5703 /* Insert the erepeat after INSN's target label. */
5704 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5706 emit_insn_after (x, prev);
5708 /* Insert the erepeat label. */
5709 newlast = (mep_insert_repeat_label_last
5710 (insn, l, !simplejump_p (insn), false));
5711 if (simplejump_p (insn))
5713 emit_insn_before (gen_erepeat_end (), insn);
5718 mep_invert_branch (insn, newlast);
5719 emit_insn_after (gen_erepeat_end (), newlast);
5726 /* A label is OK if there is exactly one user, and we
5727 can find that user before the next label. */
5730 if (LABEL_NUSES (prev) == 1)
5732 for (user = PREV_INSN (prev);
5733 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5734 user = PREV_INSN (user))
5735 if (GET_CODE (user) == JUMP_INSN
5736 && JUMP_LABEL (user) == prev)
5738 safe = INSN_UID (user);
5745 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5746 safe, INSN_UID (prev));
5756 fprintf (dump_file, "\n==============================\n");
5759 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5760 always do this on its own. */
5763 mep_jmp_return_reorg (rtx insns)
5765 rtx insn, label, ret;
5768 for (insn = insns; insn; insn = NEXT_INSN (insn))
5769 if (simplejump_p (insn))
5771 /* Find the fist real insn the jump jumps to. */
5772 label = ret = JUMP_LABEL (insn);
5774 && (GET_CODE (ret) == NOTE
5775 || GET_CODE (ret) == CODE_LABEL
5776 || GET_CODE (PATTERN (ret)) == USE))
5777 ret = NEXT_INSN (ret);
5781 /* Is it a return? */
5782 ret_code = recog_memoized (ret);
5783 if (ret_code == CODE_FOR_return_internal
5784 || ret_code == CODE_FOR_eh_return_internal)
5786 /* It is. Replace the jump with a return. */
5787 LABEL_NUSES (label) --;
5788 if (LABEL_NUSES (label) == 0)
5789 delete_insn (label);
5790 PATTERN (insn) = copy_rtx (PATTERN (ret));
5791 INSN_CODE (insn) = -1;
5799 mep_reorg_addcombine (rtx insns)
5803 for (i = insns; i; i = NEXT_INSN (i))
5805 && INSN_CODE (i) == CODE_FOR_addsi3
5806 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5807 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5808 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5809 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5813 && INSN_CODE (n) == CODE_FOR_addsi3
5814 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5815 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5816 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5817 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5819 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5820 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5821 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5823 && ic + nc > -32768)
5825 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5826 NEXT_INSN (i) = NEXT_INSN (n);
5828 PREV_INSN (NEXT_INSN (i)) = i;
5834 /* If this insn adjusts the stack, return the adjustment, else return
5837 add_sp_insn_p (rtx insn)
5841 if (! single_set (insn))
5843 pat = PATTERN (insn);
5844 if (GET_CODE (SET_DEST (pat)) != REG)
5846 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5848 if (GET_CODE (SET_SRC (pat)) != PLUS)
5850 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5852 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5854 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5856 return INTVAL (XEXP (SET_SRC (pat), 1));
5859 /* Check for trivial functions that set up an unneeded stack
5862 mep_reorg_noframe (rtx insns)
5864 rtx start_frame_insn;
5865 rtx end_frame_insn = 0;
5869 /* The first insn should be $sp = $sp + N */
5870 while (insns && ! INSN_P (insns))
5871 insns = NEXT_INSN (insns);
5875 sp_adjust = add_sp_insn_p (insns);
5879 start_frame_insn = insns;
5880 sp = SET_DEST (PATTERN (start_frame_insn));
5882 insns = next_real_insn (insns);
5886 rtx next = next_real_insn (insns);
5890 sp2 = add_sp_insn_p (insns);
5895 end_frame_insn = insns;
5896 if (sp2 != -sp_adjust)
5899 else if (mep_mentioned_p (insns, sp, 0))
5901 else if (CALL_P (insns))
5909 delete_insn (start_frame_insn);
5910 delete_insn (end_frame_insn);
5917 rtx insns = get_insns ();
5919 /* We require accurate REG_DEAD notes. */
5920 compute_bb_for_insn ();
5921 df_note_add_problem ();
5924 mep_reorg_addcombine (insns);
5925 #if EXPERIMENTAL_REGMOVE_REORG
5926 /* VLIW packing has been done already, so we can't just delete things. */
5927 if (!mep_vliw_function_p (cfun->decl))
5928 mep_reorg_regmove (insns);
5930 mep_jmp_return_reorg (insns);
5931 mep_bundle_insns (insns);
5932 mep_reorg_repeat (insns);
5935 && !profile_arc_flag
5936 && TARGET_OPT_REPEAT
5937 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5938 mep_reorg_erepeat (insns);
5940 /* This may delete *insns so make sure it's last. */
5941 mep_reorg_noframe (insns);
5943 df_finish_pass (false);
5948 /*----------------------------------------------------------------------*/
5950 /*----------------------------------------------------------------------*/
5952 /* Element X gives the index into cgen_insns[] of the most general
5953 implementation of intrinsic X. Unimplemented intrinsics are
5955 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5957 /* Element X gives the index of another instruction that is mapped to
5958 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5961 Things are set up so that mep_intrinsic_chain[X] < X. */
5962 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5964 /* The bitmask for the current ISA. The ISA masks are declared
5966 unsigned int mep_selected_isa;
5969 const char *config_name;
5973 static struct mep_config mep_configs[] = {
5974 #ifdef COPROC_SELECTION_TABLE
5975 COPROC_SELECTION_TABLE,
5980 /* Initialize the global intrinsics variables above. */
5983 mep_init_intrinsics (void)
5987 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5988 mep_selected_isa = mep_configs[0].isa;
5989 if (mep_config_string != 0)
5990 for (i = 0; mep_configs[i].config_name; i++)
5991 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5993 mep_selected_isa = mep_configs[i].isa;
5997 /* Assume all intrinsics are unavailable. */
5998 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5999 mep_intrinsic_insn[i] = -1;
6001 /* Build up the global intrinsic tables. */
6002 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6003 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
6005 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
6006 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
6008 /* See whether we can directly move values between one coprocessor
6009 register and another. */
6010 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6011 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
6012 mep_have_copro_copro_moves_p = true;
6014 /* See whether we can directly move values between core and
6015 coprocessor registers. */
6016 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
6017 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
6019 mep_have_core_copro_moves_p = 1;
6022 /* Declare all available intrinsic functions. Called once only. */
6024 static tree cp_data_bus_int_type_node;
6025 static tree opaque_vector_type_node;
6026 static tree v8qi_type_node;
6027 static tree v4hi_type_node;
6028 static tree v2si_type_node;
6029 static tree v8uqi_type_node;
6030 static tree v4uhi_type_node;
6031 static tree v2usi_type_node;
6034 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
6038 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
6039 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
6040 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
6041 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
6042 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
6043 case cgen_regnum_operand_type_CHAR: return char_type_node;
6044 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
6045 case cgen_regnum_operand_type_SI: return intSI_type_node;
6046 case cgen_regnum_operand_type_DI: return intDI_type_node;
6047 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
6048 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
6049 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
6050 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
6051 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
6052 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
6053 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
6054 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
6056 return void_type_node;
6061 mep_init_builtins (void)
6065 if (TARGET_64BIT_CR_REGS)
6066 cp_data_bus_int_type_node = long_long_integer_type_node;
6068 cp_data_bus_int_type_node = long_integer_type_node;
6070 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
6071 v8qi_type_node = build_vector_type (intQI_type_node, 8);
6072 v4hi_type_node = build_vector_type (intHI_type_node, 4);
6073 v2si_type_node = build_vector_type (intSI_type_node, 2);
6074 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6075 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6076 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6078 (*lang_hooks.decls.pushdecl)
6079 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
6080 cp_data_bus_int_type_node));
6082 (*lang_hooks.decls.pushdecl)
6083 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
6084 opaque_vector_type_node));
6086 (*lang_hooks.decls.pushdecl)
6087 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6089 (*lang_hooks.decls.pushdecl)
6090 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6092 (*lang_hooks.decls.pushdecl)
6093 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6096 (*lang_hooks.decls.pushdecl)
6097 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6099 (*lang_hooks.decls.pushdecl)
6100 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6102 (*lang_hooks.decls.pushdecl)
6103 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6106 /* Intrinsics like mep_cadd3 are implemented with two groups of
6107 instructions, one which uses UNSPECs and one which uses a specific
6108 rtl code such as PLUS. Instructions in the latter group belong
6109 to GROUP_KNOWN_CODE.
6111 In such cases, the intrinsic will have two entries in the global
6112 tables above. The unspec form is accessed using builtin functions
6113 while the specific form is accessed using the mep_* enum in
6116 The idea is that __cop arithmetic and builtin functions have
6117 different optimization requirements. If mep_cadd3() appears in
6118 the source code, the user will surely except gcc to use cadd3
6119 rather than a work-alike such as add3. However, if the user
6120 just writes "a + b", where a or b are __cop variables, it is
6121 reasonable for gcc to choose a core instruction rather than
6122 cadd3 if it believes that is more optimal. */
6123 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6124 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6125 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6127 tree ret_type = void_type_node;
6130 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6133 if (cgen_insns[i].cret_p)
6134 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6136 bi_type = build_function_type_list (ret_type, NULL_TREE);
6137 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6139 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6143 /* Report the unavailablity of the given intrinsic. */
6147 mep_intrinsic_unavailable (int intrinsic)
6149 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6151 if (already_reported_p[intrinsic])
6154 if (mep_intrinsic_insn[intrinsic] < 0)
6155 error ("coprocessor intrinsic %qs is not available in this configuration",
6156 cgen_intrinsics[intrinsic]);
6157 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6158 error ("%qs is not available in VLIW functions",
6159 cgen_intrinsics[intrinsic]);
6161 error ("%qs is not available in non-VLIW functions",
6162 cgen_intrinsics[intrinsic]);
6164 already_reported_p[intrinsic] = 1;
6169 /* See if any implementation of INTRINSIC is available to the
6170 current function. If so, store the most general implementation
6171 in *INSN_PTR and return true. Return false otherwise. */
6174 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6178 i = mep_intrinsic_insn[intrinsic];
6179 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6180 i = mep_intrinsic_chain[i];
6184 *insn_ptr = &cgen_insns[i];
6191 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6192 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6193 try using a work-alike instead. In this case, the returned insn
6194 may have three operands rather than two. */
6197 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6201 if (intrinsic == mep_cmov)
6203 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6204 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6208 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6212 /* If ARG is a register operand that is the same size as MODE, convert it
6213 to MODE using a subreg. Otherwise return ARG as-is. */
6216 mep_convert_arg (enum machine_mode mode, rtx arg)
6218 if (GET_MODE (arg) != mode
6219 && register_operand (arg, VOIDmode)
6220 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6221 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6226 /* Apply regnum conversions to ARG using the description given by REGNUM.
6227 Return the new argument on success and null on failure. */
6230 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6232 if (regnum->count == 0)
6235 if (GET_CODE (arg) != CONST_INT
6237 || INTVAL (arg) >= regnum->count)
6240 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6244 /* Try to make intrinsic argument ARG match the given operand.
6245 UNSIGNED_P is true if the argument has an unsigned type. */
6248 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6251 if (GET_CODE (arg) == CONST_INT)
6253 /* CONST_INTs can only be bound to integer operands. */
6254 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6257 else if (GET_CODE (arg) == CONST_DOUBLE)
6258 /* These hold vector constants. */;
6259 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6261 /* If the argument is a different size from what's expected, we must
6262 have a value in the right mode class in order to convert it. */
6263 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6266 /* If the operand is an rvalue, promote or demote it to match the
6267 operand's size. This might not need extra instructions when
6268 ARG is a register value. */
6269 if (operand->constraint[0] != '=')
6270 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6273 /* If the operand is an lvalue, bind the operand to a new register.
6274 The caller will copy this value into ARG after the main
6275 instruction. By doing this always, we produce slightly more
6277 /* But not for control registers. */
6278 if (operand->constraint[0] == '='
6280 || ! (CONTROL_REGNO_P (REGNO (arg))
6281 || CCR_REGNO_P (REGNO (arg))
6282 || CR_REGNO_P (REGNO (arg)))
6284 return gen_reg_rtx (operand->mode);
6286 /* Try simple mode punning. */
6287 arg = mep_convert_arg (operand->mode, arg);
6288 if (operand->predicate (arg, operand->mode))
6291 /* See if forcing the argument into a register will make it match. */
6292 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6293 arg = force_reg (operand->mode, arg);
6295 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6296 if (operand->predicate (arg, operand->mode))
6303 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6304 function FNNAME. OPERAND describes the operand to which ARGNUM
6308 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6309 int argnum, tree fnname)
6313 if (GET_CODE (arg) == CONST_INT)
6314 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6315 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6317 const struct cgen_immediate_predicate *predicate;
6318 HOST_WIDE_INT argval;
6320 predicate = &cgen_immediate_predicates[i];
6321 argval = INTVAL (arg);
6322 if (argval < predicate->lower || argval >= predicate->upper)
6323 error ("argument %d of %qE must be in the range %d...%d",
6324 argnum, fnname, predicate->lower, predicate->upper - 1);
6326 error ("argument %d of %qE must be a multiple of %d",
6327 argnum, fnname, predicate->align);
6331 error ("incompatible type for argument %d of %qE", argnum, fnname);
6335 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6336 rtx subtarget ATTRIBUTE_UNUSED,
6337 enum machine_mode mode ATTRIBUTE_UNUSED,
6338 int ignore ATTRIBUTE_UNUSED)
6340 rtx pat, op[10], arg[10];
6342 int opindex, unsigned_p[10];
6344 unsigned int n_args;
6346 const struct cgen_insn *cgen_insn;
6347 const struct insn_data_d *idata;
6348 unsigned int first_arg = 0;
6349 unsigned int builtin_n_args;
6351 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6352 fnname = DECL_NAME (fndecl);
6354 /* Find out which instruction we should emit. Note that some coprocessor
6355 intrinsics may only be available in VLIW mode, or only in normal mode. */
6356 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6358 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6361 idata = &insn_data[cgen_insn->icode];
6363 builtin_n_args = cgen_insn->num_args;
6365 if (cgen_insn->cret_p)
6367 if (cgen_insn->cret_p > 1)
6370 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6374 /* Evaluate each argument. */
6375 n_args = call_expr_nargs (exp);
6377 if (n_args < builtin_n_args)
6379 error ("too few arguments to %qE", fnname);
6382 if (n_args > builtin_n_args)
6384 error ("too many arguments to %qE", fnname);
6388 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6392 args = CALL_EXPR_ARG (exp, a - first_arg);
6397 if (cgen_insn->regnums[a].reference_p)
6399 if (TREE_CODE (value) != ADDR_EXPR)
6402 error ("argument %d of %qE must be an address", a+1, fnname);
6405 value = TREE_OPERAND (value, 0);
6409 /* If the argument has been promoted to int, get the unpromoted
6410 value. This is necessary when sub-int memory values are bound
6411 to reference parameters. */
6412 if (TREE_CODE (value) == NOP_EXPR
6413 && TREE_TYPE (value) == integer_type_node
6414 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6415 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6416 < TYPE_PRECISION (TREE_TYPE (value))))
6417 value = TREE_OPERAND (value, 0);
6419 /* If the argument has been promoted to double, get the unpromoted
6420 SFmode value. This is necessary for FMAX support, for example. */
6421 if (TREE_CODE (value) == NOP_EXPR
6422 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6423 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6424 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6425 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6426 value = TREE_OPERAND (value, 0);
6428 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6429 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6430 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6431 if (cgen_insn->regnums[a].reference_p)
6433 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6434 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6436 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6440 error ("argument %d of %qE must be in the range %d...%d",
6441 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6446 for (a = 0; a < first_arg; a++)
6448 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6451 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6454 /* Convert the arguments into a form suitable for the intrinsic.
6455 Report an error if this isn't possible. */
6456 for (opindex = 0; opindex < idata->n_operands; opindex++)
6458 a = cgen_insn->op_mapping[opindex];
6459 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6460 arg[a], unsigned_p[a]);
6461 if (op[opindex] == 0)
6463 mep_incompatible_arg (&idata->operand[opindex],
6464 arg[a], a + 1 - first_arg, fnname);
6469 /* Emit the instruction. */
6470 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6471 op[5], op[6], op[7], op[8], op[9]);
6473 if (GET_CODE (pat) == SET
6474 && GET_CODE (SET_DEST (pat)) == PC
6475 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6476 emit_jump_insn (pat);
6480 /* Copy lvalues back to their final locations. */
6481 for (opindex = 0; opindex < idata->n_operands; opindex++)
6482 if (idata->operand[opindex].constraint[0] == '=')
6484 a = cgen_insn->op_mapping[opindex];
6487 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6488 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6489 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6493 /* First convert the operand to the right mode, then copy it
6494 into the destination. Doing the conversion as a separate
6495 step (rather than using convert_move) means that we can
6496 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6497 refer to the same register. */
6498 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6499 op[opindex], unsigned_p[a]);
6500 if (!rtx_equal_p (arg[a], op[opindex]))
6501 emit_move_insn (arg[a], op[opindex]);
6506 if (first_arg > 0 && target && target != op[0])
6508 emit_move_insn (target, op[0]);
6515 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6520 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6521 a global register. */
6524 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6532 switch (GET_CODE (x))
6535 if (REG_P (SUBREG_REG (x)))
6537 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6538 && global_regs[subreg_regno (x)])
6546 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6560 /* A non-constant call might use a global register. */
6570 /* Returns nonzero if X mentions a global register. */
6573 global_reg_mentioned_p (rtx x)
6579 if (! RTL_CONST_OR_PURE_CALL_P (x))
6581 x = CALL_INSN_FUNCTION_USAGE (x);
6589 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6591 /* Scheduling hooks for VLIW mode.
6593 Conceptually this is very simple: we have a two-pack architecture
6594 that takes one core insn and one coprocessor insn to make up either
6595 a 32- or 64-bit instruction word (depending on the option bit set in
6596 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6597 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6598 and one 48-bit cop insn or two 32-bit core/cop insns.
6600 In practice, instruction selection will be a bear. Consider in
6601 VL64 mode the following insns
6606 these cannot pack, since the add is a 16-bit core insn and cmov
6607 is a 32-bit cop insn. However,
6612 packs just fine. For good VLIW code generation in VL64 mode, we
6613 will have to have 32-bit alternatives for many of the common core
6614 insns. Not implemented. */
6617 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6621 if (REG_NOTE_KIND (link) != 0)
6623 /* See whether INSN and DEP_INSN are intrinsics that set the same
6624 hard register. If so, it is more important to free up DEP_INSN
6625 than it is to free up INSN.
6627 Note that intrinsics like mep_mulr are handled differently from
6628 the equivalent mep.md patterns. In mep.md, if we don't care
6629 about the value of $lo and $hi, the pattern will just clobber
6630 the registers, not set them. Since clobbers don't count as
6631 output dependencies, it is often possible to reorder two mulrs,
6634 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6635 so any pair of mep_mulr()s will be inter-dependent. We should
6636 therefore give the first mep_mulr() a higher priority. */
6637 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6638 && global_reg_mentioned_p (PATTERN (insn))
6639 && global_reg_mentioned_p (PATTERN (dep_insn)))
6642 /* If the dependence is an anti or output dependence, assume it
6647 /* If we can't recognize the insns, we can't really do anything. */
6648 if (recog_memoized (dep_insn) < 0)
6651 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6652 attribute instead. */
6655 cost_specified = get_attr_latency (dep_insn);
6656 if (cost_specified != 0)
6657 return cost_specified;
6663 /* ??? We don't properly compute the length of a load/store insn,
6664 taking into account the addressing mode. */
6667 mep_issue_rate (void)
6669 return TARGET_IVC2 ? 3 : 2;
6672 /* Return true if function DECL was declared with the vliw attribute. */
6675 mep_vliw_function_p (tree decl)
6677 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6681 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6685 for (i = nready - 1; i >= 0; --i)
6687 rtx insn = ready[i];
6688 if (recog_memoized (insn) >= 0
6689 && get_attr_slot (insn) == slot
6690 && get_attr_length (insn) == length)
6698 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6702 for (i = 0; i < nready; ++i)
6703 if (ready[i] == insn)
6705 for (; i < nready - 1; ++i)
6706 ready[i] = ready[i + 1];
6715 mep_print_sched_insn (FILE *dump, rtx insn)
6717 const char *slots = "none";
6718 const char *name = NULL;
6722 if (GET_CODE (PATTERN (insn)) == SET
6723 || GET_CODE (PATTERN (insn)) == PARALLEL)
6725 switch (get_attr_slots (insn))
6727 case SLOTS_CORE: slots = "core"; break;
6728 case SLOTS_C3: slots = "c3"; break;
6729 case SLOTS_P0: slots = "p0"; break;
6730 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6731 case SLOTS_P0_P1: slots = "p0,p1"; break;
6732 case SLOTS_P0S: slots = "p0s"; break;
6733 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6734 case SLOTS_P1: slots = "p1"; break;
6736 sprintf(buf, "%d", get_attr_slots (insn));
6741 if (GET_CODE (PATTERN (insn)) == USE)
6744 code = INSN_CODE (insn);
6746 name = get_insn_name (code);
6751 "insn %4d %4d %8s %s\n",
6759 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6760 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6761 int *pnready, int clock ATTRIBUTE_UNUSED)
6763 int nready = *pnready;
6764 rtx core_insn, cop_insn;
6767 if (dump && sched_verbose > 1)
6769 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6770 for (i=0; i<nready; i++)
6771 mep_print_sched_insn (dump, ready[i]);
6772 fprintf (dump, "\n");
6775 if (!mep_vliw_function_p (cfun->decl))
6780 /* IVC2 uses a DFA to determine what's ready and what's not. */
6784 /* We can issue either a core or coprocessor instruction.
6785 Look for a matched pair of insns to reorder. If we don't
6786 find any, don't second-guess the scheduler's priorities. */
6788 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6789 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6790 TARGET_OPT_VL64 ? 6 : 2)))
6792 else if (TARGET_OPT_VL64
6793 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6794 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6797 /* We didn't find a pair. Issue the single insn at the head
6798 of the ready list. */
6801 /* Reorder the two insns first. */
6802 mep_move_ready_insn (ready, nready, core_insn);
6803 mep_move_ready_insn (ready, nready - 1, cop_insn);
6807 /* A for_each_rtx callback. Return true if *X is a register that is
6808 set by insn PREV. */
6811 mep_store_find_set (rtx *x, void *prev)
6813 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6816 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6817 not the containing insn. */
6820 mep_store_data_bypass_1 (rtx prev, rtx pat)
6822 /* Cope with intrinsics like swcpa. */
6823 if (GET_CODE (pat) == PARALLEL)
6827 for (i = 0; i < XVECLEN (pat, 0); i++)
6828 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6834 /* Check for some sort of store. */
6835 if (GET_CODE (pat) != SET
6836 || GET_CODE (SET_DEST (pat)) != MEM)
6839 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6840 The first operand to the unspec is the store data and the other operands
6841 are used to calculate the address. */
6842 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6847 src = SET_SRC (pat);
6848 for (i = 1; i < XVECLEN (src, 0); i++)
6849 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6855 /* Otherwise just check that PREV doesn't modify any register mentioned
6856 in the memory destination. */
6857 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6860 /* Return true if INSN is a store instruction and if the store address
6861 has no true dependence on PREV. */
6864 mep_store_data_bypass_p (rtx prev, rtx insn)
6866 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6869 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6870 is a register other than LO or HI and if PREV sets *X. */
6873 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6876 && REGNO (*x) != LO_REGNO
6877 && REGNO (*x) != HI_REGNO
6878 && reg_set_p (*x, (const_rtx) prev));
6881 /* Return true if, apart from HI/LO, there are no true dependencies
6882 between multiplication instructions PREV and INSN. */
6885 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6889 pat = PATTERN (insn);
6890 if (GET_CODE (pat) == PARALLEL)
6891 pat = XVECEXP (pat, 0, 0);
6892 return (GET_CODE (pat) == SET
6893 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6896 /* Return true if INSN is an ldc instruction that issues to the
6897 MeP-h1 integer pipeline. This is true for instructions that
6898 read from PSW, LP, SAR, HI and LO. */
6901 mep_ipipe_ldc_p (rtx insn)
6905 pat = PATTERN (insn);
6907 /* Cope with instrinsics that set both a hard register and its shadow.
6908 The set of the hard register comes first. */
6909 if (GET_CODE (pat) == PARALLEL)
6910 pat = XVECEXP (pat, 0, 0);
6912 if (GET_CODE (pat) == SET)
6914 src = SET_SRC (pat);
6916 /* Cope with intrinsics. The first operand to the unspec is
6917 the source register. */
6918 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6919 src = XVECEXP (src, 0, 0);
6922 switch (REGNO (src))
6935 /* Create a VLIW bundle from core instruction CORE and coprocessor
6936 instruction COP. COP always satisfies INSN_P, but CORE can be
6937 either a new pattern or an existing instruction.
6939 Emit the bundle in place of COP and return it. */
6942 mep_make_bundle (rtx core, rtx cop)
6946 /* If CORE is an existing instruction, remove it, otherwise put
6947 the new pattern in an INSN harness. */
6951 core = make_insn_raw (core);
6953 /* Generate the bundle sequence and replace COP with it. */
6954 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6955 insn = emit_insn_after (insn, cop);
6958 /* Set up the links of the insns inside the SEQUENCE. */
6959 PREV_INSN (core) = PREV_INSN (insn);
6960 NEXT_INSN (core) = cop;
6961 PREV_INSN (cop) = core;
6962 NEXT_INSN (cop) = NEXT_INSN (insn);
6964 /* Set the VLIW flag for the coprocessor instruction. */
6965 PUT_MODE (core, VOIDmode);
6966 PUT_MODE (cop, BImode);
6968 /* Derive a location for the bundle. Individual instructions cannot
6969 have their own location because there can be no assembler labels
6970 between CORE and COP. */
6971 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6972 INSN_LOCATOR (core) = 0;
6973 INSN_LOCATOR (cop) = 0;
6978 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6981 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6983 rtx * pinsn = (rtx *) data;
6985 if (*pinsn && reg_mentioned_p (x, *pinsn))
6989 /* Return true if anything in insn X is (anti,output,true) dependent on
6990 anything in insn Y. */
6993 mep_insn_dependent_p (rtx x, rtx y)
6997 gcc_assert (INSN_P (x));
6998 gcc_assert (INSN_P (y));
7001 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
7002 if (tmp == NULL_RTX)
7006 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
7007 if (tmp == NULL_RTX)
7014 core_insn_p (rtx insn)
7016 if (GET_CODE (PATTERN (insn)) == USE)
7018 if (get_attr_slot (insn) == SLOT_CORE)
7023 /* Mark coprocessor instructions that can be bundled together with
7024 the immediately preceeding core instruction. This is later used
7025 to emit the "+" that tells the assembler to create a VLIW insn.
7027 For unbundled insns, the assembler will automatically add coprocessor
7028 nops, and 16-bit core nops. Due to an apparent oversight in the
7029 spec, the assembler will _not_ automatically add 32-bit core nops,
7030 so we have to emit those here.
7032 Called from mep_insn_reorg. */
7035 mep_bundle_insns (rtx insns)
7037 rtx insn, last = NULL_RTX, first = NULL_RTX;
7038 int saw_scheduling = 0;
7040 /* Only do bundling if we're in vliw mode. */
7041 if (!mep_vliw_function_p (cfun->decl))
7044 /* The first insn in a bundle are TImode, the remainder are
7045 VOIDmode. After this function, the first has VOIDmode and the
7046 rest have BImode. */
7048 /* Note: this doesn't appear to be true for JUMP_INSNs. */
7050 /* First, move any NOTEs that are within a bundle, to the beginning
7052 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7054 if (NOTE_P (insn) && first)
7055 /* Don't clear FIRST. */;
7057 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
7060 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
7064 /* INSN is part of a bundle; FIRST is the first insn in that
7065 bundle. Move all intervening notes out of the bundle.
7066 In addition, since the debug pass may insert a label
7067 whenever the current line changes, set the location info
7068 for INSN to match FIRST. */
7070 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
7072 note = PREV_INSN (insn);
7073 while (note && note != first)
7075 prev = PREV_INSN (note);
7079 /* Remove NOTE from here... */
7080 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7081 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7082 /* ...and put it in here. */
7083 NEXT_INSN (note) = first;
7084 PREV_INSN (note) = PREV_INSN (first);
7085 NEXT_INSN (PREV_INSN (note)) = note;
7086 PREV_INSN (NEXT_INSN (note)) = note;
7093 else if (!NONJUMP_INSN_P (insn))
7097 /* Now fix up the bundles. */
7098 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7103 if (!NONJUMP_INSN_P (insn))
7109 /* If we're not optimizing enough, there won't be scheduling
7110 info. We detect that here. */
7111 if (GET_MODE (insn) == TImode)
7113 if (!saw_scheduling)
7118 rtx core_insn = NULL_RTX;
7120 /* IVC2 slots are scheduled by DFA, so we just accept
7121 whatever the scheduler gives us. However, we must make
7122 sure the core insn (if any) is the first in the bundle.
7123 The IVC2 assembler can insert whatever NOPs are needed,
7124 and allows a COP insn to be first. */
7126 if (NONJUMP_INSN_P (insn)
7127 && GET_CODE (PATTERN (insn)) != USE
7128 && GET_MODE (insn) == TImode)
7132 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7133 && NONJUMP_INSN_P (NEXT_INSN (last));
7134 last = NEXT_INSN (last))
7136 if (core_insn_p (last))
7139 if (core_insn_p (last))
7142 if (core_insn && core_insn != insn)
7144 /* Swap core insn to first in the bundle. */
7146 /* Remove core insn. */
7147 if (PREV_INSN (core_insn))
7148 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7149 if (NEXT_INSN (core_insn))
7150 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7152 /* Re-insert core insn. */
7153 PREV_INSN (core_insn) = PREV_INSN (insn);
7154 NEXT_INSN (core_insn) = insn;
7156 if (PREV_INSN (core_insn))
7157 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7158 PREV_INSN (insn) = core_insn;
7160 PUT_MODE (core_insn, TImode);
7161 PUT_MODE (insn, VOIDmode);
7165 /* The first insn has TImode, the rest have VOIDmode */
7166 if (GET_MODE (insn) == TImode)
7167 PUT_MODE (insn, VOIDmode);
7169 PUT_MODE (insn, BImode);
7173 PUT_MODE (insn, VOIDmode);
7174 if (recog_memoized (insn) >= 0
7175 && get_attr_slot (insn) == SLOT_COP)
7177 if (GET_CODE (insn) == JUMP_INSN
7179 || recog_memoized (last) < 0
7180 || get_attr_slot (last) != SLOT_CORE
7181 || (get_attr_length (insn)
7182 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7183 || mep_insn_dependent_p (insn, last))
7185 switch (get_attr_length (insn))
7190 insn = mep_make_bundle (gen_nop (), insn);
7193 if (TARGET_OPT_VL64)
7194 insn = mep_make_bundle (gen_nop32 (), insn);
7197 if (TARGET_OPT_VL64)
7198 error ("2 byte cop instructions are"
7199 " not allowed in 64-bit VLIW mode");
7201 insn = mep_make_bundle (gen_nop (), insn);
7204 error ("unexpected %d byte cop instruction",
7205 get_attr_length (insn));
7210 insn = mep_make_bundle (last, insn);
7218 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7219 Return true on success. This function can fail if the intrinsic
7220 is unavailable or if the operands don't satisfy their predicates. */
7223 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7225 const struct cgen_insn *cgen_insn;
7226 const struct insn_data_d *idata;
7230 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7233 idata = &insn_data[cgen_insn->icode];
7234 for (i = 0; i < idata->n_operands; i++)
7236 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7237 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7241 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7242 newop[3], newop[4], newop[5],
7243 newop[6], newop[7], newop[8]));
7249 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7250 OPERANDS[0]. Report an error if the instruction could not
7251 be synthesized. OPERANDS[1] is a register_operand. For sign
7252 and zero extensions, it may be smaller than SImode. */
7255 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7256 rtx * operands ATTRIBUTE_UNUSED)
7262 /* Likewise, but apply a binary operation to OPERANDS[1] and
7263 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7264 can be a general_operand.
7266 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7267 third operand. REG and REG3 take register operands only. */
7270 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7271 int ATTRIBUTE_UNUSED immediate3,
7272 int ATTRIBUTE_UNUSED reg,
7273 int ATTRIBUTE_UNUSED reg3,
7274 rtx * operands ATTRIBUTE_UNUSED)
7280 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, bool ATTRIBUTE_UNUSED speed_t)
7285 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7287 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7294 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7298 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7300 : COSTS_N_INSNS (2));
7307 mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7313 mep_handle_option (struct gcc_options *opts,
7314 struct gcc_options *opts_set ATTRIBUTE_UNUSED,
7315 const struct cl_decoded_option *decoded,
7316 location_t loc ATTRIBUTE_UNUSED)
7318 size_t code = decoded->opt_index;
7323 opts->x_target_flags |= MEP_ALL_OPTS;
7327 opts->x_target_flags &= ~ MEP_ALL_OPTS;
7331 opts->x_target_flags |= MASK_COP;
7332 opts->x_target_flags |= MASK_64BIT_CR_REGS;
7336 opts->x_target_flags |= MASK_COP;
7337 opts->x_target_flags |= MASK_64BIT_CR_REGS;
7338 opts->x_target_flags |= MASK_VLIW;
7339 opts->x_target_flags |= MASK_OPT_VL64;
7340 opts->x_target_flags |= MASK_IVC2;
7342 /* Remaining handling of this option deferred. */
7352 mep_asm_init_sections (void)
7355 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7356 "\t.section .based,\"aw\"");
7359 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7360 "\t.section .sbss,\"aw\"");
7363 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7364 "\t.section .sdata,\"aw\",@progbits");
7367 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7368 "\t.section .far,\"aw\"");
7371 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7372 "\t.section .farbss,\"aw\"");
7375 = get_unnamed_section (0, output_section_asm_op,
7376 "\t.section .frodata,\"a\"");
7379 = get_unnamed_section (0, output_section_asm_op,
7380 "\t.section .srodata,\"a\"");
7383 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7384 "\t.section .vtext,\"axv\"\n\t.vliw");
7387 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7388 "\t.section .vftext,\"axv\"\n\t.vliw");
7391 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7392 "\t.section .ftext,\"ax\"\n\t.core");
7396 /* Initialize the GCC target structure. */
7398 #undef TARGET_ASM_FUNCTION_PROLOGUE
7399 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7400 #undef TARGET_ATTRIBUTE_TABLE
7401 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7402 #undef TARGET_COMP_TYPE_ATTRIBUTES
7403 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7404 #undef TARGET_INSERT_ATTRIBUTES
7405 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7406 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7407 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7408 #undef TARGET_CAN_INLINE_P
7409 #define TARGET_CAN_INLINE_P mep_can_inline_p
7410 #undef TARGET_SECTION_TYPE_FLAGS
7411 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7412 #undef TARGET_ASM_NAMED_SECTION
7413 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7414 #undef TARGET_INIT_BUILTINS
7415 #define TARGET_INIT_BUILTINS mep_init_builtins
7416 #undef TARGET_EXPAND_BUILTIN
7417 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7418 #undef TARGET_SCHED_ADJUST_COST
7419 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7420 #undef TARGET_SCHED_ISSUE_RATE
7421 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7422 #undef TARGET_SCHED_REORDER
7423 #define TARGET_SCHED_REORDER mep_sched_reorder
7424 #undef TARGET_STRIP_NAME_ENCODING
7425 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7426 #undef TARGET_ASM_SELECT_SECTION
7427 #define TARGET_ASM_SELECT_SECTION mep_select_section
7428 #undef TARGET_ASM_UNIQUE_SECTION
7429 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7430 #undef TARGET_ENCODE_SECTION_INFO
7431 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7432 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7433 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7434 #undef TARGET_RTX_COSTS
7435 #define TARGET_RTX_COSTS mep_rtx_cost
7436 #undef TARGET_ADDRESS_COST
7437 #define TARGET_ADDRESS_COST mep_address_cost
7438 #undef TARGET_MACHINE_DEPENDENT_REORG
7439 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7440 #undef TARGET_SETUP_INCOMING_VARARGS
7441 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7442 #undef TARGET_PASS_BY_REFERENCE
7443 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7444 #undef TARGET_FUNCTION_ARG
7445 #define TARGET_FUNCTION_ARG mep_function_arg
7446 #undef TARGET_FUNCTION_ARG_ADVANCE
7447 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7448 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7449 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7450 #undef TARGET_HANDLE_OPTION
7451 #define TARGET_HANDLE_OPTION mep_handle_option
7452 #undef TARGET_OPTION_OVERRIDE
7453 #define TARGET_OPTION_OVERRIDE mep_option_override
7454 #undef TARGET_OPTION_OPTIMIZATION_TABLE
7455 #define TARGET_OPTION_OPTIMIZATION_TABLE mep_option_optimization_table
7456 #undef TARGET_DEFAULT_TARGET_FLAGS
7457 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
7458 #undef TARGET_ALLOCATE_INITIAL_VALUE
7459 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7460 #undef TARGET_ASM_INIT_SECTIONS
7461 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7462 #undef TARGET_RETURN_IN_MEMORY
7463 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7464 #undef TARGET_NARROW_VOLATILE_BITFIELD
7465 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7466 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7467 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7468 #undef TARGET_BUILD_BUILTIN_VA_LIST
7469 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7470 #undef TARGET_EXPAND_BUILTIN_VA_START
7471 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7472 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7473 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7474 #undef TARGET_CAN_ELIMINATE
7475 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7476 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7477 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7478 #undef TARGET_TRAMPOLINE_INIT
7479 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7481 struct gcc_target targetm = TARGET_INITIALIZER;