1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
47 #include "integrate.h"
49 #include "target-def.h"
50 #include "langhooks.h"
54 /* Structure of this file:
56 + Command Line Option Support
57 + Pattern support - constraints, predicates, expanders
60 + Functions to save and restore machine-specific function data.
61 + Frame/Epilog/Prolog Related
63 + Function args in registers
64 + Handle pipeline hazards
67 + Machine-dependent Reorg
72 Symbols are encoded as @ <char> . <name> where <char> is one of these:
80 c - cb (control bus) */
82 struct GTY(()) machine_function
84 int mep_frame_pointer_needed;
92 /* Records __builtin_return address. */
96 int reg_save_slot[FIRST_PSEUDO_REGISTER];
97 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
99 /* 2 if the current function has an interrupt attribute, 1 if not, 0
100 if unknown. This is here because resource.c uses EPILOGUE_USES
102 int interrupt_handler;
104 /* Likewise, for disinterrupt attribute. */
105 int disable_interrupts;
107 /* Number of doloop tags used so far. */
110 /* True if the last tag was allocated to a doloop_end. */
111 bool doloop_tag_from_end;
113 /* True if reload changes $TP. */
114 bool reload_changes_tp;
116 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
117 We only set this if the function is an interrupt handler. */
118 int asms_without_operands;
121 #define MEP_CONTROL_REG(x) \
122 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
124 static const struct attribute_spec mep_attribute_table[11];
126 static GTY(()) section * based_section;
127 static GTY(()) section * tinybss_section;
128 static GTY(()) section * far_section;
129 static GTY(()) section * farbss_section;
130 static GTY(()) section * frodata_section;
131 static GTY(()) section * srodata_section;
133 static GTY(()) section * vtext_section;
134 static GTY(()) section * vftext_section;
135 static GTY(()) section * ftext_section;
137 static void mep_set_leaf_registers (int);
138 static bool symbol_p (rtx);
139 static bool symbolref_p (rtx);
140 static void encode_pattern_1 (rtx);
141 static void encode_pattern (rtx);
142 static bool const_in_range (rtx, int, int);
143 static void mep_rewrite_mult (rtx, rtx);
144 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
145 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
146 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
147 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
148 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
149 static bool mep_nongeneral_reg (rtx);
150 static bool mep_general_copro_reg (rtx);
151 static bool mep_nonregister (rtx);
152 static struct machine_function* mep_init_machine_status (void);
153 static rtx mep_tp_rtx (void);
154 static rtx mep_gp_rtx (void);
155 static bool mep_interrupt_p (void);
156 static bool mep_disinterrupt_p (void);
157 static bool mep_reg_set_p (rtx, rtx);
158 static bool mep_reg_set_in_function (int);
159 static bool mep_interrupt_saved_reg (int);
160 static bool mep_call_saves_register (int);
162 static void add_constant (int, int, int, int);
163 static rtx maybe_dead_move (rtx, rtx, bool);
164 static void mep_reload_pointer (int, const char *);
165 static void mep_start_function (FILE *, HOST_WIDE_INT);
166 static bool mep_function_ok_for_sibcall (tree, tree);
167 static int unique_bit_in (HOST_WIDE_INT);
168 static int bit_size_for_clip (HOST_WIDE_INT);
169 static int bytesize (const_tree, enum machine_mode);
170 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
171 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
172 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
173 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
174 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
175 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
176 static bool mep_function_attribute_inlinable_p (const_tree);
177 static bool mep_can_inline_p (tree, tree);
178 static bool mep_lookup_pragma_disinterrupt (const char *);
179 static int mep_multiple_address_regions (tree, bool);
180 static int mep_attrlist_to_encoding (tree, tree);
181 static void mep_insert_attributes (tree, tree *);
182 static void mep_encode_section_info (tree, rtx, int);
183 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
184 static void mep_unique_section (tree, int);
185 static unsigned int mep_section_type_flags (tree, const char *, int);
186 static void mep_asm_named_section (const char *, unsigned int, tree);
187 static bool mep_mentioned_p (rtx, rtx, int);
188 static void mep_reorg_regmove (rtx);
189 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
190 static void mep_reorg_repeat (rtx);
191 static bool mep_invertable_branch_p (rtx);
192 static void mep_invert_branch (rtx, rtx);
193 static void mep_reorg_erepeat (rtx);
194 static void mep_jmp_return_reorg (rtx);
195 static void mep_reorg_addcombine (rtx);
196 static void mep_reorg (void);
197 static void mep_init_intrinsics (void);
198 static void mep_init_builtins (void);
199 static void mep_intrinsic_unavailable (int);
200 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
201 static bool mep_get_move_insn (int, const struct cgen_insn **);
202 static rtx mep_convert_arg (enum machine_mode, rtx);
203 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
204 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
205 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
206 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
207 static int mep_adjust_cost (rtx, rtx, rtx, int);
208 static int mep_issue_rate (void);
209 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
210 static void mep_move_ready_insn (rtx *, int, rtx);
211 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
212 static rtx mep_make_bundle (rtx, rtx);
213 static void mep_bundle_insns (rtx);
214 static bool mep_rtx_cost (rtx, int, int, int *, bool);
215 static int mep_address_cost (rtx, bool);
216 static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
218 static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
220 static bool mep_vector_mode_supported_p (enum machine_mode);
221 static bool mep_handle_option (size_t, const char *, int);
222 static rtx mep_allocate_initial_value (rtx);
223 static void mep_asm_init_sections (void);
224 static int mep_comp_type_attributes (const_tree, const_tree);
225 static bool mep_narrow_volatile_bitfield (void);
226 static rtx mep_expand_builtin_saveregs (void);
227 static tree mep_build_builtin_va_list (void);
228 static void mep_expand_va_start (tree, rtx);
229 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
230 static bool mep_can_eliminate (const int, const int);
231 static void mep_trampoline_init (rtx, tree, rtx);
233 /* Initialize the GCC target structure. */
235 #undef TARGET_ASM_FUNCTION_PROLOGUE
236 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
237 #undef TARGET_ATTRIBUTE_TABLE
238 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
239 #undef TARGET_COMP_TYPE_ATTRIBUTES
240 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
241 #undef TARGET_INSERT_ATTRIBUTES
242 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
243 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
244 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
245 #undef TARGET_CAN_INLINE_P
246 #define TARGET_CAN_INLINE_P mep_can_inline_p
247 #undef TARGET_SECTION_TYPE_FLAGS
248 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
249 #undef TARGET_ASM_NAMED_SECTION
250 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
251 #undef TARGET_INIT_BUILTINS
252 #define TARGET_INIT_BUILTINS mep_init_builtins
253 #undef TARGET_EXPAND_BUILTIN
254 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
255 #undef TARGET_SCHED_ADJUST_COST
256 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
257 #undef TARGET_SCHED_ISSUE_RATE
258 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
259 #undef TARGET_SCHED_REORDER
260 #define TARGET_SCHED_REORDER mep_sched_reorder
261 #undef TARGET_STRIP_NAME_ENCODING
262 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
263 #undef TARGET_ASM_SELECT_SECTION
264 #define TARGET_ASM_SELECT_SECTION mep_select_section
265 #undef TARGET_ASM_UNIQUE_SECTION
266 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
267 #undef TARGET_ENCODE_SECTION_INFO
268 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
269 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
270 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
271 #undef TARGET_RTX_COSTS
272 #define TARGET_RTX_COSTS mep_rtx_cost
273 #undef TARGET_ADDRESS_COST
274 #define TARGET_ADDRESS_COST mep_address_cost
275 #undef TARGET_MACHINE_DEPENDENT_REORG
276 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
277 #undef TARGET_SETUP_INCOMING_VARARGS
278 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
279 #undef TARGET_PASS_BY_REFERENCE
280 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
281 #undef TARGET_VECTOR_MODE_SUPPORTED_P
282 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
283 #undef TARGET_HANDLE_OPTION
284 #define TARGET_HANDLE_OPTION mep_handle_option
285 #undef TARGET_DEFAULT_TARGET_FLAGS
286 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
287 #undef TARGET_ALLOCATE_INITIAL_VALUE
288 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
289 #undef TARGET_ASM_INIT_SECTIONS
290 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
291 #undef TARGET_RETURN_IN_MEMORY
292 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
293 #undef TARGET_NARROW_VOLATILE_BITFIELD
294 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
295 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
296 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
297 #undef TARGET_BUILD_BUILTIN_VA_LIST
298 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
299 #undef TARGET_EXPAND_BUILTIN_VA_START
300 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
301 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
302 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
303 #undef TARGET_CAN_ELIMINATE
304 #define TARGET_CAN_ELIMINATE mep_can_eliminate
305 #undef TARGET_TRAMPOLINE_INIT
306 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
308 struct gcc_target targetm = TARGET_INITIALIZER;
310 #define WANT_GCC_DEFINITIONS
311 #include "mep-intrin.h"
312 #undef WANT_GCC_DEFINITIONS
315 /* Command Line Option Support. */
317 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
319 /* True if we can use cmov instructions to move values back and forth
320 between core and coprocessor registers. */
321 bool mep_have_core_copro_moves_p;
323 /* True if we can use cmov instructions (or a work-alike) to move
324 values between coprocessor registers. */
325 bool mep_have_copro_copro_moves_p;
327 /* A table of all coprocessor instructions that can act like
328 a coprocessor-to-coprocessor cmov. */
329 static const int mep_cmov_insns[] = {
342 static int option_mtiny_specified = 0;
346 mep_set_leaf_registers (int enable)
350 if (mep_leaf_registers[0] != enable)
351 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
352 mep_leaf_registers[i] = enable;
356 mep_conditional_register_usage (char *fixed_regs, char *call_used_regs)
360 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
362 fixed_regs[HI_REGNO] = 1;
363 fixed_regs[LO_REGNO] = 1;
364 call_used_regs[HI_REGNO] = 1;
365 call_used_regs[LO_REGNO] = 1;
368 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
373 mep_optimization_options (void)
375 /* The first scheduling pass often increases register pressure and tends
376 to result in more spill code. Only run it when specifically asked. */
377 flag_schedule_insns = 0;
379 /* Using $fp doesn't gain us much, even when debugging is important. */
380 flag_omit_frame_pointer = 1;
384 mep_override_options (void)
387 warning (OPT_fpic, "-fpic is not supported");
389 warning (OPT_fPIC, "-fPIC is not supported");
390 if (TARGET_S && TARGET_M)
391 error ("only one of -ms and -mm may be given");
392 if (TARGET_S && TARGET_L)
393 error ("only one of -ms and -ml may be given");
394 if (TARGET_M && TARGET_L)
395 error ("only one of -mm and -ml may be given");
396 if (TARGET_S && option_mtiny_specified)
397 error ("only one of -ms and -mtiny= may be given");
398 if (TARGET_M && option_mtiny_specified)
399 error ("only one of -mm and -mtiny= may be given");
400 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
401 warning (0, "-mclip currently has no effect without -mminmax");
403 if (mep_const_section)
405 if (strcmp (mep_const_section, "tiny") != 0
406 && strcmp (mep_const_section, "near") != 0
407 && strcmp (mep_const_section, "far") != 0)
408 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
412 mep_tiny_cutoff = 65536;
415 if (TARGET_L && ! option_mtiny_specified)
418 if (TARGET_64BIT_CR_REGS)
419 flag_split_wide_types = 0;
421 init_machine_status = mep_init_machine_status;
422 mep_init_intrinsics ();
425 /* Pattern Support - constraints, predicates, expanders. */
427 /* MEP has very few instructions that can refer to the span of
428 addresses used by symbols, so it's common to check for them. */
433 int c = GET_CODE (x);
435 return (c == CONST_INT
445 if (GET_CODE (x) != MEM)
448 c = GET_CODE (XEXP (x, 0));
449 return (c == CONST_INT
454 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
456 #define GEN_REG(R, STRICT) \
459 && ((R) == ARG_POINTER_REGNUM \
460 || (R) >= FIRST_PSEUDO_REGISTER)))
462 static char pattern[12], *patternp;
463 static GTY(()) rtx patternr[12];
464 #define RTX_IS(x) (strcmp (pattern, x) == 0)
467 encode_pattern_1 (rtx x)
471 if (patternp == pattern + sizeof (pattern) - 2)
477 patternr[patternp-pattern] = x;
479 switch (GET_CODE (x))
487 encode_pattern_1 (XEXP(x, 0));
491 encode_pattern_1 (XEXP(x, 0));
492 encode_pattern_1 (XEXP(x, 1));
496 encode_pattern_1 (XEXP(x, 0));
497 encode_pattern_1 (XEXP(x, 1));
501 encode_pattern_1 (XEXP(x, 0));
515 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
516 for (i=0; i<XVECLEN (x, 0); i++)
517 encode_pattern_1 (XVECEXP (x, 0, i));
525 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
534 encode_pattern (rtx x)
537 encode_pattern_1 (x);
542 mep_section_tag (rtx x)
548 switch (GET_CODE (x))
555 x = XVECEXP (x, 0, 0);
558 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
567 if (GET_CODE (x) != SYMBOL_REF)
570 if (name[0] == '@' && name[2] == '.')
572 if (name[1] == 'i' || name[1] == 'I')
575 return 'f'; /* near */
576 return 'n'; /* far */
584 mep_regno_reg_class (int regno)
588 case SP_REGNO: return SP_REGS;
589 case TP_REGNO: return TP_REGS;
590 case GP_REGNO: return GP_REGS;
591 case 0: return R0_REGS;
592 case HI_REGNO: return HI_REGS;
593 case LO_REGNO: return LO_REGS;
594 case ARG_POINTER_REGNUM: return GENERAL_REGS;
597 if (GR_REGNO_P (regno))
598 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
599 if (CONTROL_REGNO_P (regno))
602 if (CR_REGNO_P (regno))
606 /* Search for the register amongst user-defined subclasses of
607 the coprocessor registers. */
608 for (i = USER0_REGS; i <= USER3_REGS; ++i)
610 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
612 for (j = 0; j < N_REG_CLASSES; ++j)
614 enum reg_class sub = reg_class_subclasses[i][j];
616 if (sub == LIM_REG_CLASSES)
618 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
623 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
626 if (CCR_REGNO_P (regno))
629 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
635 mep_reg_class_from_constraint (int c, const char *str)
652 return LOADABLE_CR_REGS;
654 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
656 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
683 enum reg_class which = c - 'A' + USER0_REGS;
684 return (reg_class_size[which] > 0 ? which : NO_REGS);
693 mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
697 case 'I': return value >= -32768 && value < 32768;
698 case 'J': return value >= 0 && value < 65536;
699 case 'K': return value >= 0 && value < 0x01000000;
700 case 'L': return value >= -32 && value < 32;
701 case 'M': return value >= 0 && value < 32;
702 case 'N': return value >= 0 && value < 16;
706 return value >= -2147483647-1 && value <= 2147483647;
713 mep_extra_constraint (rtx value, int c)
715 encode_pattern (value);
720 /* For near symbols, like what call uses. */
721 if (GET_CODE (value) == REG)
723 return mep_call_address_operand (value, GET_MODE (value));
726 /* For signed 8-bit immediates. */
727 return (GET_CODE (value) == CONST_INT
728 && INTVAL (value) >= -128
729 && INTVAL (value) <= 127);
732 /* For tp/gp relative symbol values. */
733 return (RTX_IS ("u3s") || RTX_IS ("u2s")
734 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
737 /* Non-absolute memories. */
738 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
742 return RTX_IS ("Hs");
745 /* Register indirect. */
746 return RTX_IS ("mr");
749 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
760 const_in_range (rtx x, int minv, int maxv)
762 return (GET_CODE (x) == CONST_INT
763 && INTVAL (x) >= minv
764 && INTVAL (x) <= maxv);
767 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
768 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
769 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
770 at the end of the insn stream. */
773 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
775 if (rtx_equal_p (dest, src1))
777 else if (rtx_equal_p (dest, src2))
782 emit_insn (gen_movsi (copy_rtx (dest), src1));
784 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
789 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
790 Change the last element of PATTERN from (clobber (scratch:SI))
791 to (clobber (reg:SI HI_REGNO)). */
794 mep_rewrite_mult (rtx insn, rtx pattern)
798 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
799 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
800 PATTERN (insn) = pattern;
801 INSN_CODE (insn) = -1;
804 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
805 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
806 store the result in DEST if nonnull. */
809 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
813 lo = gen_rtx_REG (SImode, LO_REGNO);
815 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
816 mep_mulr_source (insn, dest, src1, src2));
818 pattern = gen_mulsi3_lo (lo, src1, src2);
819 mep_rewrite_mult (insn, pattern);
822 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
823 SRC3 into $lo, then use either madd or maddr. The move into $lo will
824 be deleted by a peephole2 if SRC3 is already in $lo. */
827 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
831 lo = gen_rtx_REG (SImode, LO_REGNO);
832 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
834 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
835 mep_mulr_source (insn, dest, src1, src2),
838 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
839 mep_rewrite_mult (insn, pattern);
842 /* Return true if $lo has the same value as integer register GPR when
843 instruction INSN is reached. If necessary, rewrite the instruction
844 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
845 rtx for (reg:SI LO_REGNO).
847 This function is intended to be used by the peephole2 pass. Since
848 that pass goes from the end of a basic block to the beginning, and
849 propagates liveness information on the way, there is no need to
850 update register notes here.
852 If GPR_DEAD_P is true on entry, and this function returns true,
853 then the caller will replace _every_ use of GPR in and after INSN
854 with LO. This means that if the instruction that sets $lo is a
855 mulr- or maddr-type instruction, we can rewrite it to use mul or
856 madd instead. In combination with the copy progagation pass,
857 this allows us to replace sequences like:
866 if GPR is no longer used. */
869 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
873 insn = PREV_INSN (insn);
875 switch (recog_memoized (insn))
877 case CODE_FOR_mulsi3_1:
879 if (rtx_equal_p (recog_data.operand[0], gpr))
881 mep_rewrite_mulsi3 (insn,
882 gpr_dead_p ? NULL : recog_data.operand[0],
883 recog_data.operand[1],
884 recog_data.operand[2]);
889 case CODE_FOR_maddsi3:
891 if (rtx_equal_p (recog_data.operand[0], gpr))
893 mep_rewrite_maddsi3 (insn,
894 gpr_dead_p ? NULL : recog_data.operand[0],
895 recog_data.operand[1],
896 recog_data.operand[2],
897 recog_data.operand[3]);
902 case CODE_FOR_mulsi3r:
903 case CODE_FOR_maddsi3r:
905 return rtx_equal_p (recog_data.operand[1], gpr);
908 if (reg_set_p (lo, insn)
909 || reg_set_p (gpr, insn)
910 || volatile_insn_p (PATTERN (insn)))
913 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
918 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
922 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
925 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
927 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
932 /* Return true if SET can be turned into a post-modify load or store
933 that adds OFFSET to GPR. In other words, return true if SET can be
936 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
938 It's OK to change SET to an equivalent operation in order to
942 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
945 unsigned int reg_bytes, mem_bytes;
946 enum machine_mode reg_mode, mem_mode;
948 /* Only simple SETs can be converted. */
949 if (GET_CODE (set) != SET)
952 /* Point REG to what we hope will be the register side of the set and
953 MEM to what we hope will be the memory side. */
954 if (GET_CODE (SET_DEST (set)) == MEM)
956 mem = &SET_DEST (set);
957 reg = &SET_SRC (set);
961 reg = &SET_DEST (set);
962 mem = &SET_SRC (set);
963 if (GET_CODE (*mem) == SIGN_EXTEND)
964 mem = &XEXP (*mem, 0);
967 /* Check that *REG is a suitable coprocessor register. */
968 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
971 /* Check that *MEM is a suitable memory reference. */
972 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
975 /* Get the number of bytes in each operand. */
976 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
977 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
979 /* Check that OFFSET is suitably aligned. */
980 if (INTVAL (offset) & (mem_bytes - 1))
983 /* Convert *MEM to a normal integer mode. */
984 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
985 *mem = change_address (*mem, mem_mode, NULL);
987 /* Adjust *REG as well. */
988 *reg = shallow_copy_rtx (*reg);
989 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
991 /* SET is a subword load. Convert it to an explicit extension. */
992 PUT_MODE (*reg, SImode);
993 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
997 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
998 PUT_MODE (*reg, reg_mode);
1003 /* Return the effect of frame-related instruction INSN. */
1006 mep_frame_expr (rtx insn)
1010 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
1011 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
1012 RTX_FRAME_RELATED_P (expr) = 1;
1016 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
1017 new pattern in INSN1; INSN2 will be deleted by the caller. */
1020 mep_make_parallel (rtx insn1, rtx insn2)
1024 if (RTX_FRAME_RELATED_P (insn2))
1026 expr = mep_frame_expr (insn2);
1027 if (RTX_FRAME_RELATED_P (insn1))
1028 expr = gen_rtx_SEQUENCE (VOIDmode,
1029 gen_rtvec (2, mep_frame_expr (insn1), expr));
1030 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
1031 RTX_FRAME_RELATED_P (insn1) = 1;
1034 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
1035 gen_rtvec (2, PATTERN (insn1),
1037 INSN_CODE (insn1) = -1;
1040 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
1041 the basic block to see if any previous load or store instruction can
1042 be persuaded to do SET_INSN as a side-effect. Return true if so. */
1045 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
1052 insn = PREV_INSN (insn);
1055 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
1057 mep_make_parallel (insn, set_insn);
1061 if (reg_set_p (reg, insn)
1062 || reg_referenced_p (reg, PATTERN (insn))
1063 || volatile_insn_p (PATTERN (insn)))
1067 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1071 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1074 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1076 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1077 extract_insn (insn);
1082 mep_allow_clip (rtx ux, rtx lx, int s)
1084 HOST_WIDE_INT u = INTVAL (ux);
1085 HOST_WIDE_INT l = INTVAL (lx);
1088 if (!TARGET_OPT_CLIP)
1093 for (i = 0; i < 30; i ++)
1094 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1095 && (l == - ((HOST_WIDE_INT) 1 << i)))
1103 for (i = 0; i < 30; i ++)
1104 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1111 mep_bit_position_p (rtx x, bool looking_for)
1113 if (GET_CODE (x) != CONST_INT)
1115 switch ((int) INTVAL(x) & 0xff)
1117 case 0x01: case 0x02: case 0x04: case 0x08:
1118 case 0x10: case 0x20: case 0x40: case 0x80:
1120 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1121 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1122 return !looking_for;
1128 move_needs_splitting (rtx dest, rtx src,
1129 enum machine_mode mode ATTRIBUTE_UNUSED)
1131 int s = mep_section_tag (src);
1135 if (GET_CODE (src) == CONST
1136 || GET_CODE (src) == MEM)
1137 src = XEXP (src, 0);
1138 else if (GET_CODE (src) == SYMBOL_REF
1139 || GET_CODE (src) == LABEL_REF
1140 || GET_CODE (src) == PLUS)
1146 || (GET_CODE (src) == PLUS
1147 && GET_CODE (XEXP (src, 1)) == CONST_INT
1148 && (INTVAL (XEXP (src, 1)) < -65536
1149 || INTVAL (XEXP (src, 1)) > 0xffffff))
1150 || (GET_CODE (dest) == REG
1151 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1157 mep_split_mov (rtx *operands, int symbolic)
1161 if (move_needs_splitting (operands[0], operands[1], SImode))
1166 if (GET_CODE (operands[1]) != CONST_INT)
1169 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1170 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1171 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1174 if (((!reload_completed && !reload_in_progress)
1175 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1176 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1182 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1183 it to one specific value. So the insn chosen depends on whether
1184 the source and destination modes match. */
1187 mep_vliw_mode_match (rtx tgt)
1189 bool src_vliw = mep_vliw_function_p (cfun->decl);
1190 bool tgt_vliw = INTVAL (tgt);
1192 return src_vliw == tgt_vliw;
1195 /* Like the above, but also test for near/far mismatches. */
1198 mep_vliw_jmp_match (rtx tgt)
1200 bool src_vliw = mep_vliw_function_p (cfun->decl);
1201 bool tgt_vliw = INTVAL (tgt);
1203 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1206 return src_vliw == tgt_vliw;
1210 mep_multi_slot (rtx x)
1212 return get_attr_slot (x) == SLOT_MULTI;
1217 mep_legitimate_constant_p (rtx x)
1219 /* We can't convert symbol values to gp- or tp-rel values after
1220 reload, as reload might have used $gp or $tp for other
1222 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1224 char e = mep_section_tag (x);
1225 return (e != 't' && e != 'b');
1230 /* Be careful not to use macros that need to be compiled one way for
1231 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1234 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1238 #define DEBUG_LEGIT 0
1240 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1244 if (GET_CODE (x) == LO_SUM
1245 && GET_CODE (XEXP (x, 0)) == REG
1246 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1247 && CONSTANT_P (XEXP (x, 1)))
1249 if (GET_MODE_SIZE (mode) > 4)
1251 /* We will end up splitting this, and lo_sums are not
1252 offsettable for us. */
1254 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1259 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1264 if (GET_CODE (x) == REG
1265 && GEN_REG (REGNO (x), strict))
1268 fprintf (stderr, " - yup, [reg]\n");
1273 if (GET_CODE (x) == PLUS
1274 && GET_CODE (XEXP (x, 0)) == REG
1275 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1276 && const_in_range (XEXP (x, 1), -32768, 32767))
1279 fprintf (stderr, " - yup, [reg+const]\n");
1284 if (GET_CODE (x) == PLUS
1285 && GET_CODE (XEXP (x, 0)) == REG
1286 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1287 && GET_CODE (XEXP (x, 1)) == CONST
1288 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1289 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1290 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1291 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1294 fprintf (stderr, " - yup, [reg+unspec]\n");
1299 the_tag = mep_section_tag (x);
1304 fprintf (stderr, " - nope, [far]\n");
1309 if (mode == VOIDmode
1310 && GET_CODE (x) == SYMBOL_REF)
1313 fprintf (stderr, " - yup, call [symbol]\n");
1318 if ((mode == SImode || mode == SFmode)
1320 && LEGITIMATE_CONSTANT_P (x)
1321 && the_tag != 't' && the_tag != 'b')
1323 if (GET_CODE (x) != CONST_INT
1324 || (INTVAL (x) <= 0xfffff
1326 && (INTVAL (x) % 4) == 0))
1329 fprintf (stderr, " - yup, [const]\n");
1336 fprintf (stderr, " - nope.\n");
1342 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1343 enum reload_type type,
1344 int ind_levels ATTRIBUTE_UNUSED)
1346 if (GET_CODE (*x) == PLUS
1347 && GET_CODE (XEXP (*x, 0)) == MEM
1348 && GET_CODE (XEXP (*x, 1)) == REG)
1350 /* GCC will by default copy the MEM into a REG, which results in
1351 an invalid address. For us, the best thing to do is move the
1352 whole expression to a REG. */
1353 push_reload (*x, NULL_RTX, x, NULL,
1354 GENERAL_REGS, mode, VOIDmode,
1359 if (GET_CODE (*x) == PLUS
1360 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1361 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1363 char e = mep_section_tag (XEXP (*x, 0));
1365 if (e != 't' && e != 'b')
1367 /* GCC thinks that (sym+const) is a valid address. Well,
1368 sometimes it is, this time it isn't. The best thing to
1369 do is reload the symbol to a register, since reg+int
1370 tends to work, and we can't just add the symbol and
1372 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1373 GENERAL_REGS, mode, VOIDmode,
1382 mep_core_address_length (rtx insn, int opn)
1384 rtx set = single_set (insn);
1385 rtx mem = XEXP (set, opn);
1386 rtx other = XEXP (set, 1-opn);
1387 rtx addr = XEXP (mem, 0);
1389 if (register_operand (addr, Pmode))
1391 if (GET_CODE (addr) == PLUS)
1393 rtx addend = XEXP (addr, 1);
1395 gcc_assert (REG_P (XEXP (addr, 0)));
1397 switch (REGNO (XEXP (addr, 0)))
1399 case STACK_POINTER_REGNUM:
1400 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1401 && mep_imm7a4_operand (addend, VOIDmode))
1406 gcc_assert (REG_P (other));
1408 if (REGNO (other) >= 8)
1411 if (GET_CODE (addend) == CONST
1412 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1413 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1416 if (GET_CODE (addend) == CONST_INT
1417 && INTVAL (addend) >= 0
1418 && INTVAL (addend) <= 127
1419 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1429 mep_cop_address_length (rtx insn, int opn)
1431 rtx set = single_set (insn);
1432 rtx mem = XEXP (set, opn);
1433 rtx addr = XEXP (mem, 0);
1435 if (GET_CODE (mem) != MEM)
1437 if (register_operand (addr, Pmode))
1439 if (GET_CODE (addr) == POST_INC)
1445 #define DEBUG_EXPAND_MOV 0
1447 mep_expand_mov (rtx *operands, enum machine_mode mode)
1452 int post_reload = 0;
1454 tag[0] = mep_section_tag (operands[0]);
1455 tag[1] = mep_section_tag (operands[1]);
1457 if (!reload_in_progress
1458 && !reload_completed
1459 && GET_CODE (operands[0]) != REG
1460 && GET_CODE (operands[0]) != SUBREG
1461 && GET_CODE (operands[1]) != REG
1462 && GET_CODE (operands[1]) != SUBREG)
1463 operands[1] = copy_to_mode_reg (mode, operands[1]);
1465 #if DEBUG_EXPAND_MOV
1466 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1467 reload_in_progress || reload_completed);
1468 debug_rtx (operands[0]);
1469 debug_rtx (operands[1]);
1472 if (mode == DImode || mode == DFmode)
1475 if (reload_in_progress || reload_completed)
1479 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1480 cfun->machine->reload_changes_tp = true;
1482 if (tag[0] == 't' || tag[1] == 't')
1484 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1485 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1488 if (tag[0] == 'b' || tag[1] == 'b')
1490 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1491 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1494 if (cfun->machine->reload_changes_tp == true)
1501 if (symbol_p (operands[1]))
1503 t = mep_section_tag (operands[1]);
1504 if (t == 'b' || t == 't')
1507 if (GET_CODE (operands[1]) == SYMBOL_REF)
1509 tpsym = operands[1];
1510 n = gen_rtx_UNSPEC (mode,
1511 gen_rtvec (1, operands[1]),
1512 t == 'b' ? UNS_TPREL : UNS_GPREL);
1513 n = gen_rtx_CONST (mode, n);
1515 else if (GET_CODE (operands[1]) == CONST
1516 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1517 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1518 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1520 tpsym = XEXP (XEXP (operands[1], 0), 0);
1521 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1522 n = gen_rtx_UNSPEC (mode,
1523 gen_rtvec (1, tpsym),
1524 t == 'b' ? UNS_TPREL : UNS_GPREL);
1525 n = gen_rtx_PLUS (mode, n, tpoffs);
1526 n = gen_rtx_CONST (mode, n);
1528 else if (GET_CODE (operands[1]) == CONST
1529 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1533 error ("unusual TP-relative address");
1537 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1538 : mep_gp_rtx ()), n);
1539 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1540 #if DEBUG_EXPAND_MOV
1541 fprintf(stderr, "mep_expand_mov emitting ");
1548 for (i=0; i < 2; i++)
1550 t = mep_section_tag (operands[i]);
1551 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1556 sym = XEXP (operands[i], 0);
1557 if (GET_CODE (sym) == CONST
1558 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1559 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1572 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1573 n = gen_rtx_CONST (Pmode, n);
1574 n = gen_rtx_PLUS (Pmode, r, n);
1575 operands[i] = replace_equiv_address (operands[i], n);
1580 if ((GET_CODE (operands[1]) != REG
1581 && MEP_CONTROL_REG (operands[0]))
1582 || (GET_CODE (operands[0]) != REG
1583 && MEP_CONTROL_REG (operands[1])))
1586 #if DEBUG_EXPAND_MOV
1587 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1589 temp = gen_reg_rtx (mode);
1590 emit_move_insn (temp, operands[1]);
1594 if (symbolref_p (operands[0])
1595 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1596 || (GET_MODE_SIZE (mode) != 4)))
1600 gcc_assert (!reload_in_progress && !reload_completed);
1602 temp = force_reg (Pmode, XEXP (operands[0], 0));
1603 operands[0] = replace_equiv_address (operands[0], temp);
1604 emit_move_insn (operands[0], operands[1]);
1608 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1611 if (symbol_p (operands[1])
1612 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1614 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1615 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1619 if (symbolref_p (operands[1])
1620 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1624 if (reload_in_progress || reload_completed)
1627 temp = gen_reg_rtx (Pmode);
1629 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1630 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1631 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1638 /* Cases where the pattern can't be made to use at all. */
1641 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1645 #define DEBUG_MOV_OK 0
1647 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1648 mep_section_tag (operands[1]));
1649 debug_rtx (operands[0]);
1650 debug_rtx (operands[1]);
1653 /* We want the movh patterns to get these. */
1654 if (GET_CODE (operands[1]) == HIGH)
1657 /* We can't store a register to a far variable without using a
1658 scratch register to hold the address. Using far variables should
1659 be split by mep_emit_mov anyway. */
1660 if (mep_section_tag (operands[0]) == 'f'
1661 || mep_section_tag (operands[1]) == 'f')
1664 fprintf (stderr, " - no, f\n");
1668 i = mep_section_tag (operands[1]);
1669 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1670 /* These are supposed to be generated with adds of the appropriate
1671 register. During and after reload, however, we allow them to
1672 be accessed as normal symbols because adding a dependency on
1673 the base register now might cause problems. */
1676 fprintf (stderr, " - no, bt\n");
1681 /* The only moves we can allow involve at least one general
1682 register, so require it. */
1683 for (i = 0; i < 2; i ++)
1685 /* Allow subregs too, before reload. */
1686 rtx x = operands[i];
1688 if (GET_CODE (x) == SUBREG)
1690 if (GET_CODE (x) == REG
1691 && ! MEP_CONTROL_REG (x))
1694 fprintf (stderr, " - ok\n");
1700 fprintf (stderr, " - no, no gen reg\n");
1705 #define DEBUG_SPLIT_WIDE_MOVE 0
1707 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1711 #if DEBUG_SPLIT_WIDE_MOVE
1712 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1713 debug_rtx (operands[0]);
1714 debug_rtx (operands[1]);
1717 for (i = 0; i <= 1; i++)
1719 rtx op = operands[i], hi, lo;
1721 switch (GET_CODE (op))
1725 unsigned int regno = REGNO (op);
1727 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1731 lo = gen_rtx_REG (SImode, regno);
1733 hi = gen_rtx_ZERO_EXTRACT (SImode,
1734 gen_rtx_REG (DImode, regno),
1739 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1740 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1748 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1749 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1756 /* The high part of CR <- GPR moves must be done after the low part. */
1757 operands [i + 4] = lo;
1758 operands [i + 2] = hi;
1761 if (reg_mentioned_p (operands[2], operands[5])
1762 || GET_CODE (operands[2]) == ZERO_EXTRACT
1763 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1767 /* Overlapping register pairs -- make sure we don't
1768 early-clobber ourselves. */
1770 operands[2] = operands[4];
1773 operands[3] = operands[5];
1777 #if DEBUG_SPLIT_WIDE_MOVE
1778 fprintf(stderr, "\033[34m");
1779 debug_rtx (operands[2]);
1780 debug_rtx (operands[3]);
1781 debug_rtx (operands[4]);
1782 debug_rtx (operands[5]);
1783 fprintf(stderr, "\033[0m");
1787 /* Emit a setcc instruction in its entirity. */
1790 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1798 tmp = op1, op1 = op2, op2 = tmp;
1799 code = swap_condition (code);
1804 op1 = force_reg (SImode, op1);
1805 emit_insn (gen_rtx_SET (VOIDmode, dest,
1806 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1810 if (op2 != const0_rtx)
1811 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1812 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1816 /* Branchful sequence:
1818 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1821 Branchless sequence:
1822 add3 tmp, op1, -op2 32-bit (or mov + sub)
1823 sltu3 tmp, tmp, 1 16-bit
1824 xor3 dest, tmp, 1 32-bit
1826 if (optimize_size && op2 != const0_rtx)
1829 if (op2 != const0_rtx)
1830 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1832 op2 = gen_reg_rtx (SImode);
1833 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1835 emit_insn (gen_rtx_SET (VOIDmode, dest,
1836 gen_rtx_XOR (SImode, op2, const1_rtx)));
1840 if (GET_CODE (op2) != CONST_INT
1841 || INTVAL (op2) == 0x7ffffff)
1843 op2 = GEN_INT (INTVAL (op2) + 1);
1844 return mep_expand_setcc_1 (LT, dest, op1, op2);
1847 if (GET_CODE (op2) != CONST_INT
1848 || INTVAL (op2) == -1)
1850 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1851 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1854 if (GET_CODE (op2) != CONST_INT
1855 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1857 op2 = GEN_INT (INTVAL (op2) - 1);
1858 return mep_expand_setcc_1 (GT, dest, op1, op2);
1861 if (GET_CODE (op2) != CONST_INT
1862 || op2 == const0_rtx)
1864 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1865 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1873 mep_expand_setcc (rtx *operands)
1875 rtx dest = operands[0];
1876 enum rtx_code code = GET_CODE (operands[1]);
1877 rtx op0 = operands[2];
1878 rtx op1 = operands[3];
1880 return mep_expand_setcc_1 (code, dest, op0, op1);
1884 mep_expand_cbranch (rtx *operands)
1886 enum rtx_code code = GET_CODE (operands[0]);
1887 rtx op0 = operands[1];
1888 rtx op1 = operands[2];
1895 if (mep_imm4_operand (op1, SImode))
1898 tmp = gen_reg_rtx (SImode);
1899 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1906 if (mep_imm4_operand (op1, SImode))
1909 tmp = gen_reg_rtx (SImode);
1910 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1919 if (! mep_reg_or_imm4_operand (op1, SImode))
1920 op1 = force_reg (SImode, op1);
1925 if (GET_CODE (op1) == CONST_INT
1926 && INTVAL (op1) != 0x7fffffff)
1928 op1 = GEN_INT (INTVAL (op1) + 1);
1929 code = (code == LE ? LT : GE);
1933 tmp = gen_reg_rtx (SImode);
1934 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1936 code = (code == LE ? EQ : NE);
1942 if (op1 == const1_rtx)
1949 tmp = gen_reg_rtx (SImode);
1950 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1957 tmp = gen_reg_rtx (SImode);
1958 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1960 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1969 tmp = gen_reg_rtx (SImode);
1970 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1971 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1978 tmp = gen_reg_rtx (SImode);
1979 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1981 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1993 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1997 mep_emit_cbranch (rtx *operands, int ne)
1999 if (GET_CODE (operands[1]) == REG)
2000 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
2001 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
2002 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
2004 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
2008 mep_expand_call (rtx *operands, int returns_value)
2010 rtx addr = operands[returns_value];
2011 rtx tp = mep_tp_rtx ();
2012 rtx gp = mep_gp_rtx ();
2014 gcc_assert (GET_CODE (addr) == MEM);
2016 addr = XEXP (addr, 0);
2018 if (! mep_call_address_operand (addr, VOIDmode))
2019 addr = force_reg (SImode, addr);
2021 if (! operands[returns_value+2])
2022 operands[returns_value+2] = const0_rtx;
2025 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
2026 operands[3], tp, gp));
2028 emit_call_insn (gen_call_internal (addr, operands[1],
2029 operands[2], tp, gp));
2032 /* Aliasing Support. */
2034 /* If X is a machine specific address (i.e. a symbol or label being
2035 referenced as a displacement from the GOT implemented using an
2036 UNSPEC), then return the base term. Otherwise return X. */
2039 mep_find_base_term (rtx x)
2044 if (GET_CODE (x) != PLUS)
2049 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
2050 && base == mep_tp_rtx ())
2052 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
2053 && base == mep_gp_rtx ())
2058 if (GET_CODE (term) != CONST)
2060 term = XEXP (term, 0);
2062 if (GET_CODE (term) != UNSPEC
2063 || XINT (term, 1) != unspec)
2066 return XVECEXP (term, 0, 0);
2069 /* Reload Support. */
2071 /* Return true if the registers in CLASS cannot represent the change from
2072 modes FROM to TO. */
2075 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2076 enum reg_class regclass)
2081 /* 64-bit COP regs must remain 64-bit COP regs. */
2082 if (TARGET_64BIT_CR_REGS
2083 && (regclass == CR_REGS
2084 || regclass == LOADABLE_CR_REGS)
2085 && (GET_MODE_SIZE (to) < 8
2086 || GET_MODE_SIZE (from) < 8))
2092 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2095 mep_general_reg (rtx x)
2097 while (GET_CODE (x) == SUBREG)
2099 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2103 mep_nongeneral_reg (rtx x)
2105 while (GET_CODE (x) == SUBREG)
2107 return (GET_CODE (x) == REG
2108 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2112 mep_general_copro_reg (rtx x)
2114 while (GET_CODE (x) == SUBREG)
2116 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2120 mep_nonregister (rtx x)
2122 while (GET_CODE (x) == SUBREG)
2124 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2127 #define DEBUG_RELOAD 0
2129 /* Return the secondary reload class needed for moving value X to or
2130 from a register in coprocessor register class CLASS. */
2132 static enum reg_class
2133 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2135 if (mep_general_reg (x))
2136 /* We can do the move directly if mep_have_core_copro_moves_p,
2137 otherwise we need to go through memory. Either way, no secondary
2138 register is needed. */
2141 if (mep_general_copro_reg (x))
2143 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2144 if (mep_have_copro_copro_moves_p)
2147 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2148 if (mep_have_core_copro_moves_p)
2149 return GENERAL_REGS;
2151 /* Otherwise we need to do it through memory. No secondary
2152 register is needed. */
2156 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2157 && constraint_satisfied_p (x, CONSTRAINT_U))
2158 /* X is a memory value that we can access directly. */
2161 /* We have to move X into a GPR first and then copy it to
2162 the coprocessor register. The move from the GPR to the
2163 coprocessor might be done directly or through memory,
2164 depending on mep_have_core_copro_moves_p. */
2165 return GENERAL_REGS;
2168 /* Copying X to register in RCLASS. */
2171 mep_secondary_input_reload_class (enum reg_class rclass,
2172 enum machine_mode mode ATTRIBUTE_UNUSED,
2178 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2182 if (reg_class_subset_p (rclass, CR_REGS))
2183 rv = mep_secondary_copro_reload_class (rclass, x);
2184 else if (MEP_NONGENERAL_CLASS (rclass)
2185 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2189 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2194 /* Copying register in RCLASS to X. */
2197 mep_secondary_output_reload_class (enum reg_class rclass,
2198 enum machine_mode mode ATTRIBUTE_UNUSED,
2204 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2208 if (reg_class_subset_p (rclass, CR_REGS))
2209 rv = mep_secondary_copro_reload_class (rclass, x);
2210 else if (MEP_NONGENERAL_CLASS (rclass)
2211 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2215 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2221 /* Implement SECONDARY_MEMORY_NEEDED. */
2224 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2225 enum machine_mode mode ATTRIBUTE_UNUSED)
2227 if (!mep_have_core_copro_moves_p)
2229 if (reg_classes_intersect_p (rclass1, CR_REGS)
2230 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2232 if (reg_classes_intersect_p (rclass2, CR_REGS)
2233 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2235 if (!mep_have_copro_copro_moves_p
2236 && reg_classes_intersect_p (rclass1, CR_REGS)
2237 && reg_classes_intersect_p (rclass2, CR_REGS))
2244 mep_expand_reload (rtx *operands, enum machine_mode mode)
2246 /* There are three cases for each direction:
2251 int s0 = mep_section_tag (operands[0]) == 'f';
2252 int s1 = mep_section_tag (operands[1]) == 'f';
2253 int c0 = mep_nongeneral_reg (operands[0]);
2254 int c1 = mep_nongeneral_reg (operands[1]);
2255 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2258 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2259 debug_rtx (operands[0]);
2260 debug_rtx (operands[1]);
2265 case 00: /* Don't know why this gets here. */
2266 case 02: /* general = far */
2267 emit_move_insn (operands[0], operands[1]);
2270 case 10: /* cr = mem */
2271 case 11: /* cr = cr */
2272 case 01: /* mem = cr */
2273 case 12: /* cr = far */
2274 emit_move_insn (operands[2], operands[1]);
2275 emit_move_insn (operands[0], operands[2]);
2278 case 20: /* far = general */
2279 emit_move_insn (operands[2], XEXP (operands[1], 0));
2280 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2283 case 21: /* far = cr */
2284 case 22: /* far = far */
2286 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2287 which, mode_name[mode]);
2288 debug_rtx (operands[0]);
2289 debug_rtx (operands[1]);
2294 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2295 can be moved directly into registers 0 to 7, but not into the rest.
2296 If so, and if the required class includes registers 0 to 7, restrict
2297 it to those registers. */
2300 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2302 switch (GET_CODE (x))
2305 if (INTVAL (x) >= 0x10000
2306 && INTVAL (x) < 0x01000000
2307 && (INTVAL (x) & 0xffff) != 0
2308 && reg_class_subset_p (TPREL_REGS, rclass))
2309 rclass = TPREL_REGS;
2315 if (mep_section_tag (x) != 'f'
2316 && reg_class_subset_p (TPREL_REGS, rclass))
2317 rclass = TPREL_REGS;
2326 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2327 moves, 4 for direct double-register moves, and 1000 for anything
2328 that requires a temporary register or temporary stack slot. */
2331 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2333 if (mep_have_copro_copro_moves_p
2334 && reg_class_subset_p (from, CR_REGS)
2335 && reg_class_subset_p (to, CR_REGS))
2337 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2341 if (reg_class_subset_p (from, CR_REGS)
2342 && reg_class_subset_p (to, CR_REGS))
2344 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2348 if (reg_class_subset_p (from, CR_REGS)
2349 || reg_class_subset_p (to, CR_REGS))
2351 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2355 if (mep_secondary_memory_needed (from, to, mode))
2357 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2360 if (GET_MODE_SIZE (mode) > 4)
2367 /* Functions to save and restore machine-specific function data. */
2369 static struct machine_function *
2370 mep_init_machine_status (void)
2372 struct machine_function *f;
2374 f = (struct machine_function *) ggc_alloc_cleared (sizeof (struct machine_function));
2380 mep_allocate_initial_value (rtx reg)
2384 if (GET_CODE (reg) != REG)
2387 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2390 /* In interrupt functions, the "initial" values of $gp and $tp are
2391 provided by the prologue. They are not necessarily the same as
2392 the values that the caller was using. */
2393 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2394 if (mep_interrupt_p ())
2397 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2399 cfun->machine->reg_save_size += 4;
2400 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2403 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2404 return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2408 mep_return_addr_rtx (int count)
2413 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2419 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2425 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2429 mep_interrupt_p (void)
2431 if (cfun->machine->interrupt_handler == 0)
2433 int interrupt_handler
2434 = (lookup_attribute ("interrupt",
2435 DECL_ATTRIBUTES (current_function_decl))
2437 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2439 return cfun->machine->interrupt_handler == 2;
2443 mep_disinterrupt_p (void)
2445 if (cfun->machine->disable_interrupts == 0)
2447 int disable_interrupts
2448 = (lookup_attribute ("disinterrupt",
2449 DECL_ATTRIBUTES (current_function_decl))
2451 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2453 return cfun->machine->disable_interrupts == 2;
2457 /* Frame/Epilog/Prolog Related. */
2460 mep_reg_set_p (rtx reg, rtx insn)
2462 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2465 if (FIND_REG_INC_NOTE (insn, reg))
2467 insn = PATTERN (insn);
2470 if (GET_CODE (insn) == SET
2471 && GET_CODE (XEXP (insn, 0)) == REG
2472 && GET_CODE (XEXP (insn, 1)) == REG
2473 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2476 return set_of (reg, insn) != NULL_RTX;
2480 #define MEP_SAVES_UNKNOWN 0
2481 #define MEP_SAVES_YES 1
2482 #define MEP_SAVES_MAYBE 2
2483 #define MEP_SAVES_NO 3
2486 mep_reg_set_in_function (int regno)
2490 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2493 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2496 push_topmost_sequence ();
2497 insn = get_insns ();
2498 pop_topmost_sequence ();
2503 reg = gen_rtx_REG (SImode, regno);
2505 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2506 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2512 mep_asm_without_operands_p (void)
2514 if (cfun->machine->asms_without_operands == 0)
2518 push_topmost_sequence ();
2519 insn = get_insns ();
2520 pop_topmost_sequence ();
2522 cfun->machine->asms_without_operands = 1;
2526 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2528 cfun->machine->asms_without_operands = 2;
2531 insn = NEXT_INSN (insn);
2535 return cfun->machine->asms_without_operands == 2;
2538 /* Interrupt functions save/restore every call-preserved register, and
2539 any call-used register it uses (or all if it calls any function,
2540 since they may get clobbered there too). Here we check to see
2541 which call-used registers need saving. */
2543 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2544 && (r == FIRST_CCR_REGNO + 1 \
2545 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2546 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2549 mep_interrupt_saved_reg (int r)
2551 if (!mep_interrupt_p ())
2553 if (r == REGSAVE_CONTROL_TEMP
2554 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2556 if (mep_asm_without_operands_p ()
2558 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2559 || IVC2_ISAVED_REG (r)))
2561 if (!current_function_is_leaf)
2562 /* Function calls mean we need to save $lp. */
2563 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2565 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2566 /* The interrupt handler might use these registers for repeat blocks,
2567 or it might call a function that does so. */
2568 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2570 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2572 /* Functions we call might clobber these. */
2573 if (call_used_regs[r] && !fixed_regs[r])
2575 /* Additional registers that need to be saved for IVC2. */
2576 if (IVC2_ISAVED_REG (r))
2583 mep_call_saves_register (int r)
2585 if (! cfun->machine->frame_locked)
2587 int rv = MEP_SAVES_NO;
2589 if (cfun->machine->reg_save_slot[r])
2591 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2593 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2595 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2597 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2598 /* We need these to have stack slots so that they can be set during
2601 else if (mep_interrupt_saved_reg (r))
2603 cfun->machine->reg_saved[r] = rv;
2605 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2608 /* Return true if epilogue uses register REGNO. */
2611 mep_epilogue_uses (int regno)
2613 /* Since $lp is a call-saved register, the generic code will normally
2614 mark it used in the epilogue if it needs to be saved and restored.
2615 However, when profiling is enabled, the profiling code will implicitly
2616 clobber $11. This case has to be handled specially both here and in
2617 mep_call_saves_register. */
2618 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2620 /* Interrupt functions save/restore pretty much everything. */
2621 return (reload_completed && mep_interrupt_saved_reg (regno));
2625 mep_reg_size (int regno)
2627 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2632 /* Worker function for TARGET_CAN_ELIMINATE. */
2635 mep_can_eliminate (const int from, const int to)
2637 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2638 ? ! frame_pointer_needed
2643 mep_elimination_offset (int from, int to)
2647 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2650 if (!cfun->machine->frame_locked)
2651 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2653 /* We don't count arg_regs_to_save in the arg pointer offset, because
2654 gcc thinks the arg pointer has moved along with the saved regs.
2655 However, we do count it when we adjust $sp in the prologue. */
2657 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2658 if (mep_call_saves_register (i))
2659 reg_save_size += mep_reg_size (i);
2661 if (reg_save_size % 8)
2662 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2664 cfun->machine->regsave_filler = 0;
2666 /* This is what our total stack adjustment looks like. */
2667 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2670 cfun->machine->frame_filler = 8 - (total_size % 8);
2672 cfun->machine->frame_filler = 0;
2675 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2676 return reg_save_size + cfun->machine->regsave_filler;
2678 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2679 return cfun->machine->frame_filler + frame_size;
2681 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2682 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2690 RTX_FRAME_RELATED_P (x) = 1;
2694 /* Since the prologue/epilogue code is generated after optimization,
2695 we can't rely on gcc to split constants for us. So, this code
2696 captures all the ways to add a constant to a register in one logic
2697 chunk, including optimizing away insns we just don't need. This
2698 makes the prolog/epilog code easier to follow. */
2700 add_constant (int dest, int src, int value, int mark_frame)
2705 if (src == dest && value == 0)
2710 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2711 gen_rtx_REG (SImode, src));
2713 RTX_FRAME_RELATED_P(insn) = 1;
2717 if (value >= -32768 && value <= 32767)
2719 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2720 gen_rtx_REG (SImode, src),
2723 RTX_FRAME_RELATED_P(insn) = 1;
2727 /* Big constant, need to use a temp register. We use
2728 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2729 area is always small enough to directly add to). */
2731 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2732 lo = value & 0xffff;
2734 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2739 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2740 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2744 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2745 gen_rtx_REG (SImode, src),
2746 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2749 RTX_FRAME_RELATED_P(insn) = 1;
2750 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2751 gen_rtx_SET (SImode,
2752 gen_rtx_REG (SImode, dest),
2753 gen_rtx_PLUS (SImode,
2754 gen_rtx_REG (SImode, dest),
2759 /* Move SRC to DEST. Mark the move as being potentially dead if
2763 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2765 rtx insn = emit_move_insn (dest, src);
2768 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2773 /* Used for interrupt functions, which can't assume that $tp and $gp
2774 contain the correct pointers. */
2777 mep_reload_pointer (int regno, const char *symbol)
2781 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2784 reg = gen_rtx_REG (SImode, regno);
2785 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2786 emit_insn (gen_movsi_topsym_s (reg, sym));
2787 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2790 /* Assign save slots for any register not already saved. DImode
2791 registers go at the end of the reg save area; the rest go at the
2792 beginning. This is for alignment purposes. Returns true if a frame
2793 is really needed. */
2795 mep_assign_save_slots (int reg_save_size)
2797 bool really_need_stack_frame = false;
2801 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2802 if (mep_call_saves_register(i))
2804 int regsize = mep_reg_size (i);
2806 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2807 || mep_reg_set_in_function (i))
2808 really_need_stack_frame = true;
2810 if (cfun->machine->reg_save_slot[i])
2815 cfun->machine->reg_save_size += regsize;
2816 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2820 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2824 cfun->machine->frame_locked = 1;
2825 return really_need_stack_frame;
2829 mep_expand_prologue (void)
2831 int i, rss, sp_offset = 0;
2834 int really_need_stack_frame;
2836 /* We must not allow register renaming in interrupt functions,
2837 because that invalidates the correctness of the set of call-used
2838 registers we're going to save/restore. */
2839 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2841 if (mep_disinterrupt_p ())
2842 emit_insn (gen_mep_disable_int ());
2844 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2846 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2847 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2848 really_need_stack_frame = frame_size;
2850 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2852 sp_offset = reg_save_size;
2853 if (sp_offset + frame_size < 128)
2854 sp_offset += frame_size ;
2856 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2858 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2859 if (mep_call_saves_register(i))
2863 enum machine_mode rmode;
2865 rss = cfun->machine->reg_save_slot[i];
2867 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2868 && (!mep_reg_set_in_function (i)
2869 && !mep_interrupt_p ()))
2872 if (mep_reg_size (i) == 8)
2877 /* If there is a pseudo associated with this register's initial value,
2878 reload might have already spilt it to the stack slot suggested by
2879 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2881 mem = gen_rtx_MEM (rmode,
2882 plus_constant (stack_pointer_rtx, sp_offset - rss));
2883 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2885 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2886 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2887 else if (rmode == DImode)
2890 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2892 mem = gen_rtx_MEM (SImode,
2893 plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2895 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2896 gen_rtx_REG (SImode, i),
2898 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2899 gen_rtx_ZERO_EXTRACT (SImode,
2900 gen_rtx_REG (DImode, i),
2904 insn = maybe_dead_move (mem,
2905 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2907 RTX_FRAME_RELATED_P (insn) = 1;
2909 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2910 gen_rtx_SET (VOIDmode,
2912 gen_rtx_REG (rmode, i)));
2913 mem = gen_rtx_MEM (SImode,
2914 plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2915 insn = maybe_dead_move (mem,
2916 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2922 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2923 gen_rtx_REG (rmode, i),
2925 insn = maybe_dead_move (mem,
2926 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2928 RTX_FRAME_RELATED_P (insn) = 1;
2930 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2931 gen_rtx_SET (VOIDmode,
2933 gen_rtx_REG (rmode, i)));
2937 if (frame_pointer_needed)
2939 /* We've already adjusted down by sp_offset. Total $sp change
2940 is reg_save_size + frame_size. We want a net change here of
2941 just reg_save_size. */
2942 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2945 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2947 if (mep_interrupt_p ())
2949 mep_reload_pointer(GP_REGNO, "__sdabase");
2950 mep_reload_pointer(TP_REGNO, "__tpbase");
2955 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2957 int local = hwi_local;
2958 int frame_size = local + crtl->outgoing_args_size;
2963 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2965 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2966 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2967 sp_offset = reg_save_size + frame_size;
2969 ffill = cfun->machine->frame_filler;
2971 if (cfun->machine->mep_frame_pointer_needed)
2972 reg_names[FP_REGNO] = "$fp";
2974 reg_names[FP_REGNO] = "$8";
2979 if (debug_info_level == DINFO_LEVEL_NONE)
2981 fprintf (file, "\t# frame: %d", sp_offset);
2983 fprintf (file, " %d regs", reg_save_size);
2985 fprintf (file, " %d locals", local);
2986 if (crtl->outgoing_args_size)
2987 fprintf (file, " %d args", crtl->outgoing_args_size);
2988 fprintf (file, "\n");
2992 fprintf (file, "\t#\n");
2993 fprintf (file, "\t# Initial Frame Information:\n");
2994 if (sp_offset || !frame_pointer_needed)
2995 fprintf (file, "\t# Entry ---------- 0\n");
2997 /* Sort registers by save slots, so they're printed in the order
2998 they appear in memory, not the order they're saved in. */
2999 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
3001 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
3002 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
3003 if (cfun->machine->reg_save_slot[slot_map[si]]
3004 > cfun->machine->reg_save_slot[slot_map[sj]])
3006 int t = slot_map[si];
3007 slot_map[si] = slot_map[sj];
3012 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3015 int r = slot_map[i];
3016 int rss = cfun->machine->reg_save_slot[r];
3018 if (!mep_call_saves_register (r))
3021 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
3022 && (!mep_reg_set_in_function (r)
3023 && !mep_interrupt_p ()))
3026 rsize = mep_reg_size(r);
3027 skip = rss - (sp+rsize);
3029 fprintf (file, "\t# %3d bytes for alignment\n", skip);
3030 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
3031 rsize, reg_names[r], sp_offset - rss);
3035 skip = reg_save_size - sp;
3037 fprintf (file, "\t# %3d bytes for alignment\n", skip);
3039 if (frame_pointer_needed)
3040 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
3042 fprintf (file, "\t# %3d bytes for local vars\n", local);
3044 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
3045 if (crtl->outgoing_args_size)
3046 fprintf (file, "\t# %3d bytes for outgoing args\n",
3047 crtl->outgoing_args_size);
3048 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
3049 fprintf (file, "\t#\n");
3053 static int mep_prevent_lp_restore = 0;
3054 static int mep_sibcall_epilogue = 0;
3057 mep_expand_epilogue (void)
3059 int i, sp_offset = 0;
3060 int reg_save_size = 0;
3062 int lp_temp = LP_REGNO, lp_slot = -1;
3063 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
3064 int interrupt_handler = mep_interrupt_p ();
3066 if (profile_arc_flag == 2)
3067 emit_insn (gen_mep_bb_trace_ret ());
3069 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
3070 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
3072 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
3074 if (frame_pointer_needed)
3076 /* If we have a frame pointer, we won't have a reliable stack
3077 pointer (alloca, you know), so rebase SP from FP */
3078 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3079 gen_rtx_REG (SImode, FP_REGNO));
3080 sp_offset = reg_save_size;
3084 /* SP is right under our local variable space. Adjust it if
3086 sp_offset = reg_save_size + frame_size;
3087 if (sp_offset >= 128)
3089 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3090 sp_offset -= frame_size;
3094 /* This is backwards so that we restore the control and coprocessor
3095 registers before the temporary registers we use to restore
3097 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3098 if (mep_call_saves_register (i))
3100 enum machine_mode rmode;
3101 int rss = cfun->machine->reg_save_slot[i];
3103 if (mep_reg_size (i) == 8)
3108 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3109 && !(mep_reg_set_in_function (i) || interrupt_handler))
3111 if (mep_prevent_lp_restore && i == LP_REGNO)
3113 if (!mep_prevent_lp_restore
3114 && !interrupt_handler
3115 && (i == 10 || i == 11))
3118 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3119 emit_move_insn (gen_rtx_REG (rmode, i),
3121 plus_constant (stack_pointer_rtx,
3125 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3126 /* Defer this one so we can jump indirect rather than
3127 copying the RA to $lp and "ret". EH epilogues
3128 automatically skip this anyway. */
3129 lp_slot = sp_offset-rss;
3132 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3134 plus_constant (stack_pointer_rtx,
3136 emit_move_insn (gen_rtx_REG (rmode, i),
3137 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3143 /* Restore this one last so we know it will be in the temp
3144 register when we return by jumping indirectly via the temp. */
3145 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3146 gen_rtx_MEM (SImode,
3147 plus_constant (stack_pointer_rtx,
3149 lp_temp = REGSAVE_CONTROL_TEMP;
3153 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3155 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3156 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3157 gen_rtx_REG (SImode, SP_REGNO),
3158 cfun->machine->eh_stack_adjust));
3160 if (mep_sibcall_epilogue)
3163 if (mep_disinterrupt_p ())
3164 emit_insn (gen_mep_enable_int ());
3166 if (mep_prevent_lp_restore)
3168 emit_jump_insn (gen_eh_return_internal ());
3171 else if (interrupt_handler)
3172 emit_jump_insn (gen_mep_reti ());
3174 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3178 mep_expand_eh_return (rtx *operands)
3180 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3182 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3183 emit_move_insn (ra, operands[0]);
3187 emit_insn (gen_eh_epilogue (operands[0]));
3191 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3193 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3194 mep_prevent_lp_restore = 1;
3195 mep_expand_epilogue ();
3196 mep_prevent_lp_restore = 0;
3200 mep_expand_sibcall_epilogue (void)
3202 mep_sibcall_epilogue = 1;
3203 mep_expand_epilogue ();
3204 mep_sibcall_epilogue = 0;
3208 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3213 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3216 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3217 if (mep_interrupt_p () || mep_disinterrupt_p ())
3224 mep_return_stackadj_rtx (void)
3226 return gen_rtx_REG (SImode, 10);
3230 mep_return_handler_rtx (void)
3232 return gen_rtx_REG (SImode, LP_REGNO);
3236 mep_function_profiler (FILE *file)
3238 /* Always right at the beginning of the function. */
3239 fprintf (file, "\t# mep function profiler\n");
3240 fprintf (file, "\tadd\t$sp, -8\n");
3241 fprintf (file, "\tsw\t$0, ($sp)\n");
3242 fprintf (file, "\tldc\t$0, $lp\n");
3243 fprintf (file, "\tsw\t$0, 4($sp)\n");
3244 fprintf (file, "\tbsr\t__mep_mcount\n");
3245 fprintf (file, "\tlw\t$0, 4($sp)\n");
3246 fprintf (file, "\tstc\t$0, $lp\n");
3247 fprintf (file, "\tlw\t$0, ($sp)\n");
3248 fprintf (file, "\tadd\t$sp, 8\n\n");
3252 mep_emit_bb_trace_ret (void)
3254 fprintf (asm_out_file, "\t# end of block profiling\n");
3255 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3256 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3257 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3258 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3259 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3260 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3261 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3262 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3263 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3270 /* Operand Printing. */
3273 mep_print_operand_address (FILE *stream, rtx address)
3275 if (GET_CODE (address) == MEM)
3276 address = XEXP (address, 0);
3278 /* cf: gcc.dg/asm-4.c. */
3279 gcc_assert (GET_CODE (address) == REG);
3281 mep_print_operand (stream, address, 0);
3287 const char *pattern;
3290 const conversions[] =
3293 { 0, "m+ri", "3(2)" },
3297 { 0, "mLrs", "%lo(3)(2)" },
3298 { 0, "mLr+si", "%lo(4+5)(2)" },
3299 { 0, "m+ru2s", "%tpoff(5)(2)" },
3300 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3301 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3302 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3303 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3304 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3306 { 0, "m+si", "(2+3)" },
3307 { 0, "m+li", "(2+3)" },
3310 { 0, "+si", "1+2" },
3311 { 0, "+u2si", "%tpoff(3+4)" },
3312 { 0, "+u3si", "%sdaoff(3+4)" },
3318 { 'h', "Hs", "%hi(1)" },
3320 { 'I', "u2s", "%tpoff(2)" },
3321 { 'I', "u3s", "%sdaoff(2)" },
3322 { 'I', "+u2si", "%tpoff(3+4)" },
3323 { 'I', "+u3si", "%sdaoff(3+4)" },
3325 { 'P', "mr", "(1\\+),\\0" },
3331 unique_bit_in (HOST_WIDE_INT i)
3335 case 0x01: case 0xfe: return 0;
3336 case 0x02: case 0xfd: return 1;
3337 case 0x04: case 0xfb: return 2;
3338 case 0x08: case 0xf7: return 3;
3339 case 0x10: case 0x7f: return 4;
3340 case 0x20: case 0xbf: return 5;
3341 case 0x40: case 0xdf: return 6;
3342 case 0x80: case 0xef: return 7;
3349 bit_size_for_clip (HOST_WIDE_INT i)
3353 for (rv = 0; rv < 31; rv ++)
3354 if (((HOST_WIDE_INT) 1 << rv) > i)
3359 /* Print an operand to a assembler instruction. */
3362 mep_print_operand (FILE *file, rtx x, int code)
3365 const char *real_name;
3369 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3370 we're using, then skip over the "mep_" part of its name. */
3371 const struct cgen_insn *insn;
3373 if (mep_get_move_insn (mep_cmov, &insn))
3374 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3376 mep_intrinsic_unavailable (mep_cmov);
3381 switch (GET_CODE (x))
3384 fputs ("clr", file);
3387 fputs ("set", file);
3390 fputs ("not", file);
3393 output_operand_lossage ("invalid %%L code");
3398 /* Print the second operand of a CR <- CR move. If we're using
3399 a two-operand instruction (i.e., a real cmov), then just print
3400 the operand normally. If we're using a "reg, reg, immediate"
3401 instruction such as caddi3, print the operand followed by a
3402 zero field. If we're using a three-register instruction,
3403 print the operand twice. */
3404 const struct cgen_insn *insn;
3406 mep_print_operand (file, x, 0);
3407 if (mep_get_move_insn (mep_cmov, &insn)
3408 && insn_data[insn->icode].n_operands == 3)
3411 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3412 mep_print_operand (file, x, 0);
3414 mep_print_operand (file, const0_rtx, 0);
3420 for (i = 0; conversions[i].pattern; i++)
3421 if (conversions[i].code == code
3422 && strcmp(conversions[i].pattern, pattern) == 0)
3424 for (j = 0; conversions[i].format[j]; j++)
3425 if (conversions[i].format[j] == '\\')
3427 fputc (conversions[i].format[j+1], file);
3430 else if (ISDIGIT(conversions[i].format[j]))
3432 rtx r = patternr[conversions[i].format[j] - '0'];
3433 switch (GET_CODE (r))
3436 fprintf (file, "%s", reg_names [REGNO (r)]);
3442 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3445 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3448 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3451 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3454 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3457 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3458 && !(INTVAL (r) & 0xff))
3459 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3461 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3464 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3465 && conversions[i].format[j+1] == 0)
3467 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3468 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3471 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3474 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3479 fprintf(file, "[const_double 0x%lx]",
3480 (unsigned long) CONST_DOUBLE_HIGH(r));
3483 real_name = TARGET_STRIP_NAME_ENCODING (XSTR (r, 0));
3484 assemble_name (file, real_name);
3487 output_asm_label (r);
3490 fprintf (stderr, "don't know how to print this operand:");
3497 if (conversions[i].format[j] == '+'
3498 && (!code || code == 'I')
3499 && ISDIGIT (conversions[i].format[j+1])
3500 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3501 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3503 fputc(conversions[i].format[j], file);
3507 if (!conversions[i].pattern)
3509 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3517 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3518 int noperands ATTRIBUTE_UNUSED)
3520 /* Despite the fact that MeP is perfectly capable of branching and
3521 doing something else in the same bundle, gcc does jump
3522 optimization *after* scheduling, so we cannot trust the bundling
3523 flags on jump instructions. */
3524 if (GET_MODE (insn) == BImode
3525 && get_attr_slots (insn) != SLOTS_CORE)
3526 fputc ('+', asm_out_file);
3529 /* Function args in registers. */
3532 mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
3533 enum machine_mode mode ATTRIBUTE_UNUSED,
3534 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3535 int second_time ATTRIBUTE_UNUSED)
3537 int nsave = 4 - (cum->nregs + 1);
3540 cfun->machine->arg_regs_to_save = nsave;
3541 *pretend_size = nsave * 4;
3545 bytesize (const_tree type, enum machine_mode mode)
3547 if (mode == BLKmode)
3548 return int_size_in_bytes (type);
3549 return GET_MODE_SIZE (mode);
3553 mep_expand_builtin_saveregs (void)
3558 ns = cfun->machine->arg_regs_to_save;
3561 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3562 regbuf = assign_stack_local (SImode, bufsize, 64);
3567 regbuf = assign_stack_local (SImode, bufsize, 32);
3570 move_block_from_reg (5-ns, regbuf, ns);
3574 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3575 int ofs = 8 * ((ns+1)/2);
3577 for (i=0; i<ns; i++)
3579 int rn = (4-ns) + i + 49;
3582 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3583 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3587 return XEXP (regbuf, 0);
3590 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3593 mep_build_builtin_va_list (void)
3595 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3599 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3601 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3602 get_identifier ("__va_next_gp"), ptr_type_node);
3603 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3604 get_identifier ("__va_next_gp_limit"),
3606 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3608 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3611 DECL_FIELD_CONTEXT (f_next_gp) = record;
3612 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3613 DECL_FIELD_CONTEXT (f_next_cop) = record;
3614 DECL_FIELD_CONTEXT (f_next_stack) = record;
3616 TYPE_FIELDS (record) = f_next_gp;
3617 TREE_CHAIN (f_next_gp) = f_next_gp_limit;
3618 TREE_CHAIN (f_next_gp_limit) = f_next_cop;
3619 TREE_CHAIN (f_next_cop) = f_next_stack;
3621 layout_type (record);
3627 mep_expand_va_start (tree valist, rtx nextarg)
3629 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3630 tree next_gp, next_gp_limit, next_cop, next_stack;
3634 ns = cfun->machine->arg_regs_to_save;
3636 f_next_gp = TYPE_FIELDS (va_list_type_node);
3637 f_next_gp_limit = TREE_CHAIN (f_next_gp);
3638 f_next_cop = TREE_CHAIN (f_next_gp_limit);
3639 f_next_stack = TREE_CHAIN (f_next_cop);
3641 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3643 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3644 valist, f_next_gp_limit, NULL_TREE);
3645 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3647 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3648 valist, f_next_stack, NULL_TREE);
3650 /* va_list.next_gp = expand_builtin_saveregs (); */
3651 u = make_tree (sizetype, expand_builtin_saveregs ());
3652 u = fold_convert (ptr_type_node, u);
3653 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3654 TREE_SIDE_EFFECTS (t) = 1;
3655 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3657 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3658 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3660 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3661 TREE_SIDE_EFFECTS (t) = 1;
3662 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3664 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3665 size_int (8 * ((ns+1)/2)));
3666 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3667 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3668 TREE_SIDE_EFFECTS (t) = 1;
3669 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3671 /* va_list.next_stack = nextarg; */
3672 u = make_tree (ptr_type_node, nextarg);
3673 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3674 TREE_SIDE_EFFECTS (t) = 1;
3675 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3679 mep_gimplify_va_arg_expr (tree valist, tree type,
3681 gimple_seq *post_p ATTRIBUTE_UNUSED)
3683 HOST_WIDE_INT size, rsize;
3684 bool by_reference, ivc2_vec;
3685 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3686 tree next_gp, next_gp_limit, next_cop, next_stack;
3687 tree label_sover, label_selse;
3690 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3692 size = int_size_in_bytes (type);
3693 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3697 type = build_pointer_type (type);
3700 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3702 f_next_gp = TYPE_FIELDS (va_list_type_node);
3703 f_next_gp_limit = TREE_CHAIN (f_next_gp);
3704 f_next_cop = TREE_CHAIN (f_next_gp_limit);
3705 f_next_stack = TREE_CHAIN (f_next_cop);
3707 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3709 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3710 valist, f_next_gp_limit, NULL_TREE);
3711 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3713 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3714 valist, f_next_stack, NULL_TREE);
3716 /* if f_next_gp < f_next_gp_limit
3717 IF (VECTOR_P && IVC2)
3725 val = *f_next_stack;
3726 f_next_stack += rsize;
3730 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3731 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3732 res_addr = create_tmp_var (ptr_type_node, NULL);
3734 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3735 unshare_expr (next_gp_limit));
3736 tmp = build3 (COND_EXPR, void_type_node, tmp,
3737 build1 (GOTO_EXPR, void_type_node,
3738 unshare_expr (label_selse)),
3740 gimplify_and_add (tmp, pre_p);
3744 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3745 gimplify_and_add (tmp, pre_p);
3749 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3750 gimplify_and_add (tmp, pre_p);
3753 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3754 unshare_expr (next_gp), size_int (4));
3755 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3757 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3758 unshare_expr (next_cop), size_int (8));
3759 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3761 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3762 gimplify_and_add (tmp, pre_p);
3766 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3767 gimplify_and_add (tmp, pre_p);
3769 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3770 gimplify_and_add (tmp, pre_p);
3772 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3773 unshare_expr (next_stack), size_int (rsize));
3774 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3778 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3779 gimplify_and_add (tmp, pre_p);
3781 res_addr = fold_convert (build_pointer_type (type), res_addr);
3784 res_addr = build_va_arg_indirect_ref (res_addr);
3786 return build_va_arg_indirect_ref (res_addr);
3790 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3791 rtx libname ATTRIBUTE_UNUSED,
3792 tree fndecl ATTRIBUTE_UNUSED)
3796 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3803 mep_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
3804 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3806 /* VOIDmode is a signal for the backend to pass data to the call
3807 expander via the second operand to the call pattern. We use
3808 this to determine whether to use "jsr" or "jsrv". */
3809 if (mode == VOIDmode)
3810 return GEN_INT (cum.vliw);
3812 /* If we havn't run out of argument registers, return the next. */
3815 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3816 return gen_rtx_REG (mode, cum.nregs + 49);
3818 return gen_rtx_REG (mode, cum.nregs + 1);
3821 /* Otherwise the argument goes on the stack. */
3826 mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
3827 enum machine_mode mode,
3829 bool named ATTRIBUTE_UNUSED)
3831 int size = bytesize (type, mode);
3833 /* This is non-obvious, but yes, large values passed after we've run
3834 out of registers are *still* passed by reference - we put the
3835 address of the parameter on the stack, as well as putting the
3836 parameter itself elsewhere on the stack. */
3838 if (size <= 0 || size > 8)
3842 if (TARGET_IVC2 && cum->nregs < 4 && type != NULL_TREE && VECTOR_TYPE_P (type))
3848 mep_arg_advance (CUMULATIVE_ARGS *pcum,
3849 enum machine_mode mode ATTRIBUTE_UNUSED,
3850 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3856 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3858 int size = bytesize (type, BLKmode);
3859 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3860 return size > 0 && size <= 8 ? 0 : 1;
3861 return size > 0 && size <= 4 ? 0 : 1;
3865 mep_narrow_volatile_bitfield (void)
3871 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3874 mep_function_value (tree type, tree func ATTRIBUTE_UNUSED)
3876 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3877 return gen_rtx_REG (TYPE_MODE (type), 48);
3878 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3881 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3884 mep_libcall_value (enum machine_mode mode)
3886 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3889 /* Handle pipeline hazards. */
3891 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3892 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3894 static int prev_opcode = 0;
3896 /* This isn't as optimal as it could be, because we don't know what
3897 control register the STC opcode is storing in. We only need to add
3898 the nop if it's the relevent register, but we add it for irrelevent
3902 mep_asm_output_opcode (FILE *file, const char *ptr)
3904 int this_opcode = op_none;
3905 const char *hazard = 0;
3910 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3911 this_opcode = op_fsft;
3914 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3915 this_opcode = op_ret;
3918 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3919 this_opcode = op_stc;
3923 if (prev_opcode == op_stc && this_opcode == op_fsft)
3925 if (prev_opcode == op_stc && this_opcode == op_ret)
3929 fprintf(file, "%s\t# %s-%s hazard\n\t",
3930 hazard, opnames[prev_opcode], opnames[this_opcode]);
3932 prev_opcode = this_opcode;
3935 /* Handle attributes. */
3938 mep_validate_based_tiny (tree *node, tree name, tree args,
3939 int flags ATTRIBUTE_UNUSED, bool *no_add)
3941 if (TREE_CODE (*node) != VAR_DECL
3942 && TREE_CODE (*node) != POINTER_TYPE
3943 && TREE_CODE (*node) != TYPE_DECL)
3945 warning (0, "%qE attribute only applies to variables", name);
3948 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3950 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3952 warning (0, "address region attributes not allowed with auto storage class");
3955 /* Ignore storage attribute of pointed to variable: char __far * x; */
3956 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3958 warning (0, "address region attributes on pointed-to types ignored");
3967 mep_multiple_address_regions (tree list, bool check_section_attr)
3970 int count_sections = 0;
3971 int section_attr_count = 0;
3973 for (a = list; a; a = TREE_CHAIN (a))
3975 if (is_attribute_p ("based", TREE_PURPOSE (a))
3976 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3977 || is_attribute_p ("near", TREE_PURPOSE (a))
3978 || is_attribute_p ("far", TREE_PURPOSE (a))
3979 || is_attribute_p ("io", TREE_PURPOSE (a)))
3981 if (check_section_attr)
3982 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3985 if (check_section_attr)
3986 return section_attr_count;
3988 return count_sections;
3991 #define MEP_ATTRIBUTES(decl) \
3992 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3993 : DECL_ATTRIBUTES (decl) \
3994 ? (DECL_ATTRIBUTES (decl)) \
3995 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3998 mep_validate_near_far (tree *node, tree name, tree args,
3999 int flags ATTRIBUTE_UNUSED, bool *no_add)
4001 if (TREE_CODE (*node) != VAR_DECL
4002 && TREE_CODE (*node) != FUNCTION_DECL
4003 && TREE_CODE (*node) != METHOD_TYPE
4004 && TREE_CODE (*node) != POINTER_TYPE
4005 && TREE_CODE (*node) != TYPE_DECL)
4007 warning (0, "%qE attribute only applies to variables and functions",
4011 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
4013 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
4015 warning (0, "address region attributes not allowed with auto storage class");
4018 /* Ignore storage attribute of pointed to variable: char __far * x; */
4019 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
4021 warning (0, "address region attributes on pointed-to types ignored");
4025 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
4027 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4028 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
4029 DECL_ATTRIBUTES (*node) = NULL_TREE;
4035 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4036 int flags ATTRIBUTE_UNUSED, bool *no_add)
4038 if (TREE_CODE (*node) != FUNCTION_DECL
4039 && TREE_CODE (*node) != METHOD_TYPE)
4041 warning (0, "%qE attribute only applies to functions", name);
4048 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4049 int flags ATTRIBUTE_UNUSED, bool *no_add)
4053 if (TREE_CODE (*node) != FUNCTION_DECL)
4055 warning (0, "%qE attribute only applies to functions", name);
4060 if (DECL_DECLARED_INLINE_P (*node))
4061 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
4062 DECL_UNINLINABLE (*node) = 1;
4064 function_type = TREE_TYPE (*node);
4066 if (TREE_TYPE (function_type) != void_type_node)
4067 error ("interrupt function must have return type of void");
4069 if (TYPE_ARG_TYPES (function_type)
4070 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4071 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4072 error ("interrupt function must have no arguments");
4078 mep_validate_io_cb (tree *node, tree name, tree args,
4079 int flags ATTRIBUTE_UNUSED, bool *no_add)
4081 if (TREE_CODE (*node) != VAR_DECL)
4083 warning (0, "%qE attribute only applies to variables", name);
4087 if (args != NULL_TREE)
4089 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4090 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4091 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4093 warning (0, "%qE attribute allows only an integer constant argument",
4099 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4100 TREE_THIS_VOLATILE (*node) = 1;
4106 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4107 int flags ATTRIBUTE_UNUSED, bool *no_add)
4109 if (TREE_CODE (*node) != FUNCTION_TYPE
4110 && TREE_CODE (*node) != FUNCTION_DECL
4111 && TREE_CODE (*node) != METHOD_TYPE
4112 && TREE_CODE (*node) != FIELD_DECL
4113 && TREE_CODE (*node) != TYPE_DECL)
4115 static int gave_pointer_note = 0;
4116 static int gave_array_note = 0;
4117 static const char * given_type = NULL;
4119 given_type = tree_code_name[TREE_CODE (*node)];
4120 if (TREE_CODE (*node) == POINTER_TYPE)
4121 given_type = "pointers";
4122 if (TREE_CODE (*node) == ARRAY_TYPE)
4123 given_type = "arrays";
4126 warning (0, "%qE attribute only applies to functions, not %s",
4129 warning (0, "%qE attribute only applies to functions",
4133 if (TREE_CODE (*node) == POINTER_TYPE
4134 && !gave_pointer_note)
4136 inform (input_location, "To describe a pointer to a VLIW function, use syntax like this:");
4137 inform (input_location, " typedef int (__vliw *vfuncptr) ();");
4138 gave_pointer_note = 1;
4141 if (TREE_CODE (*node) == ARRAY_TYPE
4142 && !gave_array_note)
4144 inform (input_location, "To describe an array of VLIW function pointers, use syntax like this:");
4145 inform (input_location, " typedef int (__vliw *vfuncptr[]) ();");
4146 gave_array_note = 1;
4150 error ("VLIW functions are not allowed without a VLIW configuration");
4154 static const struct attribute_spec mep_attribute_table[11] =
4156 /* name min max decl type func handler */
4157 { "based", 0, 0, false, false, false, mep_validate_based_tiny },
4158 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny },
4159 { "near", 0, 0, false, false, false, mep_validate_near_far },
4160 { "far", 0, 0, false, false, false, mep_validate_near_far },
4161 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt },
4162 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt },
4163 { "io", 0, 1, false, false, false, mep_validate_io_cb },
4164 { "cb", 0, 1, false, false, false, mep_validate_io_cb },
4165 { "vliw", 0, 0, false, true, false, mep_validate_vliw },
4166 { NULL, 0, 0, false, false, false, NULL }
4170 mep_function_attribute_inlinable_p (const_tree callee)
4172 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4173 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4174 return (lookup_attribute ("disinterrupt", attrs) == 0
4175 && lookup_attribute ("interrupt", attrs) == 0);
4179 mep_can_inline_p (tree caller, tree callee)
4181 if (TREE_CODE (callee) == ADDR_EXPR)
4182 callee = TREE_OPERAND (callee, 0);
4184 if (!mep_vliw_function_p (caller)
4185 && mep_vliw_function_p (callee))
4193 #define FUNC_DISINTERRUPT 2
4196 struct GTY(()) pragma_entry {
4199 const char *funcname;
4201 typedef struct pragma_entry pragma_entry;
4203 /* Hash table of farcall-tagged sections. */
4204 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4207 pragma_entry_eq (const void *p1, const void *p2)
4209 const pragma_entry *old = (const pragma_entry *) p1;
4210 const char *new_name = (const char *) p2;
4212 return strcmp (old->funcname, new_name) == 0;
4216 pragma_entry_hash (const void *p)
4218 const pragma_entry *old = (const pragma_entry *) p;
4219 return htab_hash_string (old->funcname);
4223 mep_note_pragma_flag (const char *funcname, int flag)
4225 pragma_entry **slot;
4228 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4229 pragma_entry_eq, NULL);
4231 slot = (pragma_entry **)
4232 htab_find_slot_with_hash (pragma_htab, funcname,
4233 htab_hash_string (funcname), INSERT);
4237 *slot = GGC_NEW (pragma_entry);
4240 (*slot)->funcname = ggc_strdup (funcname);
4242 (*slot)->flag |= flag;
4246 mep_lookup_pragma_flag (const char *funcname, int flag)
4248 pragma_entry **slot;
4253 if (funcname[0] == '@' && funcname[2] == '.')
4256 slot = (pragma_entry **)
4257 htab_find_slot_with_hash (pragma_htab, funcname,
4258 htab_hash_string (funcname), NO_INSERT);
4259 if (slot && *slot && ((*slot)->flag & flag))
4261 (*slot)->used |= flag;
4268 mep_lookup_pragma_call (const char *funcname)
4270 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4274 mep_note_pragma_call (const char *funcname)
4276 mep_note_pragma_flag (funcname, FUNC_CALL);
4280 mep_lookup_pragma_disinterrupt (const char *funcname)
4282 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4286 mep_note_pragma_disinterrupt (const char *funcname)
4288 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4292 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4294 const pragma_entry *d = (const pragma_entry *)(*slot);
4296 if ((d->flag & FUNC_DISINTERRUPT)
4297 && !(d->used & FUNC_DISINTERRUPT))
4298 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4303 mep_file_cleanups (void)
4306 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4311 mep_attrlist_to_encoding (tree list, tree decl)
4313 if (mep_multiple_address_regions (list, false) > 1)
4315 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4316 TREE_PURPOSE (TREE_CHAIN (list)),
4318 DECL_SOURCE_LINE (decl));
4319 TREE_CHAIN (list) = NULL_TREE;
4324 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4326 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4328 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4330 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4332 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4334 if (TREE_VALUE (list)
4335 && TREE_VALUE (TREE_VALUE (list))
4336 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4338 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4340 && location <= 0x1000000)
4345 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4347 list = TREE_CHAIN (list);
4350 && TREE_CODE (decl) == FUNCTION_DECL
4351 && DECL_SECTION_NAME (decl) == 0)
4357 mep_comp_type_attributes (const_tree t1, const_tree t2)
4361 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4362 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4371 mep_insert_attributes (tree decl, tree *attributes)
4374 const char *secname = 0;
4375 tree attrib, attrlist;
4378 if (TREE_CODE (decl) == FUNCTION_DECL)
4380 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4382 if (mep_lookup_pragma_disinterrupt (funcname))
4384 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4385 *attributes = chainon (*attributes, attrib);
4389 if (TREE_CODE (decl) != VAR_DECL
4390 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4393 if (TREE_READONLY (decl) && TARGET_DC)
4394 /* -mdc means that const variables default to the near section,
4395 regardless of the size cutoff. */
4398 /* User specified an attribute, so override the default.
4399 Ignore storage attribute of pointed to variable. char __far * x; */
4400 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4402 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4403 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4404 else if (DECL_ATTRIBUTES (decl) && *attributes)
4405 DECL_ATTRIBUTES (decl) = NULL_TREE;
4408 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4409 encoding = mep_attrlist_to_encoding (attrlist, decl);
4410 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4412 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4413 encoding = mep_attrlist_to_encoding (attrlist, decl);
4417 /* This means that the declaration has a specific section
4418 attribute, so we should not apply the default rules. */
4420 if (encoding == 'i' || encoding == 'I')
4422 tree attr = lookup_attribute ("io", attrlist);
4424 && TREE_VALUE (attr)
4425 && TREE_VALUE (TREE_VALUE(attr)))
4427 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4428 static tree previous_value = 0;
4429 static int previous_location = 0;
4430 static tree previous_name = 0;
4432 /* We take advantage of the fact that gcc will reuse the
4433 same tree pointer when applying an attribute to a
4434 list of decls, but produce a new tree for attributes
4435 on separate source lines, even when they're textually
4436 identical. This is the behavior we want. */
4437 if (TREE_VALUE (attr) == previous_value
4438 && location == previous_location)
4440 warning(0, "__io address 0x%x is the same for %qE and %qE",
4441 location, previous_name, DECL_NAME (decl));
4443 previous_name = DECL_NAME (decl);
4444 previous_location = location;
4445 previous_value = TREE_VALUE (attr);
4452 /* Declarations of arrays can change size. Don't trust them. */
4453 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4456 size = int_size_in_bytes (TREE_TYPE (decl));
4458 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4460 if (TREE_PUBLIC (decl)
4461 || DECL_EXTERNAL (decl)
4462 || TREE_STATIC (decl))
4464 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4488 if (size <= mep_based_cutoff && size > 0)
4490 else if (size <= mep_tiny_cutoff && size > 0)
4496 if (mep_const_section && TREE_READONLY (decl))
4498 if (strcmp (mep_const_section, "tiny") == 0)
4500 else if (strcmp (mep_const_section, "near") == 0)
4502 else if (strcmp (mep_const_section, "far") == 0)
4509 if (!mep_multiple_address_regions (*attributes, true)
4510 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4512 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4514 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4515 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4516 and mep_validate_based_tiny. */
4517 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4522 mep_encode_section_info (tree decl, rtx rtl, int first)
4525 const char *oldname;
4526 const char *secname;
4532 tree mep_attributes;
4537 if (TREE_CODE (decl) != VAR_DECL
4538 && TREE_CODE (decl) != FUNCTION_DECL)
4541 rtlname = XEXP (rtl, 0);
4542 if (GET_CODE (rtlname) == SYMBOL_REF)
4543 oldname = XSTR (rtlname, 0);
4544 else if (GET_CODE (rtlname) == MEM
4545 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4546 oldname = XSTR (XEXP (rtlname, 0), 0);
4550 type = TREE_TYPE (decl);
4551 if (type == error_mark_node)
4553 mep_attributes = MEP_ATTRIBUTES (decl);
4555 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4559 newname = (char *) alloca (strlen (oldname) + 4);
4560 sprintf (newname, "@%c.%s", encoding, oldname);
4561 idp = get_identifier (newname);
4563 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4564 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4565 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4578 maxsize = 0x1000000;
4586 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4588 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4590 (long) int_size_in_bytes (TREE_TYPE (decl)),
4598 mep_strip_name_encoding (const char *sym)
4604 else if (*sym == '@' && sym[2] == '.')
4612 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4613 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4618 switch (TREE_CODE (decl))
4621 if (!TREE_READONLY (decl)
4622 || TREE_SIDE_EFFECTS (decl)
4623 || !DECL_INITIAL (decl)
4624 || (DECL_INITIAL (decl) != error_mark_node
4625 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4629 if (! TREE_CONSTANT (decl))
4637 if (TREE_CODE (decl) == FUNCTION_DECL)
4639 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4641 if (name[0] == '@' && name[2] == '.')
4646 if (flag_function_sections || DECL_ONE_ONLY (decl))
4647 mep_unique_section (decl, 0);
4648 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4650 if (encoding == 'f')
4651 return vftext_section;
4653 return vtext_section;
4655 else if (encoding == 'f')
4656 return ftext_section;
4658 return text_section;
4661 if (TREE_CODE (decl) == VAR_DECL)
4663 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4665 if (name[0] == '@' && name[2] == '.')
4669 return based_section;
4673 return srodata_section;
4674 if (DECL_INITIAL (decl))
4675 return sdata_section;
4676 return tinybss_section;
4680 return frodata_section;
4685 error_at (DECL_SOURCE_LOCATION (decl),
4686 "variable %D of type %<io%> must be uninitialized", decl);
4687 return data_section;
4690 error_at (DECL_SOURCE_LOCATION (decl),
4691 "variable %D of type %<cb%> must be uninitialized", decl);
4692 return data_section;
4697 return readonly_data_section;
4699 return data_section;
4703 mep_unique_section (tree decl, int reloc)
4705 static const char *prefixes[][2] =
4707 { ".text.", ".gnu.linkonce.t." },
4708 { ".rodata.", ".gnu.linkonce.r." },
4709 { ".data.", ".gnu.linkonce.d." },
4710 { ".based.", ".gnu.linkonce.based." },
4711 { ".sdata.", ".gnu.linkonce.s." },
4712 { ".far.", ".gnu.linkonce.far." },
4713 { ".ftext.", ".gnu.linkonce.ft." },
4714 { ".frodata.", ".gnu.linkonce.frd." },
4715 { ".srodata.", ".gnu.linkonce.srd." },
4716 { ".vtext.", ".gnu.linkonce.v." },
4717 { ".vftext.", ".gnu.linkonce.vf." }
4719 int sec = 2; /* .data */
4721 const char *name, *prefix;
4724 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4725 if (DECL_RTL (decl))
4726 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4728 if (TREE_CODE (decl) == FUNCTION_DECL)
4730 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4731 sec = 9; /* .vtext */
4733 sec = 0; /* .text */
4735 else if (decl_readonly_section (decl, reloc))
4736 sec = 1; /* .rodata */
4738 if (name[0] == '@' && name[2] == '.')
4743 sec = 3; /* .based */
4747 sec = 8; /* .srodata */
4749 sec = 4; /* .sdata */
4753 sec = 6; /* .ftext */
4755 sec = 10; /* .vftext */
4757 sec = 7; /* .frodata */
4759 sec = 5; /* .far. */
4765 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4766 len = strlen (name) + strlen (prefix);
4767 string = (char *) alloca (len + 1);
4769 sprintf (string, "%s%s", prefix, name);
4771 DECL_SECTION_NAME (decl) = build_string (len, string);
4774 /* Given a decl, a section name, and whether the decl initializer
4775 has relocs, choose attributes for the section. */
4777 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4780 mep_section_type_flags (tree decl, const char *name, int reloc)
4782 unsigned int flags = default_section_type_flags (decl, name, reloc);
4784 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4785 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4786 flags |= SECTION_MEP_VLIW;
4791 /* Switch to an arbitrary section NAME with attributes as specified
4792 by FLAGS. ALIGN specifies any known alignment requirements for
4793 the section; 0 if the default should be used.
4795 Differs from the standard ELF version only in support of VLIW mode. */
4798 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4800 char flagchars[8], *f = flagchars;
4803 if (!(flags & SECTION_DEBUG))
4805 if (flags & SECTION_WRITE)
4807 if (flags & SECTION_CODE)
4809 if (flags & SECTION_SMALL)
4811 if (flags & SECTION_MEP_VLIW)
4815 if (flags & SECTION_BSS)
4820 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4821 name, flagchars, type);
4823 if (flags & SECTION_CODE)
4824 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4829 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4830 int size, int align, int global)
4832 /* We intentionally don't use mep_section_tag() here. */
4834 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4838 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4839 DECL_ATTRIBUTES (decl));
4841 && TREE_VALUE (attr)
4842 && TREE_VALUE (TREE_VALUE(attr)))
4843 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4848 fprintf (stream, "\t.globl\t");
4849 assemble_name (stream, name);
4850 fprintf (stream, "\n");
4852 assemble_name (stream, name);
4853 fprintf (stream, " = %d\n", location);
4856 if (name[0] == '@' && name[2] == '.')
4858 const char *sec = 0;
4862 switch_to_section (based_section);
4866 switch_to_section (tinybss_section);
4870 switch_to_section (farbss_section);
4879 while (align > BITS_PER_UNIT)
4884 name2 = TARGET_STRIP_NAME_ENCODING (name);
4886 fprintf (stream, "\t.globl\t%s\n", name2);
4887 fprintf (stream, "\t.p2align %d\n", p2align);
4888 fprintf (stream, "\t.type\t%s,@object\n", name2);
4889 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4890 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4897 fprintf (stream, "\t.local\t");
4898 assemble_name (stream, name);
4899 fprintf (stream, "\n");
4901 fprintf (stream, "\t.comm\t");
4902 assemble_name (stream, name);
4903 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4909 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4911 rtx addr = XEXP (m_tramp, 0);
4912 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4914 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4915 LCT_NORMAL, VOIDmode, 3,
4918 static_chain, Pmode);
4921 /* Experimental Reorg. */
4924 mep_mentioned_p (rtx in,
4925 rtx reg, /* NULL for mem */
4926 int modes_too) /* if nonzero, modes must match also. */
4934 if (reg && GET_CODE (reg) != REG)
4937 if (GET_CODE (in) == LABEL_REF)
4940 code = GET_CODE (in);
4946 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4952 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4954 return (REGNO (in) == REGNO (reg));
4967 /* Set's source should be read-only. */
4968 if (code == SET && !reg)
4969 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4971 fmt = GET_RTX_FORMAT (code);
4973 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4978 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4979 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4982 else if (fmt[i] == 'e'
4983 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4989 #define EXPERIMENTAL_REGMOVE_REORG 1
4991 #if EXPERIMENTAL_REGMOVE_REORG
4994 mep_compatible_reg_class (int r1, int r2)
4996 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4998 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
5004 mep_reorg_regmove (rtx insns)
5006 rtx insn, next, pat, follow, *where;
5007 int count = 0, done = 0, replace, before = 0;
5010 for (insn = insns; insn; insn = NEXT_INSN (insn))
5011 if (GET_CODE (insn) == INSN)
5014 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
5015 set that uses the r2 and r2 dies there. We replace r2 with r1
5016 and see if it's still a valid insn. If so, delete the first set.
5017 Copied from reorg.c. */
5022 for (insn = insns; insn; insn = next)
5024 next = NEXT_INSN (insn);
5025 if (GET_CODE (insn) != INSN)
5027 pat = PATTERN (insn);
5031 if (GET_CODE (pat) == SET
5032 && GET_CODE (SET_SRC (pat)) == REG
5033 && GET_CODE (SET_DEST (pat)) == REG
5034 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
5035 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
5037 follow = next_nonnote_insn (insn);
5039 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
5041 while (follow && GET_CODE (follow) == INSN
5042 && GET_CODE (PATTERN (follow)) == SET
5043 && !dead_or_set_p (follow, SET_SRC (pat))
5044 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
5045 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
5048 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
5049 follow = next_nonnote_insn (follow);
5053 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5054 if (follow && GET_CODE (follow) == INSN
5055 && GET_CODE (PATTERN (follow)) == SET
5056 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5058 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5060 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5063 where = & SET_SRC (PATTERN (follow));
5066 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5068 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5071 where = & PATTERN (follow);
5077 /* If so, follow is the corresponding insn */
5084 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5085 for (x = insn; x ;x = NEXT_INSN (x))
5087 print_rtl_single (dump_file, x);
5090 fprintf (dump_file, "\n");
5094 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5098 next = delete_insn (insn);
5101 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5102 print_rtl_single (dump_file, follow);
5112 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5113 fprintf (dump_file, "=====\n");
5119 /* Figure out where to put LABEL, which is the label for a repeat loop.
5120 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5121 the loop ends just before LAST_INSN. If SHARED, insns other than the
5122 "repeat" might use LABEL to jump to the loop's continuation point.
5124 Return the last instruction in the adjusted loop. */
5127 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5131 int count = 0, code, icode;
5134 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5135 INSN_UID (last_insn));
5137 /* Set PREV to the last insn in the loop. */
5140 prev = PREV_INSN (prev);
5142 /* Set NEXT to the next insn after the repeat label. */
5147 code = GET_CODE (prev);
5148 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5153 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5154 prev = XVECEXP (PATTERN (prev), 0, 1);
5156 /* Other insns that should not be in the last two opcodes. */
5157 icode = recog_memoized (prev);
5159 || icode == CODE_FOR_repeat
5160 || icode == CODE_FOR_erepeat
5161 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5164 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5165 is the second instruction in a VLIW bundle. In that case,
5166 loop again: if the first instruction also satisfies the
5167 conditions above then we will reach here again and put
5168 both of them into the repeat epilogue. Otherwise both
5169 should remain outside. */
5170 if (GET_MODE (prev) != BImode)
5175 print_rtl_single (dump_file, next);
5180 prev = PREV_INSN (prev);
5183 /* See if we're adding the label immediately after the repeat insn.
5184 If so, we need to separate them with a nop. */
5185 prev = prev_real_insn (next);
5187 switch (recog_memoized (prev))
5189 case CODE_FOR_repeat:
5190 case CODE_FOR_erepeat:
5192 fprintf (dump_file, "Adding nop inside loop\n");
5193 emit_insn_before (gen_nop (), next);
5200 /* Insert the label. */
5201 emit_label_before (label, next);
5203 /* Insert the nops. */
5204 if (dump_file && count < 2)
5205 fprintf (dump_file, "Adding %d nop%s\n\n",
5206 2 - count, count == 1 ? "" : "s");
5208 for (; count < 2; count++)
5210 last_insn = emit_insn_after (gen_nop (), last_insn);
5212 emit_insn_before (gen_nop (), last_insn);
5219 mep_emit_doloop (rtx *operands, int is_end)
5223 if (cfun->machine->doloop_tags == 0
5224 || cfun->machine->doloop_tag_from_end == is_end)
5226 cfun->machine->doloop_tags++;
5227 cfun->machine->doloop_tag_from_end = is_end;
5230 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5232 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5234 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5238 /* Code for converting doloop_begins and doloop_ends into valid
5239 MeP instructions. A doloop_begin is just a placeholder:
5241 $count = unspec ($count)
5243 where $count is initially the number of iterations - 1.
5244 doloop_end has the form:
5246 if ($count-- == 0) goto label
5248 The counter variable is private to the doloop insns, nothing else
5249 relies on its value.
5251 There are three cases, in decreasing order of preference:
5253 1. A loop has exactly one doloop_begin and one doloop_end.
5254 The doloop_end branches to the first instruction after
5257 In this case we can replace the doloop_begin with a repeat
5258 instruction and remove the doloop_end. I.e.:
5260 $count1 = unspec ($count1)
5265 if ($count2-- == 0) goto label
5269 repeat $count1,repeat_label
5277 2. As for (1), except there are several doloop_ends. One of them
5278 (call it X) falls through to a label L. All the others fall
5279 through to branches to L.
5281 In this case, we remove X and replace the other doloop_ends
5282 with branches to the repeat label. For example:
5284 $count1 = unspec ($count1)
5287 if ($count2-- == 0) goto label
5290 if ($count3-- == 0) goto label
5295 repeat $count1,repeat_label
5306 3. The fallback case. Replace doloop_begins with:
5310 Replace doloop_ends with the equivalent of:
5313 if ($count == 0) goto label
5315 Note that this might need a scratch register if $count
5316 is stored in memory. */
5318 /* A structure describing one doloop_begin. */
5319 struct mep_doloop_begin {
5320 /* The next doloop_begin with the same tag. */
5321 struct mep_doloop_begin *next;
5323 /* The instruction itself. */
5326 /* The initial counter value. This is known to be a general register. */
5330 /* A structure describing a doloop_end. */
5331 struct mep_doloop_end {
5332 /* The next doloop_end with the same loop tag. */
5333 struct mep_doloop_end *next;
5335 /* The instruction itself. */
5338 /* The first instruction after INSN when the branch isn't taken. */
5341 /* The location of the counter value. Since doloop_end_internal is a
5342 jump instruction, it has to allow the counter to be stored anywhere
5343 (any non-fixed register or memory location). */
5346 /* The target label (the place where the insn branches when the counter
5350 /* A scratch register. Only available when COUNTER isn't stored
5351 in a general register. */
5356 /* One do-while loop. */
5358 /* All the doloop_begins for this loop (in no particular order). */
5359 struct mep_doloop_begin *begin;
5361 /* All the doloop_ends. When there is more than one, arrange things
5362 so that the first one is the most likely to be X in case (2) above. */
5363 struct mep_doloop_end *end;
5367 /* Return true if LOOP can be converted into repeat/repeat_end form
5368 (that is, if it matches cases (1) or (2) above). */
5371 mep_repeat_loop_p (struct mep_doloop *loop)
5373 struct mep_doloop_end *end;
5376 /* There must be exactly one doloop_begin and at least one doloop_end. */
5377 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5380 /* The first doloop_end (X) must branch back to the insn after
5381 the doloop_begin. */
5382 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5385 /* All the other doloop_ends must branch to the same place as X.
5386 When the branch isn't taken, they must jump to the instruction
5388 fallthrough = loop->end->fallthrough;
5389 for (end = loop->end->next; end != 0; end = end->next)
5390 if (end->label != loop->end->label
5391 || !simplejump_p (end->fallthrough)
5392 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5399 /* The main repeat reorg function. See comment above for details. */
5402 mep_reorg_repeat (rtx insns)
5405 struct mep_doloop *loops, *loop;
5406 struct mep_doloop_begin *begin;
5407 struct mep_doloop_end *end;
5409 /* Quick exit if we haven't created any loops. */
5410 if (cfun->machine->doloop_tags == 0)
5413 /* Create an array of mep_doloop structures. */
5414 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5415 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5417 /* Search the function for do-while insns and group them by loop tag. */
5418 for (insn = insns; insn; insn = NEXT_INSN (insn))
5420 switch (recog_memoized (insn))
5422 case CODE_FOR_doloop_begin_internal:
5423 insn_extract (insn);
5424 loop = &loops[INTVAL (recog_data.operand[2])];
5426 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5427 begin->next = loop->begin;
5429 begin->counter = recog_data.operand[0];
5431 loop->begin = begin;
5434 case CODE_FOR_doloop_end_internal:
5435 insn_extract (insn);
5436 loop = &loops[INTVAL (recog_data.operand[2])];
5438 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5440 end->fallthrough = next_real_insn (insn);
5441 end->counter = recog_data.operand[0];
5442 end->label = recog_data.operand[1];
5443 end->scratch = recog_data.operand[3];
5445 /* If this insn falls through to an unconditional jump,
5446 give it a lower priority than the others. */
5447 if (loop->end != 0 && simplejump_p (end->fallthrough))
5449 end->next = loop->end->next;
5450 loop->end->next = end;
5454 end->next = loop->end;
5460 /* Convert the insns for each loop in turn. */
5461 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5462 if (mep_repeat_loop_p (loop))
5464 /* Case (1) or (2). */
5465 rtx repeat_label, label_ref;
5467 /* Create a new label for the repeat insn. */
5468 repeat_label = gen_label_rtx ();
5470 /* Replace the doloop_begin with a repeat. */
5471 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5472 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5474 delete_insn (loop->begin->insn);
5476 /* Insert the repeat label before the first doloop_end.
5477 Fill the gap with nops if there are other doloop_ends. */
5478 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5479 false, loop->end->next != 0);
5481 /* Emit a repeat_end (to improve the readability of the output). */
5482 emit_insn_before (gen_repeat_end (), loop->end->insn);
5484 /* Delete the first doloop_end. */
5485 delete_insn (loop->end->insn);
5487 /* Replace the others with branches to REPEAT_LABEL. */
5488 for (end = loop->end->next; end != 0; end = end->next)
5490 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5491 delete_insn (end->insn);
5492 delete_insn (end->fallthrough);
5497 /* Case (3). First replace all the doloop_begins with increment
5499 for (begin = loop->begin; begin != 0; begin = begin->next)
5501 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5502 begin->counter, const1_rtx),
5504 delete_insn (begin->insn);
5507 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5508 for (end = loop->end; end != 0; end = end->next)
5514 /* Load the counter value into a general register. */
5516 if (!REG_P (reg) || REGNO (reg) > 15)
5519 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5522 /* Decrement the counter. */
5523 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5526 /* Copy it back to its original location. */
5527 if (reg != end->counter)
5528 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5530 /* Jump back to the start label. */
5531 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5533 JUMP_LABEL (insn) = end->label;
5534 LABEL_NUSES (end->label)++;
5536 /* Emit the whole sequence before the doloop_end. */
5537 insn = get_insns ();
5539 emit_insn_before (insn, end->insn);
5541 /* Delete the doloop_end. */
5542 delete_insn (end->insn);
5549 mep_invertable_branch_p (rtx insn)
5552 enum rtx_code old_code;
5555 set = PATTERN (insn);
5556 if (GET_CODE (set) != SET)
5558 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5560 cond = XEXP (XEXP (set, 1), 0);
5561 old_code = GET_CODE (cond);
5565 PUT_CODE (cond, NE);
5568 PUT_CODE (cond, EQ);
5571 PUT_CODE (cond, GE);
5574 PUT_CODE (cond, LT);
5579 INSN_CODE (insn) = -1;
5580 i = recog_memoized (insn);
5581 PUT_CODE (cond, old_code);
5582 INSN_CODE (insn) = -1;
5587 mep_invert_branch (rtx insn, rtx after)
5589 rtx cond, set, label;
5592 set = PATTERN (insn);
5594 gcc_assert (GET_CODE (set) == SET);
5595 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5597 cond = XEXP (XEXP (set, 1), 0);
5598 switch (GET_CODE (cond))
5601 PUT_CODE (cond, NE);
5604 PUT_CODE (cond, EQ);
5607 PUT_CODE (cond, GE);
5610 PUT_CODE (cond, LT);
5615 label = gen_label_rtx ();
5616 emit_label_after (label, after);
5617 for (i=1; i<=2; i++)
5618 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5620 rtx ref = XEXP (XEXP (set, 1), i);
5621 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5622 delete_insn (XEXP (ref, 0));
5623 XEXP (ref, 0) = label;
5624 LABEL_NUSES (label) ++;
5625 JUMP_LABEL (insn) = label;
5627 INSN_CODE (insn) = -1;
5628 i = recog_memoized (insn);
5629 gcc_assert (i >= 0);
5633 mep_reorg_erepeat (rtx insns)
5635 rtx insn, prev, label_before, l, x;
5638 for (insn = insns; insn; insn = NEXT_INSN (insn))
5640 && ! JUMP_TABLE_DATA_P (insn)
5641 && mep_invertable_branch_p (insn))
5645 fprintf (dump_file, "\n------------------------------\n");
5646 fprintf (dump_file, "erepeat: considering this jump:\n");
5647 print_rtl_single (dump_file, insn);
5649 count = simplejump_p (insn) ? 0 : 1;
5651 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5653 if (GET_CODE (prev) == CALL_INSN
5654 || BARRIER_P (prev))
5657 if (prev == JUMP_LABEL (insn))
5661 fprintf (dump_file, "found loop top, %d insns\n", count);
5663 if (LABEL_NUSES (prev) == 1)
5664 /* We're the only user, always safe */ ;
5665 else if (LABEL_NUSES (prev) == 2)
5667 /* See if there's a barrier before this label. If
5668 so, we know nobody inside the loop uses it.
5669 But we must be careful to put the erepeat
5670 *after* the label. */
5672 for (barrier = PREV_INSN (prev);
5673 barrier && GET_CODE (barrier) == NOTE;
5674 barrier = PREV_INSN (barrier))
5676 if (barrier && GET_CODE (barrier) != BARRIER)
5681 /* We don't know who else, within or without our loop, uses this */
5683 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5687 /* Generate a label to be used by the erepat insn. */
5688 l = gen_label_rtx ();
5690 /* Insert the erepeat after INSN's target label. */
5691 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5693 emit_insn_after (x, prev);
5695 /* Insert the erepeat label. */
5696 newlast = (mep_insert_repeat_label_last
5697 (insn, l, !simplejump_p (insn), false));
5698 if (simplejump_p (insn))
5700 emit_insn_before (gen_erepeat_end (), insn);
5705 mep_invert_branch (insn, newlast);
5706 emit_insn_after (gen_erepeat_end (), newlast);
5713 /* A label is OK if there is exactly one user, and we
5714 can find that user before the next label. */
5717 if (LABEL_NUSES (prev) == 1)
5719 for (user = PREV_INSN (prev);
5720 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5721 user = PREV_INSN (user))
5722 if (GET_CODE (user) == JUMP_INSN
5723 && JUMP_LABEL (user) == prev)
5725 safe = INSN_UID (user);
5732 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5733 safe, INSN_UID (prev));
5740 label_before = prev;
5745 fprintf (dump_file, "\n==============================\n");
5748 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5749 always do this on its own. */
5752 mep_jmp_return_reorg (rtx insns)
5754 rtx insn, label, ret;
5757 for (insn = insns; insn; insn = NEXT_INSN (insn))
5758 if (simplejump_p (insn))
5760 /* Find the fist real insn the jump jumps to. */
5761 label = ret = JUMP_LABEL (insn);
5763 && (GET_CODE (ret) == NOTE
5764 || GET_CODE (ret) == CODE_LABEL
5765 || GET_CODE (PATTERN (ret)) == USE))
5766 ret = NEXT_INSN (ret);
5770 /* Is it a return? */
5771 ret_code = recog_memoized (ret);
5772 if (ret_code == CODE_FOR_return_internal
5773 || ret_code == CODE_FOR_eh_return_internal)
5775 /* It is. Replace the jump with a return. */
5776 LABEL_NUSES (label) --;
5777 if (LABEL_NUSES (label) == 0)
5778 delete_insn (label);
5779 PATTERN (insn) = copy_rtx (PATTERN (ret));
5780 INSN_CODE (insn) = -1;
5788 mep_reorg_addcombine (rtx insns)
5792 for (i = insns; i; i = NEXT_INSN (i))
5794 && INSN_CODE (i) == CODE_FOR_addsi3
5795 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5796 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5797 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5798 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5802 && INSN_CODE (n) == CODE_FOR_addsi3
5803 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5804 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5805 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5806 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5808 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5809 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5810 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5812 && ic + nc > -32768)
5814 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5815 NEXT_INSN (i) = NEXT_INSN (n);
5817 PREV_INSN (NEXT_INSN (i)) = i;
5823 /* If this insn adjusts the stack, return the adjustment, else return
5826 add_sp_insn_p (rtx insn)
5830 if (! single_set (insn))
5832 pat = PATTERN (insn);
5833 if (GET_CODE (SET_DEST (pat)) != REG)
5835 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5837 if (GET_CODE (SET_SRC (pat)) != PLUS)
5839 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5841 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5843 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5845 return INTVAL (XEXP (SET_SRC (pat), 1));
5848 /* Check for trivial functions that set up an unneeded stack
5851 mep_reorg_noframe (rtx insns)
5853 rtx start_frame_insn;
5854 rtx end_frame_insn = 0;
5858 /* The first insn should be $sp = $sp + N */
5859 while (insns && ! INSN_P (insns))
5860 insns = NEXT_INSN (insns);
5864 sp_adjust = add_sp_insn_p (insns);
5868 start_frame_insn = insns;
5869 sp = SET_DEST (PATTERN (start_frame_insn));
5871 insns = next_real_insn (insns);
5875 rtx next = next_real_insn (insns);
5879 sp2 = add_sp_insn_p (insns);
5884 end_frame_insn = insns;
5885 if (sp2 != -sp_adjust)
5888 else if (mep_mentioned_p (insns, sp, 0))
5890 else if (CALL_P (insns))
5898 delete_insn (start_frame_insn);
5899 delete_insn (end_frame_insn);
5906 rtx insns = get_insns ();
5908 /* We require accurate REG_DEAD notes. */
5909 compute_bb_for_insn ();
5910 df_note_add_problem ();
5913 mep_reorg_addcombine (insns);
5914 #if EXPERIMENTAL_REGMOVE_REORG
5915 /* VLIW packing has been done already, so we can't just delete things. */
5916 if (!mep_vliw_function_p (cfun->decl))
5917 mep_reorg_regmove (insns);
5919 mep_jmp_return_reorg (insns);
5920 mep_bundle_insns (insns);
5921 mep_reorg_repeat (insns);
5924 && !profile_arc_flag
5925 && TARGET_OPT_REPEAT
5926 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5927 mep_reorg_erepeat (insns);
5929 /* This may delete *insns so make sure it's last. */
5930 mep_reorg_noframe (insns);
5932 df_finish_pass (false);
5937 /*----------------------------------------------------------------------*/
5939 /*----------------------------------------------------------------------*/
5941 /* Element X gives the index into cgen_insns[] of the most general
5942 implementation of intrinsic X. Unimplemented intrinsics are
5944 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5946 /* Element X gives the index of another instruction that is mapped to
5947 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5950 Things are set up so that mep_intrinsic_chain[X] < X. */
5951 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5953 /* The bitmask for the current ISA. The ISA masks are declared
5955 unsigned int mep_selected_isa;
5958 const char *config_name;
5962 static struct mep_config mep_configs[] = {
5963 #ifdef COPROC_SELECTION_TABLE
5964 COPROC_SELECTION_TABLE,
5969 /* Initialize the global intrinsics variables above. */
5972 mep_init_intrinsics (void)
5976 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5977 mep_selected_isa = mep_configs[0].isa;
5978 if (mep_config_string != 0)
5979 for (i = 0; mep_configs[i].config_name; i++)
5980 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5982 mep_selected_isa = mep_configs[i].isa;
5986 /* Assume all intrinsics are unavailable. */
5987 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5988 mep_intrinsic_insn[i] = -1;
5990 /* Build up the global intrinsic tables. */
5991 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5992 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5994 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5995 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5997 /* See whether we can directly move values between one coprocessor
5998 register and another. */
5999 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6000 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
6001 mep_have_copro_copro_moves_p = true;
6003 /* See whether we can directly move values between core and
6004 coprocessor registers. */
6005 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
6006 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
6008 mep_have_core_copro_moves_p = 1;
6011 /* Declare all available intrinsic functions. Called once only. */
6013 static tree cp_data_bus_int_type_node;
6014 static tree opaque_vector_type_node;
6015 static tree v8qi_type_node;
6016 static tree v4hi_type_node;
6017 static tree v2si_type_node;
6018 static tree v8uqi_type_node;
6019 static tree v4uhi_type_node;
6020 static tree v2usi_type_node;
6023 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
6027 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
6028 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
6029 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
6030 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
6031 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
6032 case cgen_regnum_operand_type_CHAR: return char_type_node;
6033 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
6034 case cgen_regnum_operand_type_SI: return intSI_type_node;
6035 case cgen_regnum_operand_type_DI: return intDI_type_node;
6036 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
6037 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
6038 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
6039 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
6040 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
6041 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
6042 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
6043 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
6045 return void_type_node;
6050 mep_init_builtins (void)
6054 if (TARGET_64BIT_CR_REGS)
6055 cp_data_bus_int_type_node = long_long_integer_type_node;
6057 cp_data_bus_int_type_node = long_integer_type_node;
6059 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
6060 v8qi_type_node = build_vector_type (intQI_type_node, 8);
6061 v4hi_type_node = build_vector_type (intHI_type_node, 4);
6062 v2si_type_node = build_vector_type (intSI_type_node, 2);
6063 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6064 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6065 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6067 (*lang_hooks.decls.pushdecl)
6068 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
6069 cp_data_bus_int_type_node));
6071 (*lang_hooks.decls.pushdecl)
6072 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
6073 opaque_vector_type_node));
6075 (*lang_hooks.decls.pushdecl)
6076 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6078 (*lang_hooks.decls.pushdecl)
6079 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6081 (*lang_hooks.decls.pushdecl)
6082 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6085 (*lang_hooks.decls.pushdecl)
6086 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6088 (*lang_hooks.decls.pushdecl)
6089 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6091 (*lang_hooks.decls.pushdecl)
6092 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6095 /* Intrinsics like mep_cadd3 are implemented with two groups of
6096 instructions, one which uses UNSPECs and one which uses a specific
6097 rtl code such as PLUS. Instructions in the latter group belong
6098 to GROUP_KNOWN_CODE.
6100 In such cases, the intrinsic will have two entries in the global
6101 tables above. The unspec form is accessed using builtin functions
6102 while the specific form is accessed using the mep_* enum in
6105 The idea is that __cop arithmetic and builtin functions have
6106 different optimization requirements. If mep_cadd3() appears in
6107 the source code, the user will surely except gcc to use cadd3
6108 rather than a work-alike such as add3. However, if the user
6109 just writes "a + b", where a or b are __cop variables, it is
6110 reasonable for gcc to choose a core instruction rather than
6111 cadd3 if it believes that is more optimal. */
6112 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6113 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6114 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6116 tree ret_type = void_type_node;
6119 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6122 if (cgen_insns[i].cret_p)
6123 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6125 bi_type = build_function_type (ret_type, 0);
6126 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6128 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6132 /* Report the unavailablity of the given intrinsic. */
6136 mep_intrinsic_unavailable (int intrinsic)
6138 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6140 if (already_reported_p[intrinsic])
6143 if (mep_intrinsic_insn[intrinsic] < 0)
6144 error ("coprocessor intrinsic %qs is not available in this configuration",
6145 cgen_intrinsics[intrinsic]);
6146 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6147 error ("%qs is not available in VLIW functions",
6148 cgen_intrinsics[intrinsic]);
6150 error ("%qs is not available in non-VLIW functions",
6151 cgen_intrinsics[intrinsic]);
6153 already_reported_p[intrinsic] = 1;
6158 /* See if any implementation of INTRINSIC is available to the
6159 current function. If so, store the most general implementation
6160 in *INSN_PTR and return true. Return false otherwise. */
6163 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6167 i = mep_intrinsic_insn[intrinsic];
6168 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6169 i = mep_intrinsic_chain[i];
6173 *insn_ptr = &cgen_insns[i];
6180 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6181 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6182 try using a work-alike instead. In this case, the returned insn
6183 may have three operands rather than two. */
6186 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6190 if (intrinsic == mep_cmov)
6192 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6193 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6197 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6201 /* If ARG is a register operand that is the same size as MODE, convert it
6202 to MODE using a subreg. Otherwise return ARG as-is. */
6205 mep_convert_arg (enum machine_mode mode, rtx arg)
6207 if (GET_MODE (arg) != mode
6208 && register_operand (arg, VOIDmode)
6209 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6210 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6215 /* Apply regnum conversions to ARG using the description given by REGNUM.
6216 Return the new argument on success and null on failure. */
6219 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6221 if (regnum->count == 0)
6224 if (GET_CODE (arg) != CONST_INT
6226 || INTVAL (arg) >= regnum->count)
6229 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6233 /* Try to make intrinsic argument ARG match the given operand.
6234 UNSIGNED_P is true if the argument has an unsigned type. */
6237 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6240 if (GET_CODE (arg) == CONST_INT)
6242 /* CONST_INTs can only be bound to integer operands. */
6243 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6246 else if (GET_CODE (arg) == CONST_DOUBLE)
6247 /* These hold vector constants. */;
6248 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6250 /* If the argument is a different size from what's expected, we must
6251 have a value in the right mode class in order to convert it. */
6252 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6255 /* If the operand is an rvalue, promote or demote it to match the
6256 operand's size. This might not need extra instructions when
6257 ARG is a register value. */
6258 if (operand->constraint[0] != '=')
6259 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6262 /* If the operand is an lvalue, bind the operand to a new register.
6263 The caller will copy this value into ARG after the main
6264 instruction. By doing this always, we produce slightly more
6266 /* But not for control registers. */
6267 if (operand->constraint[0] == '='
6269 || ! (CONTROL_REGNO_P (REGNO (arg))
6270 || CCR_REGNO_P (REGNO (arg))
6271 || CR_REGNO_P (REGNO (arg)))
6273 return gen_reg_rtx (operand->mode);
6275 /* Try simple mode punning. */
6276 arg = mep_convert_arg (operand->mode, arg);
6277 if (operand->predicate (arg, operand->mode))
6280 /* See if forcing the argument into a register will make it match. */
6281 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6282 arg = force_reg (operand->mode, arg);
6284 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6285 if (operand->predicate (arg, operand->mode))
6292 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6293 function FNNAME. OPERAND describes the operand to which ARGNUM
6297 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6298 int argnum, tree fnname)
6302 if (GET_CODE (arg) == CONST_INT)
6303 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6304 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6306 const struct cgen_immediate_predicate *predicate;
6307 HOST_WIDE_INT argval;
6309 predicate = &cgen_immediate_predicates[i];
6310 argval = INTVAL (arg);
6311 if (argval < predicate->lower || argval >= predicate->upper)
6312 error ("argument %d of %qE must be in the range %d...%d",
6313 argnum, fnname, predicate->lower, predicate->upper - 1);
6315 error ("argument %d of %qE must be a multiple of %d",
6316 argnum, fnname, predicate->align);
6320 error ("incompatible type for argument %d of %qE", argnum, fnname);
6324 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6325 rtx subtarget ATTRIBUTE_UNUSED,
6326 enum machine_mode mode ATTRIBUTE_UNUSED,
6327 int ignore ATTRIBUTE_UNUSED)
6329 rtx pat, op[10], arg[10];
6331 int opindex, unsigned_p[10];
6333 unsigned int n_args;
6335 const struct cgen_insn *cgen_insn;
6336 const struct insn_data *idata;
6337 unsigned int first_arg = 0;
6338 tree return_type = void_type_node;
6339 unsigned int builtin_n_args;
6341 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6342 fnname = DECL_NAME (fndecl);
6344 /* Find out which instruction we should emit. Note that some coprocessor
6345 intrinsics may only be available in VLIW mode, or only in normal mode. */
6346 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6348 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6351 idata = &insn_data[cgen_insn->icode];
6353 builtin_n_args = cgen_insn->num_args;
6355 if (cgen_insn->cret_p)
6357 if (cgen_insn->cret_p > 1)
6360 return_type = mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6364 /* Evaluate each argument. */
6365 n_args = call_expr_nargs (exp);
6367 if (n_args < builtin_n_args)
6369 error ("too few arguments to %qE", fnname);
6372 if (n_args > builtin_n_args)
6374 error ("too many arguments to %qE", fnname);
6378 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6382 args = CALL_EXPR_ARG (exp, a - first_arg);
6387 if (cgen_insn->regnums[a].reference_p)
6389 if (TREE_CODE (value) != ADDR_EXPR)
6392 error ("argument %d of %qE must be an address", a+1, fnname);
6395 value = TREE_OPERAND (value, 0);
6399 /* If the argument has been promoted to int, get the unpromoted
6400 value. This is necessary when sub-int memory values are bound
6401 to reference parameters. */
6402 if (TREE_CODE (value) == NOP_EXPR
6403 && TREE_TYPE (value) == integer_type_node
6404 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6405 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6406 < TYPE_PRECISION (TREE_TYPE (value))))
6407 value = TREE_OPERAND (value, 0);
6409 /* If the argument has been promoted to double, get the unpromoted
6410 SFmode value. This is necessary for FMAX support, for example. */
6411 if (TREE_CODE (value) == NOP_EXPR
6412 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6413 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6414 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6415 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6416 value = TREE_OPERAND (value, 0);
6418 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6419 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6420 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6421 if (cgen_insn->regnums[a].reference_p)
6423 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6424 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6426 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6430 error ("argument %d of %qE must be in the range %d...%d",
6431 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6436 for (a = 0; a < first_arg; a++)
6438 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6441 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6444 /* Convert the arguments into a form suitable for the intrinsic.
6445 Report an error if this isn't possible. */
6446 for (opindex = 0; opindex < idata->n_operands; opindex++)
6448 a = cgen_insn->op_mapping[opindex];
6449 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6450 arg[a], unsigned_p[a]);
6451 if (op[opindex] == 0)
6453 mep_incompatible_arg (&idata->operand[opindex],
6454 arg[a], a + 1 - first_arg, fnname);
6459 /* Emit the instruction. */
6460 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6461 op[5], op[6], op[7], op[8], op[9]);
6463 if (GET_CODE (pat) == SET
6464 && GET_CODE (SET_DEST (pat)) == PC
6465 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6466 emit_jump_insn (pat);
6470 /* Copy lvalues back to their final locations. */
6471 for (opindex = 0; opindex < idata->n_operands; opindex++)
6472 if (idata->operand[opindex].constraint[0] == '=')
6474 a = cgen_insn->op_mapping[opindex];
6477 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6478 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6479 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6483 /* First convert the operand to the right mode, then copy it
6484 into the destination. Doing the conversion as a separate
6485 step (rather than using convert_move) means that we can
6486 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6487 refer to the same register. */
6488 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6489 op[opindex], unsigned_p[a]);
6490 if (!rtx_equal_p (arg[a], op[opindex]))
6491 emit_move_insn (arg[a], op[opindex]);
6496 if (first_arg > 0 && target && target != op[0])
6498 emit_move_insn (target, op[0]);
6505 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6510 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6511 a global register. */
6514 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6522 switch (GET_CODE (x))
6525 if (REG_P (SUBREG_REG (x)))
6527 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6528 && global_regs[subreg_regno (x)])
6536 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6550 /* A non-constant call might use a global register. */
6560 /* Returns nonzero if X mentions a global register. */
6563 global_reg_mentioned_p (rtx x)
6569 if (! RTL_CONST_OR_PURE_CALL_P (x))
6571 x = CALL_INSN_FUNCTION_USAGE (x);
6579 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6581 /* Scheduling hooks for VLIW mode.
6583 Conceptually this is very simple: we have a two-pack architecture
6584 that takes one core insn and one coprocessor insn to make up either
6585 a 32- or 64-bit instruction word (depending on the option bit set in
6586 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6587 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6588 and one 48-bit cop insn or two 32-bit core/cop insns.
6590 In practice, instruction selection will be a bear. Consider in
6591 VL64 mode the following insns
6596 these cannot pack, since the add is a 16-bit core insn and cmov
6597 is a 32-bit cop insn. However,
6602 packs just fine. For good VLIW code generation in VL64 mode, we
6603 will have to have 32-bit alternatives for many of the common core
6604 insns. Not implemented. */
6607 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6611 if (REG_NOTE_KIND (link) != 0)
6613 /* See whether INSN and DEP_INSN are intrinsics that set the same
6614 hard register. If so, it is more important to free up DEP_INSN
6615 than it is to free up INSN.
6617 Note that intrinsics like mep_mulr are handled differently from
6618 the equivalent mep.md patterns. In mep.md, if we don't care
6619 about the value of $lo and $hi, the pattern will just clobber
6620 the registers, not set them. Since clobbers don't count as
6621 output dependencies, it is often possible to reorder two mulrs,
6624 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6625 so any pair of mep_mulr()s will be inter-dependent. We should
6626 therefore give the first mep_mulr() a higher priority. */
6627 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6628 && global_reg_mentioned_p (PATTERN (insn))
6629 && global_reg_mentioned_p (PATTERN (dep_insn)))
6632 /* If the dependence is an anti or output dependence, assume it
6637 /* If we can't recognize the insns, we can't really do anything. */
6638 if (recog_memoized (dep_insn) < 0)
6641 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6642 attribute instead. */
6645 cost_specified = get_attr_latency (dep_insn);
6646 if (cost_specified != 0)
6647 return cost_specified;
6653 /* ??? We don't properly compute the length of a load/store insn,
6654 taking into account the addressing mode. */
6657 mep_issue_rate (void)
6659 return TARGET_IVC2 ? 3 : 2;
6662 /* Return true if function DECL was declared with the vliw attribute. */
6665 mep_vliw_function_p (tree decl)
6667 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6671 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6675 for (i = nready - 1; i >= 0; --i)
6677 rtx insn = ready[i];
6678 if (recog_memoized (insn) >= 0
6679 && get_attr_slot (insn) == slot
6680 && get_attr_length (insn) == length)
6688 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6692 for (i = 0; i < nready; ++i)
6693 if (ready[i] == insn)
6695 for (; i < nready - 1; ++i)
6696 ready[i] = ready[i + 1];
6705 mep_print_sched_insn (FILE *dump, rtx insn)
6707 const char *slots = "none";
6708 const char *name = NULL;
6712 if (GET_CODE (PATTERN (insn)) == SET
6713 || GET_CODE (PATTERN (insn)) == PARALLEL)
6715 switch (get_attr_slots (insn))
6717 case SLOTS_CORE: slots = "core"; break;
6718 case SLOTS_C3: slots = "c3"; break;
6719 case SLOTS_P0: slots = "p0"; break;
6720 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6721 case SLOTS_P0_P1: slots = "p0,p1"; break;
6722 case SLOTS_P0S: slots = "p0s"; break;
6723 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6724 case SLOTS_P1: slots = "p1"; break;
6726 sprintf(buf, "%d", get_attr_slots (insn));
6731 if (GET_CODE (PATTERN (insn)) == USE)
6734 code = INSN_CODE (insn);
6736 name = get_insn_name (code);
6741 "insn %4d %4d %8s %s\n",
6749 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6750 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6751 int *pnready, int clock ATTRIBUTE_UNUSED)
6753 int nready = *pnready;
6754 rtx core_insn, cop_insn;
6757 if (dump && sched_verbose > 1)
6759 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6760 for (i=0; i<nready; i++)
6761 mep_print_sched_insn (dump, ready[i]);
6762 fprintf (dump, "\n");
6765 if (!mep_vliw_function_p (cfun->decl))
6770 /* IVC2 uses a DFA to determine what's ready and what's not. */
6774 /* We can issue either a core or coprocessor instruction.
6775 Look for a matched pair of insns to reorder. If we don't
6776 find any, don't second-guess the scheduler's priorities. */
6778 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6779 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6780 TARGET_OPT_VL64 ? 6 : 2)))
6782 else if (TARGET_OPT_VL64
6783 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6784 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6787 /* We didn't find a pair. Issue the single insn at the head
6788 of the ready list. */
6791 /* Reorder the two insns first. */
6792 mep_move_ready_insn (ready, nready, core_insn);
6793 mep_move_ready_insn (ready, nready - 1, cop_insn);
6797 /* A for_each_rtx callback. Return true if *X is a register that is
6798 set by insn PREV. */
6801 mep_store_find_set (rtx *x, void *prev)
6803 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6806 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6807 not the containing insn. */
6810 mep_store_data_bypass_1 (rtx prev, rtx pat)
6812 /* Cope with intrinsics like swcpa. */
6813 if (GET_CODE (pat) == PARALLEL)
6817 for (i = 0; i < XVECLEN (pat, 0); i++)
6818 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6824 /* Check for some sort of store. */
6825 if (GET_CODE (pat) != SET
6826 || GET_CODE (SET_DEST (pat)) != MEM)
6829 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6830 The first operand to the unspec is the store data and the other operands
6831 are used to calculate the address. */
6832 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6837 src = SET_SRC (pat);
6838 for (i = 1; i < XVECLEN (src, 0); i++)
6839 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6845 /* Otherwise just check that PREV doesn't modify any register mentioned
6846 in the memory destination. */
6847 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6850 /* Return true if INSN is a store instruction and if the store address
6851 has no true dependence on PREV. */
6854 mep_store_data_bypass_p (rtx prev, rtx insn)
6856 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6859 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6860 is a register other than LO or HI and if PREV sets *X. */
6863 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6866 && REGNO (*x) != LO_REGNO
6867 && REGNO (*x) != HI_REGNO
6868 && reg_set_p (*x, (const_rtx) prev));
6871 /* Return true if, apart from HI/LO, there are no true dependencies
6872 between multiplication instructions PREV and INSN. */
6875 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6879 pat = PATTERN (insn);
6880 if (GET_CODE (pat) == PARALLEL)
6881 pat = XVECEXP (pat, 0, 0);
6882 return (GET_CODE (pat) == SET
6883 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6886 /* Return true if INSN is an ldc instruction that issues to the
6887 MeP-h1 integer pipeline. This is true for instructions that
6888 read from PSW, LP, SAR, HI and LO. */
6891 mep_ipipe_ldc_p (rtx insn)
6895 pat = PATTERN (insn);
6897 /* Cope with instrinsics that set both a hard register and its shadow.
6898 The set of the hard register comes first. */
6899 if (GET_CODE (pat) == PARALLEL)
6900 pat = XVECEXP (pat, 0, 0);
6902 if (GET_CODE (pat) == SET)
6904 src = SET_SRC (pat);
6906 /* Cope with intrinsics. The first operand to the unspec is
6907 the source register. */
6908 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6909 src = XVECEXP (src, 0, 0);
6912 switch (REGNO (src))
6925 /* Create a VLIW bundle from core instruction CORE and coprocessor
6926 instruction COP. COP always satisfies INSN_P, but CORE can be
6927 either a new pattern or an existing instruction.
6929 Emit the bundle in place of COP and return it. */
6932 mep_make_bundle (rtx core, rtx cop)
6936 /* If CORE is an existing instruction, remove it, otherwise put
6937 the new pattern in an INSN harness. */
6941 core = make_insn_raw (core);
6943 /* Generate the bundle sequence and replace COP with it. */
6944 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6945 insn = emit_insn_after (insn, cop);
6948 /* Set up the links of the insns inside the SEQUENCE. */
6949 PREV_INSN (core) = PREV_INSN (insn);
6950 NEXT_INSN (core) = cop;
6951 PREV_INSN (cop) = core;
6952 NEXT_INSN (cop) = NEXT_INSN (insn);
6954 /* Set the VLIW flag for the coprocessor instruction. */
6955 PUT_MODE (core, VOIDmode);
6956 PUT_MODE (cop, BImode);
6958 /* Derive a location for the bundle. Individual instructions cannot
6959 have their own location because there can be no assembler labels
6960 between CORE and COP. */
6961 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6962 INSN_LOCATOR (core) = 0;
6963 INSN_LOCATOR (cop) = 0;
6968 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6971 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6973 rtx * pinsn = (rtx *) data;
6975 if (*pinsn && reg_mentioned_p (x, *pinsn))
6979 /* Return true if anything in insn X is (anti,output,true) dependent on
6980 anything in insn Y. */
6983 mep_insn_dependent_p (rtx x, rtx y)
6987 gcc_assert (INSN_P (x));
6988 gcc_assert (INSN_P (y));
6991 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6992 if (tmp == NULL_RTX)
6996 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6997 if (tmp == NULL_RTX)
7004 core_insn_p (rtx insn)
7006 if (GET_CODE (PATTERN (insn)) == USE)
7008 if (get_attr_slot (insn) == SLOT_CORE)
7013 /* Mark coprocessor instructions that can be bundled together with
7014 the immediately preceeding core instruction. This is later used
7015 to emit the "+" that tells the assembler to create a VLIW insn.
7017 For unbundled insns, the assembler will automatically add coprocessor
7018 nops, and 16-bit core nops. Due to an apparent oversight in the
7019 spec, the assembler will _not_ automatically add 32-bit core nops,
7020 so we have to emit those here.
7022 Called from mep_insn_reorg. */
7025 mep_bundle_insns (rtx insns)
7027 rtx insn, last = NULL_RTX, first = NULL_RTX;
7028 int saw_scheduling = 0;
7030 /* Only do bundling if we're in vliw mode. */
7031 if (!mep_vliw_function_p (cfun->decl))
7034 /* The first insn in a bundle are TImode, the remainder are
7035 VOIDmode. After this function, the first has VOIDmode and the
7036 rest have BImode. */
7038 /* Note: this doesn't appear to be true for JUMP_INSNs. */
7040 /* First, move any NOTEs that are within a bundle, to the beginning
7042 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7044 if (NOTE_P (insn) && first)
7045 /* Don't clear FIRST. */;
7047 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
7050 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
7054 /* INSN is part of a bundle; FIRST is the first insn in that
7055 bundle. Move all intervening notes out of the bundle.
7056 In addition, since the debug pass may insert a label
7057 whenever the current line changes, set the location info
7058 for INSN to match FIRST. */
7060 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
7062 note = PREV_INSN (insn);
7063 while (note && note != first)
7065 prev = PREV_INSN (note);
7069 /* Remove NOTE from here... */
7070 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7071 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7072 /* ...and put it in here. */
7073 NEXT_INSN (note) = first;
7074 PREV_INSN (note) = PREV_INSN (first);
7075 NEXT_INSN (PREV_INSN (note)) = note;
7076 PREV_INSN (NEXT_INSN (note)) = note;
7083 else if (!NONJUMP_INSN_P (insn))
7087 /* Now fix up the bundles. */
7088 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7093 if (!NONJUMP_INSN_P (insn))
7099 /* If we're not optimizing enough, there won't be scheduling
7100 info. We detect that here. */
7101 if (GET_MODE (insn) == TImode)
7103 if (!saw_scheduling)
7108 rtx core_insn = NULL_RTX;
7110 /* IVC2 slots are scheduled by DFA, so we just accept
7111 whatever the scheduler gives us. However, we must make
7112 sure the core insn (if any) is the first in the bundle.
7113 The IVC2 assembler can insert whatever NOPs are needed,
7114 and allows a COP insn to be first. */
7116 if (NONJUMP_INSN_P (insn)
7117 && GET_CODE (PATTERN (insn)) != USE
7118 && GET_MODE (insn) == TImode)
7122 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7123 && NONJUMP_INSN_P (NEXT_INSN (last));
7124 last = NEXT_INSN (last))
7126 if (core_insn_p (last))
7129 if (core_insn_p (last))
7132 if (core_insn && core_insn != insn)
7134 /* Swap core insn to first in the bundle. */
7136 /* Remove core insn. */
7137 if (PREV_INSN (core_insn))
7138 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7139 if (NEXT_INSN (core_insn))
7140 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7142 /* Re-insert core insn. */
7143 PREV_INSN (core_insn) = PREV_INSN (insn);
7144 NEXT_INSN (core_insn) = insn;
7146 if (PREV_INSN (core_insn))
7147 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7148 PREV_INSN (insn) = core_insn;
7150 PUT_MODE (core_insn, TImode);
7151 PUT_MODE (insn, VOIDmode);
7155 /* The first insn has TImode, the rest have VOIDmode */
7156 if (GET_MODE (insn) == TImode)
7157 PUT_MODE (insn, VOIDmode);
7159 PUT_MODE (insn, BImode);
7163 PUT_MODE (insn, VOIDmode);
7164 if (recog_memoized (insn) >= 0
7165 && get_attr_slot (insn) == SLOT_COP)
7167 if (GET_CODE (insn) == JUMP_INSN
7169 || recog_memoized (last) < 0
7170 || get_attr_slot (last) != SLOT_CORE
7171 || (get_attr_length (insn)
7172 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7173 || mep_insn_dependent_p (insn, last))
7175 switch (get_attr_length (insn))
7180 insn = mep_make_bundle (gen_nop (), insn);
7183 if (TARGET_OPT_VL64)
7184 insn = mep_make_bundle (gen_nop32 (), insn);
7187 if (TARGET_OPT_VL64)
7188 error ("2 byte cop instructions are"
7189 " not allowed in 64-bit VLIW mode");
7191 insn = mep_make_bundle (gen_nop (), insn);
7194 error ("unexpected %d byte cop instruction",
7195 get_attr_length (insn));
7200 insn = mep_make_bundle (last, insn);
7208 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7209 Return true on success. This function can fail if the intrinsic
7210 is unavailable or if the operands don't satisfy their predicates. */
7213 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7215 const struct cgen_insn *cgen_insn;
7216 const struct insn_data *idata;
7220 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7223 idata = &insn_data[cgen_insn->icode];
7224 for (i = 0; i < idata->n_operands; i++)
7226 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7227 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7231 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7232 newop[3], newop[4], newop[5],
7233 newop[6], newop[7], newop[8]));
7239 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7240 OPERANDS[0]. Report an error if the instruction could not
7241 be synthesized. OPERANDS[1] is a register_operand. For sign
7242 and zero extensions, it may be smaller than SImode. */
7245 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7246 rtx * operands ATTRIBUTE_UNUSED)
7252 /* Likewise, but apply a binary operation to OPERANDS[1] and
7253 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7254 can be a general_operand.
7256 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7257 third operand. REG and REG3 take register operands only. */
7260 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7261 int ATTRIBUTE_UNUSED immediate3,
7262 int ATTRIBUTE_UNUSED reg,
7263 int ATTRIBUTE_UNUSED reg3,
7264 rtx * operands ATTRIBUTE_UNUSED)
7270 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, bool ATTRIBUTE_UNUSED speed_t)
7275 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7277 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7284 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7288 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7290 : COSTS_N_INSNS (2));
7297 mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7303 mep_handle_option (size_t code,
7304 const char *arg ATTRIBUTE_UNUSED,
7305 int value ATTRIBUTE_UNUSED)
7312 target_flags |= MEP_ALL_OPTS;
7316 target_flags &= ~ MEP_ALL_OPTS;
7320 target_flags |= MASK_COP;
7321 target_flags |= MASK_64BIT_CR_REGS;
7325 option_mtiny_specified = 1;
7328 target_flags |= MASK_COP;
7329 target_flags |= MASK_64BIT_CR_REGS;
7330 target_flags |= MASK_VLIW;
7331 target_flags |= MASK_OPT_VL64;
7332 target_flags |= MASK_IVC2;
7334 for (i=0; i<32; i++)
7335 fixed_regs[i+48] = 0;
7336 for (i=0; i<32; i++)
7337 call_used_regs[i+48] = 1;
7339 call_used_regs[i+48] = 0;
7341 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7378 mep_asm_init_sections (void)
7381 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7382 "\t.section .based,\"aw\"");
7385 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7386 "\t.section .sbss,\"aw\"");
7389 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7390 "\t.section .sdata,\"aw\",@progbits");
7393 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7394 "\t.section .far,\"aw\"");
7397 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7398 "\t.section .farbss,\"aw\"");
7401 = get_unnamed_section (0, output_section_asm_op,
7402 "\t.section .frodata,\"a\"");
7405 = get_unnamed_section (0, output_section_asm_op,
7406 "\t.section .srodata,\"a\"");
7409 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7410 "\t.section .vtext,\"axv\"\n\t.vliw");
7413 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7414 "\t.section .vftext,\"axv\"\n\t.vliw");
7417 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7418 "\t.section .ftext,\"ax\"\n\t.core");