1 /* Target Code for R8C/M16C/M32C
2 Copyright (C) 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Red Hat.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
46 #include "target-def.h"
48 #include "langhooks.h"
54 /* Used by m32c_pushm_popm. */
62 static bool m32c_function_needs_enter (void);
63 static tree interrupt_handler (tree *, tree, tree, int, bool *);
64 static tree function_vector_handler (tree *, tree, tree, int, bool *);
65 static int interrupt_p (tree node);
66 static int bank_switch_p (tree node);
67 static int fast_interrupt_p (tree node);
68 static int interrupt_p (tree node);
69 static bool m32c_asm_integer (rtx, unsigned int, int);
70 static int m32c_comp_type_attributes (const_tree, const_tree);
71 static bool m32c_fixed_condition_code_regs (unsigned int *, unsigned int *);
72 static struct machine_function *m32c_init_machine_status (void);
73 static void m32c_insert_attributes (tree, tree *);
74 static bool m32c_legitimate_address_p (enum machine_mode, rtx, bool);
75 static bool m32c_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
77 static bool m32c_promote_prototypes (const_tree);
78 static int m32c_pushm_popm (Push_Pop_Type);
79 static bool m32c_strict_argument_naming (CUMULATIVE_ARGS *);
80 static rtx m32c_struct_value_rtx (tree, int);
81 static rtx m32c_subreg (enum machine_mode, rtx, enum machine_mode, int);
82 static int need_to_save (int);
83 static rtx m32c_function_value (const_tree, const_tree, bool);
84 static rtx m32c_libcall_value (enum machine_mode, const_rtx);
86 int current_function_special_page_vector (rtx);
88 #define SYMBOL_FLAG_FUNCVEC_FUNCTION (SYMBOL_FLAG_MACH_DEP << 0)
90 #define streq(a,b) (strcmp ((a), (b)) == 0)
92 /* Internal support routines */
94 /* Debugging statements are tagged with DEBUG0 only so that they can
95 be easily enabled individually, by replacing the '0' with '1' as
101 /* This is needed by some of the commented-out debug statements
103 static char const *class_names[LIM_REG_CLASSES] = REG_CLASS_NAMES;
105 static int class_contents[LIM_REG_CLASSES][1] = REG_CLASS_CONTENTS;
107 /* These are all to support encode_pattern(). */
108 static char pattern[30], *patternp;
109 static GTY(()) rtx patternr[30];
110 #define RTX_IS(x) (streq (pattern, x))
112 /* Some macros to simplify the logic throughout this file. */
113 #define IS_MEM_REGNO(regno) ((regno) >= MEM0_REGNO && (regno) <= MEM7_REGNO)
114 #define IS_MEM_REG(rtx) (GET_CODE (rtx) == REG && IS_MEM_REGNO (REGNO (rtx)))
116 #define IS_CR_REGNO(regno) ((regno) >= SB_REGNO && (regno) <= PC_REGNO)
117 #define IS_CR_REG(rtx) (GET_CODE (rtx) == REG && IS_CR_REGNO (REGNO (rtx)))
119 /* We do most RTX matching by converting the RTX into a string, and
120 using string compares. This vastly simplifies the logic in many of
121 the functions in this file.
123 On exit, pattern[] has the encoded string (use RTX_IS("...") to
124 compare it) and patternr[] has pointers to the nodes in the RTX
125 corresponding to each character in the encoded string. The latter
126 is mostly used by print_operand().
128 Unrecognized patterns have '?' in them; this shows up when the
129 assembler complains about syntax errors.
133 encode_pattern_1 (rtx x)
137 if (patternp == pattern + sizeof (pattern) - 2)
143 patternr[patternp - pattern] = x;
145 switch (GET_CODE (x))
151 if (GET_MODE_SIZE (GET_MODE (x)) !=
152 GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
154 encode_pattern_1 (XEXP (x, 0));
159 encode_pattern_1 (XEXP (x, 0));
163 encode_pattern_1 (XEXP (x, 0));
164 encode_pattern_1 (XEXP (x, 1));
168 encode_pattern_1 (XEXP (x, 0));
172 encode_pattern_1 (XEXP (x, 0));
176 encode_pattern_1 (XEXP (x, 0));
177 encode_pattern_1 (XEXP (x, 1));
181 encode_pattern_1 (XEXP (x, 0));
198 *patternp++ = '0' + XCINT (x, 1, UNSPEC);
199 for (i = 0; i < XVECLEN (x, 0); i++)
200 encode_pattern_1 (XVECEXP (x, 0, i));
207 for (i = 0; i < XVECLEN (x, 0); i++)
208 encode_pattern_1 (XVECEXP (x, 0, i));
212 encode_pattern_1 (XEXP (x, 0));
214 encode_pattern_1 (XEXP (x, 1));
219 fprintf (stderr, "can't encode pattern %s\n",
220 GET_RTX_NAME (GET_CODE (x)));
229 encode_pattern (rtx x)
232 encode_pattern_1 (x);
236 /* Since register names indicate the mode they're used in, we need a
237 way to determine which name to refer to the register with. Called
238 by print_operand(). */
241 reg_name_with_mode (int regno, enum machine_mode mode)
243 int mlen = GET_MODE_SIZE (mode);
244 if (regno == R0_REGNO && mlen == 1)
246 if (regno == R0_REGNO && (mlen == 3 || mlen == 4))
248 if (regno == R0_REGNO && mlen == 6)
250 if (regno == R0_REGNO && mlen == 8)
252 if (regno == R1_REGNO && mlen == 1)
254 if (regno == R1_REGNO && (mlen == 3 || mlen == 4))
256 if (regno == A0_REGNO && TARGET_A16 && (mlen == 3 || mlen == 4))
258 return reg_names[regno];
261 /* How many bytes a register uses on stack when it's pushed. We need
262 to know this because the push opcode needs to explicitly indicate
263 the size of the register, even though the name of the register
264 already tells it that. Used by m32c_output_reg_{push,pop}, which
265 is only used through calls to ASM_OUTPUT_REG_{PUSH,POP}. */
268 reg_push_size (int regno)
293 static int *class_sizes = 0;
295 /* Given two register classes, find the largest intersection between
296 them. If there is no intersection, return RETURNED_IF_EMPTY
299 reduce_class (int original_class, int limiting_class, int returned_if_empty)
301 int cc = class_contents[original_class][0];
302 int i, best = NO_REGS;
305 if (original_class == limiting_class)
306 return original_class;
311 class_sizes = (int *) xmalloc (LIM_REG_CLASSES * sizeof (int));
312 for (i = 0; i < LIM_REG_CLASSES; i++)
315 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
316 if (class_contents[i][0] & (1 << r))
321 cc &= class_contents[limiting_class][0];
322 for (i = 0; i < LIM_REG_CLASSES; i++)
324 int ic = class_contents[i][0];
327 if (best_size < class_sizes[i])
330 best_size = class_sizes[i];
335 return returned_if_empty;
339 /* Returns TRUE If there are any registers that exist in both register
342 classes_intersect (int class1, int class2)
344 return class_contents[class1][0] & class_contents[class2][0];
347 /* Used by m32c_register_move_cost to determine if a move is
348 impossibly expensive. */
350 class_can_hold_mode (int rclass, enum machine_mode mode)
352 /* Cache the results: 0=untested 1=no 2=yes */
353 static char results[LIM_REG_CLASSES][MAX_MACHINE_MODE];
354 if (results[rclass][mode] == 0)
357 results[rclass][mode] = 1;
358 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
359 if (class_contents[rclass][0] & (1 << r)
360 && HARD_REGNO_MODE_OK (r, mode))
363 n = HARD_REGNO_NREGS (r, mode);
364 for (i = 1; i < n; i++)
365 if (!(class_contents[rclass][0] & (1 << (r + i))))
369 results[rclass][mode] = 2;
375 fprintf (stderr, "class %s can hold %s? %s\n",
376 class_names[rclass], mode_name[mode],
377 (results[rclass][mode] == 2) ? "yes" : "no");
379 return results[rclass][mode] == 2;
382 /* Run-time Target Specification. */
384 /* Memregs are memory locations that gcc treats like general
385 registers, as there are a limited number of true registers and the
386 m32c families can use memory in most places that registers can be
389 However, since memory accesses are more expensive than registers,
390 we allow the user to limit the number of memregs available, in
391 order to try to persuade gcc to try harder to use real registers.
393 Memregs are provided by m32c-lib1.S.
396 int target_memregs = 16;
397 static bool target_memregs_set = FALSE;
398 int ok_to_change_target_memregs = TRUE;
400 #undef TARGET_HANDLE_OPTION
401 #define TARGET_HANDLE_OPTION m32c_handle_option
403 m32c_handle_option (size_t code,
404 const char *arg ATTRIBUTE_UNUSED,
405 int value ATTRIBUTE_UNUSED)
407 if (code == OPT_memregs_)
409 target_memregs_set = TRUE;
410 target_memregs = atoi (arg);
415 /* Implements OVERRIDE_OPTIONS. We limit memregs to 0..16, and
416 provide a default. */
418 m32c_override_options (void)
420 if (target_memregs_set)
422 if (target_memregs < 0 || target_memregs > 16)
423 error ("invalid target memregs value '%d'", target_memregs);
432 /* Defining data structures for per-function information */
434 /* The usual; we set up our machine_function data. */
435 static struct machine_function *
436 m32c_init_machine_status (void)
438 struct machine_function *machine;
440 (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
445 /* Implements INIT_EXPANDERS. We just set up to call the above
448 m32c_init_expanders (void)
450 init_machine_status = m32c_init_machine_status;
455 /* Register Basics */
457 /* Basic Characteristics of Registers */
459 /* Whether a mode fits in a register is complex enough to warrant a
468 } nregs_table[FIRST_PSEUDO_REGISTER] =
470 { 1, 1, 2, 2, 4 }, /* r0 */
471 { 0, 1, 0, 0, 0 }, /* r2 */
472 { 1, 1, 2, 2, 0 }, /* r1 */
473 { 0, 1, 0, 0, 0 }, /* r3 */
474 { 0, 1, 1, 0, 0 }, /* a0 */
475 { 0, 1, 1, 0, 0 }, /* a1 */
476 { 0, 1, 1, 0, 0 }, /* sb */
477 { 0, 1, 1, 0, 0 }, /* fb */
478 { 0, 1, 1, 0, 0 }, /* sp */
479 { 1, 1, 1, 0, 0 }, /* pc */
480 { 0, 0, 0, 0, 0 }, /* fl */
481 { 1, 1, 1, 0, 0 }, /* ap */
482 { 1, 1, 2, 2, 4 }, /* mem0 */
483 { 1, 1, 2, 2, 4 }, /* mem1 */
484 { 1, 1, 2, 2, 4 }, /* mem2 */
485 { 1, 1, 2, 2, 4 }, /* mem3 */
486 { 1, 1, 2, 2, 4 }, /* mem4 */
487 { 1, 1, 2, 2, 0 }, /* mem5 */
488 { 1, 1, 2, 2, 0 }, /* mem6 */
489 { 1, 1, 0, 0, 0 }, /* mem7 */
492 /* Implements CONDITIONAL_REGISTER_USAGE. We adjust the number of
493 available memregs, and select which registers need to be preserved
494 across calls based on the chip family. */
497 m32c_conditional_register_usage (void)
501 if (0 <= target_memregs && target_memregs <= 16)
503 /* The command line option is bytes, but our "registers" are
505 for (i = (target_memregs+1)/2; i < 8; i++)
507 fixed_regs[MEM0_REGNO + i] = 1;
508 CLEAR_HARD_REG_BIT (reg_class_contents[MEM_REGS], MEM0_REGNO + i);
512 /* M32CM and M32C preserve more registers across function calls. */
515 call_used_regs[R1_REGNO] = 0;
516 call_used_regs[R2_REGNO] = 0;
517 call_used_regs[R3_REGNO] = 0;
518 call_used_regs[A0_REGNO] = 0;
519 call_used_regs[A1_REGNO] = 0;
523 /* How Values Fit in Registers */
525 /* Implements HARD_REGNO_NREGS. This is complicated by the fact that
526 different registers are different sizes from each other, *and* may
527 be different sizes in different chip families. */
529 m32c_hard_regno_nregs_1 (int regno, enum machine_mode mode)
531 if (regno == FLG_REGNO && mode == CCmode)
533 if (regno >= FIRST_PSEUDO_REGISTER)
534 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
536 if (regno >= MEM0_REGNO && regno <= MEM7_REGNO)
537 return (GET_MODE_SIZE (mode) + 1) / 2;
539 if (GET_MODE_SIZE (mode) <= 1)
540 return nregs_table[regno].qi_regs;
541 if (GET_MODE_SIZE (mode) <= 2)
542 return nregs_table[regno].hi_regs;
543 if (regno == A0_REGNO && mode == PSImode && TARGET_A16)
545 if ((GET_MODE_SIZE (mode) <= 3 || mode == PSImode) && TARGET_A24)
546 return nregs_table[regno].pi_regs;
547 if (GET_MODE_SIZE (mode) <= 4)
548 return nregs_table[regno].si_regs;
549 if (GET_MODE_SIZE (mode) <= 8)
550 return nregs_table[regno].di_regs;
555 m32c_hard_regno_nregs (int regno, enum machine_mode mode)
557 int rv = m32c_hard_regno_nregs_1 (regno, mode);
561 /* Implements HARD_REGNO_MODE_OK. The above function does the work
562 already; just test its return value. */
564 m32c_hard_regno_ok (int regno, enum machine_mode mode)
566 return m32c_hard_regno_nregs_1 (regno, mode) != 0;
569 /* Implements MODES_TIEABLE_P. In general, modes aren't tieable since
570 registers are all different sizes. However, since most modes are
571 bigger than our registers anyway, it's easier to implement this
572 function that way, leaving QImode as the only unique case. */
574 m32c_modes_tieable_p (enum machine_mode m1, enum machine_mode m2)
576 if (GET_MODE_SIZE (m1) == GET_MODE_SIZE (m2))
580 if (m1 == QImode || m2 == QImode)
587 /* Register Classes */
589 /* Implements REGNO_REG_CLASS. */
591 m32c_regno_reg_class (int regno)
615 if (IS_MEM_REGNO (regno))
621 /* Implements REG_CLASS_FROM_CONSTRAINT. Note that some constraints only match
622 for certain chip families. */
624 m32c_reg_class_from_constraint (char c ATTRIBUTE_UNUSED, const char *s)
626 if (memcmp (s, "Rsp", 3) == 0)
628 if (memcmp (s, "Rfb", 3) == 0)
630 if (memcmp (s, "Rsb", 3) == 0)
632 if (memcmp (s, "Rcr", 3) == 0)
633 return TARGET_A16 ? CR_REGS : NO_REGS;
634 if (memcmp (s, "Rcl", 3) == 0)
635 return TARGET_A24 ? CR_REGS : NO_REGS;
636 if (memcmp (s, "R0w", 3) == 0)
638 if (memcmp (s, "R1w", 3) == 0)
640 if (memcmp (s, "R2w", 3) == 0)
642 if (memcmp (s, "R3w", 3) == 0)
644 if (memcmp (s, "R02", 3) == 0)
646 if (memcmp (s, "R13", 3) == 0)
648 if (memcmp (s, "R03", 3) == 0)
650 if (memcmp (s, "Rdi", 3) == 0)
652 if (memcmp (s, "Rhl", 3) == 0)
654 if (memcmp (s, "R23", 3) == 0)
656 if (memcmp (s, "Ra0", 3) == 0)
658 if (memcmp (s, "Ra1", 3) == 0)
660 if (memcmp (s, "Raa", 3) == 0)
662 if (memcmp (s, "Raw", 3) == 0)
663 return TARGET_A16 ? A_REGS : NO_REGS;
664 if (memcmp (s, "Ral", 3) == 0)
665 return TARGET_A24 ? A_REGS : NO_REGS;
666 if (memcmp (s, "Rqi", 3) == 0)
668 if (memcmp (s, "Rad", 3) == 0)
670 if (memcmp (s, "Rsi", 3) == 0)
672 if (memcmp (s, "Rhi", 3) == 0)
674 if (memcmp (s, "Rhc", 3) == 0)
676 if (memcmp (s, "Rra", 3) == 0)
678 if (memcmp (s, "Rfl", 3) == 0)
680 if (memcmp (s, "Rmm", 3) == 0)
682 if (fixed_regs[MEM0_REGNO])
687 /* PSImode registers - i.e. whatever can hold a pointer. */
688 if (memcmp (s, "Rpi", 3) == 0)
693 return RA_REGS; /* r2r0 and r3r1 can hold pointers. */
696 /* We handle this one as an EXTRA_CONSTRAINT. */
697 if (memcmp (s, "Rpa", 3) == 0)
702 fprintf(stderr, "unrecognized R constraint: %.3s\n", s);
709 /* Implements REGNO_OK_FOR_BASE_P. */
711 m32c_regno_ok_for_base_p (int regno)
713 if (regno == A0_REGNO
714 || regno == A1_REGNO || regno >= FIRST_PSEUDO_REGISTER)
719 #define DEBUG_RELOAD 0
721 /* Implements PREFERRED_RELOAD_CLASS. In general, prefer general
722 registers of the appropriate size. */
724 m32c_preferred_reload_class (rtx x, int rclass)
726 int newclass = rclass;
729 fprintf (stderr, "\npreferred_reload_class for %s is ",
730 class_names[rclass]);
732 if (rclass == NO_REGS)
733 rclass = GET_MODE (x) == QImode ? HL_REGS : R03_REGS;
735 if (classes_intersect (rclass, CR_REGS))
737 switch (GET_MODE (x))
743 /* newclass = HI_REGS; */
748 else if (newclass == QI_REGS && GET_MODE_SIZE (GET_MODE (x)) > 2)
750 else if (GET_MODE_SIZE (GET_MODE (x)) > 4
751 && ~class_contents[rclass][0] & 0x000f)
754 rclass = reduce_class (rclass, newclass, rclass);
756 if (GET_MODE (x) == QImode)
757 rclass = reduce_class (rclass, HL_REGS, rclass);
760 fprintf (stderr, "%s\n", class_names[rclass]);
763 if (GET_CODE (x) == MEM
764 && GET_CODE (XEXP (x, 0)) == PLUS
765 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS)
766 fprintf (stderr, "Glorm!\n");
771 /* Implements PREFERRED_OUTPUT_RELOAD_CLASS. */
773 m32c_preferred_output_reload_class (rtx x, int rclass)
775 return m32c_preferred_reload_class (x, rclass);
778 /* Implements LIMIT_RELOAD_CLASS. We basically want to avoid using
779 address registers for reloads since they're needed for address
782 m32c_limit_reload_class (enum machine_mode mode, int rclass)
785 fprintf (stderr, "limit_reload_class for %s: %s ->",
786 mode_name[mode], class_names[rclass]);
790 rclass = reduce_class (rclass, HL_REGS, rclass);
791 else if (mode == HImode)
792 rclass = reduce_class (rclass, HI_REGS, rclass);
793 else if (mode == SImode)
794 rclass = reduce_class (rclass, SI_REGS, rclass);
796 if (rclass != A_REGS)
797 rclass = reduce_class (rclass, DI_REGS, rclass);
800 fprintf (stderr, " %s\n", class_names[rclass]);
805 /* Implements SECONDARY_RELOAD_CLASS. QImode have to be reloaded in
806 r0 or r1, as those are the only real QImode registers. CR regs get
807 reloaded through appropriately sized general or address
810 m32c_secondary_reload_class (int rclass, enum machine_mode mode, rtx x)
812 int cc = class_contents[rclass][0];
814 fprintf (stderr, "\nsecondary reload class %s %s\n",
815 class_names[rclass], mode_name[mode]);
819 && GET_CODE (x) == MEM && (cc & ~class_contents[R23_REGS][0]) == 0)
821 if (classes_intersect (rclass, CR_REGS)
822 && GET_CODE (x) == REG
823 && REGNO (x) >= SB_REGNO && REGNO (x) <= SP_REGNO)
824 return TARGET_A16 ? HI_REGS : A_REGS;
828 /* Implements CLASS_LIKELY_SPILLED_P. A_REGS is needed for address
831 m32c_class_likely_spilled_p (int regclass)
833 if (regclass == A_REGS)
835 return reg_class_size[regclass] == 1;
838 /* Implements CLASS_MAX_NREGS. We calculate this according to its
839 documented meaning, to avoid potential inconsistencies with actual
840 class definitions. */
842 m32c_class_max_nregs (int regclass, enum machine_mode mode)
846 for (rn = 0; rn < FIRST_PSEUDO_REGISTER; rn++)
847 if (class_contents[regclass][0] & (1 << rn))
849 int n = m32c_hard_regno_nregs (rn, mode);
856 /* Implements CANNOT_CHANGE_MODE_CLASS. Only r0 and r1 can change to
857 QI (r0l, r1l) because the chip doesn't support QI ops on other
858 registers (well, it does on a0/a1 but if we let gcc do that, reload
859 suffers). Otherwise, we allow changes to larger modes. */
861 m32c_cannot_change_mode_class (enum machine_mode from,
862 enum machine_mode to, int rclass)
866 fprintf (stderr, "cannot change from %s to %s in %s\n",
867 mode_name[from], mode_name[to], class_names[rclass]);
870 /* If the larger mode isn't allowed in any of these registers, we
871 can't allow the change. */
872 for (rn = 0; rn < FIRST_PSEUDO_REGISTER; rn++)
873 if (class_contents[rclass][0] & (1 << rn))
874 if (! m32c_hard_regno_ok (rn, to))
878 return (class_contents[rclass][0] & 0x1ffa);
880 if (class_contents[rclass][0] & 0x0005 /* r0, r1 */
881 && GET_MODE_SIZE (from) > 1)
883 if (GET_MODE_SIZE (from) > 2) /* all other regs */
889 /* Helpers for the rest of the file. */
890 /* TRUE if the rtx is a REG rtx for the given register. */
891 #define IS_REG(rtx,regno) (GET_CODE (rtx) == REG \
892 && REGNO (rtx) == regno)
893 /* TRUE if the rtx is a pseudo - specifically, one we can use as a
894 base register in address calculations (hence the "strict"
896 #define IS_PSEUDO(rtx,strict) (!strict && GET_CODE (rtx) == REG \
897 && (REGNO (rtx) == AP_REGNO \
898 || REGNO (rtx) >= FIRST_PSEUDO_REGISTER))
900 /* Implements CONST_OK_FOR_CONSTRAINT_P. Currently, all constant
901 constraints start with 'I', with the next two characters indicating
902 the type and size of the range allowed. */
904 m32c_const_ok_for_constraint_p (HOST_WIDE_INT value,
905 char c ATTRIBUTE_UNUSED, const char *str)
907 /* s=signed u=unsigned n=nonzero m=minus l=log2able,
908 [sun] bits [SUN] bytes, p=pointer size
909 I[-0-9][0-9] matches that number */
910 if (memcmp (str, "Is3", 3) == 0)
912 return (-8 <= value && value <= 7);
914 if (memcmp (str, "IS1", 3) == 0)
916 return (-128 <= value && value <= 127);
918 if (memcmp (str, "IS2", 3) == 0)
920 return (-32768 <= value && value <= 32767);
922 if (memcmp (str, "IU2", 3) == 0)
924 return (0 <= value && value <= 65535);
926 if (memcmp (str, "IU3", 3) == 0)
928 return (0 <= value && value <= 0x00ffffff);
930 if (memcmp (str, "In4", 3) == 0)
932 return (-8 <= value && value && value <= 8);
934 if (memcmp (str, "In5", 3) == 0)
936 return (-16 <= value && value && value <= 16);
938 if (memcmp (str, "In6", 3) == 0)
940 return (-32 <= value && value && value <= 32);
942 if (memcmp (str, "IM2", 3) == 0)
944 return (-65536 <= value && value && value <= -1);
946 if (memcmp (str, "Ilb", 3) == 0)
948 int b = exact_log2 (value);
949 return (b >= 0 && b <= 7);
951 if (memcmp (str, "Imb", 3) == 0)
953 int b = exact_log2 ((value ^ 0xff) & 0xff);
954 return (b >= 0 && b <= 7);
956 if (memcmp (str, "ImB", 3) == 0)
958 int b = exact_log2 ((value ^ 0xffff) & 0xffff);
959 return (b >= 0 && b <= 7);
961 if (memcmp (str, "Ilw", 3) == 0)
963 int b = exact_log2 (value);
964 return (b >= 0 && b <= 15);
966 if (memcmp (str, "Imw", 3) == 0)
968 int b = exact_log2 ((value ^ 0xffff) & 0xffff);
969 return (b >= 0 && b <= 15);
971 if (memcmp (str, "I00", 3) == 0)
978 /* Implements EXTRA_CONSTRAINT_STR (see next function too). 'S' is
979 for memory constraints, plus "Rpa" for PARALLEL rtx's we use for
980 call return values. */
982 m32c_extra_constraint_p2 (rtx value, char c ATTRIBUTE_UNUSED, const char *str)
984 encode_pattern (value);
985 if (memcmp (str, "Sd", 2) == 0)
987 /* This is the common "src/dest" address */
989 if (GET_CODE (value) == MEM && CONSTANT_P (XEXP (value, 0)))
991 if (RTX_IS ("ms") || RTX_IS ("m+si"))
993 if (RTX_IS ("m++rii"))
995 if (REGNO (patternr[3]) == FB_REGNO
996 && INTVAL (patternr[4]) == 0)
1001 else if (RTX_IS ("m+ri") || RTX_IS ("m+rs") || RTX_IS ("m+r+si"))
1005 if (REGNO (r) == SP_REGNO)
1007 return m32c_legitimate_address_p (GET_MODE (value), XEXP (value, 0), 1);
1009 else if (memcmp (str, "Sa", 2) == 0)
1014 else if (RTX_IS ("m+ri"))
1018 return (IS_REG (r, A0_REGNO) || IS_REG (r, A1_REGNO));
1020 else if (memcmp (str, "Si", 2) == 0)
1022 return (RTX_IS ("mi") || RTX_IS ("ms") || RTX_IS ("m+si"));
1024 else if (memcmp (str, "Ss", 2) == 0)
1026 return ((RTX_IS ("mr")
1027 && (IS_REG (patternr[1], SP_REGNO)))
1028 || (RTX_IS ("m+ri") && (IS_REG (patternr[2], SP_REGNO))));
1030 else if (memcmp (str, "Sf", 2) == 0)
1032 return ((RTX_IS ("mr")
1033 && (IS_REG (patternr[1], FB_REGNO)))
1034 || (RTX_IS ("m+ri") && (IS_REG (patternr[2], FB_REGNO))));
1036 else if (memcmp (str, "Sb", 2) == 0)
1038 return ((RTX_IS ("mr")
1039 && (IS_REG (patternr[1], SB_REGNO)))
1040 || (RTX_IS ("m+ri") && (IS_REG (patternr[2], SB_REGNO))));
1042 else if (memcmp (str, "Sp", 2) == 0)
1044 /* Absolute addresses 0..0x1fff used for bit addressing (I/O ports) */
1045 return (RTX_IS ("mi")
1046 && !(INTVAL (patternr[1]) & ~0x1fff));
1048 else if (memcmp (str, "S1", 2) == 0)
1050 return r1h_operand (value, QImode);
1053 gcc_assert (str[0] != 'S');
1055 if (memcmp (str, "Rpa", 2) == 0)
1056 return GET_CODE (value) == PARALLEL;
1061 /* This is for when we're debugging the above. */
1063 m32c_extra_constraint_p (rtx value, char c, const char *str)
1065 int rv = m32c_extra_constraint_p2 (value, c, str);
1067 fprintf (stderr, "\nconstraint %.*s: %d\n", CONSTRAINT_LEN (c, str), str,
1074 /* Implements EXTRA_MEMORY_CONSTRAINT. Currently, we only use strings
1075 starting with 'S'. */
1077 m32c_extra_memory_constraint (char c, const char *str ATTRIBUTE_UNUSED)
1082 /* Implements EXTRA_ADDRESS_CONSTRAINT. We reserve 'A' strings for these,
1083 but don't currently define any. */
1085 m32c_extra_address_constraint (char c, const char *str ATTRIBUTE_UNUSED)
1090 /* STACK AND CALLING */
1094 /* Implements RETURN_ADDR_RTX. Note that R8C and M16C push 24 bits
1095 (yes, THREE bytes) onto the stack for the return address, but we
1096 don't support pointers bigger than 16 bits on those chips. This
1097 will likely wreak havoc with exception unwinding. FIXME. */
1099 m32c_return_addr_rtx (int count)
1101 enum machine_mode mode;
1107 /* we want 2[$fb] */
1111 /* It's four bytes */
1117 /* FIXME: it's really 3 bytes */
1123 gen_rtx_MEM (mode, plus_constant (gen_rtx_REG (Pmode, FP_REGNO), offset));
1124 return copy_to_mode_reg (mode, ra_mem);
1127 /* Implements INCOMING_RETURN_ADDR_RTX. See comment above. */
1129 m32c_incoming_return_addr_rtx (void)
1132 return gen_rtx_MEM (PSImode, gen_rtx_REG (PSImode, SP_REGNO));
1135 /* Exception Handling Support */
1137 /* Implements EH_RETURN_DATA_REGNO. Choose registers able to hold
1140 m32c_eh_return_data_regno (int n)
1152 return INVALID_REGNUM;
1156 /* Implements EH_RETURN_STACKADJ_RTX. Saved and used later in
1157 m32c_emit_eh_epilogue. */
1159 m32c_eh_return_stackadj_rtx (void)
1161 if (!cfun->machine->eh_stack_adjust)
1165 sa = gen_rtx_REG (Pmode, R0_REGNO);
1166 cfun->machine->eh_stack_adjust = sa;
1168 return cfun->machine->eh_stack_adjust;
1171 /* Registers That Address the Stack Frame */
1173 /* Implements DWARF_FRAME_REGNUM and DBX_REGISTER_NUMBER. Note that
1174 the original spec called for dwarf numbers to vary with register
1175 width as well, for example, r0l, r0, and r2r0 would each have
1176 different dwarf numbers. GCC doesn't support this, and we don't do
1177 it, and gdb seems to like it this way anyway. */
1179 m32c_dwarf_frame_regnum (int n)
1205 return DWARF_FRAME_REGISTERS + 1;
1209 /* The frame looks like this:
1211 ap -> +------------------------------
1212 | Return address (3 or 4 bytes)
1213 | Saved FB (2 or 4 bytes)
1214 fb -> +------------------------------
1217 | through r0 as needed
1218 sp -> +------------------------------
1221 /* We use this to wrap all emitted insns in the prologue. */
1225 RTX_FRAME_RELATED_P (x) = 1;
1229 /* This maps register numbers to the PUSHM/POPM bitfield, and tells us
1230 how much the stack pointer moves for each, for each cpu family. */
1239 /* These are in reverse push (nearest-to-sp) order. */
1240 { R0_REGNO, 0x80, 2, 2 },
1241 { R1_REGNO, 0x40, 2, 2 },
1242 { R2_REGNO, 0x20, 2, 2 },
1243 { R3_REGNO, 0x10, 2, 2 },
1244 { A0_REGNO, 0x08, 2, 4 },
1245 { A1_REGNO, 0x04, 2, 4 },
1246 { SB_REGNO, 0x02, 2, 4 },
1247 { FB_REGNO, 0x01, 2, 4 }
1250 #define PUSHM_N (sizeof(pushm_info)/sizeof(pushm_info[0]))
1252 /* Returns TRUE if we need to save/restore the given register. We
1253 save everything for exception handlers, so that any register can be
1254 unwound. For interrupt handlers, we save everything if the handler
1255 calls something else (because we don't know what *that* function
1256 might do), but try to be a bit smarter if the handler is a leaf
1257 function. We always save $a0, though, because we use that in the
1258 epilogue to copy $fb to $sp. */
1260 need_to_save (int regno)
1262 if (fixed_regs[regno])
1264 if (crtl->calls_eh_return)
1266 if (regno == FP_REGNO)
1268 if (cfun->machine->is_interrupt
1269 && (!cfun->machine->is_leaf
1270 || (regno == A0_REGNO
1271 && m32c_function_needs_enter ())
1274 if (df_regs_ever_live_p (regno)
1275 && (!call_used_regs[regno] || cfun->machine->is_interrupt))
1280 /* This function contains all the intelligence about saving and
1281 restoring registers. It always figures out the register save set.
1282 When called with PP_justcount, it merely returns the size of the
1283 save set (for eliminating the frame pointer, for example). When
1284 called with PP_pushm or PP_popm, it emits the appropriate
1285 instructions for saving (pushm) or restoring (popm) the
1288 m32c_pushm_popm (Push_Pop_Type ppt)
1291 int byte_count = 0, bytes;
1293 rtx dwarf_set[PUSHM_N];
1295 int nosave_mask = 0;
1297 if (crtl->return_rtx
1298 && GET_CODE (crtl->return_rtx) == PARALLEL
1299 && !(crtl->calls_eh_return || cfun->machine->is_interrupt))
1301 rtx exp = XVECEXP (crtl->return_rtx, 0, 0);
1302 rtx rv = XEXP (exp, 0);
1303 int rv_bytes = GET_MODE_SIZE (GET_MODE (rv));
1306 nosave_mask |= 0x20; /* PSI, SI */
1308 nosave_mask |= 0xf0; /* DF */
1310 nosave_mask |= 0x50; /* DI */
1313 for (i = 0; i < (int) PUSHM_N; i++)
1315 /* Skip if neither register needs saving. */
1316 if (!need_to_save (pushm_info[i].reg1))
1319 if (pushm_info[i].bit & nosave_mask)
1322 reg_mask |= pushm_info[i].bit;
1323 bytes = TARGET_A16 ? pushm_info[i].a16_bytes : pushm_info[i].a24_bytes;
1325 if (ppt == PP_pushm)
1327 enum machine_mode mode = (bytes == 2) ? HImode : SImode;
1330 /* Always use stack_pointer_rtx instead of calling
1331 rtx_gen_REG ourselves. Code elsewhere in GCC assumes
1332 that there is a single rtx representing the stack pointer,
1333 namely stack_pointer_rtx, and uses == to recognize it. */
1334 addr = stack_pointer_rtx;
1336 if (byte_count != 0)
1337 addr = gen_rtx_PLUS (GET_MODE (addr), addr, GEN_INT (byte_count));
1339 dwarf_set[n_dwarfs++] =
1340 gen_rtx_SET (VOIDmode,
1341 gen_rtx_MEM (mode, addr),
1342 gen_rtx_REG (mode, pushm_info[i].reg1));
1343 F (dwarf_set[n_dwarfs - 1]);
1346 byte_count += bytes;
1349 if (cfun->machine->is_interrupt)
1351 cfun->machine->intr_pushm = reg_mask & 0xfe;
1356 if (cfun->machine->is_interrupt)
1357 for (i = MEM0_REGNO; i <= MEM7_REGNO; i++)
1358 if (need_to_save (i))
1361 cfun->machine->intr_pushmem[i - MEM0_REGNO] = 1;
1364 if (ppt == PP_pushm && byte_count)
1366 rtx note = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (n_dwarfs + 1));
1371 XVECEXP (note, 0, 0)
1372 = gen_rtx_SET (VOIDmode,
1374 gen_rtx_PLUS (GET_MODE (stack_pointer_rtx),
1376 GEN_INT (-byte_count)));
1377 F (XVECEXP (note, 0, 0));
1379 for (i = 0; i < n_dwarfs; i++)
1380 XVECEXP (note, 0, i + 1) = dwarf_set[i];
1382 pushm = F (emit_insn (gen_pushm (GEN_INT (reg_mask))));
1384 REG_NOTES (pushm) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, note,
1388 if (cfun->machine->is_interrupt)
1389 for (i = MEM0_REGNO; i <= MEM7_REGNO; i++)
1390 if (cfun->machine->intr_pushmem[i - MEM0_REGNO])
1393 pushm = emit_insn (gen_pushhi_16 (gen_rtx_REG (HImode, i)));
1395 pushm = emit_insn (gen_pushhi_24 (gen_rtx_REG (HImode, i)));
1399 if (ppt == PP_popm && byte_count)
1401 if (cfun->machine->is_interrupt)
1402 for (i = MEM7_REGNO; i >= MEM0_REGNO; i--)
1403 if (cfun->machine->intr_pushmem[i - MEM0_REGNO])
1406 emit_insn (gen_pophi_16 (gen_rtx_REG (HImode, i)));
1408 emit_insn (gen_pophi_24 (gen_rtx_REG (HImode, i)));
1411 emit_insn (gen_popm (GEN_INT (reg_mask)));
1417 /* Implements INITIAL_ELIMINATION_OFFSET. See the comment above that
1418 diagrams our call frame. */
1420 m32c_initial_elimination_offset (int from, int to)
1424 if (from == AP_REGNO)
1434 ofs += m32c_pushm_popm (PP_justcount);
1435 ofs += get_frame_size ();
1438 /* Account for push rounding. */
1440 ofs = (ofs + 1) & ~1;
1442 fprintf (stderr, "initial_elimination_offset from=%d to=%d, ofs=%d\n", from,
1448 /* Passing Function Arguments on the Stack */
1450 /* Implements PUSH_ROUNDING. The R8C and M16C have byte stacks, the
1451 M32C has word stacks. */
1453 m32c_push_rounding (int n)
1455 if (TARGET_R8C || TARGET_M16C)
1457 return (n + 1) & ~1;
1460 /* Passing Arguments in Registers */
1462 /* Implements FUNCTION_ARG. Arguments are passed partly in registers,
1463 partly on stack. If our function returns a struct, a pointer to a
1464 buffer for it is at the top of the stack (last thing pushed). The
1465 first few real arguments may be in registers as follows:
1467 R8C/M16C: arg1 in r1 if it's QI or HI (else it's pushed on stack)
1468 arg2 in r2 if it's HI (else pushed on stack)
1470 M32C: arg1 in r0 if it's QI or HI (else it's pushed on stack)
1473 Structs are not passed in registers, even if they fit. Only
1474 integer and pointer types are passed in registers.
1476 Note that when arg1 doesn't fit in r1, arg2 may still be passed in
1479 m32c_function_arg (CUMULATIVE_ARGS * ca,
1480 enum machine_mode mode, tree type, int named)
1482 /* Can return a reg, parallel, or 0 for stack */
1485 fprintf (stderr, "func_arg %d (%s, %d)\n",
1486 ca->parm_num, mode_name[mode], named);
1490 if (mode == VOIDmode)
1493 if (ca->force_mem || !named)
1496 fprintf (stderr, "func arg: force %d named %d, mem\n", ca->force_mem,
1502 if (type && INTEGRAL_TYPE_P (type) && POINTER_TYPE_P (type))
1505 if (type && AGGREGATE_TYPE_P (type))
1508 switch (ca->parm_num)
1511 if (GET_MODE_SIZE (mode) == 1 || GET_MODE_SIZE (mode) == 2)
1512 rv = gen_rtx_REG (mode, TARGET_A16 ? R1_REGNO : R0_REGNO);
1516 if (TARGET_A16 && GET_MODE_SIZE (mode) == 2)
1517 rv = gen_rtx_REG (mode, R2_REGNO);
1527 #undef TARGET_PASS_BY_REFERENCE
1528 #define TARGET_PASS_BY_REFERENCE m32c_pass_by_reference
1530 m32c_pass_by_reference (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED,
1531 enum machine_mode mode ATTRIBUTE_UNUSED,
1532 const_tree type ATTRIBUTE_UNUSED,
1533 bool named ATTRIBUTE_UNUSED)
1538 /* Implements INIT_CUMULATIVE_ARGS. */
1540 m32c_init_cumulative_args (CUMULATIVE_ARGS * ca,
1542 rtx libname ATTRIBUTE_UNUSED,
1544 int n_named_args ATTRIBUTE_UNUSED)
1546 if (fntype && aggregate_value_p (TREE_TYPE (fntype), fndecl))
1553 /* Implements FUNCTION_ARG_ADVANCE. force_mem is set for functions
1554 returning structures, so we always reset that. Otherwise, we only
1555 need to know the sequence number of the argument to know what to do
1558 m32c_function_arg_advance (CUMULATIVE_ARGS * ca,
1559 enum machine_mode mode ATTRIBUTE_UNUSED,
1560 tree type ATTRIBUTE_UNUSED,
1561 int named ATTRIBUTE_UNUSED)
1569 /* Implements FUNCTION_ARG_REGNO_P. */
1571 m32c_function_arg_regno_p (int r)
1574 return (r == R0_REGNO);
1575 return (r == R1_REGNO || r == R2_REGNO);
1578 /* HImode and PSImode are the two "native" modes as far as GCC is
1579 concerned, but the chips also support a 32-bit mode which is used
1580 for some opcodes in R8C/M16C and for reset vectors and such. */
1581 #undef TARGET_VALID_POINTER_MODE
1582 #define TARGET_VALID_POINTER_MODE m32c_valid_pointer_mode
1584 m32c_valid_pointer_mode (enum machine_mode mode)
1594 /* How Scalar Function Values Are Returned */
1596 /* Implements TARGET_LIBCALL_VALUE. Most values are returned in $r0, or some
1597 combination of registers starting there (r2r0 for longs, r3r1r2r0
1598 for long long, r3r2r1r0 for doubles), except that that ABI
1599 currently doesn't work because it ends up using all available
1600 general registers and gcc often can't compile it. So, instead, we
1601 return anything bigger than 16 bits in "mem0" (effectively, a
1602 memory location). */
1604 #undef TARGET_LIBCALL_VALUE
1605 #define TARGET_LIBCALL_VALUE m32c_libcall_value
1608 m32c_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
1610 /* return reg or parallel */
1612 /* FIXME: GCC has difficulty returning large values in registers,
1613 because that ties up most of the general registers and gives the
1614 register allocator little to work with. Until we can resolve
1615 this, large values are returned in memory. */
1620 rv = gen_rtx_PARALLEL (mode, rtvec_alloc (4));
1621 XVECEXP (rv, 0, 0) = gen_rtx_EXPR_LIST (VOIDmode,
1622 gen_rtx_REG (HImode,
1625 XVECEXP (rv, 0, 1) = gen_rtx_EXPR_LIST (VOIDmode,
1626 gen_rtx_REG (HImode,
1629 XVECEXP (rv, 0, 2) = gen_rtx_EXPR_LIST (VOIDmode,
1630 gen_rtx_REG (HImode,
1633 XVECEXP (rv, 0, 3) = gen_rtx_EXPR_LIST (VOIDmode,
1634 gen_rtx_REG (HImode,
1640 if (TARGET_A24 && GET_MODE_SIZE (mode) > 2)
1644 rv = gen_rtx_PARALLEL (mode, rtvec_alloc (1));
1645 XVECEXP (rv, 0, 0) = gen_rtx_EXPR_LIST (VOIDmode,
1653 if (GET_MODE_SIZE (mode) > 2)
1654 return gen_rtx_REG (mode, MEM0_REGNO);
1655 return gen_rtx_REG (mode, R0_REGNO);
1658 /* Implements TARGET_FUNCTION_VALUE. Functions and libcalls have the same
1661 #undef TARGET_FUNCTION_VALUE
1662 #define TARGET_FUNCTION_VALUE m32c_function_value
1665 m32c_function_value (const_tree valtype,
1666 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1667 bool outgoing ATTRIBUTE_UNUSED)
1669 /* return reg or parallel */
1670 const enum machine_mode mode = TYPE_MODE (valtype);
1671 return m32c_libcall_value (mode, NULL_RTX);
1674 /* Implements FUNCTION_VALUE_REGNO_P. */
1677 m32c_function_value_regno_p (const unsigned int regno)
1679 return (regno == R0_REGNO || regno == MEM0_REGNO);
1682 /* How Large Values Are Returned */
1684 /* We return structures by pushing the address on the stack, even if
1685 we use registers for the first few "real" arguments. */
1686 #undef TARGET_STRUCT_VALUE_RTX
1687 #define TARGET_STRUCT_VALUE_RTX m32c_struct_value_rtx
1689 m32c_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1690 int incoming ATTRIBUTE_UNUSED)
1695 /* Function Entry and Exit */
1697 /* Implements EPILOGUE_USES. Interrupts restore all registers. */
1699 m32c_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1701 if (cfun->machine->is_interrupt)
1706 /* Implementing the Varargs Macros */
1708 #undef TARGET_STRICT_ARGUMENT_NAMING
1709 #define TARGET_STRICT_ARGUMENT_NAMING m32c_strict_argument_naming
1711 m32c_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
1716 /* Trampolines for Nested Functions */
1720 1 0000 75C43412 mov.w #0x1234,a0
1721 2 0004 FC000000 jmp.a label
1724 1 0000 BC563412 mov.l:s #0x123456,a0
1725 2 0004 CC000000 jmp.a label
1728 /* Implements TRAMPOLINE_SIZE. */
1730 m32c_trampoline_size (void)
1732 /* Allocate extra space so we can avoid the messy shifts when we
1733 initialize the trampoline; we just write past the end of the
1735 return TARGET_A16 ? 8 : 10;
1738 /* Implements TRAMPOLINE_ALIGNMENT. */
1740 m32c_trampoline_alignment (void)
1745 /* Implements TARGET_TRAMPOLINE_INIT. */
1747 #undef TARGET_TRAMPOLINE_INIT
1748 #define TARGET_TRAMPOLINE_INIT m32c_trampoline_init
1750 m32c_trampoline_init (rtx m_tramp, tree fndecl, rtx chainval)
1752 rtx function = XEXP (DECL_RTL (fndecl), 0);
1754 #define A0(m,i) adjust_address (m_tramp, m, i)
1757 /* Note: we subtract a "word" because the moves want signed
1758 constants, not unsigned constants. */
1759 emit_move_insn (A0 (HImode, 0), GEN_INT (0xc475 - 0x10000));
1760 emit_move_insn (A0 (HImode, 2), chainval);
1761 emit_move_insn (A0 (QImode, 4), GEN_INT (0xfc - 0x100));
1762 /* We use 16-bit addresses here, but store the zero to turn it
1763 into a 24-bit offset. */
1764 emit_move_insn (A0 (HImode, 5), function);
1765 emit_move_insn (A0 (QImode, 7), GEN_INT (0x00));
1769 /* Note that the PSI moves actually write 4 bytes. Make sure we
1770 write stuff out in the right order, and leave room for the
1771 extra byte at the end. */
1772 emit_move_insn (A0 (QImode, 0), GEN_INT (0xbc - 0x100));
1773 emit_move_insn (A0 (PSImode, 1), chainval);
1774 emit_move_insn (A0 (QImode, 4), GEN_INT (0xcc - 0x100));
1775 emit_move_insn (A0 (PSImode, 5), function);
1780 /* Implicit Calls to Library Routines */
1782 #undef TARGET_INIT_LIBFUNCS
1783 #define TARGET_INIT_LIBFUNCS m32c_init_libfuncs
1785 m32c_init_libfuncs (void)
1787 /* We do this because the M32C has an HImode operand, but the
1788 M16C has an 8-bit operand. Since gcc looks at the match data
1789 and not the expanded rtl, we have to reset the optab so that
1790 the right modes are found. */
1793 optab_handler (cstore_optab, QImode)->insn_code = CODE_FOR_cstoreqi4_24;
1794 optab_handler (cstore_optab, HImode)->insn_code = CODE_FOR_cstorehi4_24;
1795 optab_handler (cstore_optab, PSImode)->insn_code = CODE_FOR_cstorepsi4_24;
1799 /* Addressing Modes */
1801 /* The r8c/m32c family supports a wide range of non-orthogonal
1802 addressing modes, including the ability to double-indirect on *some*
1803 of them. Not all insns support all modes, either, but we rely on
1804 predicates and constraints to deal with that. */
1805 #undef TARGET_LEGITIMATE_ADDRESS_P
1806 #define TARGET_LEGITIMATE_ADDRESS_P m32c_legitimate_address_p
1808 m32c_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1814 /* Wide references to memory will be split after reload, so we must
1815 ensure that all parts of such splits remain legitimate
1817 mode_adjust = GET_MODE_SIZE (mode) - 1;
1819 /* allowing PLUS yields mem:HI(plus:SI(mem:SI(plus:SI in m32c_split_move */
1820 if (GET_CODE (x) == PRE_DEC
1821 || GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_MODIFY)
1823 return (GET_CODE (XEXP (x, 0)) == REG
1824 && REGNO (XEXP (x, 0)) == SP_REGNO);
1828 /* This is the double indirection detection, but it currently
1829 doesn't work as cleanly as this code implies, so until we've had
1830 a chance to debug it, leave it disabled. */
1831 if (TARGET_A24 && GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) != PLUS)
1834 fprintf (stderr, "double indirect\n");
1843 /* Most indexable registers can be used without displacements,
1844 although some of them will be emitted with an explicit zero
1845 to please the assembler. */
1846 switch (REGNO (patternr[0]))
1856 if (IS_PSEUDO (patternr[0], strict))
1863 /* This is more interesting, because different base registers
1864 allow for different displacements - both range and signedness
1865 - and it differs from chip series to chip series too. */
1866 int rn = REGNO (patternr[1]);
1867 HOST_WIDE_INT offs = INTVAL (patternr[2]);
1873 /* The syntax only allows positive offsets, but when the
1874 offsets span the entire memory range, we can simulate
1875 negative offsets by wrapping. */
1877 return (offs >= -65536 && offs <= 65535 - mode_adjust);
1879 return (offs >= 0 && offs <= 65535 - mode_adjust);
1881 return (offs >= -16777216 && offs <= 16777215);
1885 return (offs >= -128 && offs <= 127 - mode_adjust);
1886 return (offs >= -65536 && offs <= 65535 - mode_adjust);
1889 return (offs >= -128 && offs <= 127 - mode_adjust);
1892 if (IS_PSEUDO (patternr[1], strict))
1897 if (RTX_IS ("+rs") || RTX_IS ("+r+si"))
1899 rtx reg = patternr[1];
1901 /* We don't know where the symbol is, so only allow base
1902 registers which support displacements spanning the whole
1904 switch (REGNO (reg))
1908 /* $sb needs a secondary reload, but since it's involved in
1909 memory address reloads too, we don't deal with it very
1911 /* case SB_REGNO: */
1914 if (IS_PSEUDO (reg, strict))
1922 /* Implements REG_OK_FOR_BASE_P. */
1924 m32c_reg_ok_for_base_p (rtx x, int strict)
1926 if (GET_CODE (x) != REG)
1937 if (IS_PSEUDO (x, strict))
1943 /* We have three choices for choosing fb->aN offsets. If we choose -128,
1944 we need one MOVA -128[fb],aN opcode and 16-bit aN displacements,
1946 EB 4B FF mova -128[$fb],$a0
1947 D8 0C FF FF mov.w:Q #0,-1[$a0]
1949 Alternately, we subtract the frame size, and hopefully use 8-bit aN
1952 77 54 00 01 sub #256,$a0
1953 D8 08 01 mov.w:Q #0,1[$a0]
1955 If we don't offset (i.e. offset by zero), we end up with:
1957 D8 0C 00 FF mov.w:Q #0,-256[$a0]
1959 We have to subtract *something* so that we have a PLUS rtx to mark
1960 that we've done this reload. The -128 offset will never result in
1961 an 8-bit aN offset, and the payoff for the second case is five
1962 loads *if* those loads are within 256 bytes of the other end of the
1963 frame, so the third case seems best. Note that we subtract the
1964 zero, but detect that in the addhi3 pattern. */
1966 #define BIG_FB_ADJ 0
1968 /* Implements LEGITIMIZE_ADDRESS. The only address we really have to
1969 worry about is frame base offsets, as $fb has a limited
1970 displacement range. We deal with this by attempting to reload $fb
1971 itself into an address register; that seems to result in the best
1973 #undef TARGET_LEGITIMIZE_ADDRESS
1974 #define TARGET_LEGITIMIZE_ADDRESS m32c_legitimize_address
1976 m32c_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1977 enum machine_mode mode)
1980 fprintf (stderr, "m32c_legitimize_address for mode %s\n", mode_name[mode]);
1982 fprintf (stderr, "\n");
1985 if (GET_CODE (x) == PLUS
1986 && GET_CODE (XEXP (x, 0)) == REG
1987 && REGNO (XEXP (x, 0)) == FB_REGNO
1988 && GET_CODE (XEXP (x, 1)) == CONST_INT
1989 && (INTVAL (XEXP (x, 1)) < -128
1990 || INTVAL (XEXP (x, 1)) > (128 - GET_MODE_SIZE (mode))))
1992 /* reload FB to A_REGS */
1993 rtx temp = gen_reg_rtx (Pmode);
1995 emit_insn (gen_rtx_SET (VOIDmode, temp, XEXP (x, 0)));
2002 /* Implements LEGITIMIZE_RELOAD_ADDRESS. See comment above. */
2004 m32c_legitimize_reload_address (rtx * x,
2005 enum machine_mode mode,
2007 int type, int ind_levels ATTRIBUTE_UNUSED)
2010 fprintf (stderr, "\nm32c_legitimize_reload_address for mode %s\n",
2015 /* At one point, this function tried to get $fb copied to an address
2016 register, which in theory would maximize sharing, but gcc was
2017 *also* still trying to reload the whole address, and we'd run out
2018 of address registers. So we let gcc do the naive (but safe)
2019 reload instead, when the above function doesn't handle it for
2022 The code below is a second attempt at the above. */
2024 if (GET_CODE (*x) == PLUS
2025 && GET_CODE (XEXP (*x, 0)) == REG
2026 && REGNO (XEXP (*x, 0)) == FB_REGNO
2027 && GET_CODE (XEXP (*x, 1)) == CONST_INT
2028 && (INTVAL (XEXP (*x, 1)) < -128
2029 || INTVAL (XEXP (*x, 1)) > (128 - GET_MODE_SIZE (mode))))
2032 int offset = INTVAL (XEXP (*x, 1));
2033 int adjustment = -BIG_FB_ADJ;
2035 sum = gen_rtx_PLUS (Pmode, XEXP (*x, 0),
2036 GEN_INT (adjustment));
2037 *x = gen_rtx_PLUS (Pmode, sum, GEN_INT (offset - adjustment));
2038 if (type == RELOAD_OTHER)
2039 type = RELOAD_FOR_OTHER_ADDRESS;
2040 push_reload (sum, NULL_RTX, &XEXP (*x, 0), NULL,
2041 A_REGS, Pmode, VOIDmode, 0, 0, opnum,
2046 if (GET_CODE (*x) == PLUS
2047 && GET_CODE (XEXP (*x, 0)) == PLUS
2048 && GET_CODE (XEXP (XEXP (*x, 0), 0)) == REG
2049 && REGNO (XEXP (XEXP (*x, 0), 0)) == FB_REGNO
2050 && GET_CODE (XEXP (XEXP (*x, 0), 1)) == CONST_INT
2051 && GET_CODE (XEXP (*x, 1)) == CONST_INT
2054 if (type == RELOAD_OTHER)
2055 type = RELOAD_FOR_OTHER_ADDRESS;
2056 push_reload (XEXP (*x, 0), NULL_RTX, &XEXP (*x, 0), NULL,
2057 A_REGS, Pmode, VOIDmode, 0, 0, opnum,
2065 /* Implements LEGITIMATE_CONSTANT_P. We split large constants anyway,
2066 so we can allow anything. */
2068 m32c_legitimate_constant_p (rtx x ATTRIBUTE_UNUSED)
2074 /* Condition Code Status */
2076 #undef TARGET_FIXED_CONDITION_CODE_REGS
2077 #define TARGET_FIXED_CONDITION_CODE_REGS m32c_fixed_condition_code_regs
2079 m32c_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
2082 *p2 = INVALID_REGNUM;
2086 /* Describing Relative Costs of Operations */
2088 /* Implements REGISTER_MOVE_COST. We make impossible moves
2089 prohibitively expensive, like trying to put QIs in r2/r3 (there are
2090 no opcodes to do that). We also discourage use of mem* registers
2091 since they're really memory. */
2093 m32c_register_move_cost (enum machine_mode mode, int from, int to)
2095 int cost = COSTS_N_INSNS (3);
2096 int cc = class_contents[from][0] | class_contents[to][0];
2097 /* FIXME: pick real values, but not 2 for now. */
2098 if (mode == QImode && (cc & class_contents[R23_REGS][0]))
2100 if (!(cc & ~class_contents[R23_REGS][0]))
2101 cost = COSTS_N_INSNS (1000);
2103 cost = COSTS_N_INSNS (80);
2106 if (!class_can_hold_mode (from, mode) || !class_can_hold_mode (to, mode))
2107 cost = COSTS_N_INSNS (1000);
2109 if (classes_intersect (from, CR_REGS))
2110 cost += COSTS_N_INSNS (5);
2112 if (classes_intersect (to, CR_REGS))
2113 cost += COSTS_N_INSNS (5);
2115 if (from == MEM_REGS || to == MEM_REGS)
2116 cost += COSTS_N_INSNS (50);
2117 else if (classes_intersect (from, MEM_REGS)
2118 || classes_intersect (to, MEM_REGS))
2119 cost += COSTS_N_INSNS (10);
2122 fprintf (stderr, "register_move_cost %s from %s to %s = %d\n",
2123 mode_name[mode], class_names[from], class_names[to], cost);
2128 /* Implements MEMORY_MOVE_COST. */
2130 m32c_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2131 int reg_class ATTRIBUTE_UNUSED,
2132 int in ATTRIBUTE_UNUSED)
2134 /* FIXME: pick real values. */
2135 return COSTS_N_INSNS (10);
2138 /* Here we try to describe when we use multiple opcodes for one RTX so
2139 that gcc knows when to use them. */
2140 #undef TARGET_RTX_COSTS
2141 #define TARGET_RTX_COSTS m32c_rtx_costs
2143 m32c_rtx_costs (rtx x, int code, int outer_code, int *total,
2144 bool speed ATTRIBUTE_UNUSED)
2149 if (REGNO (x) >= MEM0_REGNO && REGNO (x) <= MEM7_REGNO)
2150 *total += COSTS_N_INSNS (500);
2152 *total += COSTS_N_INSNS (1);
2158 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2160 /* mov.b r1l, r1h */
2161 *total += COSTS_N_INSNS (1);
2164 if (INTVAL (XEXP (x, 1)) > 8
2165 || INTVAL (XEXP (x, 1)) < -8)
2168 /* mov.b r1l, r1h */
2169 *total += COSTS_N_INSNS (2);
2184 if (outer_code == SET)
2186 *total += COSTS_N_INSNS (2);
2193 rtx dest = XEXP (x, 0);
2194 rtx addr = XEXP (dest, 0);
2195 switch (GET_CODE (addr))
2198 *total += COSTS_N_INSNS (1);
2201 *total += COSTS_N_INSNS (3);
2204 *total += COSTS_N_INSNS (2);
2212 /* Reasonable default. */
2213 if (TARGET_A16 && GET_MODE(x) == SImode)
2214 *total += COSTS_N_INSNS (2);
2220 #undef TARGET_ADDRESS_COST
2221 #define TARGET_ADDRESS_COST m32c_address_cost
2223 m32c_address_cost (rtx addr, bool speed ATTRIBUTE_UNUSED)
2226 /* fprintf(stderr, "\naddress_cost\n");
2228 switch (GET_CODE (addr))
2233 return COSTS_N_INSNS(1);
2234 if (0 < i && i <= 255)
2235 return COSTS_N_INSNS(2);
2236 if (0 < i && i <= 65535)
2237 return COSTS_N_INSNS(3);
2238 return COSTS_N_INSNS(4);
2240 return COSTS_N_INSNS(4);
2242 return COSTS_N_INSNS(1);
2244 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
2246 i = INTVAL (XEXP (addr, 1));
2248 return COSTS_N_INSNS(1);
2249 if (0 < i && i <= 255)
2250 return COSTS_N_INSNS(2);
2251 if (0 < i && i <= 65535)
2252 return COSTS_N_INSNS(3);
2254 return COSTS_N_INSNS(4);
2260 /* Defining the Output Assembler Language */
2262 /* The Overall Framework of an Assembler File */
2264 #undef TARGET_HAVE_NAMED_SECTIONS
2265 #define TARGET_HAVE_NAMED_SECTIONS true
2267 /* Output of Data */
2269 /* We may have 24 bit sizes, which is the native address size.
2270 Currently unused, but provided for completeness. */
2271 #undef TARGET_ASM_INTEGER
2272 #define TARGET_ASM_INTEGER m32c_asm_integer
2274 m32c_asm_integer (rtx x, unsigned int size, int aligned_p)
2279 fprintf (asm_out_file, "\t.3byte\t");
2280 output_addr_const (asm_out_file, x);
2281 fputc ('\n', asm_out_file);
2284 if (GET_CODE (x) == SYMBOL_REF)
2286 fprintf (asm_out_file, "\t.long\t");
2287 output_addr_const (asm_out_file, x);
2288 fputc ('\n', asm_out_file);
2293 return default_assemble_integer (x, size, aligned_p);
2296 /* Output of Assembler Instructions */
2298 /* We use a lookup table because the addressing modes are non-orthogonal. */
2303 char const *pattern;
2306 const conversions[] = {
2309 { 0, "mr", "z[1]" },
2310 { 0, "m+ri", "3[2]" },
2311 { 0, "m+rs", "3[2]" },
2312 { 0, "m+r+si", "4+5[2]" },
2315 { 0, "m+si", "2+3" },
2317 { 0, "mmr", "[z[2]]" },
2318 { 0, "mm+ri", "[4[3]]" },
2319 { 0, "mm+rs", "[4[3]]" },
2320 { 0, "mm+r+si", "[5+6[3]]" },
2321 { 0, "mms", "[[2]]" },
2322 { 0, "mmi", "[[2]]" },
2323 { 0, "mm+si", "[4[3]]" },
2327 { 0, "+si", "#1+2" },
2333 { 'd', "+si", "1+2" },
2336 { 'D', "+si", "1+2" },
2347 /* This is in order according to the bitfield that pushm/popm use. */
2348 static char const *pushm_regs[] = {
2349 "fb", "sb", "a1", "a0", "r3", "r2", "r1", "r0"
2352 /* Implements PRINT_OPERAND. */
2354 m32c_print_operand (FILE * file, rtx x, int code)
2359 int unsigned_const = 0;
2362 /* Multiplies; constants are converted to sign-extended format but
2363 we need unsigned, so 'u' and 'U' tell us what size unsigned we
2375 /* This one is only for debugging; you can put it in a pattern to
2376 force this error. */
2379 fprintf (stderr, "dj: unreviewed pattern:");
2380 if (current_output_insn)
2381 debug_rtx (current_output_insn);
2384 /* PSImode operations are either .w or .l depending on the target. */
2388 fprintf (file, "w");
2390 fprintf (file, "l");
2393 /* Inverted conditionals. */
2396 switch (GET_CODE (x))
2402 fputs ("gtu", file);
2408 fputs ("geu", file);
2414 fputs ("leu", file);
2420 fputs ("ltu", file);
2433 /* Regular conditionals. */
2436 switch (GET_CODE (x))
2442 fputs ("leu", file);
2448 fputs ("ltu", file);
2454 fputs ("gtu", file);
2460 fputs ("geu", file);
2473 /* Used in negsi2 to do HImode ops on the two parts of an SImode
2475 if (code == 'h' && GET_MODE (x) == SImode)
2477 x = m32c_subreg (HImode, x, SImode, 0);
2480 if (code == 'H' && GET_MODE (x) == SImode)
2482 x = m32c_subreg (HImode, x, SImode, 2);
2485 if (code == 'h' && GET_MODE (x) == HImode)
2487 x = m32c_subreg (QImode, x, HImode, 0);
2490 if (code == 'H' && GET_MODE (x) == HImode)
2492 /* We can't actually represent this as an rtx. Do it here. */
2493 if (GET_CODE (x) == REG)
2498 fputs ("r0h", file);
2501 fputs ("r1h", file);
2507 /* This should be a MEM. */
2508 x = m32c_subreg (QImode, x, HImode, 1);
2511 /* This is for BMcond, which always wants word register names. */
2512 if (code == 'h' && GET_MODE (x) == QImode)
2514 if (GET_CODE (x) == REG)
2515 x = gen_rtx_REG (HImode, REGNO (x));
2518 /* 'x' and 'X' need to be ignored for non-immediates. */
2519 if ((code == 'x' || code == 'X') && GET_CODE (x) != CONST_INT)
2524 for (i = 0; conversions[i].pattern; i++)
2525 if (conversions[i].code == code
2526 && streq (conversions[i].pattern, pattern))
2528 for (j = 0; conversions[i].format[j]; j++)
2529 /* backslash quotes the next character in the output pattern. */
2530 if (conversions[i].format[j] == '\\')
2532 fputc (conversions[i].format[j + 1], file);
2535 /* Digits in the output pattern indicate that the
2536 corresponding RTX is to be output at that point. */
2537 else if (ISDIGIT (conversions[i].format[j]))
2539 rtx r = patternr[conversions[i].format[j] - '0'];
2540 switch (GET_CODE (r))
2543 fprintf (file, "%s",
2544 reg_name_with_mode (REGNO (r), GET_MODE (r)));
2553 int i = (int) exact_log2 (v);
2555 i = (int) exact_log2 ((v ^ 0xffff) & 0xffff);
2557 i = (int) exact_log2 ((v ^ 0xff) & 0xff);
2559 fprintf (file, "%d", i);
2563 /* Unsigned byte. */
2564 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2568 /* Unsigned word. */
2569 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2570 INTVAL (r) & 0xffff);
2573 /* pushm and popm encode a register set into a single byte. */
2575 for (b = 7; b >= 0; b--)
2576 if (INTVAL (r) & (1 << b))
2578 fprintf (file, "%s%s", comma, pushm_regs[b]);
2583 /* "Minus". Output -X */
2584 ival = (-INTVAL (r) & 0xffff);
2586 ival = ival - 0x10000;
2587 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2591 if (conversions[i].format[j + 1] == '[' && ival < 0)
2593 /* We can simulate negative displacements by
2594 taking advantage of address space
2595 wrapping when the offset can span the
2596 entire address range. */
2598 patternr[conversions[i].format[j + 2] - '0'];
2599 if (GET_CODE (base) == REG)
2600 switch (REGNO (base))
2605 ival = 0x1000000 + ival;
2607 ival = 0x10000 + ival;
2611 ival = 0x10000 + ival;
2615 else if (code == 'd' && ival < 0 && j == 0)
2616 /* The "mova" opcode is used to do addition by
2617 computing displacements, but again, we need
2618 displacements to be unsigned *if* they're
2619 the only component of the displacement
2620 (i.e. no "symbol-4" type displacement). */
2621 ival = (TARGET_A24 ? 0x1000000 : 0x10000) + ival;
2623 if (conversions[i].format[j] == '0')
2625 /* More conversions to unsigned. */
2626 if (unsigned_const == 2)
2628 if (unsigned_const == 1)
2631 if (streq (conversions[i].pattern, "mi")
2632 || streq (conversions[i].pattern, "mmi"))
2634 /* Integers used as addresses are unsigned. */
2635 ival &= (TARGET_A24 ? 0xffffff : 0xffff);
2637 if (force_sign && ival >= 0)
2639 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2644 /* We don't have const_double constants. If it
2645 happens, make it obvious. */
2646 fprintf (file, "[const_double 0x%lx]",
2647 (unsigned long) CONST_DOUBLE_HIGH (r));
2650 assemble_name (file, XSTR (r, 0));
2653 output_asm_label (r);
2656 fprintf (stderr, "don't know how to print this operand:");
2663 if (conversions[i].format[j] == 'z')
2665 /* Some addressing modes *must* have a displacement,
2666 so insert a zero here if needed. */
2668 for (k = j + 1; conversions[i].format[k]; k++)
2669 if (ISDIGIT (conversions[i].format[k]))
2671 rtx reg = patternr[conversions[i].format[k] - '0'];
2672 if (GET_CODE (reg) == REG
2673 && (REGNO (reg) == SB_REGNO
2674 || REGNO (reg) == FB_REGNO
2675 || REGNO (reg) == SP_REGNO))
2680 /* Signed displacements off symbols need to have signs
2682 if (conversions[i].format[j] == '+'
2683 && (!code || code == 'D' || code == 'd')
2684 && ISDIGIT (conversions[i].format[j + 1])
2685 && (GET_CODE (patternr[conversions[i].format[j + 1] - '0'])
2691 fputc (conversions[i].format[j], file);
2695 if (!conversions[i].pattern)
2697 fprintf (stderr, "unconvertible operand %c `%s'", code ? code : '-',
2700 fprintf (file, "[%c.%s]", code ? code : '-', pattern);
2706 /* Implements PRINT_OPERAND_PUNCT_VALID_P. See m32c_print_operand
2707 above for descriptions of what these do. */
2709 m32c_print_operand_punct_valid_p (int c)
2711 if (c == '&' || c == '!')
2716 /* Implements PRINT_OPERAND_ADDRESS. Nothing unusual here. */
2718 m32c_print_operand_address (FILE * stream, rtx address)
2720 if (GET_CODE (address) == MEM)
2721 address = XEXP (address, 0);
2723 /* cf: gcc.dg/asm-4.c. */
2724 gcc_assert (GET_CODE (address) == REG);
2726 m32c_print_operand (stream, address, 0);
2729 /* Implements ASM_OUTPUT_REG_PUSH. Control registers are pushed
2730 differently than general registers. */
2732 m32c_output_reg_push (FILE * s, int regno)
2734 if (regno == FLG_REGNO)
2735 fprintf (s, "\tpushc\tflg\n");
2737 fprintf (s, "\tpush.%c\t%s\n",
2738 " bwll"[reg_push_size (regno)], reg_names[regno]);
2741 /* Likewise for ASM_OUTPUT_REG_POP. */
2743 m32c_output_reg_pop (FILE * s, int regno)
2745 if (regno == FLG_REGNO)
2746 fprintf (s, "\tpopc\tflg\n");
2748 fprintf (s, "\tpop.%c\t%s\n",
2749 " bwll"[reg_push_size (regno)], reg_names[regno]);
2752 /* Defining target-specific uses of `__attribute__' */
2754 /* Used to simplify the logic below. Find the attributes wherever
2756 #define M32C_ATTRIBUTES(decl) \
2757 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
2758 : DECL_ATTRIBUTES (decl) \
2759 ? (DECL_ATTRIBUTES (decl)) \
2760 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
2762 /* Returns TRUE if the given tree has the "interrupt" attribute. */
2764 interrupt_p (tree node ATTRIBUTE_UNUSED)
2766 tree list = M32C_ATTRIBUTES (node);
2769 if (is_attribute_p ("interrupt", TREE_PURPOSE (list)))
2771 list = TREE_CHAIN (list);
2773 return fast_interrupt_p (node);
2776 /* Returns TRUE if the given tree has the "bank_switch" attribute. */
2778 bank_switch_p (tree node ATTRIBUTE_UNUSED)
2780 tree list = M32C_ATTRIBUTES (node);
2783 if (is_attribute_p ("bank_switch", TREE_PURPOSE (list)))
2785 list = TREE_CHAIN (list);
2790 /* Returns TRUE if the given tree has the "fast_interrupt" attribute. */
2792 fast_interrupt_p (tree node ATTRIBUTE_UNUSED)
2794 tree list = M32C_ATTRIBUTES (node);
2797 if (is_attribute_p ("fast_interrupt", TREE_PURPOSE (list)))
2799 list = TREE_CHAIN (list);
2805 interrupt_handler (tree * node ATTRIBUTE_UNUSED,
2806 tree name ATTRIBUTE_UNUSED,
2807 tree args ATTRIBUTE_UNUSED,
2808 int flags ATTRIBUTE_UNUSED,
2809 bool * no_add_attrs ATTRIBUTE_UNUSED)
2814 /* Returns TRUE if given tree has the "function_vector" attribute. */
2816 m32c_special_page_vector_p (tree func)
2820 if (TREE_CODE (func) != FUNCTION_DECL)
2823 list = M32C_ATTRIBUTES (func);
2826 if (is_attribute_p ("function_vector", TREE_PURPOSE (list)))
2828 list = TREE_CHAIN (list);
2834 function_vector_handler (tree * node ATTRIBUTE_UNUSED,
2835 tree name ATTRIBUTE_UNUSED,
2836 tree args ATTRIBUTE_UNUSED,
2837 int flags ATTRIBUTE_UNUSED,
2838 bool * no_add_attrs ATTRIBUTE_UNUSED)
2842 /* The attribute is not supported for R8C target. */
2843 warning (OPT_Wattributes,
2844 "%qE attribute is not supported for R8C target",
2846 *no_add_attrs = true;
2848 else if (TREE_CODE (*node) != FUNCTION_DECL)
2850 /* The attribute must be applied to functions only. */
2851 warning (OPT_Wattributes,
2852 "%qE attribute applies only to functions",
2854 *no_add_attrs = true;
2856 else if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
2858 /* The argument must be a constant integer. */
2859 warning (OPT_Wattributes,
2860 "%qE attribute argument not an integer constant",
2862 *no_add_attrs = true;
2864 else if (TREE_INT_CST_LOW (TREE_VALUE (args)) < 18
2865 || TREE_INT_CST_LOW (TREE_VALUE (args)) > 255)
2867 /* The argument value must be between 18 to 255. */
2868 warning (OPT_Wattributes,
2869 "%qE attribute argument should be between 18 to 255",
2871 *no_add_attrs = true;
2876 /* If the function is assigned the attribute 'function_vector', it
2877 returns the function vector number, otherwise returns zero. */
2879 current_function_special_page_vector (rtx x)
2883 if ((GET_CODE(x) == SYMBOL_REF)
2884 && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_FUNCVEC_FUNCTION))
2887 tree t = SYMBOL_REF_DECL (x);
2889 if (TREE_CODE (t) != FUNCTION_DECL)
2892 list = M32C_ATTRIBUTES (t);
2895 if (is_attribute_p ("function_vector", TREE_PURPOSE (list)))
2897 num = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (list)));
2901 list = TREE_CHAIN (list);
2910 #undef TARGET_ATTRIBUTE_TABLE
2911 #define TARGET_ATTRIBUTE_TABLE m32c_attribute_table
2912 static const struct attribute_spec m32c_attribute_table[] = {
2913 {"interrupt", 0, 0, false, false, false, interrupt_handler},
2914 {"bank_switch", 0, 0, false, false, false, interrupt_handler},
2915 {"fast_interrupt", 0, 0, false, false, false, interrupt_handler},
2916 {"function_vector", 1, 1, true, false, false, function_vector_handler},
2917 {0, 0, 0, 0, 0, 0, 0}
2920 #undef TARGET_COMP_TYPE_ATTRIBUTES
2921 #define TARGET_COMP_TYPE_ATTRIBUTES m32c_comp_type_attributes
2923 m32c_comp_type_attributes (const_tree type1 ATTRIBUTE_UNUSED,
2924 const_tree type2 ATTRIBUTE_UNUSED)
2926 /* 0=incompatible 1=compatible 2=warning */
2930 #undef TARGET_INSERT_ATTRIBUTES
2931 #define TARGET_INSERT_ATTRIBUTES m32c_insert_attributes
2933 m32c_insert_attributes (tree node ATTRIBUTE_UNUSED,
2934 tree * attr_ptr ATTRIBUTE_UNUSED)
2936 /* Nothing to do here. */
2941 /* This is a list of legal subregs of hard regs. */
2942 static const struct {
2943 unsigned char outer_mode_size;
2944 unsigned char inner_mode_size;
2945 unsigned char byte_mask;
2946 unsigned char legal_when;
2948 } legal_subregs[] = {
2949 {1, 2, 0x03, 1, R0_REGNO}, /* r0h r0l */
2950 {1, 2, 0x03, 1, R1_REGNO}, /* r1h r1l */
2951 {1, 2, 0x01, 1, A0_REGNO},
2952 {1, 2, 0x01, 1, A1_REGNO},
2954 {1, 4, 0x01, 1, A0_REGNO},
2955 {1, 4, 0x01, 1, A1_REGNO},
2957 {2, 4, 0x05, 1, R0_REGNO}, /* r2 r0 */
2958 {2, 4, 0x05, 1, R1_REGNO}, /* r3 r1 */
2959 {2, 4, 0x05, 16, A0_REGNO}, /* a1 a0 */
2960 {2, 4, 0x01, 24, A0_REGNO}, /* a1 a0 */
2961 {2, 4, 0x01, 24, A1_REGNO}, /* a1 a0 */
2963 {4, 8, 0x55, 1, R0_REGNO}, /* r3 r1 r2 r0 */
2966 /* Returns TRUE if OP is a subreg of a hard reg which we don't
2969 m32c_illegal_subreg_p (rtx op)
2973 int src_mode, dest_mode;
2975 if (GET_CODE (op) != SUBREG)
2978 dest_mode = GET_MODE (op);
2979 offset = SUBREG_BYTE (op);
2980 op = SUBREG_REG (op);
2981 src_mode = GET_MODE (op);
2983 if (GET_MODE_SIZE (dest_mode) == GET_MODE_SIZE (src_mode))
2985 if (GET_CODE (op) != REG)
2987 if (REGNO (op) >= MEM0_REGNO)
2990 offset = (1 << offset);
2992 for (i = 0; i < ARRAY_SIZE (legal_subregs); i ++)
2993 if (legal_subregs[i].outer_mode_size == GET_MODE_SIZE (dest_mode)
2994 && legal_subregs[i].regno == REGNO (op)
2995 && legal_subregs[i].inner_mode_size == GET_MODE_SIZE (src_mode)
2996 && legal_subregs[i].byte_mask & offset)
2998 switch (legal_subregs[i].legal_when)
3015 /* Returns TRUE if we support a move between the first two operands.
3016 At the moment, we just want to discourage mem to mem moves until
3017 after reload, because reload has a hard time with our limited
3018 number of address registers, and we can get into a situation where
3019 we need three of them when we only have two. */
3021 m32c_mov_ok (rtx * operands, enum machine_mode mode ATTRIBUTE_UNUSED)
3023 rtx op0 = operands[0];
3024 rtx op1 = operands[1];
3029 #define DEBUG_MOV_OK 0
3031 fprintf (stderr, "m32c_mov_ok %s\n", mode_name[mode]);
3036 if (GET_CODE (op0) == SUBREG)
3037 op0 = XEXP (op0, 0);
3038 if (GET_CODE (op1) == SUBREG)
3039 op1 = XEXP (op1, 0);
3041 if (GET_CODE (op0) == MEM
3042 && GET_CODE (op1) == MEM
3043 && ! reload_completed)
3046 fprintf (stderr, " - no, mem to mem\n");
3052 fprintf (stderr, " - ok\n");
3057 /* Returns TRUE if two consecutive HImode mov instructions, generated
3058 for moving an immediate double data to a double data type variable
3059 location, can be combined into single SImode mov instruction. */
3061 m32c_immd_dbl_mov (rtx * operands,
3062 enum machine_mode mode ATTRIBUTE_UNUSED)
3064 int flag = 0, okflag = 0, offset1 = 0, offset2 = 0, offsetsign = 0;
3068 if (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
3069 && MEM_SCALAR_P (operands[0])
3070 && !MEM_IN_STRUCT_P (operands[0])
3071 && GET_CODE (XEXP (operands[2], 0)) == CONST
3072 && GET_CODE (XEXP (XEXP (operands[2], 0), 0)) == PLUS
3073 && GET_CODE (XEXP (XEXP (XEXP (operands[2], 0), 0), 0)) == SYMBOL_REF
3074 && GET_CODE (XEXP (XEXP (XEXP (operands[2], 0), 0), 1)) == CONST_INT
3075 && MEM_SCALAR_P (operands[2])
3076 && !MEM_IN_STRUCT_P (operands[2]))
3079 else if (GET_CODE (XEXP (operands[0], 0)) == CONST
3080 && GET_CODE (XEXP (XEXP (operands[0], 0), 0)) == PLUS
3081 && GET_CODE (XEXP (XEXP (XEXP (operands[0], 0), 0), 0)) == SYMBOL_REF
3082 && MEM_SCALAR_P (operands[0])
3083 && !MEM_IN_STRUCT_P (operands[0])
3084 && !(INTVAL (XEXP (XEXP (XEXP (operands[0], 0), 0), 1)) %4)
3085 && GET_CODE (XEXP (operands[2], 0)) == CONST
3086 && GET_CODE (XEXP (XEXP (operands[2], 0), 0)) == PLUS
3087 && GET_CODE (XEXP (XEXP (XEXP (operands[2], 0), 0), 0)) == SYMBOL_REF
3088 && MEM_SCALAR_P (operands[2])
3089 && !MEM_IN_STRUCT_P (operands[2]))
3092 else if (GET_CODE (XEXP (operands[0], 0)) == PLUS
3093 && GET_CODE (XEXP (XEXP (operands[0], 0), 0)) == REG
3094 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == FB_REGNO
3095 && GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT
3096 && MEM_SCALAR_P (operands[0])
3097 && !MEM_IN_STRUCT_P (operands[0])
3098 && !(INTVAL (XEXP (XEXP (operands[0], 0), 1)) %4)
3099 && REGNO (XEXP (XEXP (operands[2], 0), 0)) == FB_REGNO
3100 && GET_CODE (XEXP (XEXP (operands[2], 0), 1)) == CONST_INT
3101 && MEM_SCALAR_P (operands[2])
3102 && !MEM_IN_STRUCT_P (operands[2]))
3111 str1 = XSTR (XEXP (operands[0], 0), 0);
3112 str2 = XSTR (XEXP (XEXP (XEXP (operands[2], 0), 0), 0), 0);
3113 if (strcmp (str1, str2) == 0)
3119 str1 = XSTR (XEXP (XEXP (XEXP (operands[0], 0), 0), 0), 0);
3120 str2 = XSTR (XEXP (XEXP (XEXP (operands[2], 0), 0), 0), 0);
3121 if (strcmp(str1,str2) == 0)
3127 offset1 = INTVAL (XEXP (XEXP (operands[0], 0), 1));
3128 offset2 = INTVAL (XEXP (XEXP (operands[2], 0), 1));
3129 offsetsign = offset1 >> ((sizeof (offset1) * 8) -1);
3130 if (((offset2-offset1) == 2) && offsetsign != 0)
3142 operands[4] = gen_rtx_MEM (SImode, XEXP (operands[0], 0));
3144 val = (INTVAL (operands[3]) << 16) + (INTVAL (operands[1]) & 0xFFFF);
3145 operands[5] = gen_rtx_CONST_INT (VOIDmode, val);
3155 /* Subregs are non-orthogonal for us, because our registers are all
3158 m32c_subreg (enum machine_mode outer,
3159 rtx x, enum machine_mode inner, int byte)
3163 /* Converting MEMs to different types that are the same size, we
3164 just rewrite them. */
3165 if (GET_CODE (x) == SUBREG
3166 && SUBREG_BYTE (x) == 0
3167 && GET_CODE (SUBREG_REG (x)) == MEM
3168 && (GET_MODE_SIZE (GET_MODE (x))
3169 == GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3172 x = gen_rtx_MEM (GET_MODE (x), XEXP (SUBREG_REG (x), 0));
3173 MEM_COPY_ATTRIBUTES (x, SUBREG_REG (oldx));
3176 /* Push/pop get done as smaller push/pops. */
3177 if (GET_CODE (x) == MEM
3178 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
3179 || GET_CODE (XEXP (x, 0)) == POST_INC))
3180 return gen_rtx_MEM (outer, XEXP (x, 0));
3181 if (GET_CODE (x) == SUBREG
3182 && GET_CODE (XEXP (x, 0)) == MEM
3183 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PRE_DEC
3184 || GET_CODE (XEXP (XEXP (x, 0), 0)) == POST_INC))
3185 return gen_rtx_MEM (outer, XEXP (XEXP (x, 0), 0));
3187 if (GET_CODE (x) != REG)
3188 return simplify_gen_subreg (outer, x, inner, byte);
3191 if (r >= FIRST_PSEUDO_REGISTER || r == AP_REGNO)
3192 return simplify_gen_subreg (outer, x, inner, byte);
3194 if (IS_MEM_REGNO (r))
3195 return simplify_gen_subreg (outer, x, inner, byte);
3197 /* This is where the complexities of our register layout are
3201 else if (outer == HImode)
3203 if (r == R0_REGNO && byte == 2)
3205 else if (r == R0_REGNO && byte == 4)
3207 else if (r == R0_REGNO && byte == 6)
3209 else if (r == R1_REGNO && byte == 2)
3211 else if (r == A0_REGNO && byte == 2)
3214 else if (outer == SImode)
3216 if (r == R0_REGNO && byte == 0)
3218 else if (r == R0_REGNO && byte == 4)
3223 fprintf (stderr, "m32c_subreg %s %s %d\n",
3224 mode_name[outer], mode_name[inner], byte);
3228 return gen_rtx_REG (outer, nr);
3231 /* Used to emit move instructions. We split some moves,
3232 and avoid mem-mem moves. */
3234 m32c_prepare_move (rtx * operands, enum machine_mode mode)
3236 if (TARGET_A16 && mode == PSImode)
3237 return m32c_split_move (operands, mode, 1);
3238 if ((GET_CODE (operands[0]) == MEM)
3239 && (GET_CODE (XEXP (operands[0], 0)) == PRE_MODIFY))
3241 rtx pmv = XEXP (operands[0], 0);
3242 rtx dest_reg = XEXP (pmv, 0);
3243 rtx dest_mod = XEXP (pmv, 1);
3245 emit_insn (gen_rtx_SET (Pmode, dest_reg, dest_mod));
3246 operands[0] = gen_rtx_MEM (mode, dest_reg);
3248 if (can_create_pseudo_p () && MEM_P (operands[0]) && MEM_P (operands[1]))
3249 operands[1] = copy_to_mode_reg (mode, operands[1]);
3253 #define DEBUG_SPLIT 0
3255 /* Returns TRUE if the given PSImode move should be split. We split
3256 for all r8c/m16c moves, since it doesn't support them, and for
3257 POP.L as we can only *push* SImode. */
3259 m32c_split_psi_p (rtx * operands)
3262 fprintf (stderr, "\nm32c_split_psi_p\n");
3263 debug_rtx (operands[0]);
3264 debug_rtx (operands[1]);
3269 fprintf (stderr, "yes, A16\n");
3273 if (GET_CODE (operands[1]) == MEM
3274 && GET_CODE (XEXP (operands[1], 0)) == POST_INC)
3277 fprintf (stderr, "yes, pop.l\n");
3282 fprintf (stderr, "no, default\n");
3287 /* Split the given move. SPLIT_ALL is 0 if splitting is optional
3288 (define_expand), 1 if it is not optional (define_insn_and_split),
3289 and 3 for define_split (alternate api). */
3291 m32c_split_move (rtx * operands, enum machine_mode mode, int split_all)
3294 int parts, si, di, rev = 0;
3295 int rv = 0, opi = 2;
3296 enum machine_mode submode = HImode;
3297 rtx *ops, local_ops[10];
3299 /* define_split modifies the existing operands, but the other two
3300 emit new insns. OPS is where we store the operand pairs, which
3311 /* Before splitting mem-mem moves, force one operand into a
3313 if (can_create_pseudo_p () && MEM_P (operands[0]) && MEM_P (operands[1]))
3316 fprintf (stderr, "force_reg...\n");
3317 debug_rtx (operands[1]);
3319 operands[1] = force_reg (mode, operands[1]);
3321 debug_rtx (operands[1]);
3328 fprintf (stderr, "\nsplit_move %d all=%d\n", !can_create_pseudo_p (),
3330 debug_rtx (operands[0]);
3331 debug_rtx (operands[1]);
3334 /* Note that split_all is not used to select the api after this
3335 point, so it's safe to set it to 3 even with define_insn. */
3336 /* None of the chips can move SI operands to sp-relative addresses,
3337 so we always split those. */
3338 if (m32c_extra_constraint_p (operands[0], 'S', "Ss"))
3341 /* We don't need to split these. */
3344 && (mode == SImode || mode == PSImode)
3345 && !(GET_CODE (operands[1]) == MEM
3346 && GET_CODE (XEXP (operands[1], 0)) == POST_INC))
3349 /* First, enumerate the subregs we'll be dealing with. */
3350 for (si = 0; si < parts; si++)
3353 m32c_subreg (submode, operands[0], mode,
3354 si * GET_MODE_SIZE (submode));
3356 m32c_subreg (submode, operands[1], mode,
3357 si * GET_MODE_SIZE (submode));
3360 /* Split pushes by emitting a sequence of smaller pushes. */
3361 if (GET_CODE (d[0]) == MEM && GET_CODE (XEXP (d[0], 0)) == PRE_DEC)
3363 for (si = parts - 1; si >= 0; si--)
3365 ops[opi++] = gen_rtx_MEM (submode,
3366 gen_rtx_PRE_DEC (Pmode,
3374 /* Likewise for pops. */
3375 else if (GET_CODE (s[0]) == MEM && GET_CODE (XEXP (s[0], 0)) == POST_INC)
3377 for (di = 0; di < parts; di++)
3380 ops[opi++] = gen_rtx_MEM (submode,
3381 gen_rtx_POST_INC (Pmode,
3389 /* if d[di] == s[si] for any di < si, we'll early clobber. */
3390 for (di = 0; di < parts - 1; di++)
3391 for (si = di + 1; si < parts; si++)
3392 if (reg_mentioned_p (d[di], s[si]))
3396 for (si = 0; si < parts; si++)
3402 for (si = parts - 1; si >= 0; si--)
3409 /* Now emit any moves we may have accumulated. */
3410 if (rv && split_all != 3)
3413 for (i = 2; i < opi; i += 2)
3414 emit_move_insn (ops[i], ops[i + 1]);
3419 /* The m32c has a number of opcodes that act like memcpy, strcmp, and
3420 the like. For the R8C they expect one of the addresses to be in
3421 R1L:An so we need to arrange for that. Otherwise, it's just a
3422 matter of picking out the operands we want and emitting the right
3423 pattern for them. All these expanders, which correspond to
3424 patterns in blkmov.md, must return nonzero if they expand the insn,
3425 or zero if they should FAIL. */
3427 /* This is a memset() opcode. All operands are implied, so we need to
3428 arrange for them to be in the right registers. The opcode wants
3429 addresses, not [mem] syntax. $0 is the destination (MEM:BLK), $1
3430 the count (HI), and $2 the value (QI). */
3432 m32c_expand_setmemhi(rtx *operands)
3434 rtx desta, count, val;
3437 desta = XEXP (operands[0], 0);
3438 count = operands[1];
3441 desto = gen_reg_rtx (Pmode);
3442 counto = gen_reg_rtx (HImode);
3444 if (GET_CODE (desta) != REG
3445 || REGNO (desta) < FIRST_PSEUDO_REGISTER)
3446 desta = copy_to_mode_reg (Pmode, desta);
3448 /* This looks like an arbitrary restriction, but this is by far the
3449 most common case. For counts 8..14 this actually results in
3450 smaller code with no speed penalty because the half-sized
3451 constant can be loaded with a shorter opcode. */
3452 if (GET_CODE (count) == CONST_INT
3453 && GET_CODE (val) == CONST_INT
3454 && ! (INTVAL (count) & 1)
3455 && (INTVAL (count) > 1)
3456 && (INTVAL (val) <= 7 && INTVAL (val) >= -8))
3458 unsigned v = INTVAL (val) & 0xff;
3460 count = copy_to_mode_reg (HImode, GEN_INT (INTVAL (count) / 2));
3461 val = copy_to_mode_reg (HImode, GEN_INT (v));
3463 emit_insn (gen_setmemhi_whi_op (desto, counto, val, desta, count));
3465 emit_insn (gen_setmemhi_wpsi_op (desto, counto, val, desta, count));
3469 /* This is the generalized memset() case. */
3470 if (GET_CODE (val) != REG
3471 || REGNO (val) < FIRST_PSEUDO_REGISTER)
3472 val = copy_to_mode_reg (QImode, val);
3474 if (GET_CODE (count) != REG
3475 || REGNO (count) < FIRST_PSEUDO_REGISTER)
3476 count = copy_to_mode_reg (HImode, count);
3479 emit_insn (gen_setmemhi_bhi_op (desto, counto, val, desta, count));
3481 emit_insn (gen_setmemhi_bpsi_op (desto, counto, val, desta, count));
3486 /* This is a memcpy() opcode. All operands are implied, so we need to
3487 arrange for them to be in the right registers. The opcode wants
3488 addresses, not [mem] syntax. $0 is the destination (MEM:BLK), $1
3489 is the source (MEM:BLK), and $2 the count (HI). */
3491 m32c_expand_movmemhi(rtx *operands)
3493 rtx desta, srca, count;
3494 rtx desto, srco, counto;
3496 desta = XEXP (operands[0], 0);
3497 srca = XEXP (operands[1], 0);
3498 count = operands[2];
3500 desto = gen_reg_rtx (Pmode);
3501 srco = gen_reg_rtx (Pmode);
3502 counto = gen_reg_rtx (HImode);
3504 if (GET_CODE (desta) != REG
3505 || REGNO (desta) < FIRST_PSEUDO_REGISTER)
3506 desta = copy_to_mode_reg (Pmode, desta);
3508 if (GET_CODE (srca) != REG
3509 || REGNO (srca) < FIRST_PSEUDO_REGISTER)
3510 srca = copy_to_mode_reg (Pmode, srca);
3512 /* Similar to setmem, but we don't need to check the value. */
3513 if (GET_CODE (count) == CONST_INT
3514 && ! (INTVAL (count) & 1)
3515 && (INTVAL (count) > 1))
3517 count = copy_to_mode_reg (HImode, GEN_INT (INTVAL (count) / 2));
3519 emit_insn (gen_movmemhi_whi_op (desto, srco, counto, desta, srca, count));
3521 emit_insn (gen_movmemhi_wpsi_op (desto, srco, counto, desta, srca, count));
3525 /* This is the generalized memset() case. */
3526 if (GET_CODE (count) != REG
3527 || REGNO (count) < FIRST_PSEUDO_REGISTER)
3528 count = copy_to_mode_reg (HImode, count);
3531 emit_insn (gen_movmemhi_bhi_op (desto, srco, counto, desta, srca, count));
3533 emit_insn (gen_movmemhi_bpsi_op (desto, srco, counto, desta, srca, count));
3538 /* This is a stpcpy() opcode. $0 is the destination (MEM:BLK) after
3539 the copy, which should point to the NUL at the end of the string,
3540 $1 is the destination (MEM:BLK), and $2 is the source (MEM:BLK).
3541 Since our opcode leaves the destination pointing *after* the NUL,
3542 we must emit an adjustment. */
3544 m32c_expand_movstr(rtx *operands)
3549 desta = XEXP (operands[1], 0);
3550 srca = XEXP (operands[2], 0);
3552 desto = gen_reg_rtx (Pmode);
3553 srco = gen_reg_rtx (Pmode);
3555 if (GET_CODE (desta) != REG
3556 || REGNO (desta) < FIRST_PSEUDO_REGISTER)
3557 desta = copy_to_mode_reg (Pmode, desta);
3559 if (GET_CODE (srca) != REG
3560 || REGNO (srca) < FIRST_PSEUDO_REGISTER)
3561 srca = copy_to_mode_reg (Pmode, srca);
3563 emit_insn (gen_movstr_op (desto, srco, desta, srca));
3564 /* desto ends up being a1, which allows this type of add through MOVA. */
3565 emit_insn (gen_addpsi3 (operands[0], desto, GEN_INT (-1)));
3570 /* This is a strcmp() opcode. $0 is the destination (HI) which holds
3571 <=>0 depending on the comparison, $1 is one string (MEM:BLK), and
3572 $2 is the other (MEM:BLK). We must do the comparison, and then
3573 convert the flags to a signed integer result. */
3575 m32c_expand_cmpstr(rtx *operands)
3579 src1a = XEXP (operands[1], 0);
3580 src2a = XEXP (operands[2], 0);
3582 if (GET_CODE (src1a) != REG
3583 || REGNO (src1a) < FIRST_PSEUDO_REGISTER)
3584 src1a = copy_to_mode_reg (Pmode, src1a);
3586 if (GET_CODE (src2a) != REG
3587 || REGNO (src2a) < FIRST_PSEUDO_REGISTER)
3588 src2a = copy_to_mode_reg (Pmode, src2a);
3590 emit_insn (gen_cmpstrhi_op (src1a, src2a, src1a, src2a));
3591 emit_insn (gen_cond_to_int (operands[0]));
3597 typedef rtx (*shift_gen_func)(rtx, rtx, rtx);
3599 static shift_gen_func
3600 shift_gen_func_for (int mode, int code)
3602 #define GFF(m,c,f) if (mode == m && code == c) return f
3603 GFF(QImode, ASHIFT, gen_ashlqi3_i);
3604 GFF(QImode, ASHIFTRT, gen_ashrqi3_i);
3605 GFF(QImode, LSHIFTRT, gen_lshrqi3_i);
3606 GFF(HImode, ASHIFT, gen_ashlhi3_i);
3607 GFF(HImode, ASHIFTRT, gen_ashrhi3_i);
3608 GFF(HImode, LSHIFTRT, gen_lshrhi3_i);
3609 GFF(PSImode, ASHIFT, gen_ashlpsi3_i);
3610 GFF(PSImode, ASHIFTRT, gen_ashrpsi3_i);
3611 GFF(PSImode, LSHIFTRT, gen_lshrpsi3_i);
3612 GFF(SImode, ASHIFT, TARGET_A16 ? gen_ashlsi3_16 : gen_ashlsi3_24);
3613 GFF(SImode, ASHIFTRT, TARGET_A16 ? gen_ashrsi3_16 : gen_ashrsi3_24);
3614 GFF(SImode, LSHIFTRT, TARGET_A16 ? gen_lshrsi3_16 : gen_lshrsi3_24);
3619 /* The m32c only has one shift, but it takes a signed count. GCC
3620 doesn't want this, so we fake it by negating any shift count when
3621 we're pretending to shift the other way. Also, the shift count is
3622 limited to -8..8. It's slightly better to use two shifts for 9..15
3623 than to load the count into r1h, so we do that too. */
3625 m32c_prepare_shift (rtx * operands, int scale, int shift_code)
3627 enum machine_mode mode = GET_MODE (operands[0]);
3628 shift_gen_func func = shift_gen_func_for (mode, shift_code);
3631 if (GET_CODE (operands[2]) == CONST_INT)
3633 int maxc = TARGET_A24 && (mode == PSImode || mode == SImode) ? 32 : 8;
3634 int count = INTVAL (operands[2]) * scale;
3636 while (count > maxc)
3638 temp = gen_reg_rtx (mode);
3639 emit_insn (func (temp, operands[1], GEN_INT (maxc)));
3643 while (count < -maxc)
3645 temp = gen_reg_rtx (mode);
3646 emit_insn (func (temp, operands[1], GEN_INT (-maxc)));
3650 emit_insn (func (operands[0], operands[1], GEN_INT (count)));
3654 temp = gen_reg_rtx (QImode);
3656 /* The pattern has a NEG that corresponds to this. */
3657 emit_move_insn (temp, gen_rtx_NEG (QImode, operands[2]));
3658 else if (TARGET_A16 && mode == SImode)
3659 /* We do this because the code below may modify this, we don't
3660 want to modify the origin of this value. */
3661 emit_move_insn (temp, operands[2]);
3663 /* We'll only use it for the shift, no point emitting a move. */
3666 if (TARGET_A16 && GET_MODE_SIZE (mode) == 4)
3668 /* The m16c has a limit of -16..16 for SI shifts, even when the
3669 shift count is in a register. Since there are so many targets
3670 of these shifts, it's better to expand the RTL here than to
3671 call a helper function.
3673 The resulting code looks something like this:
3685 We take advantage of the fact that "negative" shifts are
3686 undefined to skip one of the comparisons. */
3689 rtx label, lref, insn, tempvar;
3691 emit_move_insn (operands[0], operands[1]);
3694 label = gen_label_rtx ();
3695 lref = gen_rtx_LABEL_REF (VOIDmode, label);
3696 LABEL_NUSES (label) ++;
3698 tempvar = gen_reg_rtx (mode);
3700 if (shift_code == ASHIFT)
3702 /* This is a left shift. We only need check positive counts. */
3703 emit_jump_insn (gen_cbranchqi4 (gen_rtx_LE (VOIDmode, 0, 0),
3704 count, GEN_INT (16), label));
3705 emit_insn (func (tempvar, operands[0], GEN_INT (8)));
3706 emit_insn (func (operands[0], tempvar, GEN_INT (8)));
3707 insn = emit_insn (gen_addqi3 (count, count, GEN_INT (-16)));
3708 emit_label_after (label, insn);
3712 /* This is a right shift. We only need check negative counts. */
3713 emit_jump_insn (gen_cbranchqi4 (gen_rtx_GE (VOIDmode, 0, 0),
3714 count, GEN_INT (-16), label));
3715 emit_insn (func (tempvar, operands[0], GEN_INT (-8)));
3716 emit_insn (func (operands[0], tempvar, GEN_INT (-8)));
3717 insn = emit_insn (gen_addqi3 (count, count, GEN_INT (16)));
3718 emit_label_after (label, insn);
3720 operands[1] = operands[0];
3721 emit_insn (func (operands[0], operands[0], count));
3729 /* The m32c has a limited range of operations that work on PSImode
3730 values; we have to expand to SI, do the math, and truncate back to
3731 PSI. Yes, this is expensive, but hopefully gcc will learn to avoid
3734 m32c_expand_neg_mulpsi3 (rtx * operands)
3736 /* operands: a = b * i */
3737 rtx temp1; /* b as SI */
3738 rtx scale /* i as SI */;
3739 rtx temp2; /* a*b as SI */
3741 temp1 = gen_reg_rtx (SImode);
3742 temp2 = gen_reg_rtx (SImode);
3743 if (GET_CODE (operands[2]) != CONST_INT)
3745 scale = gen_reg_rtx (SImode);
3746 emit_insn (gen_zero_extendpsisi2 (scale, operands[2]));
3749 scale = copy_to_mode_reg (SImode, operands[2]);
3751 emit_insn (gen_zero_extendpsisi2 (temp1, operands[1]));
3752 temp2 = expand_simple_binop (SImode, MULT, temp1, scale, temp2, 1, OPTAB_LIB);
3753 emit_insn (gen_truncsipsi2 (operands[0], temp2));
3756 /* Pattern Output Functions */
3759 m32c_expand_movcc (rtx *operands)
3761 rtx rel = operands[1];
3764 if (GET_CODE (rel) != EQ && GET_CODE (rel) != NE)
3766 if (GET_CODE (operands[2]) != CONST_INT
3767 || GET_CODE (operands[3]) != CONST_INT)
3769 if (GET_CODE (rel) == NE)
3771 rtx tmp = operands[2];
3772 operands[2] = operands[3];
3774 rel = gen_rtx_EQ (GET_MODE (rel), XEXP (rel, 0), XEXP (rel, 1));
3777 emit_move_insn (operands[0],
3778 gen_rtx_IF_THEN_ELSE (GET_MODE (operands[0]),
3785 /* Used for the "insv" pattern. Return nonzero to fail, else done. */
3787 m32c_expand_insv (rtx *operands)
3792 if (INTVAL (operands[1]) != 1)
3795 /* Our insv opcode (bset, bclr) can only insert a one-bit constant. */
3796 if (GET_CODE (operands[3]) != CONST_INT)
3798 if (INTVAL (operands[3]) != 0
3799 && INTVAL (operands[3]) != 1
3800 && INTVAL (operands[3]) != -1)
3803 mask = 1 << INTVAL (operands[2]);
3806 if (GET_CODE (op0) == SUBREG
3807 && SUBREG_BYTE (op0) == 0)
3809 rtx sub = SUBREG_REG (op0);
3810 if (GET_MODE (sub) == HImode || GET_MODE (sub) == QImode)
3814 if (!can_create_pseudo_p ()
3815 || (GET_CODE (op0) == MEM && MEM_VOLATILE_P (op0)))
3819 src0 = gen_reg_rtx (GET_MODE (op0));
3820 emit_move_insn (src0, op0);
3823 if (GET_MODE (op0) == HImode
3824 && INTVAL (operands[2]) >= 8
3825 && GET_MODE (op0) == MEM)
3827 /* We are little endian. */
3828 rtx new_mem = gen_rtx_MEM (QImode, plus_constant (XEXP (op0, 0), 1));
3829 MEM_COPY_ATTRIBUTES (new_mem, op0);
3833 /* First, we generate a mask with the correct polarity. If we are
3834 storing a zero, we want an AND mask, so invert it. */
3835 if (INTVAL (operands[3]) == 0)
3837 /* Storing a zero, use an AND mask */
3838 if (GET_MODE (op0) == HImode)
3843 /* Now we need to properly sign-extend the mask in case we need to
3844 fall back to an AND or OR opcode. */
3845 if (GET_MODE (op0) == HImode)
3856 switch ( (INTVAL (operands[3]) ? 4 : 0)
3857 + ((GET_MODE (op0) == HImode) ? 2 : 0)
3858 + (TARGET_A24 ? 1 : 0))
3860 case 0: p = gen_andqi3_16 (op0, src0, GEN_INT (mask)); break;
3861 case 1: p = gen_andqi3_24 (op0, src0, GEN_INT (mask)); break;
3862 case 2: p = gen_andhi3_16 (op0, src0, GEN_INT (mask)); break;
3863 case 3: p = gen_andhi3_24 (op0, src0, GEN_INT (mask)); break;
3864 case 4: p = gen_iorqi3_16 (op0, src0, GEN_INT (mask)); break;
3865 case 5: p = gen_iorqi3_24 (op0, src0, GEN_INT (mask)); break;
3866 case 6: p = gen_iorhi3_16 (op0, src0, GEN_INT (mask)); break;
3867 case 7: p = gen_iorhi3_24 (op0, src0, GEN_INT (mask)); break;
3868 default: p = NULL_RTX; break; /* Not reached, but silences a warning. */
3876 m32c_scc_pattern(rtx *operands, RTX_CODE code)
3878 static char buf[30];
3879 if (GET_CODE (operands[0]) == REG
3880 && REGNO (operands[0]) == R0_REGNO)
3883 return "stzx\t#1,#0,r0l";
3885 return "stzx\t#0,#1,r0l";
3887 sprintf(buf, "bm%s\t0,%%h0\n\tand.b\t#1,%%0", GET_RTX_NAME (code));
3891 /* Encode symbol attributes of a SYMBOL_REF into its
3892 SYMBOL_REF_FLAGS. */
3894 m32c_encode_section_info (tree decl, rtx rtl, int first)
3896 int extra_flags = 0;
3898 default_encode_section_info (decl, rtl, first);
3899 if (TREE_CODE (decl) == FUNCTION_DECL
3900 && m32c_special_page_vector_p (decl))
3902 extra_flags = SYMBOL_FLAG_FUNCVEC_FUNCTION;
3905 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
3908 /* Returns TRUE if the current function is a leaf, and thus we can
3909 determine which registers an interrupt function really needs to
3910 save. The logic below is mostly about finding the insn sequence
3911 that's the function, versus any sequence that might be open for the
3914 m32c_leaf_function_p (void)
3916 rtx saved_first, saved_last;
3917 struct sequence_stack *seq;
3920 saved_first = crtl->emit.x_first_insn;
3921 saved_last = crtl->emit.x_last_insn;
3922 for (seq = crtl->emit.sequence_stack; seq && seq->next; seq = seq->next)
3926 crtl->emit.x_first_insn = seq->first;
3927 crtl->emit.x_last_insn = seq->last;
3930 rv = leaf_function_p ();
3932 crtl->emit.x_first_insn = saved_first;
3933 crtl->emit.x_last_insn = saved_last;
3937 /* Returns TRUE if the current function needs to use the ENTER/EXIT
3938 opcodes. If the function doesn't need the frame base or stack
3939 pointer, it can use the simpler RTS opcode. */
3941 m32c_function_needs_enter (void)
3944 struct sequence_stack *seq;
3945 rtx sp = gen_rtx_REG (Pmode, SP_REGNO);
3946 rtx fb = gen_rtx_REG (Pmode, FB_REGNO);
3948 insn = get_insns ();
3949 for (seq = crtl->emit.sequence_stack;
3951 insn = seq->first, seq = seq->next);
3955 if (reg_mentioned_p (sp, insn))
3957 if (reg_mentioned_p (fb, insn))
3959 insn = NEXT_INSN (insn);
3964 /* Mark all the subexpressions of the PARALLEL rtx PAR as
3965 frame-related. Return PAR.
3967 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
3968 PARALLEL rtx other than the first if they do not have the
3969 FRAME_RELATED flag set on them. So this function is handy for
3970 marking up 'enter' instructions. */
3972 m32c_all_frame_related (rtx par)
3974 int len = XVECLEN (par, 0);
3977 for (i = 0; i < len; i++)
3978 F (XVECEXP (par, 0, i));
3983 /* Emits the prologue. See the frame layout comment earlier in this
3984 file. We can reserve up to 256 bytes with the ENTER opcode, beyond
3985 that we manually update sp. */
3987 m32c_emit_prologue (void)
3989 int frame_size, extra_frame_size = 0, reg_save_size;
3990 int complex_prologue = 0;
3992 cfun->machine->is_leaf = m32c_leaf_function_p ();
3993 if (interrupt_p (cfun->decl))
3995 cfun->machine->is_interrupt = 1;
3996 complex_prologue = 1;
3998 else if (bank_switch_p (cfun->decl))
3999 warning (OPT_Wattributes,
4000 "%<bank_switch%> has no effect on non-interrupt functions");
4002 reg_save_size = m32c_pushm_popm (PP_justcount);
4004 if (interrupt_p (cfun->decl))
4006 if (bank_switch_p (cfun->decl))
4007 emit_insn (gen_fset_b ());
4008 else if (cfun->machine->intr_pushm)
4009 emit_insn (gen_pushm (GEN_INT (cfun->machine->intr_pushm)));
4013 m32c_initial_elimination_offset (FB_REGNO, SP_REGNO) - reg_save_size;
4015 && !m32c_function_needs_enter ())
4016 cfun->machine->use_rts = 1;
4018 if (frame_size > 254)
4020 extra_frame_size = frame_size - 254;
4023 if (cfun->machine->use_rts == 0)
4024 F (emit_insn (m32c_all_frame_related
4026 ? gen_prologue_enter_16 (GEN_INT (frame_size + 2))
4027 : gen_prologue_enter_24 (GEN_INT (frame_size + 4)))));
4029 if (extra_frame_size)
4031 complex_prologue = 1;
4033 F (emit_insn (gen_addhi3 (gen_rtx_REG (HImode, SP_REGNO),
4034 gen_rtx_REG (HImode, SP_REGNO),
4035 GEN_INT (-extra_frame_size))));
4037 F (emit_insn (gen_addpsi3 (gen_rtx_REG (PSImode, SP_REGNO),
4038 gen_rtx_REG (PSImode, SP_REGNO),
4039 GEN_INT (-extra_frame_size))));
4042 complex_prologue += m32c_pushm_popm (PP_pushm);
4044 /* This just emits a comment into the .s file for debugging. */
4045 if (complex_prologue)
4046 emit_insn (gen_prologue_end ());
4049 /* Likewise, for the epilogue. The only exception is that, for
4050 interrupts, we must manually unwind the frame as the REIT opcode
4053 m32c_emit_epilogue (void)
4055 /* This just emits a comment into the .s file for debugging. */
4056 if (m32c_pushm_popm (PP_justcount) > 0 || cfun->machine->is_interrupt)
4057 emit_insn (gen_epilogue_start ());
4059 m32c_pushm_popm (PP_popm);
4061 if (cfun->machine->is_interrupt)
4063 enum machine_mode spmode = TARGET_A16 ? HImode : PSImode;
4065 /* REIT clears B flag and restores $fp for us, but we still
4066 have to fix up the stack. USE_RTS just means we didn't
4068 if (!cfun->machine->use_rts)
4070 emit_move_insn (gen_rtx_REG (spmode, A0_REGNO),
4071 gen_rtx_REG (spmode, FP_REGNO));
4072 emit_move_insn (gen_rtx_REG (spmode, SP_REGNO),
4073 gen_rtx_REG (spmode, A0_REGNO));
4074 /* We can't just add this to the POPM because it would be in
4075 the wrong order, and wouldn't fix the stack if we're bank
4078 emit_insn (gen_pophi_16 (gen_rtx_REG (HImode, FP_REGNO)));
4080 emit_insn (gen_poppsi (gen_rtx_REG (PSImode, FP_REGNO)));
4082 if (!bank_switch_p (cfun->decl) && cfun->machine->intr_pushm)
4083 emit_insn (gen_popm (GEN_INT (cfun->machine->intr_pushm)));
4085 /* The FREIT (Fast REturn from InTerrupt) instruction should be
4086 generated only for M32C/M32CM targets (generate the REIT
4087 instruction otherwise). */
4088 if (fast_interrupt_p (cfun->decl))
4090 /* Check if fast_attribute is set for M32C or M32CM. */
4093 emit_jump_insn (gen_epilogue_freit ());
4095 /* If fast_interrupt attribute is set for an R8C or M16C
4096 target ignore this attribute and generated REIT
4100 warning (OPT_Wattributes,
4101 "%<fast_interrupt%> attribute directive ignored");
4102 emit_jump_insn (gen_epilogue_reit_16 ());
4105 else if (TARGET_A16)
4106 emit_jump_insn (gen_epilogue_reit_16 ());
4108 emit_jump_insn (gen_epilogue_reit_24 ());
4110 else if (cfun->machine->use_rts)
4111 emit_jump_insn (gen_epilogue_rts ());
4112 else if (TARGET_A16)
4113 emit_jump_insn (gen_epilogue_exitd_16 ());
4115 emit_jump_insn (gen_epilogue_exitd_24 ());
4120 m32c_emit_eh_epilogue (rtx ret_addr)
4122 /* R0[R2] has the stack adjustment. R1[R3] has the address to
4123 return to. We have to fudge the stack, pop everything, pop SP
4124 (fudged), and return (fudged). This is actually easier to do in
4125 assembler, so punt to libgcc. */
4126 emit_jump_insn (gen_eh_epilogue (ret_addr, cfun->machine->eh_stack_adjust));
4127 /* emit_clobber (gen_rtx_REG (HImode, R0L_REGNO)); */
4131 /* Indicate which flags must be properly set for a given conditional. */
4133 flags_needed_for_conditional (rtx cond)
4135 switch (GET_CODE (cond))
4159 /* Returns true if a compare insn is redundant because it would only
4160 set flags that are already set correctly. */
4162 m32c_compare_redundant (rtx cmp, rtx *operands)
4177 fprintf(stderr, "\n\033[32mm32c_compare_redundant\033[0m\n");
4181 fprintf(stderr, "operands[%d] = ", i);
4182 debug_rtx(operands[i]);
4186 next = next_nonnote_insn (cmp);
4187 if (!next || !INSN_P (next))
4190 fprintf(stderr, "compare not followed by insn\n");
4195 if (GET_CODE (PATTERN (next)) == SET
4196 && GET_CODE (XEXP ( PATTERN (next), 1)) == IF_THEN_ELSE)
4198 next = XEXP (XEXP (PATTERN (next), 1), 0);
4200 else if (GET_CODE (PATTERN (next)) == SET)
4202 /* If this is a conditional, flags_needed will be something
4203 other than FLAGS_N, which we test below. */
4204 next = XEXP (PATTERN (next), 1);
4209 fprintf(stderr, "compare not followed by conditional\n");
4215 fprintf(stderr, "conditional is: ");
4219 flags_needed = flags_needed_for_conditional (next);
4220 if (flags_needed == FLAGS_N)
4223 fprintf(stderr, "compare not followed by conditional\n");
4229 /* Compare doesn't set overflow and carry the same way that
4230 arithmetic instructions do, so we can't replace those. */
4231 if (flags_needed & FLAGS_OC)
4236 prev = prev_nonnote_insn (prev);
4240 fprintf(stderr, "No previous insn.\n");
4247 fprintf(stderr, "Previous insn is a non-insn.\n");
4251 pp = PATTERN (prev);
4252 if (GET_CODE (pp) != SET)
4255 fprintf(stderr, "Previous insn is not a SET.\n");
4259 pflags = get_attr_flags (prev);
4261 /* Looking up attributes of previous insns corrupted the recog
4263 INSN_UID (cmp) = -1;
4264 recog (PATTERN (cmp), cmp, 0);
4266 if (pflags == FLAGS_N
4267 && reg_mentioned_p (op0, pp))
4270 fprintf(stderr, "intermediate non-flags insn uses op:\n");
4276 /* Check for comparisons against memory - between volatiles and
4277 aliases, we just can't risk this one. */
4278 if (GET_CODE (operands[0]) == MEM
4279 || GET_CODE (operands[0]) == MEM)
4282 fprintf(stderr, "comparisons with memory:\n");
4288 /* Check for PREV changing a register that's used to compute a
4289 value in CMP, even if it doesn't otherwise change flags. */
4290 if (GET_CODE (operands[0]) == REG
4291 && rtx_referenced_p (SET_DEST (PATTERN (prev)), operands[0]))
4294 fprintf(stderr, "sub-value affected, op0:\n");
4299 if (GET_CODE (operands[1]) == REG
4300 && rtx_referenced_p (SET_DEST (PATTERN (prev)), operands[1]))
4303 fprintf(stderr, "sub-value affected, op1:\n");
4309 } while (pflags == FLAGS_N);
4311 fprintf(stderr, "previous flag-setting insn:\n");
4316 if (GET_CODE (pp) == SET
4317 && GET_CODE (XEXP (pp, 0)) == REG
4318 && REGNO (XEXP (pp, 0)) == FLG_REGNO
4319 && GET_CODE (XEXP (pp, 1)) == COMPARE)
4321 /* Adjacent cbranches must have the same operands to be
4323 rtx pop0 = XEXP (XEXP (pp, 1), 0);
4324 rtx pop1 = XEXP (XEXP (pp, 1), 1);
4326 fprintf(stderr, "adjacent cbranches\n");
4330 if (rtx_equal_p (op0, pop0)
4331 && rtx_equal_p (op1, pop1))
4334 fprintf(stderr, "prev cmp not same\n");
4339 /* Else the previous insn must be a SET, with either the source or
4340 dest equal to operands[0], and operands[1] must be zero. */
4342 if (!rtx_equal_p (op1, const0_rtx))
4345 fprintf(stderr, "operands[1] not const0_rtx\n");
4349 if (GET_CODE (pp) != SET)
4352 fprintf (stderr, "pp not set\n");
4356 if (!rtx_equal_p (op0, SET_SRC (pp))
4357 && !rtx_equal_p (op0, SET_DEST (pp)))
4360 fprintf(stderr, "operands[0] not found in set\n");
4366 fprintf(stderr, "cmp flags %x prev flags %x\n", flags_needed, pflags);
4368 if ((pflags & flags_needed) == flags_needed)
4374 /* Return the pattern for a compare. This will be commented out if
4375 the compare is redundant, else a normal pattern is returned. Thus,
4376 the assembler output says where the compare would have been. */
4378 m32c_output_compare (rtx insn, rtx *operands)
4380 static char templ[] = ";cmp.b\t%1,%0";
4383 templ[5] = " bwll"[GET_MODE_SIZE(GET_MODE(operands[0]))];
4384 if (m32c_compare_redundant (insn, operands))
4387 fprintf(stderr, "cbranch: cmp not needed\n");
4393 fprintf(stderr, "cbranch: cmp needed: `%s'\n", templ + 1);
4398 #undef TARGET_ENCODE_SECTION_INFO
4399 #define TARGET_ENCODE_SECTION_INFO m32c_encode_section_info
4401 /* If the frame pointer isn't used, we detect it manually. But the
4402 stack pointer doesn't have as flexible addressing as the frame
4403 pointer, so we always assume we have it. */
4405 #undef TARGET_FRAME_POINTER_REQUIRED
4406 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
4408 /* The Global `targetm' Variable. */
4410 struct gcc_target targetm = TARGET_INITIALIZER;
4412 #include "gt-m32c.h"