1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
45 #include "target-def.h"
47 #include "langhooks.h"
52 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
53 static void xstormy16_asm_out_constructor (rtx, int);
54 static void xstormy16_asm_out_destructor (rtx, int);
55 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
58 static void xstormy16_init_builtins (void);
59 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
60 static bool xstormy16_rtx_costs (rtx, int, int, int *, bool);
61 static int xstormy16_address_cost (rtx, bool);
62 static bool xstormy16_return_in_memory (const_tree, const_tree);
64 static GTY(()) section *bss100_section;
66 /* Compute a (partial) cost for rtx X. Return true if the complete
67 cost has been computed, and false if subexpressions should be
68 scanned. In either case, *TOTAL contains the cost result. */
71 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
72 int *total, bool speed ATTRIBUTE_UNUSED)
77 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
78 *total = COSTS_N_INSNS (1) / 2;
79 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
80 *total = COSTS_N_INSNS (1);
82 *total = COSTS_N_INSNS (2);
89 *total = COSTS_N_INSNS (2);
93 *total = COSTS_N_INSNS (35 + 6);
96 *total = COSTS_N_INSNS (51 - 6);
105 xstormy16_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
107 return (GET_CODE (x) == CONST_INT ? 2
108 : GET_CODE (x) == PLUS ? 7
112 /* Branches are handled as follows:
114 1. HImode compare-and-branches. The machine supports these
115 natively, so the appropriate pattern is emitted directly.
117 2. SImode EQ and NE. These are emitted as pairs of HImode
118 compare-and-branches.
120 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
121 of a SImode subtract followed by a branch (not a compare-and-branch),
127 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
134 /* Emit a branch of kind CODE to location LOC. */
137 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
139 rtx condition_rtx, loc_ref, branch, cy_clobber;
141 enum machine_mode mode;
143 mode = GET_MODE (op0);
144 gcc_assert (mode == HImode || mode == SImode);
147 && (code == GT || code == LE || code == GTU || code == LEU))
149 int unsigned_p = (code == GTU || code == LEU);
150 int gt_p = (code == GT || code == GTU);
154 lab = gen_label_rtx ();
155 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
156 /* This should be generated as a comparison against the temporary
157 created by the previous insn, but reload can't handle that. */
158 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
163 else if (mode == SImode
164 && (code == NE || code == EQ)
165 && op1 != const0_rtx)
167 rtx op0_word, op1_word;
169 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
173 lab = gen_label_rtx ();
175 for (i = 0; i < num_words - 1; i++)
177 op0_word = simplify_gen_subreg (word_mode, op0, mode,
179 op1_word = simplify_gen_subreg (word_mode, op1, mode,
181 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
183 op0_word = simplify_gen_subreg (word_mode, op0, mode,
185 op1_word = simplify_gen_subreg (word_mode, op1, mode,
187 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
194 /* We can't allow reload to try to generate any reload after a branch,
195 so when some register must match we must make the temporary ourselves. */
199 tmp = gen_reg_rtx (mode);
200 emit_move_insn (tmp, op0);
204 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
205 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
206 branch = gen_rtx_SET (VOIDmode, pc_rtx,
207 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
210 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
213 vec = gen_rtvec (2, branch, cy_clobber);
214 else if (code == NE || code == EQ)
215 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
220 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
222 sub = gen_rtx_CLOBBER (SImode, op0);
224 vec = gen_rtvec (3, branch, sub, cy_clobber);
227 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
230 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
231 the arithmetic operation. Most of the work is done by
232 xstormy16_expand_arith. */
235 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
238 rtx op0 = XEXP (comparison, 0);
239 rtx op1 = XEXP (comparison, 1);
244 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
248 gcc_assert (INSN_P (seq));
251 while (NEXT_INSN (last_insn) != NULL_RTX)
252 last_insn = NEXT_INSN (last_insn);
254 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
255 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
256 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
261 /* Return the string to output a conditional branch to LABEL, which is
262 the operand number of the label.
264 OP is the conditional expression, or NULL for branch-always.
266 REVERSED is nonzero if we should reverse the sense of the comparison.
271 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
273 static char string[64];
274 int need_longbranch = (op != NULL_RTX
275 ? get_attr_length (insn) == 8
276 : get_attr_length (insn) == 4);
277 int really_reversed = reversed ^ need_longbranch;
280 const char *operands;
289 sprintf (string, "%s %s", ccode, label);
293 code = GET_CODE (op);
295 if (GET_CODE (XEXP (op, 0)) != REG)
297 code = swap_condition (code);
303 /* Work out which way this really branches. */
305 code = reverse_condition (code);
309 case EQ: ccode = "z"; break;
310 case NE: ccode = "nz"; break;
311 case GE: ccode = "ge"; break;
312 case LT: ccode = "lt"; break;
313 case GT: ccode = "gt"; break;
314 case LE: ccode = "le"; break;
315 case GEU: ccode = "nc"; break;
316 case LTU: ccode = "c"; break;
317 case GTU: ccode = "hi"; break;
318 case LEU: ccode = "ls"; break;
325 templ = "b%s %s,.+8 | jmpf %s";
328 sprintf (string, templ, ccode, operands, label);
333 /* Return the string to output a conditional branch to LABEL, which is
334 the operand number of the label, but suitable for the tail of a
337 OP is the conditional expression (OP is never NULL_RTX).
339 REVERSED is nonzero if we should reverse the sense of the comparison.
344 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
346 static char string[64];
347 int need_longbranch = get_attr_length (insn) >= 8;
348 int really_reversed = reversed ^ need_longbranch;
354 code = GET_CODE (op);
356 /* Work out which way this really branches. */
358 code = reverse_condition (code);
362 case EQ: ccode = "z"; break;
363 case NE: ccode = "nz"; break;
364 case GE: ccode = "ge"; break;
365 case LT: ccode = "lt"; break;
366 case GEU: ccode = "nc"; break;
367 case LTU: ccode = "c"; break;
369 /* The missing codes above should never be generated. */
380 gcc_assert (GET_CODE (XEXP (op, 0)) == REG);
382 regnum = REGNO (XEXP (op, 0));
383 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
387 case GE: case LT: case GEU: case LTU:
388 strcpy (prevop, "sbc %2,%3");
396 templ = "%s | b%s .+6 | jmpf %s";
398 templ = "%s | b%s %s";
399 sprintf (string, templ, prevop, ccode, label);
404 /* Many machines have some registers that cannot be copied directly to or from
405 memory or even from other types of registers. An example is the `MQ'
406 register, which on most machines, can only be copied to or from general
407 registers, but not memory. Some machines allow copying all registers to and
408 from memory, but require a scratch register for stores to some memory
409 locations (e.g., those with symbolic address on the RT, and those with
410 certain symbolic address on the SPARC when compiling PIC). In some cases,
411 both an intermediate and a scratch register are required.
413 You should define these macros to indicate to the reload phase that it may
414 need to allocate at least one register for a reload in addition to the
415 register to contain the data. Specifically, if copying X to a register
416 RCLASS in MODE requires an intermediate register, you should define
417 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
418 whose registers can be used as intermediate registers or scratch registers.
420 If copying a register RCLASS in MODE to X requires an intermediate or scratch
421 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
422 largest register class required. If the requirements for input and output
423 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
424 instead of defining both macros identically.
426 The values returned by these macros are often `GENERAL_REGS'. Return
427 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
428 to or from a register of RCLASS in MODE without requiring a scratch register.
429 Do not define this macro if it would always return `NO_REGS'.
431 If a scratch register is required (either with or without an intermediate
432 register), you should define patterns for `reload_inM' or `reload_outM', as
433 required.. These patterns, which will normally be implemented with a
434 `define_expand', should be similar to the `movM' patterns, except that
435 operand 2 is the scratch register.
437 Define constraints for the reload register and scratch register that contain
438 a single register class. If the original reload register (whose class is
439 RCLASS) can meet the constraint given in the pattern, the value returned by
440 these macros is used for the class of the scratch register. Otherwise, two
441 additional reload registers are required. Their classes are obtained from
442 the constraints in the insn pattern.
444 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
445 either be in a hard register or in memory. Use `true_regnum' to find out;
446 it will return -1 if the pseudo is in memory and the hard register number if
449 These macros should not be used in the case where a particular class of
450 registers can only be copied to memory and not to another class of
451 registers. In that case, secondary reload registers are not needed and
452 would not be helpful. Instead, a stack location must be used to perform the
453 copy and the `movM' pattern should use memory as an intermediate storage.
454 This case often occurs between floating-point and general registers. */
457 xstormy16_secondary_reload_class (enum reg_class rclass,
458 enum machine_mode mode,
461 /* This chip has the interesting property that only the first eight
462 registers can be moved to/from memory. */
463 if ((GET_CODE (x) == MEM
464 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
465 && (true_regnum (x) == -1
466 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
467 && ! reg_class_subset_p (rclass, EIGHT_REGS))
474 xstormy16_preferred_reload_class (rtx x, enum reg_class rclass)
476 if (rclass == GENERAL_REGS
477 && GET_CODE (x) == MEM)
483 /* Predicate for symbols and addresses that reflect special 8-bit
487 xstormy16_below100_symbol (rtx x,
488 enum machine_mode mode ATTRIBUTE_UNUSED)
490 if (GET_CODE (x) == CONST)
492 if (GET_CODE (x) == PLUS
493 && GET_CODE (XEXP (x, 1)) == CONST_INT)
496 if (GET_CODE (x) == SYMBOL_REF)
497 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
499 if (GET_CODE (x) == CONST_INT)
501 HOST_WIDE_INT i = INTVAL (x);
502 if ((i >= 0x0000 && i <= 0x00ff)
503 || (i >= 0x7f00 && i <= 0x7fff))
509 /* Likewise, but only for non-volatile MEMs, for patterns where the
510 MEM will get split into smaller sized accesses. */
513 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
515 if (GET_CODE (x) == MEM && MEM_VOLATILE_P (x))
517 return xstormy16_below100_operand (x, mode);
520 /* Expand an 8-bit IOR. This either detects the one case we can
521 actually do, or uses a 16-bit IOR. */
524 xstormy16_expand_iorqi3 (rtx *operands)
526 rtx in, out, outsub, val;
532 if (xstormy16_onebit_set_operand (val, QImode))
534 if (!xstormy16_below100_or_register (in, QImode))
535 in = copy_to_mode_reg (QImode, in);
536 if (!xstormy16_below100_or_register (out, QImode))
537 out = gen_reg_rtx (QImode);
538 emit_insn (gen_iorqi3_internal (out, in, val));
539 if (out != operands[0])
540 emit_move_insn (operands[0], out);
544 if (GET_CODE (in) != REG)
545 in = copy_to_mode_reg (QImode, in);
546 if (GET_CODE (val) != REG
547 && GET_CODE (val) != CONST_INT)
548 val = copy_to_mode_reg (QImode, val);
549 if (GET_CODE (out) != REG)
550 out = gen_reg_rtx (QImode);
552 in = simplify_gen_subreg (HImode, in, QImode, 0);
553 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
554 if (GET_CODE (val) != CONST_INT)
555 val = simplify_gen_subreg (HImode, val, QImode, 0);
557 emit_insn (gen_iorhi3 (outsub, in, val));
559 if (out != operands[0])
560 emit_move_insn (operands[0], out);
563 /* Expand an 8-bit AND. This either detects the one case we can
564 actually do, or uses a 16-bit AND. */
567 xstormy16_expand_andqi3 (rtx *operands)
569 rtx in, out, outsub, val;
575 if (xstormy16_onebit_clr_operand (val, QImode))
577 if (!xstormy16_below100_or_register (in, QImode))
578 in = copy_to_mode_reg (QImode, in);
579 if (!xstormy16_below100_or_register (out, QImode))
580 out = gen_reg_rtx (QImode);
581 emit_insn (gen_andqi3_internal (out, in, val));
582 if (out != operands[0])
583 emit_move_insn (operands[0], out);
587 if (GET_CODE (in) != REG)
588 in = copy_to_mode_reg (QImode, in);
589 if (GET_CODE (val) != REG
590 && GET_CODE (val) != CONST_INT)
591 val = copy_to_mode_reg (QImode, val);
592 if (GET_CODE (out) != REG)
593 out = gen_reg_rtx (QImode);
595 in = simplify_gen_subreg (HImode, in, QImode, 0);
596 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
597 if (GET_CODE (val) != CONST_INT)
598 val = simplify_gen_subreg (HImode, val, QImode, 0);
600 emit_insn (gen_andhi3 (outsub, in, val));
602 if (out != operands[0])
603 emit_move_insn (operands[0], out);
606 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
607 (GET_CODE (X) == CONST_INT \
608 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
610 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
611 (GET_CODE (X) == CONST_INT \
612 && INTVAL (X) + (OFFSET) >= 0 \
613 && INTVAL (X) + (OFFSET) < 0x8000 \
614 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
617 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
620 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
623 if (GET_CODE (x) == PLUS
624 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
627 /* PR 31232: Do not allow INT+INT as an address. */
628 if (GET_CODE (x) == CONST_INT)
632 if ((GET_CODE (x) == PRE_MODIFY
633 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
634 || GET_CODE (x) == POST_INC
635 || GET_CODE (x) == PRE_DEC)
638 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
639 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
642 if (xstormy16_below100_symbol (x, mode))
648 /* Return nonzero if memory address X (an RTX) can have different
649 meanings depending on the machine mode of the memory reference it
650 is used for or if the address is valid for some modes but not
653 Autoincrement and autodecrement addresses typically have mode-dependent
654 effects because the amount of the increment or decrement is the size of the
655 operand being addressed. Some machines have other mode-dependent addresses.
656 Many RISC machines have no mode-dependent addresses.
658 You may assume that ADDR is a valid address for the machine.
660 On this chip, this is true if the address is valid with an offset
661 of 0 but not of 6, because in that case it cannot be used as an
662 address for DImode or DFmode, or if the address is a post-increment
663 or pre-decrement address. */
666 xstormy16_mode_dependent_address_p (rtx x)
668 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
669 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
672 if (GET_CODE (x) == PLUS
673 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
674 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
677 if (GET_CODE (x) == PLUS)
680 /* Auto-increment addresses are now treated generically in recog.c. */
684 /* A C expression that defines the optional machine-dependent constraint
685 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
686 types of operands, usually memory references, for the target machine.
687 Normally this macro will not be defined. If it is required for a particular
688 target machine, it should return 1 if VALUE corresponds to the operand type
689 represented by the constraint letter C. If C is not defined as an extra
690 constraint, the value returned should be 0 regardless of VALUE. */
693 xstormy16_extra_constraint_p (rtx x, int c)
697 /* 'Q' is for pushes. */
699 return (GET_CODE (x) == MEM
700 && GET_CODE (XEXP (x, 0)) == POST_INC
701 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
703 /* 'R' is for pops. */
705 return (GET_CODE (x) == MEM
706 && GET_CODE (XEXP (x, 0)) == PRE_DEC
707 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
709 /* 'S' is for immediate memory addresses. */
711 return (GET_CODE (x) == MEM
712 && GET_CODE (XEXP (x, 0)) == CONST_INT
713 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
717 /* Not implemented yet. */
720 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
721 for allocating a scratch register for 32-bit shifts. */
723 return (GET_CODE (x) == CONST_INT
724 && (INTVAL (x) < 2 || INTVAL (x) > 15));
726 /* 'Z' is for CONST_INT value zero. This is for adding zero to
727 a register in addhi3, which would otherwise require a carry. */
729 return (GET_CODE (x) == CONST_INT
730 && (INTVAL (x) == 0));
733 return xstormy16_below100_operand (x, GET_MODE (x));
741 short_memory_operand (rtx x, enum machine_mode mode)
743 if (! memory_operand (x, mode))
745 return (GET_CODE (XEXP (x, 0)) != PLUS);
748 /* Splitter for the 'move' patterns, for modes not directly implemented
749 by hardware. Emit insns to copy a value of mode MODE from SRC to
752 This function is only called when reload_completed. */
755 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
757 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
758 int direction, end, i;
759 int src_modifies = 0;
760 int dest_modifies = 0;
761 int src_volatile = 0;
762 int dest_volatile = 0;
764 rtx auto_inc_reg_rtx = NULL_RTX;
766 /* Check initial conditions. */
767 gcc_assert (reload_completed
768 && mode != QImode && mode != HImode
769 && nonimmediate_operand (dest, mode)
770 && general_operand (src, mode));
772 /* This case is not supported below, and shouldn't be generated. */
773 gcc_assert (GET_CODE (dest) != MEM || GET_CODE (src) != MEM);
775 /* This case is very very bad after reload, so trap it now. */
776 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
778 /* The general idea is to copy by words, offsetting the source and
779 destination. Normally the least-significant word will be copied
780 first, but for pre-dec operations it's better to copy the
781 most-significant word first. Only one operand can be a pre-dec
784 It's also possible that the copy overlaps so that the direction
788 if (GET_CODE (dest) == MEM)
790 mem_operand = XEXP (dest, 0);
791 dest_modifies = side_effects_p (mem_operand);
792 if (auto_inc_p (mem_operand))
793 auto_inc_reg_rtx = XEXP (mem_operand, 0);
794 dest_volatile = MEM_VOLATILE_P (dest);
797 dest = copy_rtx (dest);
798 MEM_VOLATILE_P (dest) = 0;
801 else if (GET_CODE (src) == MEM)
803 mem_operand = XEXP (src, 0);
804 src_modifies = side_effects_p (mem_operand);
805 if (auto_inc_p (mem_operand))
806 auto_inc_reg_rtx = XEXP (mem_operand, 0);
807 src_volatile = MEM_VOLATILE_P (src);
810 src = copy_rtx (src);
811 MEM_VOLATILE_P (src) = 0;
815 mem_operand = NULL_RTX;
817 if (mem_operand == NULL_RTX)
819 if (GET_CODE (src) == REG
820 && GET_CODE (dest) == REG
821 && reg_overlap_mentioned_p (dest, src)
822 && REGNO (dest) > REGNO (src))
825 else if (GET_CODE (mem_operand) == PRE_DEC
826 || (GET_CODE (mem_operand) == PLUS
827 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
829 else if (GET_CODE (src) == MEM
830 && reg_overlap_mentioned_p (dest, src))
834 gcc_assert (GET_CODE (dest) == REG);
835 regno = REGNO (dest);
837 gcc_assert (refers_to_regno_p (regno, regno + num_words,
840 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
842 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
846 /* This means something like
847 (set (reg:DI r0) (mem:DI (reg:HI r1)))
848 which we'd need to support by doing the set of the second word
853 end = direction < 0 ? -1 : num_words;
854 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
856 rtx w_src, w_dest, insn;
859 w_src = gen_rtx_MEM (word_mode, mem_operand);
861 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
863 MEM_VOLATILE_P (w_src) = 1;
865 w_dest = gen_rtx_MEM (word_mode, mem_operand);
867 w_dest = simplify_gen_subreg (word_mode, dest, mode,
870 MEM_VOLATILE_P (w_dest) = 1;
872 /* The simplify_subreg calls must always be able to simplify. */
873 gcc_assert (GET_CODE (w_src) != SUBREG
874 && GET_CODE (w_dest) != SUBREG);
876 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
877 if (auto_inc_reg_rtx)
878 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
884 /* Expander for the 'move' patterns. Emit insns to copy a value of
885 mode MODE from SRC to DEST. */
888 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
890 if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
892 rtx pmv = XEXP (dest, 0);
893 rtx dest_reg = XEXP (pmv, 0);
894 rtx dest_mod = XEXP (pmv, 1);
895 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
896 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
898 dest = gen_rtx_MEM (mode, dest_reg);
899 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
901 else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
903 rtx pmv = XEXP (src, 0);
904 rtx src_reg = XEXP (pmv, 0);
905 rtx src_mod = XEXP (pmv, 1);
906 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
907 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
909 src = gen_rtx_MEM (mode, src_reg);
910 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
913 /* There are only limited immediate-to-memory move instructions. */
914 if (! reload_in_progress
915 && ! reload_completed
916 && GET_CODE (dest) == MEM
917 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
918 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
919 && ! xstormy16_below100_operand (dest, mode)
920 && GET_CODE (src) != REG
921 && GET_CODE (src) != SUBREG)
922 src = copy_to_mode_reg (mode, src);
924 /* Don't emit something we would immediately split. */
926 && mode != HImode && mode != QImode)
928 xstormy16_split_move (mode, dest, src);
932 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
937 The stack is laid out as follows:
941 Register save area (up to 4 words)
942 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
944 AP-> Return address (two words)
945 9th procedure parameter word
946 10th procedure parameter word
948 last procedure parameter word
950 The frame pointer location is tuned to make it most likely that all
951 parameters and local variables can be accessed using a load-indexed
954 /* A structure to describe the layout. */
955 struct xstormy16_stack_layout
957 /* Size of the topmost three items on the stack. */
959 int register_save_size;
960 int stdarg_save_size;
961 /* Sum of the above items. */
963 /* Various offsets. */
964 int first_local_minus_ap;
969 /* Does REGNO need to be saved? */
970 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
971 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
972 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
973 && (REGNUM != CARRY_REGNUM) \
974 && (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
976 /* Compute the stack layout. */
978 struct xstormy16_stack_layout
979 xstormy16_compute_stack_layout (void)
981 struct xstormy16_stack_layout layout;
983 const int ifun = xstormy16_interrupt_function_p ();
985 layout.locals_size = get_frame_size ();
987 layout.register_save_size = 0;
988 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
989 if (REG_NEEDS_SAVE (regno, ifun))
990 layout.register_save_size += UNITS_PER_WORD;
993 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
995 layout.stdarg_save_size = 0;
997 layout.frame_size = (layout.locals_size
998 + layout.register_save_size
999 + layout.stdarg_save_size);
1001 if (crtl->args.size <= 2048 && crtl->args.size != -1)
1003 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
1004 + crtl->args.size <= 2048)
1005 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
1007 layout.fp_minus_ap = 2048 - crtl->args.size;
1010 layout.fp_minus_ap = (layout.stdarg_save_size
1011 + layout.register_save_size
1012 - INCOMING_FRAME_SP_OFFSET);
1013 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
1014 - layout.fp_minus_ap);
1015 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1019 /* Determine how all the special registers get eliminated. */
1022 xstormy16_initial_elimination_offset (int from, int to)
1024 struct xstormy16_stack_layout layout;
1027 layout = xstormy16_compute_stack_layout ();
1029 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1030 result = layout.sp_minus_fp - layout.locals_size;
1031 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1032 result = - layout.locals_size;
1033 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1034 result = - layout.fp_minus_ap;
1035 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1036 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
1044 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1046 rtx set, clobber, insn;
1048 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1049 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1050 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1054 /* Called after register allocation to add any instructions needed for
1055 the prologue. Using a prologue insn is favored compared to putting
1056 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1057 since it allows the scheduler to intermix instructions with the
1058 saves of the caller saved registers. In some cases, it might be
1059 necessary to emit a barrier instruction as the last insn to prevent
1062 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1063 so that the debug info generation code can handle them properly. */
1066 xstormy16_expand_prologue (void)
1068 struct xstormy16_stack_layout layout;
1072 const int ifun = xstormy16_interrupt_function_p ();
1074 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1075 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1077 layout = xstormy16_compute_stack_layout ();
1079 if (layout.locals_size >= 32768)
1080 error ("local variable memory requirements exceed capacity");
1082 /* Save the argument registers if necessary. */
1083 if (layout.stdarg_save_size)
1084 for (regno = FIRST_ARGUMENT_REGISTER;
1085 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1089 rtx reg = gen_rtx_REG (HImode, regno);
1091 insn = emit_move_insn (mem_push_rtx, reg);
1092 RTX_FRAME_RELATED_P (insn) = 1;
1094 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1096 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1097 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1099 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1100 plus_constant (stack_pointer_rtx,
1101 GET_MODE_SIZE (Pmode)));
1102 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1105 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1106 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1109 /* Push each of the registers to save. */
1110 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1111 if (REG_NEEDS_SAVE (regno, ifun))
1114 rtx reg = gen_rtx_REG (HImode, regno);
1116 insn = emit_move_insn (mem_push_rtx, reg);
1117 RTX_FRAME_RELATED_P (insn) = 1;
1119 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1121 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1122 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1124 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1125 plus_constant (stack_pointer_rtx,
1126 GET_MODE_SIZE (Pmode)));
1127 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1130 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1131 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1134 /* It's just possible that the SP here might be what we need for
1136 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1138 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1139 RTX_FRAME_RELATED_P (insn) = 1;
1142 /* Allocate space for local variables. */
1143 if (layout.locals_size)
1145 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1146 GEN_INT (layout.locals_size));
1147 RTX_FRAME_RELATED_P (insn) = 1;
1150 /* Set up the frame pointer, if required. */
1151 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1153 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1154 RTX_FRAME_RELATED_P (insn) = 1;
1156 if (layout.sp_minus_fp)
1158 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1159 hard_frame_pointer_rtx,
1160 GEN_INT (- layout.sp_minus_fp));
1161 RTX_FRAME_RELATED_P (insn) = 1;
1166 /* Do we need an epilogue at all? */
1169 direct_return (void)
1171 return (reload_completed
1172 && xstormy16_compute_stack_layout ().frame_size == 0);
1175 /* Called after register allocation to add any instructions needed for
1176 the epilogue. Using an epilogue insn is favored compared to putting
1177 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1178 since it allows the scheduler to intermix instructions with the
1179 saves of the caller saved registers. In some cases, it might be
1180 necessary to emit a barrier instruction as the last insn to prevent
1184 xstormy16_expand_epilogue (void)
1186 struct xstormy16_stack_layout layout;
1187 rtx mem_pop_rtx, insn;
1189 const int ifun = xstormy16_interrupt_function_p ();
1191 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1192 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1194 layout = xstormy16_compute_stack_layout ();
1196 /* Pop the stack for the locals. */
1197 if (layout.locals_size)
1199 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1200 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1202 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1203 GEN_INT (- layout.locals_size));
1206 /* Restore any call-saved registers. */
1207 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1208 if (REG_NEEDS_SAVE (regno, ifun))
1209 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1211 /* Pop the stack for the stdarg save area. */
1212 if (layout.stdarg_save_size)
1213 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1214 GEN_INT (- layout.stdarg_save_size));
1218 emit_jump_insn (gen_return_internal_interrupt ());
1220 emit_jump_insn (gen_return_internal ());
1224 xstormy16_epilogue_uses (int regno)
1226 if (reload_completed && call_used_regs[regno])
1228 const int ifun = xstormy16_interrupt_function_p ();
1229 return REG_NEEDS_SAVE (regno, ifun);
1235 xstormy16_function_profiler (void)
1237 sorry ("function_profiler support");
1240 /* Return an updated summarizer variable CUM to advance past an
1241 argument in the argument list. The values MODE, TYPE and NAMED
1242 describe that argument. Once this is done, the variable CUM is
1243 suitable for analyzing the *following* argument with
1244 `FUNCTION_ARG', etc.
1246 This function need not do anything if the argument in question was
1247 passed on the stack. The compiler knows how to track the amount of
1248 stack space used for arguments without any special help. However,
1249 it makes life easier for xstormy16_build_va_list if it does update
1253 xstormy16_function_arg_advance (CUMULATIVE_ARGS cum, enum machine_mode mode,
1254 tree type, int named ATTRIBUTE_UNUSED)
1256 /* If an argument would otherwise be passed partially in registers,
1257 and partially on the stack, the whole of it is passed on the
1259 if (cum < NUM_ARGUMENT_REGISTERS
1260 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1261 cum = NUM_ARGUMENT_REGISTERS;
1263 cum += XSTORMY16_WORD_SIZE (type, mode);
1269 xstormy16_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
1270 tree type, int named ATTRIBUTE_UNUSED)
1272 if (mode == VOIDmode)
1274 if (targetm.calls.must_pass_in_stack (mode, type)
1275 || cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1277 return gen_rtx_REG (mode, cum + 2);
1280 /* Build the va_list type.
1282 For this chip, va_list is a record containing a counter and a pointer.
1283 The counter is of type 'int' and indicates how many bytes
1284 have been used to date. The pointer indicates the stack position
1285 for arguments that have not been passed in registers.
1286 To keep the layout nice, the pointer is first in the structure. */
1289 xstormy16_build_builtin_va_list (void)
1291 tree f_1, f_2, record, type_decl;
1293 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1294 type_decl = build_decl (BUILTINS_LOCATION,
1295 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1297 f_1 = build_decl (BUILTINS_LOCATION,
1298 FIELD_DECL, get_identifier ("base"),
1300 f_2 = build_decl (BUILTINS_LOCATION,
1301 FIELD_DECL, get_identifier ("count"),
1302 unsigned_type_node);
1304 DECL_FIELD_CONTEXT (f_1) = record;
1305 DECL_FIELD_CONTEXT (f_2) = record;
1307 TREE_CHAIN (record) = type_decl;
1308 TYPE_NAME (record) = type_decl;
1309 TYPE_FIELDS (record) = f_1;
1310 TREE_CHAIN (f_1) = f_2;
1312 layout_type (record);
1317 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1318 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1319 variable to initialize. NEXTARG is the machine independent notion of the
1320 'next' argument after the variable arguments. */
1323 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1325 tree f_base, f_count;
1329 if (xstormy16_interrupt_function_p ())
1330 error ("cannot use va_start in interrupt function");
1332 f_base = TYPE_FIELDS (va_list_type_node);
1333 f_count = TREE_CHAIN (f_base);
1335 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1336 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1339 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1340 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1341 u = fold_convert (TREE_TYPE (count), u);
1342 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), t, u);
1343 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1344 TREE_SIDE_EFFECTS (t) = 1;
1345 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1347 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1348 build_int_cst (NULL_TREE,
1349 crtl->args.info * UNITS_PER_WORD));
1350 TREE_SIDE_EFFECTS (t) = 1;
1351 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1354 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1355 of type va_list as a tree, TYPE is the type passed to va_arg.
1356 Note: This algorithm is documented in stormy-abi. */
1359 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1360 gimple_seq *post_p ATTRIBUTE_UNUSED)
1362 tree f_base, f_count;
1364 tree count_tmp, addr, t;
1365 tree lab_gotaddr, lab_fromstack;
1366 int size, size_of_reg_args, must_stack;
1369 f_base = TYPE_FIELDS (va_list_type_node);
1370 f_count = TREE_CHAIN (f_base);
1372 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1373 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1376 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1377 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1378 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1380 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1382 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1383 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1384 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1385 addr = create_tmp_var (ptr_type_node, NULL);
1391 t = fold_convert (TREE_TYPE (count), size_tree);
1392 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1393 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1394 t = build2 (GT_EXPR, boolean_type_node, t, r);
1395 t = build3 (COND_EXPR, void_type_node, t,
1396 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1398 gimplify_and_add (t, pre_p);
1400 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, base, count_tmp);
1401 gimplify_assign (addr, t, pre_p);
1403 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1404 gimplify_and_add (t, pre_p);
1406 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1407 gimplify_and_add (t, pre_p);
1410 /* Arguments larger than a word might need to skip over some
1411 registers, since arguments are either passed entirely in
1412 registers or entirely on the stack. */
1413 size = PUSH_ROUNDING (int_size_in_bytes (type));
1414 if (size > 2 || size < 0 || must_stack)
1418 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1419 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1421 t = fold_convert (TREE_TYPE (count), r);
1422 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1423 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1424 gimplify_and_add (t, pre_p);
1427 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1428 + INCOMING_FRAME_SP_OFFSET);
1429 t = fold_convert (TREE_TYPE (count), t);
1430 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1431 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1432 fold_convert (TREE_TYPE (count), size_tree));
1433 t = fold_convert (TREE_TYPE (t), fold (t));
1434 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1435 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t);
1436 gimplify_assign (addr, t, pre_p);
1438 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1439 gimplify_and_add (t, pre_p);
1441 t = fold_convert (TREE_TYPE (count), size_tree);
1442 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1443 gimplify_assign (count, t, pre_p);
1445 addr = fold_convert (build_pointer_type (type), addr);
1446 return build_va_arg_indirect_ref (addr);
1449 /* Initialize the variable parts of a trampoline. ADDR is an RTX for
1450 the address of the trampoline; FNADDR is an RTX for the address of
1451 the nested function; STATIC_CHAIN is an RTX for the static chain
1452 value that should be passed to the function when it is called. */
1455 xstormy16_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
1457 rtx reg_addr = gen_reg_rtx (Pmode);
1458 rtx temp = gen_reg_rtx (HImode);
1459 rtx reg_fnaddr = gen_reg_rtx (HImode);
1462 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1464 emit_move_insn (reg_addr, addr);
1465 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1466 emit_move_insn (reg_addr_mem, temp);
1467 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1468 emit_move_insn (temp, static_chain);
1469 emit_move_insn (reg_addr_mem, temp);
1470 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1471 emit_move_insn (reg_fnaddr, fnaddr);
1472 emit_move_insn (temp, reg_fnaddr);
1473 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1474 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1475 emit_move_insn (reg_addr_mem, temp);
1476 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1477 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1478 emit_move_insn (reg_addr_mem, reg_fnaddr);
1481 /* Worker function for FUNCTION_VALUE. */
1484 xstormy16_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
1486 enum machine_mode mode;
1487 mode = TYPE_MODE (valtype);
1488 PROMOTE_MODE (mode, 0, valtype);
1489 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1492 /* A C compound statement that outputs the assembler code for a thunk function,
1493 used to implement C++ virtual function calls with multiple inheritance. The
1494 thunk acts as a wrapper around a virtual function, adjusting the implicit
1495 object parameter before handing control off to the real function.
1497 First, emit code to add the integer DELTA to the location that contains the
1498 incoming first argument. Assume that this argument contains a pointer, and
1499 is the one used to pass the `this' pointer in C++. This is the incoming
1500 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1501 addition must preserve the values of all other incoming arguments.
1503 After the addition, emit code to jump to FUNCTION, which is a
1504 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1505 the return address. Hence returning from FUNCTION will return to whoever
1506 called the current `thunk'.
1508 The effect must be as if @var{function} had been called directly
1509 with the adjusted first argument. This macro is responsible for
1510 emitting all of the code for a thunk function;
1511 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1514 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1515 extracted from it.) It might possibly be useful on some targets, but
1519 xstormy16_asm_output_mi_thunk (FILE *file,
1520 tree thunk_fndecl ATTRIBUTE_UNUSED,
1521 HOST_WIDE_INT delta,
1522 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1525 int regnum = FIRST_ARGUMENT_REGISTER;
1527 /* There might be a hidden first argument for a returned structure. */
1528 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1531 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1532 fputs ("\tjmpf ", file);
1533 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1537 /* The purpose of this function is to override the default behavior of
1538 BSS objects. Normally, they go into .bss or .sbss via ".common"
1539 directives, but we need to override that and put them in
1540 .bss_below100. We can't just use a section override (like we do
1541 for .data_below100), because that makes them initialized rather
1542 than uninitialized. */
1545 xstormy16_asm_output_aligned_common (FILE *stream,
1552 rtx mem = DECL_RTL (decl);
1556 && GET_CODE (mem) == MEM
1557 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1558 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1563 switch_to_section (bss100_section);
1571 name2 = default_strip_name_encoding (name);
1573 fprintf (stream, "\t.globl\t%s\n", name2);
1575 fprintf (stream, "\t.p2align %d\n", p2align);
1576 fprintf (stream, "\t.type\t%s, @object\n", name2);
1577 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1578 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1584 fprintf (stream, "\t.local\t");
1585 assemble_name (stream, name);
1586 fprintf (stream, "\n");
1588 fprintf (stream, "\t.comm\t");
1589 assemble_name (stream, name);
1590 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1593 /* Implement TARGET_ASM_INIT_SECTIONS. */
1596 xstormy16_asm_init_sections (void)
1599 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1600 output_section_asm_op,
1601 "\t.section \".bss_below100\",\"aw\",@nobits");
1604 /* Mark symbols with the "below100" attribute so that we can use the
1605 special addressing modes for them. */
1608 xstormy16_encode_section_info (tree decl, rtx r, int first)
1610 default_encode_section_info (decl, r, first);
1612 if (TREE_CODE (decl) == VAR_DECL
1613 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1614 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1616 rtx symbol = XEXP (r, 0);
1618 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1619 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1623 #undef TARGET_ASM_CONSTRUCTOR
1624 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1625 #undef TARGET_ASM_DESTRUCTOR
1626 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1628 /* Output constructors and destructors. Just like
1629 default_named_section_asm_out_* but don't set the sections writable. */
1632 xstormy16_asm_out_destructor (rtx symbol, int priority)
1634 const char *section = ".dtors";
1637 /* ??? This only works reliably with the GNU linker. */
1638 if (priority != DEFAULT_INIT_PRIORITY)
1640 sprintf (buf, ".dtors.%.5u",
1641 /* Invert the numbering so the linker puts us in the proper
1642 order; constructors are run from right to left, and the
1643 linker sorts in increasing order. */
1644 MAX_INIT_PRIORITY - priority);
1648 switch_to_section (get_section (section, 0, NULL));
1649 assemble_align (POINTER_SIZE);
1650 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1654 xstormy16_asm_out_constructor (rtx symbol, int priority)
1656 const char *section = ".ctors";
1659 /* ??? This only works reliably with the GNU linker. */
1660 if (priority != DEFAULT_INIT_PRIORITY)
1662 sprintf (buf, ".ctors.%.5u",
1663 /* Invert the numbering so the linker puts us in the proper
1664 order; constructors are run from right to left, and the
1665 linker sorts in increasing order. */
1666 MAX_INIT_PRIORITY - priority);
1670 switch_to_section (get_section (section, 0, NULL));
1671 assemble_align (POINTER_SIZE);
1672 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1675 /* Print a memory address as an operand to reference that memory location. */
1678 xstormy16_print_operand_address (FILE *file, rtx address)
1680 HOST_WIDE_INT offset;
1681 int pre_dec, post_inc;
1683 /* There are a few easy cases. */
1684 if (GET_CODE (address) == CONST_INT)
1686 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1690 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1692 output_addr_const (file, address);
1696 /* Otherwise, it's hopefully something of the form
1697 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1698 if (GET_CODE (address) == PLUS)
1700 gcc_assert (GET_CODE (XEXP (address, 1)) == CONST_INT);
1701 offset = INTVAL (XEXP (address, 1));
1702 address = XEXP (address, 0);
1707 pre_dec = (GET_CODE (address) == PRE_DEC);
1708 post_inc = (GET_CODE (address) == POST_INC);
1709 if (pre_dec || post_inc)
1710 address = XEXP (address, 0);
1712 gcc_assert (GET_CODE (address) == REG);
1717 fputs (reg_names [REGNO (address)], file);
1721 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1725 /* Print an operand to an assembler instruction. */
1728 xstormy16_print_operand (FILE *file, rtx x, int code)
1733 /* There is either one bit set, or one bit clear, in X.
1734 Print it preceded by '#'. */
1736 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1737 HOST_WIDE_INT xx = 1;
1740 if (GET_CODE (x) == CONST_INT)
1743 output_operand_lossage ("'B' operand is not constant");
1745 /* GCC sign-extends masks with the MSB set, so we have to
1746 detect all the cases that differ only in sign extension
1747 beyond the bits we care about. Normally, the predicates
1748 and constraints ensure that we have the right values. This
1749 works correctly for valid masks. */
1750 if (bits_set[xx & 7] <= 1)
1752 /* Remove sign extension bits. */
1753 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1755 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1757 l = exact_log2 (xx);
1761 /* Add sign extension bits. */
1762 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1763 xx |= ~(HOST_WIDE_INT)0xff;
1764 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1765 xx |= ~(HOST_WIDE_INT)0xffff;
1766 l = exact_log2 (~xx);
1770 output_operand_lossage ("'B' operand has multiple bits set");
1772 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1777 /* Print the symbol without a surrounding @fptr(). */
1778 if (GET_CODE (x) == SYMBOL_REF)
1779 assemble_name (file, XSTR (x, 0));
1780 else if (GET_CODE (x) == LABEL_REF)
1781 output_asm_label (x);
1783 xstormy16_print_operand_address (file, x);
1788 /* Print the immediate operand less one, preceded by '#'.
1789 For 'O', negate it first. */
1791 HOST_WIDE_INT xx = 0;
1793 if (GET_CODE (x) == CONST_INT)
1796 output_operand_lossage ("'o' operand is not constant");
1801 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1806 /* Print the shift mask for bp/bn. */
1808 HOST_WIDE_INT xx = 1;
1811 if (GET_CODE (x) == CONST_INT)
1814 output_operand_lossage ("'B' operand is not constant");
1818 fputs (IMMEDIATE_PREFIX, file);
1819 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1824 /* Handled below. */
1828 output_operand_lossage ("xstormy16_print_operand: unknown code");
1832 switch (GET_CODE (x))
1835 fputs (reg_names [REGNO (x)], file);
1839 xstormy16_print_operand_address (file, XEXP (x, 0));
1843 /* Some kind of constant or label; an immediate operand,
1844 so prefix it with '#' for the assembler. */
1845 fputs (IMMEDIATE_PREFIX, file);
1846 output_addr_const (file, x);
1853 /* Expander for the `casesi' pattern.
1854 INDEX is the index of the switch statement.
1855 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1856 to the first table entry.
1857 RANGE is the number of table entries.
1858 TABLE is an ADDR_VEC that is the jump table.
1859 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1860 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1863 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1864 rtx table, rtx default_label)
1866 HOST_WIDE_INT range_i = INTVAL (range);
1869 /* This code uses 'br', so it can deal only with tables of size up to
1871 if (range_i >= 8192)
1872 sorry ("switch statement of size %lu entries too large",
1873 (unsigned long) range_i);
1875 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1877 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1879 int_index = gen_lowpart_common (HImode, index);
1880 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1881 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1884 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1885 instructions, without label or alignment or any other special
1886 constructs. We know that the previous instruction will be the
1887 `tablejump_pcrel' output above.
1889 TODO: it might be nice to output 'br' instructions if they could
1893 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1897 switch_to_section (current_function_section ());
1899 vlen = XVECLEN (table, 0);
1900 for (idx = 0; idx < vlen; idx++)
1902 fputs ("\tjmpf ", file);
1903 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1908 /* Expander for the `call' patterns.
1909 INDEX is the index of the switch statement.
1910 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1911 to the first table entry.
1912 RANGE is the number of table entries.
1913 TABLE is an ADDR_VEC that is the jump table.
1914 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1915 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1918 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1921 enum machine_mode mode;
1923 gcc_assert (GET_CODE (dest) == MEM);
1924 dest = XEXP (dest, 0);
1926 if (! CONSTANT_P (dest)
1927 && GET_CODE (dest) != REG)
1928 dest = force_reg (Pmode, dest);
1933 mode = GET_MODE (retval);
1935 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1938 call = gen_rtx_SET (VOIDmode, retval, call);
1940 if (! CONSTANT_P (dest))
1942 temp = gen_reg_rtx (HImode);
1943 emit_move_insn (temp, const0_rtx);
1948 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1949 gen_rtx_USE (VOIDmode, temp)));
1950 emit_call_insn (call);
1953 /* Expanders for multiword computational operations. */
1955 /* Expander for arithmetic operations; emit insns to compute
1957 (set DEST (CODE:MODE SRC0 SRC1))
1959 When CODE is COMPARE, a branch template is generated
1960 (this saves duplicating code in xstormy16_split_cbranch). */
1963 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1964 rtx dest, rtx src0, rtx src1)
1966 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1971 emit_move_insn (src0, const0_rtx);
1973 for (i = 0; i < num_words; i++)
1975 rtx w_src0, w_src1, w_dest;
1978 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1979 i * UNITS_PER_WORD);
1980 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1981 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1987 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1991 insn = gen_addchi4 (w_dest, w_src0, w_src1);
1993 insn = gen_addchi5 (w_dest, w_src0, w_src1);
1999 if (code == COMPARE && i == num_words - 1)
2001 rtx branch, sub, clobber, sub_1;
2003 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2004 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
2005 sub = gen_rtx_SET (VOIDmode, w_dest,
2006 gen_rtx_MINUS (HImode, sub_1, w_src1));
2007 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
2008 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2009 gen_rtx_IF_THEN_ELSE (VOIDmode,
2015 insn = gen_rtx_PARALLEL (VOIDmode,
2016 gen_rtvec (3, branch, sub, clobber));
2020 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2023 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2025 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2031 if (GET_CODE (w_src1) == CONST_INT
2032 && INTVAL (w_src1) == -(code == AND))
2035 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2040 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2051 /* If we emit nothing, try_split() will think we failed. So emit
2052 something that does nothing and can be optimized away. */
2057 /* The shift operations are split at output time for constant values;
2058 variable-width shifts get handed off to a library routine.
2060 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2061 SIZE_R will be a CONST_INT, X will be a hard register. */
2064 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2065 rtx x, rtx size_r, rtx temp)
2068 const char *r0, *r1, *rt;
2071 gcc_assert (GET_CODE (size_r) == CONST_INT
2072 && GET_CODE (x) == REG && mode == SImode);
2073 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2078 r0 = reg_names [REGNO (x)];
2079 r1 = reg_names [REGNO (x) + 1];
2081 /* For shifts of size 1, we can use the rotate instructions. */
2087 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2090 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2093 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2101 /* For large shifts, there are easy special cases. */
2107 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2110 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2113 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2125 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2126 r1, r0, r0, r1, (int) size - 16);
2129 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2130 r0, r1, r1, r0, (int) size - 16);
2133 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2134 r0, r1, r1, r0, (int) size - 16);
2142 /* For the rest, we have to do more work. In particular, we
2143 need a temporary. */
2144 rt = reg_names [REGNO (temp)];
2149 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2150 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2155 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2156 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2161 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2162 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2171 /* Attribute handling. */
2173 /* Return nonzero if the function is an interrupt function. */
2176 xstormy16_interrupt_function_p (void)
2180 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2181 any functions are declared, which is demonstrably wrong, but
2182 it is worked around here. FIXME. */
2186 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2187 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2190 #undef TARGET_ATTRIBUTE_TABLE
2191 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2193 static tree xstormy16_handle_interrupt_attribute
2194 (tree *, tree, tree, int, bool *);
2195 static tree xstormy16_handle_below100_attribute
2196 (tree *, tree, tree, int, bool *);
2198 static const struct attribute_spec xstormy16_attribute_table[] =
2200 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler. */
2201 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2202 { "BELOW100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2203 { "below100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2204 { NULL, 0, 0, false, false, false, NULL }
2207 /* Handle an "interrupt" attribute;
2208 arguments as in struct attribute_spec.handler. */
2211 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2212 tree args ATTRIBUTE_UNUSED,
2213 int flags ATTRIBUTE_UNUSED,
2216 if (TREE_CODE (*node) != FUNCTION_TYPE)
2218 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2220 *no_add_attrs = true;
2226 /* Handle an "below" attribute;
2227 arguments as in struct attribute_spec.handler. */
2230 xstormy16_handle_below100_attribute (tree *node,
2231 tree name ATTRIBUTE_UNUSED,
2232 tree args ATTRIBUTE_UNUSED,
2233 int flags ATTRIBUTE_UNUSED,
2236 if (TREE_CODE (*node) != VAR_DECL
2237 && TREE_CODE (*node) != POINTER_TYPE
2238 && TREE_CODE (*node) != TYPE_DECL)
2240 warning (OPT_Wattributes,
2241 "%<__BELOW100__%> attribute only applies to variables");
2242 *no_add_attrs = true;
2244 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2246 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2248 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2249 "with auto storage class");
2250 *no_add_attrs = true;
2257 #undef TARGET_INIT_BUILTINS
2258 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2259 #undef TARGET_EXPAND_BUILTIN
2260 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2266 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2267 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2271 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2272 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2273 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2274 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2275 { NULL, 0, NULL, NULL }
2279 xstormy16_init_builtins (void)
2281 tree args, ret_type, arg;
2284 ret_type = void_type_node;
2286 for (i = 0; s16builtins[i].name; i++)
2288 args = void_list_node;
2289 for (a = strlen (s16builtins[i].arg_types) - 1; a >= 0; a--)
2291 switch (s16builtins[i].arg_types[a])
2293 case 's': arg = short_integer_type_node; break;
2294 case 'S': arg = short_unsigned_type_node; break;
2295 case 'l': arg = long_integer_type_node; break;
2296 case 'L': arg = long_unsigned_type_node; break;
2297 default: gcc_unreachable ();
2302 args = tree_cons (NULL_TREE, arg, args);
2304 add_builtin_function (s16builtins[i].name,
2305 build_function_type (ret_type, args),
2306 i, BUILT_IN_MD, NULL, NULL);
2311 xstormy16_expand_builtin (tree exp, rtx target,
2312 rtx subtarget ATTRIBUTE_UNUSED,
2313 enum machine_mode mode ATTRIBUTE_UNUSED,
2314 int ignore ATTRIBUTE_UNUSED)
2316 rtx op[10], args[10], pat, copyto[10], retval = 0;
2317 tree fndecl, argtree;
2320 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2321 argtree = TREE_OPERAND (exp, 1);
2322 i = DECL_FUNCTION_CODE (fndecl);
2323 code = s16builtins[i].md_code;
2325 for (a = 0; a < 10 && argtree; a++)
2327 args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2328 argtree = TREE_CHAIN (argtree);
2331 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2333 char ao = s16builtins[i].arg_ops[o];
2334 char c = insn_data[code].operand[o].constraint[0];
2339 omode = insn_data[code].operand[o].mode;
2341 op[o] = target ? target : gen_reg_rtx (omode);
2343 op[o] = gen_reg_rtx (omode);
2345 op[o] = args[(int) hex_value (ao)];
2347 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2349 if (c == '+' || c == '=')
2352 op[o] = gen_reg_rtx (omode);
2355 op[o] = copy_to_mode_reg (omode, op[o]);
2362 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2363 op[5], op[6], op[7], op[8], op[9]);
2366 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2369 emit_move_insn (copyto[o], op[o]);
2370 if (op[o] == retval)
2377 /* Look for combinations of insns that can be converted to BN or BP
2378 opcodes. This is, unfortunately, too complex to do with MD
2382 combine_bnp (rtx insn)
2384 int insn_code, regno, need_extend;
2386 rtx cond, reg, and, load, qireg, mem;
2387 enum machine_mode load_mode = QImode;
2388 enum machine_mode and_mode = QImode;
2389 rtx shift = NULL_RTX;
2391 insn_code = recog_memoized (insn);
2392 if (insn_code != CODE_FOR_cbranchhi
2393 && insn_code != CODE_FOR_cbranchhi_neg)
2396 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2397 cond = XEXP (cond, 1); /* if */
2398 cond = XEXP (cond, 0); /* cond */
2399 switch (GET_CODE (cond))
2413 reg = XEXP (cond, 0);
2414 if (GET_CODE (reg) != REG)
2416 regno = REGNO (reg);
2417 if (XEXP (cond, 1) != const0_rtx)
2419 if (! find_regno_note (insn, REG_DEAD, regno))
2421 qireg = gen_rtx_REG (QImode, regno);
2425 /* LT and GE conditionals should have a sign extend before
2427 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2429 int and_code = recog_memoized (and);
2431 if (and_code == CODE_FOR_extendqihi2
2432 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2433 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), qireg))
2436 if (and_code == CODE_FOR_movhi_internal
2437 && rtx_equal_p (SET_DEST (PATTERN (and)), reg))
2439 /* This is for testing bit 15. */
2444 if (reg_mentioned_p (reg, and))
2447 if (GET_CODE (and) != NOTE
2448 && GET_CODE (and) != INSN)
2454 /* EQ and NE conditionals have an AND before them. */
2455 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2457 if (recog_memoized (and) == CODE_FOR_andhi3
2458 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2459 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), reg))
2462 if (reg_mentioned_p (reg, and))
2465 if (GET_CODE (and) != NOTE
2466 && GET_CODE (and) != INSN)
2472 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2473 followed by an AND like this:
2475 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2476 (clobber (reg:BI carry))]
2478 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2480 Attempt to detect this here. */
2481 for (shift = prev_real_insn (and); shift; shift = prev_real_insn (shift))
2483 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2484 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2485 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2488 if (reg_mentioned_p (reg, shift)
2489 || (GET_CODE (shift) != NOTE
2490 && GET_CODE (shift) != INSN))
2501 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and);
2503 load = prev_real_insn (load))
2505 int load_code = recog_memoized (load);
2507 if (load_code == CODE_FOR_movhi_internal
2508 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2509 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2510 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2516 if (load_code == CODE_FOR_movqi_internal
2517 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2518 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2524 if (load_code == CODE_FOR_zero_extendqihi2
2525 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2526 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2533 if (reg_mentioned_p (reg, load))
2536 if (GET_CODE (load) != NOTE
2537 && GET_CODE (load) != INSN)
2543 mem = SET_SRC (PATTERN (load));
2547 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2549 /* If the mem includes a zero-extend operation and we are
2550 going to generate a sign-extend operation then move the
2551 mem inside the zero-extend. */
2552 if (GET_CODE (mem) == ZERO_EXTEND)
2553 mem = XEXP (mem, 0);
2557 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and)), 1), load_mode))
2560 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and)), 1));
2563 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2566 if (load_mode == HImode)
2568 rtx addr = XEXP (mem, 0);
2570 if (! (mask & 0xff))
2572 addr = plus_constant (addr, 1);
2575 mem = gen_rtx_MEM (QImode, addr);
2579 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2581 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2583 INSN_CODE (insn) = -1;
2589 if (shift != NULL_RTX)
2590 delete_insn (shift);
2594 xstormy16_reorg (void)
2598 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2600 if (! JUMP_P (insn))
2606 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2609 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2611 const HOST_WIDE_INT size = int_size_in_bytes (type);
2612 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2615 #undef TARGET_ASM_ALIGNED_HI_OP
2616 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2617 #undef TARGET_ASM_ALIGNED_SI_OP
2618 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2619 #undef TARGET_ENCODE_SECTION_INFO
2620 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2622 /* Select_section doesn't handle .bss_below100. */
2623 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2624 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2626 #undef TARGET_ASM_OUTPUT_MI_THUNK
2627 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2628 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2629 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2631 #undef TARGET_RTX_COSTS
2632 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2633 #undef TARGET_ADDRESS_COST
2634 #define TARGET_ADDRESS_COST xstormy16_address_cost
2636 #undef TARGET_BUILD_BUILTIN_VA_LIST
2637 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2638 #undef TARGET_EXPAND_BUILTIN_VA_START
2639 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2640 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2641 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2643 #undef TARGET_PROMOTE_FUNCTION_ARGS
2644 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
2645 #undef TARGET_PROMOTE_FUNCTION_RETURN
2646 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
2647 #undef TARGET_PROMOTE_PROTOTYPES
2648 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2650 #undef TARGET_RETURN_IN_MEMORY
2651 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2653 #undef TARGET_MACHINE_DEPENDENT_REORG
2654 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2656 #undef TARGET_LEGITIMATE_ADDRESS_P
2657 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2659 struct gcc_target targetm = TARGET_INITIALIZER;
2661 #include "gt-stormy16.h"