1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
45 #include "target-def.h"
47 #include "langhooks.h"
52 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
53 static void xstormy16_asm_out_constructor (rtx, int);
54 static void xstormy16_asm_out_destructor (rtx, int);
55 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
58 static void xstormy16_init_builtins (void);
59 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
60 static bool xstormy16_rtx_costs (rtx, int, int, int *, bool);
61 static int xstormy16_address_cost (rtx, bool);
62 static bool xstormy16_return_in_memory (const_tree, const_tree);
64 /* Define the information needed to generate branch and scc insns. This is
65 stored from the compare operation. */
66 struct rtx_def * xstormy16_compare_op0;
67 struct rtx_def * xstormy16_compare_op1;
69 static GTY(()) section *bss100_section;
71 /* Compute a (partial) cost for rtx X. Return true if the complete
72 cost has been computed, and false if subexpressions should be
73 scanned. In either case, *TOTAL contains the cost result. */
76 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
77 int *total, bool speed ATTRIBUTE_UNUSED)
82 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
83 *total = COSTS_N_INSNS (1) / 2;
84 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
85 *total = COSTS_N_INSNS (1);
87 *total = COSTS_N_INSNS (2);
94 *total = COSTS_N_INSNS (2);
98 *total = COSTS_N_INSNS (35 + 6);
101 *total = COSTS_N_INSNS (51 - 6);
110 xstormy16_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
112 return (GET_CODE (x) == CONST_INT ? 2
113 : GET_CODE (x) == PLUS ? 7
117 /* Branches are handled as follows:
119 1. HImode compare-and-branches. The machine supports these
120 natively, so the appropriate pattern is emitted directly.
122 2. SImode EQ and NE. These are emitted as pairs of HImode
123 compare-and-branches.
125 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
126 of a SImode subtract followed by a branch (not a compare-and-branch),
132 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
139 /* Emit a branch of kind CODE to location LOC. */
142 xstormy16_emit_cbranch (enum rtx_code code, rtx loc)
144 rtx op0 = xstormy16_compare_op0;
145 rtx op1 = xstormy16_compare_op1;
146 rtx condition_rtx, loc_ref, branch, cy_clobber;
148 enum machine_mode mode;
150 mode = GET_MODE (op0);
151 gcc_assert (mode == HImode || mode == SImode);
154 && (code == GT || code == LE || code == GTU || code == LEU))
156 int unsigned_p = (code == GTU || code == LEU);
157 int gt_p = (code == GT || code == GTU);
161 lab = gen_label_rtx ();
162 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
163 /* This should be generated as a comparison against the temporary
164 created by the previous insn, but reload can't handle that. */
165 xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
170 else if (mode == SImode
171 && (code == NE || code == EQ)
172 && op1 != const0_rtx)
175 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
179 lab = gen_label_rtx ();
181 for (i = 0; i < num_words - 1; i++)
183 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
185 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
187 xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
189 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
191 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
193 xstormy16_emit_cbranch (code, loc);
200 /* We can't allow reload to try to generate any reload after a branch,
201 so when some register must match we must make the temporary ourselves. */
205 tmp = gen_reg_rtx (mode);
206 emit_move_insn (tmp, op0);
210 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
211 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
212 branch = gen_rtx_SET (VOIDmode, pc_rtx,
213 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
216 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
219 vec = gen_rtvec (2, branch, cy_clobber);
220 else if (code == NE || code == EQ)
221 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
226 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
228 sub = gen_rtx_CLOBBER (SImode, op0);
230 vec = gen_rtvec (3, branch, sub, cy_clobber);
233 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
236 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
237 the arithmetic operation. Most of the work is done by
238 xstormy16_expand_arith. */
241 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
244 rtx op0 = XEXP (comparison, 0);
245 rtx op1 = XEXP (comparison, 1);
250 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
254 gcc_assert (INSN_P (seq));
257 while (NEXT_INSN (last_insn) != NULL_RTX)
258 last_insn = NEXT_INSN (last_insn);
260 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
261 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
262 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
267 /* Return the string to output a conditional branch to LABEL, which is
268 the operand number of the label.
270 OP is the conditional expression, or NULL for branch-always.
272 REVERSED is nonzero if we should reverse the sense of the comparison.
277 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
279 static char string[64];
280 int need_longbranch = (op != NULL_RTX
281 ? get_attr_length (insn) == 8
282 : get_attr_length (insn) == 4);
283 int really_reversed = reversed ^ need_longbranch;
286 const char *operands;
295 sprintf (string, "%s %s", ccode, label);
299 code = GET_CODE (op);
301 if (GET_CODE (XEXP (op, 0)) != REG)
303 code = swap_condition (code);
309 /* Work out which way this really branches. */
311 code = reverse_condition (code);
315 case EQ: ccode = "z"; break;
316 case NE: ccode = "nz"; break;
317 case GE: ccode = "ge"; break;
318 case LT: ccode = "lt"; break;
319 case GT: ccode = "gt"; break;
320 case LE: ccode = "le"; break;
321 case GEU: ccode = "nc"; break;
322 case LTU: ccode = "c"; break;
323 case GTU: ccode = "hi"; break;
324 case LEU: ccode = "ls"; break;
331 templ = "b%s %s,.+8 | jmpf %s";
334 sprintf (string, templ, ccode, operands, label);
339 /* Return the string to output a conditional branch to LABEL, which is
340 the operand number of the label, but suitable for the tail of a
343 OP is the conditional expression (OP is never NULL_RTX).
345 REVERSED is nonzero if we should reverse the sense of the comparison.
350 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
352 static char string[64];
353 int need_longbranch = get_attr_length (insn) >= 8;
354 int really_reversed = reversed ^ need_longbranch;
360 code = GET_CODE (op);
362 /* Work out which way this really branches. */
364 code = reverse_condition (code);
368 case EQ: ccode = "z"; break;
369 case NE: ccode = "nz"; break;
370 case GE: ccode = "ge"; break;
371 case LT: ccode = "lt"; break;
372 case GEU: ccode = "nc"; break;
373 case LTU: ccode = "c"; break;
375 /* The missing codes above should never be generated. */
386 gcc_assert (GET_CODE (XEXP (op, 0)) == REG);
388 regnum = REGNO (XEXP (op, 0));
389 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
393 case GE: case LT: case GEU: case LTU:
394 strcpy (prevop, "sbc %2,%3");
402 templ = "%s | b%s .+6 | jmpf %s";
404 templ = "%s | b%s %s";
405 sprintf (string, templ, prevop, ccode, label);
410 /* Many machines have some registers that cannot be copied directly to or from
411 memory or even from other types of registers. An example is the `MQ'
412 register, which on most machines, can only be copied to or from general
413 registers, but not memory. Some machines allow copying all registers to and
414 from memory, but require a scratch register for stores to some memory
415 locations (e.g., those with symbolic address on the RT, and those with
416 certain symbolic address on the SPARC when compiling PIC). In some cases,
417 both an intermediate and a scratch register are required.
419 You should define these macros to indicate to the reload phase that it may
420 need to allocate at least one register for a reload in addition to the
421 register to contain the data. Specifically, if copying X to a register
422 RCLASS in MODE requires an intermediate register, you should define
423 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
424 whose registers can be used as intermediate registers or scratch registers.
426 If copying a register RCLASS in MODE to X requires an intermediate or scratch
427 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
428 largest register class required. If the requirements for input and output
429 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
430 instead of defining both macros identically.
432 The values returned by these macros are often `GENERAL_REGS'. Return
433 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
434 to or from a register of RCLASS in MODE without requiring a scratch register.
435 Do not define this macro if it would always return `NO_REGS'.
437 If a scratch register is required (either with or without an intermediate
438 register), you should define patterns for `reload_inM' or `reload_outM', as
439 required.. These patterns, which will normally be implemented with a
440 `define_expand', should be similar to the `movM' patterns, except that
441 operand 2 is the scratch register.
443 Define constraints for the reload register and scratch register that contain
444 a single register class. If the original reload register (whose class is
445 RCLASS) can meet the constraint given in the pattern, the value returned by
446 these macros is used for the class of the scratch register. Otherwise, two
447 additional reload registers are required. Their classes are obtained from
448 the constraints in the insn pattern.
450 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
451 either be in a hard register or in memory. Use `true_regnum' to find out;
452 it will return -1 if the pseudo is in memory and the hard register number if
455 These macros should not be used in the case where a particular class of
456 registers can only be copied to memory and not to another class of
457 registers. In that case, secondary reload registers are not needed and
458 would not be helpful. Instead, a stack location must be used to perform the
459 copy and the `movM' pattern should use memory as an intermediate storage.
460 This case often occurs between floating-point and general registers. */
463 xstormy16_secondary_reload_class (enum reg_class rclass,
464 enum machine_mode mode,
467 /* This chip has the interesting property that only the first eight
468 registers can be moved to/from memory. */
469 if ((GET_CODE (x) == MEM
470 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
471 && (true_regnum (x) == -1
472 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
473 && ! reg_class_subset_p (rclass, EIGHT_REGS))
480 xstormy16_preferred_reload_class (rtx x, enum reg_class rclass)
482 if (rclass == GENERAL_REGS
483 && GET_CODE (x) == MEM)
489 /* Predicate for symbols and addresses that reflect special 8-bit
493 xstormy16_below100_symbol (rtx x,
494 enum machine_mode mode ATTRIBUTE_UNUSED)
496 if (GET_CODE (x) == CONST)
498 if (GET_CODE (x) == PLUS
499 && GET_CODE (XEXP (x, 1)) == CONST_INT)
502 if (GET_CODE (x) == SYMBOL_REF)
503 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
505 if (GET_CODE (x) == CONST_INT)
507 HOST_WIDE_INT i = INTVAL (x);
508 if ((i >= 0x0000 && i <= 0x00ff)
509 || (i >= 0x7f00 && i <= 0x7fff))
515 /* Likewise, but only for non-volatile MEMs, for patterns where the
516 MEM will get split into smaller sized accesses. */
519 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
521 if (GET_CODE (x) == MEM && MEM_VOLATILE_P (x))
523 return xstormy16_below100_operand (x, mode);
526 /* Expand an 8-bit IOR. This either detects the one case we can
527 actually do, or uses a 16-bit IOR. */
530 xstormy16_expand_iorqi3 (rtx *operands)
532 rtx in, out, outsub, val;
538 if (xstormy16_onebit_set_operand (val, QImode))
540 if (!xstormy16_below100_or_register (in, QImode))
541 in = copy_to_mode_reg (QImode, in);
542 if (!xstormy16_below100_or_register (out, QImode))
543 out = gen_reg_rtx (QImode);
544 emit_insn (gen_iorqi3_internal (out, in, val));
545 if (out != operands[0])
546 emit_move_insn (operands[0], out);
550 if (GET_CODE (in) != REG)
551 in = copy_to_mode_reg (QImode, in);
552 if (GET_CODE (val) != REG
553 && GET_CODE (val) != CONST_INT)
554 val = copy_to_mode_reg (QImode, val);
555 if (GET_CODE (out) != REG)
556 out = gen_reg_rtx (QImode);
558 in = simplify_gen_subreg (HImode, in, QImode, 0);
559 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
560 if (GET_CODE (val) != CONST_INT)
561 val = simplify_gen_subreg (HImode, val, QImode, 0);
563 emit_insn (gen_iorhi3 (outsub, in, val));
565 if (out != operands[0])
566 emit_move_insn (operands[0], out);
569 /* Expand an 8-bit AND. This either detects the one case we can
570 actually do, or uses a 16-bit AND. */
573 xstormy16_expand_andqi3 (rtx *operands)
575 rtx in, out, outsub, val;
581 if (xstormy16_onebit_clr_operand (val, QImode))
583 if (!xstormy16_below100_or_register (in, QImode))
584 in = copy_to_mode_reg (QImode, in);
585 if (!xstormy16_below100_or_register (out, QImode))
586 out = gen_reg_rtx (QImode);
587 emit_insn (gen_andqi3_internal (out, in, val));
588 if (out != operands[0])
589 emit_move_insn (operands[0], out);
593 if (GET_CODE (in) != REG)
594 in = copy_to_mode_reg (QImode, in);
595 if (GET_CODE (val) != REG
596 && GET_CODE (val) != CONST_INT)
597 val = copy_to_mode_reg (QImode, val);
598 if (GET_CODE (out) != REG)
599 out = gen_reg_rtx (QImode);
601 in = simplify_gen_subreg (HImode, in, QImode, 0);
602 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
603 if (GET_CODE (val) != CONST_INT)
604 val = simplify_gen_subreg (HImode, val, QImode, 0);
606 emit_insn (gen_andhi3 (outsub, in, val));
608 if (out != operands[0])
609 emit_move_insn (operands[0], out);
612 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
613 (GET_CODE (X) == CONST_INT \
614 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
616 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
617 (GET_CODE (X) == CONST_INT \
618 && INTVAL (X) + (OFFSET) >= 0 \
619 && INTVAL (X) + (OFFSET) < 0x8000 \
620 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
623 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
626 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
629 if (GET_CODE (x) == PLUS
630 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
633 /* PR 31232: Do not allow INT+INT as an address. */
634 if (GET_CODE (x) == CONST_INT)
638 if ((GET_CODE (x) == PRE_MODIFY
639 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
640 || GET_CODE (x) == POST_INC
641 || GET_CODE (x) == PRE_DEC)
644 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
645 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
648 if (xstormy16_below100_symbol (x, mode))
654 /* Return nonzero if memory address X (an RTX) can have different
655 meanings depending on the machine mode of the memory reference it
656 is used for or if the address is valid for some modes but not
659 Autoincrement and autodecrement addresses typically have mode-dependent
660 effects because the amount of the increment or decrement is the size of the
661 operand being addressed. Some machines have other mode-dependent addresses.
662 Many RISC machines have no mode-dependent addresses.
664 You may assume that ADDR is a valid address for the machine.
666 On this chip, this is true if the address is valid with an offset
667 of 0 but not of 6, because in that case it cannot be used as an
668 address for DImode or DFmode, or if the address is a post-increment
669 or pre-decrement address. */
672 xstormy16_mode_dependent_address_p (rtx x)
674 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
675 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
678 if (GET_CODE (x) == PLUS
679 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
680 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
683 if (GET_CODE (x) == PLUS)
686 /* Auto-increment addresses are now treated generically in recog.c. */
690 /* A C expression that defines the optional machine-dependent constraint
691 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
692 types of operands, usually memory references, for the target machine.
693 Normally this macro will not be defined. If it is required for a particular
694 target machine, it should return 1 if VALUE corresponds to the operand type
695 represented by the constraint letter C. If C is not defined as an extra
696 constraint, the value returned should be 0 regardless of VALUE. */
699 xstormy16_extra_constraint_p (rtx x, int c)
703 /* 'Q' is for pushes. */
705 return (GET_CODE (x) == MEM
706 && GET_CODE (XEXP (x, 0)) == POST_INC
707 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
709 /* 'R' is for pops. */
711 return (GET_CODE (x) == MEM
712 && GET_CODE (XEXP (x, 0)) == PRE_DEC
713 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
715 /* 'S' is for immediate memory addresses. */
717 return (GET_CODE (x) == MEM
718 && GET_CODE (XEXP (x, 0)) == CONST_INT
719 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
723 /* Not implemented yet. */
726 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
727 for allocating a scratch register for 32-bit shifts. */
729 return (GET_CODE (x) == CONST_INT
730 && (INTVAL (x) < 2 || INTVAL (x) > 15));
732 /* 'Z' is for CONST_INT value zero. This is for adding zero to
733 a register in addhi3, which would otherwise require a carry. */
735 return (GET_CODE (x) == CONST_INT
736 && (INTVAL (x) == 0));
739 return xstormy16_below100_operand (x, GET_MODE (x));
747 short_memory_operand (rtx x, enum machine_mode mode)
749 if (! memory_operand (x, mode))
751 return (GET_CODE (XEXP (x, 0)) != PLUS);
754 /* Splitter for the 'move' patterns, for modes not directly implemented
755 by hardware. Emit insns to copy a value of mode MODE from SRC to
758 This function is only called when reload_completed. */
761 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
763 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
764 int direction, end, i;
765 int src_modifies = 0;
766 int dest_modifies = 0;
767 int src_volatile = 0;
768 int dest_volatile = 0;
770 rtx auto_inc_reg_rtx = NULL_RTX;
772 /* Check initial conditions. */
773 gcc_assert (reload_completed
774 && mode != QImode && mode != HImode
775 && nonimmediate_operand (dest, mode)
776 && general_operand (src, mode));
778 /* This case is not supported below, and shouldn't be generated. */
779 gcc_assert (GET_CODE (dest) != MEM || GET_CODE (src) != MEM);
781 /* This case is very very bad after reload, so trap it now. */
782 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
784 /* The general idea is to copy by words, offsetting the source and
785 destination. Normally the least-significant word will be copied
786 first, but for pre-dec operations it's better to copy the
787 most-significant word first. Only one operand can be a pre-dec
790 It's also possible that the copy overlaps so that the direction
794 if (GET_CODE (dest) == MEM)
796 mem_operand = XEXP (dest, 0);
797 dest_modifies = side_effects_p (mem_operand);
798 if (auto_inc_p (mem_operand))
799 auto_inc_reg_rtx = XEXP (mem_operand, 0);
800 dest_volatile = MEM_VOLATILE_P (dest);
803 dest = copy_rtx (dest);
804 MEM_VOLATILE_P (dest) = 0;
807 else if (GET_CODE (src) == MEM)
809 mem_operand = XEXP (src, 0);
810 src_modifies = side_effects_p (mem_operand);
811 if (auto_inc_p (mem_operand))
812 auto_inc_reg_rtx = XEXP (mem_operand, 0);
813 src_volatile = MEM_VOLATILE_P (src);
816 src = copy_rtx (src);
817 MEM_VOLATILE_P (src) = 0;
821 mem_operand = NULL_RTX;
823 if (mem_operand == NULL_RTX)
825 if (GET_CODE (src) == REG
826 && GET_CODE (dest) == REG
827 && reg_overlap_mentioned_p (dest, src)
828 && REGNO (dest) > REGNO (src))
831 else if (GET_CODE (mem_operand) == PRE_DEC
832 || (GET_CODE (mem_operand) == PLUS
833 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
835 else if (GET_CODE (src) == MEM
836 && reg_overlap_mentioned_p (dest, src))
840 gcc_assert (GET_CODE (dest) == REG);
841 regno = REGNO (dest);
843 gcc_assert (refers_to_regno_p (regno, regno + num_words,
846 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
848 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
852 /* This means something like
853 (set (reg:DI r0) (mem:DI (reg:HI r1)))
854 which we'd need to support by doing the set of the second word
859 end = direction < 0 ? -1 : num_words;
860 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
862 rtx w_src, w_dest, insn;
865 w_src = gen_rtx_MEM (word_mode, mem_operand);
867 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
869 MEM_VOLATILE_P (w_src) = 1;
871 w_dest = gen_rtx_MEM (word_mode, mem_operand);
873 w_dest = simplify_gen_subreg (word_mode, dest, mode,
876 MEM_VOLATILE_P (w_dest) = 1;
878 /* The simplify_subreg calls must always be able to simplify. */
879 gcc_assert (GET_CODE (w_src) != SUBREG
880 && GET_CODE (w_dest) != SUBREG);
882 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
883 if (auto_inc_reg_rtx)
884 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
890 /* Expander for the 'move' patterns. Emit insns to copy a value of
891 mode MODE from SRC to DEST. */
894 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
896 if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
898 rtx pmv = XEXP (dest, 0);
899 rtx dest_reg = XEXP (pmv, 0);
900 rtx dest_mod = XEXP (pmv, 1);
901 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
902 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
904 dest = gen_rtx_MEM (mode, dest_reg);
905 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
907 else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
909 rtx pmv = XEXP (src, 0);
910 rtx src_reg = XEXP (pmv, 0);
911 rtx src_mod = XEXP (pmv, 1);
912 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
913 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
915 src = gen_rtx_MEM (mode, src_reg);
916 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
919 /* There are only limited immediate-to-memory move instructions. */
920 if (! reload_in_progress
921 && ! reload_completed
922 && GET_CODE (dest) == MEM
923 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
924 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
925 && ! xstormy16_below100_operand (dest, mode)
926 && GET_CODE (src) != REG
927 && GET_CODE (src) != SUBREG)
928 src = copy_to_mode_reg (mode, src);
930 /* Don't emit something we would immediately split. */
932 && mode != HImode && mode != QImode)
934 xstormy16_split_move (mode, dest, src);
938 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
943 The stack is laid out as follows:
947 Register save area (up to 4 words)
948 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
950 AP-> Return address (two words)
951 9th procedure parameter word
952 10th procedure parameter word
954 last procedure parameter word
956 The frame pointer location is tuned to make it most likely that all
957 parameters and local variables can be accessed using a load-indexed
960 /* A structure to describe the layout. */
961 struct xstormy16_stack_layout
963 /* Size of the topmost three items on the stack. */
965 int register_save_size;
966 int stdarg_save_size;
967 /* Sum of the above items. */
969 /* Various offsets. */
970 int first_local_minus_ap;
975 /* Does REGNO need to be saved? */
976 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
977 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
978 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
979 && (REGNUM != CARRY_REGNUM) \
980 && (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
982 /* Compute the stack layout. */
984 struct xstormy16_stack_layout
985 xstormy16_compute_stack_layout (void)
987 struct xstormy16_stack_layout layout;
989 const int ifun = xstormy16_interrupt_function_p ();
991 layout.locals_size = get_frame_size ();
993 layout.register_save_size = 0;
994 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
995 if (REG_NEEDS_SAVE (regno, ifun))
996 layout.register_save_size += UNITS_PER_WORD;
999 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1001 layout.stdarg_save_size = 0;
1003 layout.frame_size = (layout.locals_size
1004 + layout.register_save_size
1005 + layout.stdarg_save_size);
1007 if (crtl->args.size <= 2048 && crtl->args.size != -1)
1009 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
1010 + crtl->args.size <= 2048)
1011 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
1013 layout.fp_minus_ap = 2048 - crtl->args.size;
1016 layout.fp_minus_ap = (layout.stdarg_save_size
1017 + layout.register_save_size
1018 - INCOMING_FRAME_SP_OFFSET);
1019 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
1020 - layout.fp_minus_ap);
1021 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1025 /* Determine how all the special registers get eliminated. */
1028 xstormy16_initial_elimination_offset (int from, int to)
1030 struct xstormy16_stack_layout layout;
1033 layout = xstormy16_compute_stack_layout ();
1035 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1036 result = layout.sp_minus_fp - layout.locals_size;
1037 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1038 result = - layout.locals_size;
1039 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1040 result = - layout.fp_minus_ap;
1041 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1042 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
1050 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1052 rtx set, clobber, insn;
1054 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1055 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1056 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1060 /* Called after register allocation to add any instructions needed for
1061 the prologue. Using a prologue insn is favored compared to putting
1062 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1063 since it allows the scheduler to intermix instructions with the
1064 saves of the caller saved registers. In some cases, it might be
1065 necessary to emit a barrier instruction as the last insn to prevent
1068 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1069 so that the debug info generation code can handle them properly. */
1072 xstormy16_expand_prologue (void)
1074 struct xstormy16_stack_layout layout;
1078 const int ifun = xstormy16_interrupt_function_p ();
1080 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1081 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1083 layout = xstormy16_compute_stack_layout ();
1085 if (layout.locals_size >= 32768)
1086 error ("local variable memory requirements exceed capacity");
1088 /* Save the argument registers if necessary. */
1089 if (layout.stdarg_save_size)
1090 for (regno = FIRST_ARGUMENT_REGISTER;
1091 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1095 rtx reg = gen_rtx_REG (HImode, regno);
1097 insn = emit_move_insn (mem_push_rtx, reg);
1098 RTX_FRAME_RELATED_P (insn) = 1;
1100 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1102 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1103 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1105 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1106 plus_constant (stack_pointer_rtx,
1107 GET_MODE_SIZE (Pmode)));
1108 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1111 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1112 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1115 /* Push each of the registers to save. */
1116 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1117 if (REG_NEEDS_SAVE (regno, ifun))
1120 rtx reg = gen_rtx_REG (HImode, regno);
1122 insn = emit_move_insn (mem_push_rtx, reg);
1123 RTX_FRAME_RELATED_P (insn) = 1;
1125 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1127 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1128 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1130 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1131 plus_constant (stack_pointer_rtx,
1132 GET_MODE_SIZE (Pmode)));
1133 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1136 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1137 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1140 /* It's just possible that the SP here might be what we need for
1142 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1144 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1145 RTX_FRAME_RELATED_P (insn) = 1;
1148 /* Allocate space for local variables. */
1149 if (layout.locals_size)
1151 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1152 GEN_INT (layout.locals_size));
1153 RTX_FRAME_RELATED_P (insn) = 1;
1156 /* Set up the frame pointer, if required. */
1157 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1159 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1160 RTX_FRAME_RELATED_P (insn) = 1;
1162 if (layout.sp_minus_fp)
1164 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1165 hard_frame_pointer_rtx,
1166 GEN_INT (- layout.sp_minus_fp));
1167 RTX_FRAME_RELATED_P (insn) = 1;
1172 /* Do we need an epilogue at all? */
1175 direct_return (void)
1177 return (reload_completed
1178 && xstormy16_compute_stack_layout ().frame_size == 0);
1181 /* Called after register allocation to add any instructions needed for
1182 the epilogue. Using an epilogue insn is favored compared to putting
1183 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1184 since it allows the scheduler to intermix instructions with the
1185 saves of the caller saved registers. In some cases, it might be
1186 necessary to emit a barrier instruction as the last insn to prevent
1190 xstormy16_expand_epilogue (void)
1192 struct xstormy16_stack_layout layout;
1193 rtx mem_pop_rtx, insn;
1195 const int ifun = xstormy16_interrupt_function_p ();
1197 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1198 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1200 layout = xstormy16_compute_stack_layout ();
1202 /* Pop the stack for the locals. */
1203 if (layout.locals_size)
1205 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1206 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1208 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1209 GEN_INT (- layout.locals_size));
1212 /* Restore any call-saved registers. */
1213 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1214 if (REG_NEEDS_SAVE (regno, ifun))
1215 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1217 /* Pop the stack for the stdarg save area. */
1218 if (layout.stdarg_save_size)
1219 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1220 GEN_INT (- layout.stdarg_save_size));
1224 emit_jump_insn (gen_return_internal_interrupt ());
1226 emit_jump_insn (gen_return_internal ());
1230 xstormy16_epilogue_uses (int regno)
1232 if (reload_completed && call_used_regs[regno])
1234 const int ifun = xstormy16_interrupt_function_p ();
1235 return REG_NEEDS_SAVE (regno, ifun);
1241 xstormy16_function_profiler (void)
1243 sorry ("function_profiler support");
1246 /* Return an updated summarizer variable CUM to advance past an
1247 argument in the argument list. The values MODE, TYPE and NAMED
1248 describe that argument. Once this is done, the variable CUM is
1249 suitable for analyzing the *following* argument with
1250 `FUNCTION_ARG', etc.
1252 This function need not do anything if the argument in question was
1253 passed on the stack. The compiler knows how to track the amount of
1254 stack space used for arguments without any special help. However,
1255 it makes life easier for xstormy16_build_va_list if it does update
1259 xstormy16_function_arg_advance (CUMULATIVE_ARGS cum, enum machine_mode mode,
1260 tree type, int named ATTRIBUTE_UNUSED)
1262 /* If an argument would otherwise be passed partially in registers,
1263 and partially on the stack, the whole of it is passed on the
1265 if (cum < NUM_ARGUMENT_REGISTERS
1266 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1267 cum = NUM_ARGUMENT_REGISTERS;
1269 cum += XSTORMY16_WORD_SIZE (type, mode);
1275 xstormy16_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
1276 tree type, int named ATTRIBUTE_UNUSED)
1278 if (mode == VOIDmode)
1280 if (targetm.calls.must_pass_in_stack (mode, type)
1281 || cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1283 return gen_rtx_REG (mode, cum + 2);
1286 /* Build the va_list type.
1288 For this chip, va_list is a record containing a counter and a pointer.
1289 The counter is of type 'int' and indicates how many bytes
1290 have been used to date. The pointer indicates the stack position
1291 for arguments that have not been passed in registers.
1292 To keep the layout nice, the pointer is first in the structure. */
1295 xstormy16_build_builtin_va_list (void)
1297 tree f_1, f_2, record, type_decl;
1299 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1300 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1302 f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
1304 f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
1305 unsigned_type_node);
1307 DECL_FIELD_CONTEXT (f_1) = record;
1308 DECL_FIELD_CONTEXT (f_2) = record;
1310 TREE_CHAIN (record) = type_decl;
1311 TYPE_NAME (record) = type_decl;
1312 TYPE_FIELDS (record) = f_1;
1313 TREE_CHAIN (f_1) = f_2;
1315 layout_type (record);
1320 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1321 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1322 variable to initialize. NEXTARG is the machine independent notion of the
1323 'next' argument after the variable arguments. */
1326 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1328 tree f_base, f_count;
1332 if (xstormy16_interrupt_function_p ())
1333 error ("cannot use va_start in interrupt function");
1335 f_base = TYPE_FIELDS (va_list_type_node);
1336 f_count = TREE_CHAIN (f_base);
1338 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1339 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1342 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1343 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1344 u = fold_convert (TREE_TYPE (count), u);
1345 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), t, u);
1346 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1347 TREE_SIDE_EFFECTS (t) = 1;
1348 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1350 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1351 build_int_cst (NULL_TREE,
1352 crtl->args.info * UNITS_PER_WORD));
1353 TREE_SIDE_EFFECTS (t) = 1;
1354 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1357 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1358 of type va_list as a tree, TYPE is the type passed to va_arg.
1359 Note: This algorithm is documented in stormy-abi. */
1362 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1363 gimple_seq *post_p ATTRIBUTE_UNUSED)
1365 tree f_base, f_count;
1367 tree count_tmp, addr, t;
1368 tree lab_gotaddr, lab_fromstack;
1369 int size, size_of_reg_args, must_stack;
1372 f_base = TYPE_FIELDS (va_list_type_node);
1373 f_count = TREE_CHAIN (f_base);
1375 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1376 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1379 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1380 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1381 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1383 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1385 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1386 lab_gotaddr = create_artificial_label ();
1387 lab_fromstack = create_artificial_label ();
1388 addr = create_tmp_var (ptr_type_node, NULL);
1394 t = fold_convert (TREE_TYPE (count), size_tree);
1395 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1396 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1397 t = build2 (GT_EXPR, boolean_type_node, t, r);
1398 t = build3 (COND_EXPR, void_type_node, t,
1399 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1401 gimplify_and_add (t, pre_p);
1403 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, base, count_tmp);
1404 gimplify_assign (addr, t, pre_p);
1406 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1407 gimplify_and_add (t, pre_p);
1409 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1410 gimplify_and_add (t, pre_p);
1413 /* Arguments larger than a word might need to skip over some
1414 registers, since arguments are either passed entirely in
1415 registers or entirely on the stack. */
1416 size = PUSH_ROUNDING (int_size_in_bytes (type));
1417 if (size > 2 || size < 0 || must_stack)
1421 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1422 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1424 t = fold_convert (TREE_TYPE (count), r);
1425 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1426 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1427 gimplify_and_add (t, pre_p);
1430 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1431 + INCOMING_FRAME_SP_OFFSET);
1432 t = fold_convert (TREE_TYPE (count), t);
1433 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1434 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1435 fold_convert (TREE_TYPE (count), size_tree));
1436 t = fold_convert (TREE_TYPE (t), fold (t));
1437 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1438 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t);
1439 gimplify_assign (addr, t, pre_p);
1441 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1442 gimplify_and_add (t, pre_p);
1444 t = fold_convert (TREE_TYPE (count), size_tree);
1445 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1446 gimplify_assign (count, t, pre_p);
1448 addr = fold_convert (build_pointer_type (type), addr);
1449 return build_va_arg_indirect_ref (addr);
1452 /* Initialize the variable parts of a trampoline. ADDR is an RTX for
1453 the address of the trampoline; FNADDR is an RTX for the address of
1454 the nested function; STATIC_CHAIN is an RTX for the static chain
1455 value that should be passed to the function when it is called. */
1458 xstormy16_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
1460 rtx reg_addr = gen_reg_rtx (Pmode);
1461 rtx temp = gen_reg_rtx (HImode);
1462 rtx reg_fnaddr = gen_reg_rtx (HImode);
1465 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1467 emit_move_insn (reg_addr, addr);
1468 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1469 emit_move_insn (reg_addr_mem, temp);
1470 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1471 emit_move_insn (temp, static_chain);
1472 emit_move_insn (reg_addr_mem, temp);
1473 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1474 emit_move_insn (reg_fnaddr, fnaddr);
1475 emit_move_insn (temp, reg_fnaddr);
1476 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1477 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1478 emit_move_insn (reg_addr_mem, temp);
1479 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1480 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1481 emit_move_insn (reg_addr_mem, reg_fnaddr);
1484 /* Worker function for FUNCTION_VALUE. */
1487 xstormy16_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
1489 enum machine_mode mode;
1490 mode = TYPE_MODE (valtype);
1491 PROMOTE_MODE (mode, 0, valtype);
1492 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1495 /* A C compound statement that outputs the assembler code for a thunk function,
1496 used to implement C++ virtual function calls with multiple inheritance. The
1497 thunk acts as a wrapper around a virtual function, adjusting the implicit
1498 object parameter before handing control off to the real function.
1500 First, emit code to add the integer DELTA to the location that contains the
1501 incoming first argument. Assume that this argument contains a pointer, and
1502 is the one used to pass the `this' pointer in C++. This is the incoming
1503 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1504 addition must preserve the values of all other incoming arguments.
1506 After the addition, emit code to jump to FUNCTION, which is a
1507 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1508 the return address. Hence returning from FUNCTION will return to whoever
1509 called the current `thunk'.
1511 The effect must be as if @var{function} had been called directly
1512 with the adjusted first argument. This macro is responsible for
1513 emitting all of the code for a thunk function;
1514 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1517 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1518 extracted from it.) It might possibly be useful on some targets, but
1522 xstormy16_asm_output_mi_thunk (FILE *file,
1523 tree thunk_fndecl ATTRIBUTE_UNUSED,
1524 HOST_WIDE_INT delta,
1525 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1528 int regnum = FIRST_ARGUMENT_REGISTER;
1530 /* There might be a hidden first argument for a returned structure. */
1531 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1534 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1535 fputs ("\tjmpf ", file);
1536 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1540 /* The purpose of this function is to override the default behavior of
1541 BSS objects. Normally, they go into .bss or .sbss via ".common"
1542 directives, but we need to override that and put them in
1543 .bss_below100. We can't just use a section override (like we do
1544 for .data_below100), because that makes them initialized rather
1545 than uninitialized. */
1548 xstormy16_asm_output_aligned_common (FILE *stream,
1555 rtx mem = DECL_RTL (decl);
1559 && GET_CODE (mem) == MEM
1560 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1561 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1566 switch_to_section (bss100_section);
1574 name2 = default_strip_name_encoding (name);
1576 fprintf (stream, "\t.globl\t%s\n", name2);
1578 fprintf (stream, "\t.p2align %d\n", p2align);
1579 fprintf (stream, "\t.type\t%s, @object\n", name2);
1580 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1581 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1587 fprintf (stream, "\t.local\t");
1588 assemble_name (stream, name);
1589 fprintf (stream, "\n");
1591 fprintf (stream, "\t.comm\t");
1592 assemble_name (stream, name);
1593 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1596 /* Implement TARGET_ASM_INIT_SECTIONS. */
1599 xstormy16_asm_init_sections (void)
1602 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1603 output_section_asm_op,
1604 "\t.section \".bss_below100\",\"aw\",@nobits");
1607 /* Mark symbols with the "below100" attribute so that we can use the
1608 special addressing modes for them. */
1611 xstormy16_encode_section_info (tree decl, rtx r, int first)
1613 default_encode_section_info (decl, r, first);
1615 if (TREE_CODE (decl) == VAR_DECL
1616 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1617 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1619 rtx symbol = XEXP (r, 0);
1621 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1622 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1626 #undef TARGET_ASM_CONSTRUCTOR
1627 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1628 #undef TARGET_ASM_DESTRUCTOR
1629 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1631 /* Output constructors and destructors. Just like
1632 default_named_section_asm_out_* but don't set the sections writable. */
1635 xstormy16_asm_out_destructor (rtx symbol, int priority)
1637 const char *section = ".dtors";
1640 /* ??? This only works reliably with the GNU linker. */
1641 if (priority != DEFAULT_INIT_PRIORITY)
1643 sprintf (buf, ".dtors.%.5u",
1644 /* Invert the numbering so the linker puts us in the proper
1645 order; constructors are run from right to left, and the
1646 linker sorts in increasing order. */
1647 MAX_INIT_PRIORITY - priority);
1651 switch_to_section (get_section (section, 0, NULL));
1652 assemble_align (POINTER_SIZE);
1653 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1657 xstormy16_asm_out_constructor (rtx symbol, int priority)
1659 const char *section = ".ctors";
1662 /* ??? This only works reliably with the GNU linker. */
1663 if (priority != DEFAULT_INIT_PRIORITY)
1665 sprintf (buf, ".ctors.%.5u",
1666 /* Invert the numbering so the linker puts us in the proper
1667 order; constructors are run from right to left, and the
1668 linker sorts in increasing order. */
1669 MAX_INIT_PRIORITY - priority);
1673 switch_to_section (get_section (section, 0, NULL));
1674 assemble_align (POINTER_SIZE);
1675 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1678 /* Print a memory address as an operand to reference that memory location. */
1681 xstormy16_print_operand_address (FILE *file, rtx address)
1683 HOST_WIDE_INT offset;
1684 int pre_dec, post_inc;
1686 /* There are a few easy cases. */
1687 if (GET_CODE (address) == CONST_INT)
1689 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1693 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1695 output_addr_const (file, address);
1699 /* Otherwise, it's hopefully something of the form
1700 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1701 if (GET_CODE (address) == PLUS)
1703 gcc_assert (GET_CODE (XEXP (address, 1)) == CONST_INT);
1704 offset = INTVAL (XEXP (address, 1));
1705 address = XEXP (address, 0);
1710 pre_dec = (GET_CODE (address) == PRE_DEC);
1711 post_inc = (GET_CODE (address) == POST_INC);
1712 if (pre_dec || post_inc)
1713 address = XEXP (address, 0);
1715 gcc_assert (GET_CODE (address) == REG);
1720 fputs (reg_names [REGNO (address)], file);
1724 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1728 /* Print an operand to an assembler instruction. */
1731 xstormy16_print_operand (FILE *file, rtx x, int code)
1736 /* There is either one bit set, or one bit clear, in X.
1737 Print it preceded by '#'. */
1739 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1740 HOST_WIDE_INT xx = 1;
1743 if (GET_CODE (x) == CONST_INT)
1746 output_operand_lossage ("'B' operand is not constant");
1748 /* GCC sign-extends masks with the MSB set, so we have to
1749 detect all the cases that differ only in sign extension
1750 beyond the bits we care about. Normally, the predicates
1751 and constraints ensure that we have the right values. This
1752 works correctly for valid masks. */
1753 if (bits_set[xx & 7] <= 1)
1755 /* Remove sign extension bits. */
1756 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1758 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1760 l = exact_log2 (xx);
1764 /* Add sign extension bits. */
1765 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1766 xx |= ~(HOST_WIDE_INT)0xff;
1767 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1768 xx |= ~(HOST_WIDE_INT)0xffff;
1769 l = exact_log2 (~xx);
1773 output_operand_lossage ("'B' operand has multiple bits set");
1775 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1780 /* Print the symbol without a surrounding @fptr(). */
1781 if (GET_CODE (x) == SYMBOL_REF)
1782 assemble_name (file, XSTR (x, 0));
1783 else if (GET_CODE (x) == LABEL_REF)
1784 output_asm_label (x);
1786 xstormy16_print_operand_address (file, x);
1791 /* Print the immediate operand less one, preceded by '#'.
1792 For 'O', negate it first. */
1794 HOST_WIDE_INT xx = 0;
1796 if (GET_CODE (x) == CONST_INT)
1799 output_operand_lossage ("'o' operand is not constant");
1804 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1809 /* Print the shift mask for bp/bn. */
1811 HOST_WIDE_INT xx = 1;
1814 if (GET_CODE (x) == CONST_INT)
1817 output_operand_lossage ("'B' operand is not constant");
1821 fputs (IMMEDIATE_PREFIX, file);
1822 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1827 /* Handled below. */
1831 output_operand_lossage ("xstormy16_print_operand: unknown code");
1835 switch (GET_CODE (x))
1838 fputs (reg_names [REGNO (x)], file);
1842 xstormy16_print_operand_address (file, XEXP (x, 0));
1846 /* Some kind of constant or label; an immediate operand,
1847 so prefix it with '#' for the assembler. */
1848 fputs (IMMEDIATE_PREFIX, file);
1849 output_addr_const (file, x);
1856 /* Expander for the `casesi' pattern.
1857 INDEX is the index of the switch statement.
1858 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1859 to the first table entry.
1860 RANGE is the number of table entries.
1861 TABLE is an ADDR_VEC that is the jump table.
1862 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1863 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1866 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1867 rtx table, rtx default_label)
1869 HOST_WIDE_INT range_i = INTVAL (range);
1872 /* This code uses 'br', so it can deal only with tables of size up to
1874 if (range_i >= 8192)
1875 sorry ("switch statement of size %lu entries too large",
1876 (unsigned long) range_i);
1878 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1880 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1882 int_index = gen_lowpart_common (HImode, index);
1883 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1884 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1887 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1888 instructions, without label or alignment or any other special
1889 constructs. We know that the previous instruction will be the
1890 `tablejump_pcrel' output above.
1892 TODO: it might be nice to output 'br' instructions if they could
1896 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1900 switch_to_section (current_function_section ());
1902 vlen = XVECLEN (table, 0);
1903 for (idx = 0; idx < vlen; idx++)
1905 fputs ("\tjmpf ", file);
1906 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1911 /* Expander for the `call' patterns.
1912 INDEX is the index of the switch statement.
1913 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1914 to the first table entry.
1915 RANGE is the number of table entries.
1916 TABLE is an ADDR_VEC that is the jump table.
1917 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1918 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1921 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1924 enum machine_mode mode;
1926 gcc_assert (GET_CODE (dest) == MEM);
1927 dest = XEXP (dest, 0);
1929 if (! CONSTANT_P (dest)
1930 && GET_CODE (dest) != REG)
1931 dest = force_reg (Pmode, dest);
1936 mode = GET_MODE (retval);
1938 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1941 call = gen_rtx_SET (VOIDmode, retval, call);
1943 if (! CONSTANT_P (dest))
1945 temp = gen_reg_rtx (HImode);
1946 emit_move_insn (temp, const0_rtx);
1951 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1952 gen_rtx_USE (VOIDmode, temp)));
1953 emit_call_insn (call);
1956 /* Expanders for multiword computational operations. */
1958 /* Expander for arithmetic operations; emit insns to compute
1960 (set DEST (CODE:MODE SRC0 SRC1))
1962 When CODE is COMPARE, a branch template is generated
1963 (this saves duplicating code in xstormy16_split_cbranch). */
1966 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1967 rtx dest, rtx src0, rtx src1)
1969 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1974 emit_move_insn (src0, const0_rtx);
1976 for (i = 0; i < num_words; i++)
1978 rtx w_src0, w_src1, w_dest;
1981 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1982 i * UNITS_PER_WORD);
1983 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1984 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1990 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1994 insn = gen_addchi4 (w_dest, w_src0, w_src1);
1996 insn = gen_addchi5 (w_dest, w_src0, w_src1);
2002 if (code == COMPARE && i == num_words - 1)
2004 rtx branch, sub, clobber, sub_1;
2006 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2007 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
2008 sub = gen_rtx_SET (VOIDmode, w_dest,
2009 gen_rtx_MINUS (HImode, sub_1, w_src1));
2010 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
2011 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2012 gen_rtx_IF_THEN_ELSE (VOIDmode,
2018 insn = gen_rtx_PARALLEL (VOIDmode,
2019 gen_rtvec (3, branch, sub, clobber));
2023 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2026 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2028 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2034 if (GET_CODE (w_src1) == CONST_INT
2035 && INTVAL (w_src1) == -(code == AND))
2038 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2043 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2054 /* If we emit nothing, try_split() will think we failed. So emit
2055 something that does nothing and can be optimized away. */
2060 /* The shift operations are split at output time for constant values;
2061 variable-width shifts get handed off to a library routine.
2063 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2064 SIZE_R will be a CONST_INT, X will be a hard register. */
2067 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2068 rtx x, rtx size_r, rtx temp)
2071 const char *r0, *r1, *rt;
2074 gcc_assert (GET_CODE (size_r) == CONST_INT
2075 && GET_CODE (x) == REG && mode == SImode);
2076 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2081 r0 = reg_names [REGNO (x)];
2082 r1 = reg_names [REGNO (x) + 1];
2084 /* For shifts of size 1, we can use the rotate instructions. */
2090 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2093 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2096 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2104 /* For large shifts, there are easy special cases. */
2110 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2113 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2116 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2128 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2129 r1, r0, r0, r1, (int) size - 16);
2132 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2133 r0, r1, r1, r0, (int) size - 16);
2136 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2137 r0, r1, r1, r0, (int) size - 16);
2145 /* For the rest, we have to do more work. In particular, we
2146 need a temporary. */
2147 rt = reg_names [REGNO (temp)];
2152 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2153 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2158 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2159 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2164 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2165 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2174 /* Attribute handling. */
2176 /* Return nonzero if the function is an interrupt function. */
2179 xstormy16_interrupt_function_p (void)
2183 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2184 any functions are declared, which is demonstrably wrong, but
2185 it is worked around here. FIXME. */
2189 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2190 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2193 #undef TARGET_ATTRIBUTE_TABLE
2194 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2196 static tree xstormy16_handle_interrupt_attribute
2197 (tree *, tree, tree, int, bool *);
2198 static tree xstormy16_handle_below100_attribute
2199 (tree *, tree, tree, int, bool *);
2201 static const struct attribute_spec xstormy16_attribute_table[] =
2203 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler. */
2204 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2205 { "BELOW100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2206 { "below100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2207 { NULL, 0, 0, false, false, false, NULL }
2210 /* Handle an "interrupt" attribute;
2211 arguments as in struct attribute_spec.handler. */
2214 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2215 tree args ATTRIBUTE_UNUSED,
2216 int flags ATTRIBUTE_UNUSED,
2219 if (TREE_CODE (*node) != FUNCTION_TYPE)
2221 warning (OPT_Wattributes, "%qs attribute only applies to functions",
2222 IDENTIFIER_POINTER (name));
2223 *no_add_attrs = true;
2229 /* Handle an "below" attribute;
2230 arguments as in struct attribute_spec.handler. */
2233 xstormy16_handle_below100_attribute (tree *node,
2234 tree name ATTRIBUTE_UNUSED,
2235 tree args ATTRIBUTE_UNUSED,
2236 int flags ATTRIBUTE_UNUSED,
2239 if (TREE_CODE (*node) != VAR_DECL
2240 && TREE_CODE (*node) != POINTER_TYPE
2241 && TREE_CODE (*node) != TYPE_DECL)
2243 warning (OPT_Wattributes,
2244 "%<__BELOW100__%> attribute only applies to variables");
2245 *no_add_attrs = true;
2247 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2249 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2251 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2252 "with auto storage class");
2253 *no_add_attrs = true;
2260 #undef TARGET_INIT_BUILTINS
2261 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2262 #undef TARGET_EXPAND_BUILTIN
2263 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2269 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2270 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2274 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2275 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2276 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2277 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2278 { NULL, 0, NULL, NULL }
2282 xstormy16_init_builtins (void)
2284 tree args, ret_type, arg;
2287 ret_type = void_type_node;
2289 for (i = 0; s16builtins[i].name; i++)
2291 args = void_list_node;
2292 for (a = strlen (s16builtins[i].arg_types) - 1; a >= 0; a--)
2294 switch (s16builtins[i].arg_types[a])
2296 case 's': arg = short_integer_type_node; break;
2297 case 'S': arg = short_unsigned_type_node; break;
2298 case 'l': arg = long_integer_type_node; break;
2299 case 'L': arg = long_unsigned_type_node; break;
2300 default: gcc_unreachable ();
2305 args = tree_cons (NULL_TREE, arg, args);
2307 add_builtin_function (s16builtins[i].name,
2308 build_function_type (ret_type, args),
2309 i, BUILT_IN_MD, NULL, NULL);
2314 xstormy16_expand_builtin (tree exp, rtx target,
2315 rtx subtarget ATTRIBUTE_UNUSED,
2316 enum machine_mode mode ATTRIBUTE_UNUSED,
2317 int ignore ATTRIBUTE_UNUSED)
2319 rtx op[10], args[10], pat, copyto[10], retval = 0;
2320 tree fndecl, argtree;
2323 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2324 argtree = TREE_OPERAND (exp, 1);
2325 i = DECL_FUNCTION_CODE (fndecl);
2326 code = s16builtins[i].md_code;
2328 for (a = 0; a < 10 && argtree; a++)
2330 args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2331 argtree = TREE_CHAIN (argtree);
2334 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2336 char ao = s16builtins[i].arg_ops[o];
2337 char c = insn_data[code].operand[o].constraint[0];
2342 omode = insn_data[code].operand[o].mode;
2344 op[o] = target ? target : gen_reg_rtx (omode);
2346 op[o] = gen_reg_rtx (omode);
2348 op[o] = args[(int) hex_value (ao)];
2350 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2352 if (c == '+' || c == '=')
2355 op[o] = gen_reg_rtx (omode);
2358 op[o] = copy_to_mode_reg (omode, op[o]);
2365 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2366 op[5], op[6], op[7], op[8], op[9]);
2369 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2372 emit_move_insn (copyto[o], op[o]);
2373 if (op[o] == retval)
2380 /* Look for combinations of insns that can be converted to BN or BP
2381 opcodes. This is, unfortunately, too complex to do with MD
2385 combine_bnp (rtx insn)
2387 int insn_code, regno, need_extend;
2389 rtx cond, reg, and, load, qireg, mem;
2390 enum machine_mode load_mode = QImode;
2391 enum machine_mode and_mode = QImode;
2392 rtx shift = NULL_RTX;
2394 insn_code = recog_memoized (insn);
2395 if (insn_code != CODE_FOR_cbranchhi
2396 && insn_code != CODE_FOR_cbranchhi_neg)
2399 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2400 cond = XEXP (cond, 1); /* if */
2401 cond = XEXP (cond, 0); /* cond */
2402 switch (GET_CODE (cond))
2416 reg = XEXP (cond, 0);
2417 if (GET_CODE (reg) != REG)
2419 regno = REGNO (reg);
2420 if (XEXP (cond, 1) != const0_rtx)
2422 if (! find_regno_note (insn, REG_DEAD, regno))
2424 qireg = gen_rtx_REG (QImode, regno);
2428 /* LT and GE conditionals should have a sign extend before
2430 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2432 int and_code = recog_memoized (and);
2434 if (and_code == CODE_FOR_extendqihi2
2435 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2436 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), qireg))
2439 if (and_code == CODE_FOR_movhi_internal
2440 && rtx_equal_p (SET_DEST (PATTERN (and)), reg))
2442 /* This is for testing bit 15. */
2447 if (reg_mentioned_p (reg, and))
2450 if (GET_CODE (and) != NOTE
2451 && GET_CODE (and) != INSN)
2457 /* EQ and NE conditionals have an AND before them. */
2458 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2460 if (recog_memoized (and) == CODE_FOR_andhi3
2461 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2462 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), reg))
2465 if (reg_mentioned_p (reg, and))
2468 if (GET_CODE (and) != NOTE
2469 && GET_CODE (and) != INSN)
2475 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2476 followed by an AND like this:
2478 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2479 (clobber (reg:BI carry))]
2481 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2483 Attempt to detect this here. */
2484 for (shift = prev_real_insn (and); shift; shift = prev_real_insn (shift))
2486 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2487 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2488 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2491 if (reg_mentioned_p (reg, shift)
2492 || (GET_CODE (shift) != NOTE
2493 && GET_CODE (shift) != INSN))
2504 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and);
2506 load = prev_real_insn (load))
2508 int load_code = recog_memoized (load);
2510 if (load_code == CODE_FOR_movhi_internal
2511 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2512 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2513 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2519 if (load_code == CODE_FOR_movqi_internal
2520 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2521 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2527 if (load_code == CODE_FOR_zero_extendqihi2
2528 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2529 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2536 if (reg_mentioned_p (reg, load))
2539 if (GET_CODE (load) != NOTE
2540 && GET_CODE (load) != INSN)
2546 mem = SET_SRC (PATTERN (load));
2550 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2552 /* If the mem includes a zero-extend operation and we are
2553 going to generate a sign-extend operation then move the
2554 mem inside the zero-extend. */
2555 if (GET_CODE (mem) == ZERO_EXTEND)
2556 mem = XEXP (mem, 0);
2560 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and)), 1), load_mode))
2563 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and)), 1));
2566 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2569 if (load_mode == HImode)
2571 rtx addr = XEXP (mem, 0);
2573 if (! (mask & 0xff))
2575 addr = plus_constant (addr, 1);
2578 mem = gen_rtx_MEM (QImode, addr);
2582 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2584 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2586 INSN_CODE (insn) = -1;
2592 if (shift != NULL_RTX)
2593 delete_insn (shift);
2597 xstormy16_reorg (void)
2601 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2603 if (! JUMP_P (insn))
2609 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2612 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2614 const HOST_WIDE_INT size = int_size_in_bytes (type);
2615 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2618 #undef TARGET_ASM_ALIGNED_HI_OP
2619 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2620 #undef TARGET_ASM_ALIGNED_SI_OP
2621 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2622 #undef TARGET_ENCODE_SECTION_INFO
2623 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2625 /* Select_section doesn't handle .bss_below100. */
2626 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2627 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2629 #undef TARGET_ASM_OUTPUT_MI_THUNK
2630 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2631 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2632 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2634 #undef TARGET_RTX_COSTS
2635 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2636 #undef TARGET_ADDRESS_COST
2637 #define TARGET_ADDRESS_COST xstormy16_address_cost
2639 #undef TARGET_BUILD_BUILTIN_VA_LIST
2640 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2641 #undef TARGET_EXPAND_BUILTIN_VA_START
2642 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2643 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2644 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2646 #undef TARGET_PROMOTE_FUNCTION_ARGS
2647 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
2648 #undef TARGET_PROMOTE_FUNCTION_RETURN
2649 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
2650 #undef TARGET_PROMOTE_PROTOTYPES
2651 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2653 #undef TARGET_RETURN_IN_MEMORY
2654 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2656 #undef TARGET_MACHINE_DEPENDENT_REORG
2657 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2659 struct gcc_target targetm = TARGET_INITIALIZER;
2661 #include "gt-stormy16.h"