1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
45 #include "target-def.h"
47 #include "langhooks.h"
52 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
53 static void xstormy16_asm_out_constructor (rtx, int);
54 static void xstormy16_asm_out_destructor (rtx, int);
55 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
58 static void xstormy16_init_builtins (void);
59 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
60 static bool xstormy16_rtx_costs (rtx, int, int, int *, bool);
61 static int xstormy16_address_cost (rtx, bool);
62 static bool xstormy16_return_in_memory (const_tree, const_tree);
64 /* Define the information needed to generate branch and scc insns. This is
65 stored from the compare operation. */
66 struct rtx_def * xstormy16_compare_op0;
67 struct rtx_def * xstormy16_compare_op1;
69 static GTY(()) section *bss100_section;
71 /* Compute a (partial) cost for rtx X. Return true if the complete
72 cost has been computed, and false if subexpressions should be
73 scanned. In either case, *TOTAL contains the cost result. */
76 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
77 int *total, bool speed ATTRIBUTE_UNUSED)
82 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
83 *total = COSTS_N_INSNS (1) / 2;
84 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
85 *total = COSTS_N_INSNS (1);
87 *total = COSTS_N_INSNS (2);
94 *total = COSTS_N_INSNS(2);
98 *total = COSTS_N_INSNS (35 + 6);
101 *total = COSTS_N_INSNS (51 - 6);
110 xstormy16_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
112 return (GET_CODE (x) == CONST_INT ? 2
113 : GET_CODE (x) == PLUS ? 7
117 /* Branches are handled as follows:
119 1. HImode compare-and-branches. The machine supports these
120 natively, so the appropriate pattern is emitted directly.
122 2. SImode EQ and NE. These are emitted as pairs of HImode
123 compare-and-branches.
125 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
126 of a SImode subtract followed by a branch (not a compare-and-branch),
132 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
140 /* Emit a branch of kind CODE to location LOC. */
143 xstormy16_emit_cbranch (enum rtx_code code, rtx loc)
145 rtx op0 = xstormy16_compare_op0;
146 rtx op1 = xstormy16_compare_op1;
147 rtx condition_rtx, loc_ref, branch, cy_clobber;
149 enum machine_mode mode;
151 mode = GET_MODE (op0);
152 gcc_assert (mode == HImode || mode == SImode);
155 && (code == GT || code == LE || code == GTU || code == LEU))
157 int unsigned_p = (code == GTU || code == LEU);
158 int gt_p = (code == GT || code == GTU);
162 lab = gen_label_rtx ();
163 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
164 /* This should be generated as a comparison against the temporary
165 created by the previous insn, but reload can't handle that. */
166 xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
171 else if (mode == SImode
172 && (code == NE || code == EQ)
173 && op1 != const0_rtx)
176 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
180 lab = gen_label_rtx ();
182 for (i = 0; i < num_words - 1; i++)
184 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
186 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
188 xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
190 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
192 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
194 xstormy16_emit_cbranch (code, loc);
201 /* We can't allow reload to try to generate any reload after a branch,
202 so when some register must match we must make the temporary ourselves. */
206 tmp = gen_reg_rtx (mode);
207 emit_move_insn (tmp, op0);
211 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
212 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
213 branch = gen_rtx_SET (VOIDmode, pc_rtx,
214 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
217 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
220 vec = gen_rtvec (2, branch, cy_clobber);
221 else if (code == NE || code == EQ)
222 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
227 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
229 sub = gen_rtx_CLOBBER (SImode, op0);
231 vec = gen_rtvec (3, branch, sub, cy_clobber);
234 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
237 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
238 the arithmetic operation. Most of the work is done by
239 xstormy16_expand_arith. */
242 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
245 rtx op0 = XEXP (comparison, 0);
246 rtx op1 = XEXP (comparison, 1);
251 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
255 gcc_assert (INSN_P (seq));
258 while (NEXT_INSN (last_insn) != NULL_RTX)
259 last_insn = NEXT_INSN (last_insn);
261 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
262 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
263 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
268 /* Return the string to output a conditional branch to LABEL, which is
269 the operand number of the label.
271 OP is the conditional expression, or NULL for branch-always.
273 REVERSED is nonzero if we should reverse the sense of the comparison.
278 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
280 static char string[64];
281 int need_longbranch = (op != NULL_RTX
282 ? get_attr_length (insn) == 8
283 : get_attr_length (insn) == 4);
284 int really_reversed = reversed ^ need_longbranch;
287 const char *operands;
296 sprintf (string, "%s %s", ccode, label);
300 code = GET_CODE (op);
302 if (GET_CODE (XEXP (op, 0)) != REG)
304 code = swap_condition (code);
310 /* Work out which way this really branches. */
312 code = reverse_condition (code);
316 case EQ: ccode = "z"; break;
317 case NE: ccode = "nz"; break;
318 case GE: ccode = "ge"; break;
319 case LT: ccode = "lt"; break;
320 case GT: ccode = "gt"; break;
321 case LE: ccode = "le"; break;
322 case GEU: ccode = "nc"; break;
323 case LTU: ccode = "c"; break;
324 case GTU: ccode = "hi"; break;
325 case LEU: ccode = "ls"; break;
332 templ = "b%s %s,.+8 | jmpf %s";
335 sprintf (string, templ, ccode, operands, label);
340 /* Return the string to output a conditional branch to LABEL, which is
341 the operand number of the label, but suitable for the tail of a
344 OP is the conditional expression (OP is never NULL_RTX).
346 REVERSED is nonzero if we should reverse the sense of the comparison.
351 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
353 static char string[64];
354 int need_longbranch = get_attr_length (insn) >= 8;
355 int really_reversed = reversed ^ need_longbranch;
361 code = GET_CODE (op);
363 /* Work out which way this really branches. */
365 code = reverse_condition (code);
369 case EQ: ccode = "z"; break;
370 case NE: ccode = "nz"; break;
371 case GE: ccode = "ge"; break;
372 case LT: ccode = "lt"; break;
373 case GEU: ccode = "nc"; break;
374 case LTU: ccode = "c"; break;
376 /* The missing codes above should never be generated. */
387 gcc_assert (GET_CODE (XEXP (op, 0)) == REG);
389 regnum = REGNO (XEXP (op, 0));
390 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
394 case GE: case LT: case GEU: case LTU:
395 strcpy (prevop, "sbc %2,%3");
403 templ = "%s | b%s .+6 | jmpf %s";
405 templ = "%s | b%s %s";
406 sprintf (string, templ, prevop, ccode, label);
411 /* Many machines have some registers that cannot be copied directly to or from
412 memory or even from other types of registers. An example is the `MQ'
413 register, which on most machines, can only be copied to or from general
414 registers, but not memory. Some machines allow copying all registers to and
415 from memory, but require a scratch register for stores to some memory
416 locations (e.g., those with symbolic address on the RT, and those with
417 certain symbolic address on the SPARC when compiling PIC). In some cases,
418 both an intermediate and a scratch register are required.
420 You should define these macros to indicate to the reload phase that it may
421 need to allocate at least one register for a reload in addition to the
422 register to contain the data. Specifically, if copying X to a register
423 RCLASS in MODE requires an intermediate register, you should define
424 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
425 whose registers can be used as intermediate registers or scratch registers.
427 If copying a register RCLASS in MODE to X requires an intermediate or scratch
428 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
429 largest register class required. If the requirements for input and output
430 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
431 instead of defining both macros identically.
433 The values returned by these macros are often `GENERAL_REGS'. Return
434 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
435 to or from a register of RCLASS in MODE without requiring a scratch register.
436 Do not define this macro if it would always return `NO_REGS'.
438 If a scratch register is required (either with or without an intermediate
439 register), you should define patterns for `reload_inM' or `reload_outM', as
440 required.. These patterns, which will normally be implemented with a
441 `define_expand', should be similar to the `movM' patterns, except that
442 operand 2 is the scratch register.
444 Define constraints for the reload register and scratch register that contain
445 a single register class. If the original reload register (whose class is
446 RCLASS) can meet the constraint given in the pattern, the value returned by
447 these macros is used for the class of the scratch register. Otherwise, two
448 additional reload registers are required. Their classes are obtained from
449 the constraints in the insn pattern.
451 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
452 either be in a hard register or in memory. Use `true_regnum' to find out;
453 it will return -1 if the pseudo is in memory and the hard register number if
456 These macros should not be used in the case where a particular class of
457 registers can only be copied to memory and not to another class of
458 registers. In that case, secondary reload registers are not needed and
459 would not be helpful. Instead, a stack location must be used to perform the
460 copy and the `movM' pattern should use memory as an intermediate storage.
461 This case often occurs between floating-point and general registers. */
464 xstormy16_secondary_reload_class (enum reg_class rclass,
465 enum machine_mode mode,
468 /* This chip has the interesting property that only the first eight
469 registers can be moved to/from memory. */
470 if ((GET_CODE (x) == MEM
471 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
472 && (true_regnum (x) == -1
473 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
474 && ! reg_class_subset_p (rclass, EIGHT_REGS))
481 xstormy16_preferred_reload_class (rtx x, enum reg_class rclass)
483 if (rclass == GENERAL_REGS
484 && GET_CODE (x) == MEM)
490 /* Predicate for symbols and addresses that reflect special 8-bit
493 xstormy16_below100_symbol (rtx x,
494 enum machine_mode mode ATTRIBUTE_UNUSED)
496 if (GET_CODE (x) == CONST)
498 if (GET_CODE (x) == PLUS
499 && GET_CODE (XEXP (x, 1)) == CONST_INT)
502 if (GET_CODE (x) == SYMBOL_REF)
503 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
505 if (GET_CODE (x) == CONST_INT)
507 HOST_WIDE_INT i = INTVAL (x);
508 if ((i >= 0x0000 && i <= 0x00ff)
509 || (i >= 0x7f00 && i <= 0x7fff))
515 /* Likewise, but only for non-volatile MEMs, for patterns where the
516 MEM will get split into smaller sized accesses. */
518 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
520 if (GET_CODE (x) == MEM && MEM_VOLATILE_P (x))
522 return xstormy16_below100_operand (x, mode);
525 /* Expand an 8-bit IOR. This either detects the one case we can
526 actually do, or uses a 16-bit IOR. */
528 xstormy16_expand_iorqi3 (rtx *operands)
530 rtx in, out, outsub, val;
536 if (xstormy16_onebit_set_operand (val, QImode))
538 if (!xstormy16_below100_or_register (in, QImode))
539 in = copy_to_mode_reg (QImode, in);
540 if (!xstormy16_below100_or_register (out, QImode))
541 out = gen_reg_rtx (QImode);
542 emit_insn (gen_iorqi3_internal (out, in, val));
543 if (out != operands[0])
544 emit_move_insn (operands[0], out);
548 if (GET_CODE (in) != REG)
549 in = copy_to_mode_reg (QImode, in);
550 if (GET_CODE (val) != REG
551 && GET_CODE (val) != CONST_INT)
552 val = copy_to_mode_reg (QImode, val);
553 if (GET_CODE (out) != REG)
554 out = gen_reg_rtx (QImode);
556 in = simplify_gen_subreg (HImode, in, QImode, 0);
557 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
558 if (GET_CODE (val) != CONST_INT)
559 val = simplify_gen_subreg (HImode, val, QImode, 0);
561 emit_insn (gen_iorhi3 (outsub, in, val));
563 if (out != operands[0])
564 emit_move_insn (operands[0], out);
567 /* Likewise, for AND. */
569 xstormy16_expand_andqi3 (rtx *operands)
571 rtx in, out, outsub, val;
577 if (xstormy16_onebit_clr_operand (val, QImode))
579 if (!xstormy16_below100_or_register (in, QImode))
580 in = copy_to_mode_reg (QImode, in);
581 if (!xstormy16_below100_or_register (out, QImode))
582 out = gen_reg_rtx (QImode);
583 emit_insn (gen_andqi3_internal (out, in, val));
584 if (out != operands[0])
585 emit_move_insn (operands[0], out);
589 if (GET_CODE (in) != REG)
590 in = copy_to_mode_reg (QImode, in);
591 if (GET_CODE (val) != REG
592 && GET_CODE (val) != CONST_INT)
593 val = copy_to_mode_reg (QImode, val);
594 if (GET_CODE (out) != REG)
595 out = gen_reg_rtx (QImode);
597 in = simplify_gen_subreg (HImode, in, QImode, 0);
598 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
599 if (GET_CODE (val) != CONST_INT)
600 val = simplify_gen_subreg (HImode, val, QImode, 0);
602 emit_insn (gen_andhi3 (outsub, in, val));
604 if (out != operands[0])
605 emit_move_insn (operands[0], out);
608 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
609 (GET_CODE (X) == CONST_INT \
610 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
612 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
613 (GET_CODE (X) == CONST_INT \
614 && INTVAL (X) + (OFFSET) >= 0 \
615 && INTVAL (X) + (OFFSET) < 0x8000 \
616 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
619 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
622 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
625 if (GET_CODE (x) == PLUS
626 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
629 /* PR 31232: Do not allow INT+INT as an address. */
630 if (GET_CODE (x) == CONST_INT)
634 if ((GET_CODE (x) == PRE_MODIFY
635 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
636 || GET_CODE (x) == POST_INC
637 || GET_CODE (x) == PRE_DEC)
640 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
641 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
644 if (xstormy16_below100_symbol (x, mode))
650 /* Return nonzero if memory address X (an RTX) can have different
651 meanings depending on the machine mode of the memory reference it
652 is used for or if the address is valid for some modes but not
655 Autoincrement and autodecrement addresses typically have mode-dependent
656 effects because the amount of the increment or decrement is the size of the
657 operand being addressed. Some machines have other mode-dependent addresses.
658 Many RISC machines have no mode-dependent addresses.
660 You may assume that ADDR is a valid address for the machine.
662 On this chip, this is true if the address is valid with an offset
663 of 0 but not of 6, because in that case it cannot be used as an
664 address for DImode or DFmode, or if the address is a post-increment
665 or pre-decrement address. */
667 xstormy16_mode_dependent_address_p (rtx x)
669 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
670 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
673 if (GET_CODE (x) == PLUS
674 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
675 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
678 if (GET_CODE (x) == PLUS)
681 /* Auto-increment addresses are now treated generically in recog.c. */
686 /* A C expression that defines the optional machine-dependent constraint
687 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
688 types of operands, usually memory references, for the target machine.
689 Normally this macro will not be defined. If it is required for a particular
690 target machine, it should return 1 if VALUE corresponds to the operand type
691 represented by the constraint letter C. If C is not defined as an extra
692 constraint, the value returned should be 0 regardless of VALUE. */
694 xstormy16_extra_constraint_p (rtx x, int c)
698 /* 'Q' is for pushes. */
700 return (GET_CODE (x) == MEM
701 && GET_CODE (XEXP (x, 0)) == POST_INC
702 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
704 /* 'R' is for pops. */
706 return (GET_CODE (x) == MEM
707 && GET_CODE (XEXP (x, 0)) == PRE_DEC
708 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
710 /* 'S' is for immediate memory addresses. */
712 return (GET_CODE (x) == MEM
713 && GET_CODE (XEXP (x, 0)) == CONST_INT
714 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
718 /* Not implemented yet. */
721 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
722 for allocating a scratch register for 32-bit shifts. */
724 return (GET_CODE (x) == CONST_INT
725 && (INTVAL (x) < 2 || INTVAL (x) > 15));
727 /* 'Z' is for CONST_INT value zero. This is for adding zero to
728 a register in addhi3, which would otherwise require a carry. */
730 return (GET_CODE (x) == CONST_INT
731 && (INTVAL (x) == 0));
734 return xstormy16_below100_operand (x, GET_MODE (x));
742 short_memory_operand (rtx x, enum machine_mode mode)
744 if (! memory_operand (x, mode))
746 return (GET_CODE (XEXP (x, 0)) != PLUS);
749 /* Splitter for the 'move' patterns, for modes not directly implemented
750 by hardware. Emit insns to copy a value of mode MODE from SRC to
753 This function is only called when reload_completed.
757 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
759 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
760 int direction, end, i;
761 int src_modifies = 0;
762 int dest_modifies = 0;
763 int src_volatile = 0;
764 int dest_volatile = 0;
766 rtx auto_inc_reg_rtx = NULL_RTX;
768 /* Check initial conditions. */
769 gcc_assert (reload_completed
770 && mode != QImode && mode != HImode
771 && nonimmediate_operand (dest, mode)
772 && general_operand (src, mode));
774 /* This case is not supported below, and shouldn't be generated. */
775 gcc_assert (GET_CODE (dest) != MEM || GET_CODE (src) != MEM);
777 /* This case is very very bad after reload, so trap it now. */
778 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
780 /* The general idea is to copy by words, offsetting the source and
781 destination. Normally the least-significant word will be copied
782 first, but for pre-dec operations it's better to copy the
783 most-significant word first. Only one operand can be a pre-dec
786 It's also possible that the copy overlaps so that the direction
790 if (GET_CODE (dest) == MEM)
792 mem_operand = XEXP (dest, 0);
793 dest_modifies = side_effects_p (mem_operand);
794 if (auto_inc_p (mem_operand))
795 auto_inc_reg_rtx = XEXP (mem_operand, 0);
796 dest_volatile = MEM_VOLATILE_P (dest);
799 dest = copy_rtx (dest);
800 MEM_VOLATILE_P (dest) = 0;
803 else if (GET_CODE (src) == MEM)
805 mem_operand = XEXP (src, 0);
806 src_modifies = side_effects_p (mem_operand);
807 if (auto_inc_p (mem_operand))
808 auto_inc_reg_rtx = XEXP (mem_operand, 0);
809 src_volatile = MEM_VOLATILE_P (src);
812 src = copy_rtx (src);
813 MEM_VOLATILE_P (src) = 0;
817 mem_operand = NULL_RTX;
819 if (mem_operand == NULL_RTX)
821 if (GET_CODE (src) == REG
822 && GET_CODE (dest) == REG
823 && reg_overlap_mentioned_p (dest, src)
824 && REGNO (dest) > REGNO (src))
827 else if (GET_CODE (mem_operand) == PRE_DEC
828 || (GET_CODE (mem_operand) == PLUS
829 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
831 else if (GET_CODE (src) == MEM
832 && reg_overlap_mentioned_p (dest, src))
836 gcc_assert (GET_CODE (dest) == REG);
837 regno = REGNO (dest);
839 gcc_assert (refers_to_regno_p (regno, regno + num_words,
842 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
844 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
848 /* This means something like
849 (set (reg:DI r0) (mem:DI (reg:HI r1)))
850 which we'd need to support by doing the set of the second word
855 end = direction < 0 ? -1 : num_words;
856 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
858 rtx w_src, w_dest, insn;
861 w_src = gen_rtx_MEM (word_mode, mem_operand);
863 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
865 MEM_VOLATILE_P (w_src) = 1;
867 w_dest = gen_rtx_MEM (word_mode, mem_operand);
869 w_dest = simplify_gen_subreg (word_mode, dest, mode,
872 MEM_VOLATILE_P (w_dest) = 1;
874 /* The simplify_subreg calls must always be able to simplify. */
875 gcc_assert (GET_CODE (w_src) != SUBREG
876 && GET_CODE (w_dest) != SUBREG);
878 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
879 if (auto_inc_reg_rtx)
880 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
886 /* Expander for the 'move' patterns. Emit insns to copy a value of
887 mode MODE from SRC to DEST. */
890 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
892 if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
894 rtx pmv = XEXP (dest, 0);
895 rtx dest_reg = XEXP (pmv, 0);
896 rtx dest_mod = XEXP (pmv, 1);
897 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
898 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
900 dest = gen_rtx_MEM (mode, dest_reg);
901 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
903 else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
905 rtx pmv = XEXP (src, 0);
906 rtx src_reg = XEXP (pmv, 0);
907 rtx src_mod = XEXP (pmv, 1);
908 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
909 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
911 src = gen_rtx_MEM (mode, src_reg);
912 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
915 /* There are only limited immediate-to-memory move instructions. */
916 if (! reload_in_progress
917 && ! reload_completed
918 && GET_CODE (dest) == MEM
919 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
920 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
921 && ! xstormy16_below100_operand (dest, mode)
922 && GET_CODE (src) != REG
923 && GET_CODE (src) != SUBREG)
924 src = copy_to_mode_reg (mode, src);
926 /* Don't emit something we would immediately split. */
928 && mode != HImode && mode != QImode)
930 xstormy16_split_move (mode, dest, src);
934 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
940 The stack is laid out as follows:
944 Register save area (up to 4 words)
945 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
947 AP-> Return address (two words)
948 9th procedure parameter word
949 10th procedure parameter word
951 last procedure parameter word
953 The frame pointer location is tuned to make it most likely that all
954 parameters and local variables can be accessed using a load-indexed
957 /* A structure to describe the layout. */
958 struct xstormy16_stack_layout
960 /* Size of the topmost three items on the stack. */
962 int register_save_size;
963 int stdarg_save_size;
964 /* Sum of the above items. */
966 /* Various offsets. */
967 int first_local_minus_ap;
972 /* Does REGNO need to be saved? */
973 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
974 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
975 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
976 && (REGNUM != CARRY_REGNUM) \
977 && (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
979 /* Compute the stack layout. */
980 struct xstormy16_stack_layout
981 xstormy16_compute_stack_layout (void)
983 struct xstormy16_stack_layout layout;
985 const int ifun = xstormy16_interrupt_function_p ();
987 layout.locals_size = get_frame_size ();
989 layout.register_save_size = 0;
990 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
991 if (REG_NEEDS_SAVE (regno, ifun))
992 layout.register_save_size += UNITS_PER_WORD;
995 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
997 layout.stdarg_save_size = 0;
999 layout.frame_size = (layout.locals_size
1000 + layout.register_save_size
1001 + layout.stdarg_save_size);
1003 if (crtl->args.size <= 2048 && crtl->args.size != -1)
1005 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
1006 + crtl->args.size <= 2048)
1007 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
1009 layout.fp_minus_ap = 2048 - crtl->args.size;
1012 layout.fp_minus_ap = (layout.stdarg_save_size
1013 + layout.register_save_size
1014 - INCOMING_FRAME_SP_OFFSET);
1015 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
1016 - layout.fp_minus_ap);
1017 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1021 /* Determine how all the special registers get eliminated. */
1023 xstormy16_initial_elimination_offset (int from, int to)
1025 struct xstormy16_stack_layout layout;
1028 layout = xstormy16_compute_stack_layout ();
1030 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1031 result = layout.sp_minus_fp - layout.locals_size;
1032 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1033 result = - layout.locals_size;
1034 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1035 result = - layout.fp_minus_ap;
1036 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1037 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
1045 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1047 rtx set, clobber, insn;
1049 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1050 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1051 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1055 /* Called after register allocation to add any instructions needed for
1056 the prologue. Using a prologue insn is favored compared to putting
1057 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1058 since it allows the scheduler to intermix instructions with the
1059 saves of the caller saved registers. In some cases, it might be
1060 necessary to emit a barrier instruction as the last insn to prevent
1063 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1064 so that the debug info generation code can handle them properly. */
1066 xstormy16_expand_prologue (void)
1068 struct xstormy16_stack_layout layout;
1072 const int ifun = xstormy16_interrupt_function_p ();
1074 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1075 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1077 layout = xstormy16_compute_stack_layout ();
1079 if (layout.locals_size >= 32768)
1080 error ("local variable memory requirements exceed capacity");
1082 /* Save the argument registers if necessary. */
1083 if (layout.stdarg_save_size)
1084 for (regno = FIRST_ARGUMENT_REGISTER;
1085 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1089 rtx reg = gen_rtx_REG (HImode, regno);
1091 insn = emit_move_insn (mem_push_rtx, reg);
1092 RTX_FRAME_RELATED_P (insn) = 1;
1094 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1096 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1097 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1099 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1100 plus_constant (stack_pointer_rtx,
1101 GET_MODE_SIZE (Pmode)));
1102 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1105 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1106 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1109 /* Push each of the registers to save. */
1110 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1111 if (REG_NEEDS_SAVE (regno, ifun))
1114 rtx reg = gen_rtx_REG (HImode, regno);
1116 insn = emit_move_insn (mem_push_rtx, reg);
1117 RTX_FRAME_RELATED_P (insn) = 1;
1119 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1121 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1122 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1124 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1125 plus_constant (stack_pointer_rtx,
1126 GET_MODE_SIZE (Pmode)));
1127 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1130 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1131 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1134 /* It's just possible that the SP here might be what we need for
1136 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1138 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1139 RTX_FRAME_RELATED_P (insn) = 1;
1142 /* Allocate space for local variables. */
1143 if (layout.locals_size)
1145 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1146 GEN_INT (layout.locals_size));
1147 RTX_FRAME_RELATED_P (insn) = 1;
1150 /* Set up the frame pointer, if required. */
1151 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1153 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1154 RTX_FRAME_RELATED_P (insn) = 1;
1156 if (layout.sp_minus_fp)
1158 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1159 hard_frame_pointer_rtx,
1160 GEN_INT (- layout.sp_minus_fp));
1161 RTX_FRAME_RELATED_P (insn) = 1;
1166 /* Do we need an epilogue at all? */
1168 direct_return (void)
1170 return (reload_completed
1171 && xstormy16_compute_stack_layout ().frame_size == 0);
1174 /* Called after register allocation to add any instructions needed for
1175 the epilogue. Using an epilogue insn is favored compared to putting
1176 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1177 since it allows the scheduler to intermix instructions with the
1178 saves of the caller saved registers. In some cases, it might be
1179 necessary to emit a barrier instruction as the last insn to prevent
1183 xstormy16_expand_epilogue (void)
1185 struct xstormy16_stack_layout layout;
1186 rtx mem_pop_rtx, insn;
1188 const int ifun = xstormy16_interrupt_function_p ();
1190 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1191 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1193 layout = xstormy16_compute_stack_layout ();
1195 /* Pop the stack for the locals. */
1196 if (layout.locals_size)
1198 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1199 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1201 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1202 GEN_INT (- layout.locals_size));
1205 /* Restore any call-saved registers. */
1206 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1207 if (REG_NEEDS_SAVE (regno, ifun))
1208 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1210 /* Pop the stack for the stdarg save area. */
1211 if (layout.stdarg_save_size)
1212 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1213 GEN_INT (- layout.stdarg_save_size));
1217 emit_jump_insn (gen_return_internal_interrupt ());
1219 emit_jump_insn (gen_return_internal ());
1223 xstormy16_epilogue_uses (int regno)
1225 if (reload_completed && call_used_regs[regno])
1227 const int ifun = xstormy16_interrupt_function_p ();
1228 return REG_NEEDS_SAVE (regno, ifun);
1234 xstormy16_function_profiler (void)
1236 sorry ("function_profiler support");
1240 /* Return an updated summarizer variable CUM to advance past an
1241 argument in the argument list. The values MODE, TYPE and NAMED
1242 describe that argument. Once this is done, the variable CUM is
1243 suitable for analyzing the *following* argument with
1244 `FUNCTION_ARG', etc.
1246 This function need not do anything if the argument in question was
1247 passed on the stack. The compiler knows how to track the amount of
1248 stack space used for arguments without any special help. However,
1249 it makes life easier for xstormy16_build_va_list if it does update
1252 xstormy16_function_arg_advance (CUMULATIVE_ARGS cum, enum machine_mode mode,
1253 tree type, int named ATTRIBUTE_UNUSED)
1255 /* If an argument would otherwise be passed partially in registers,
1256 and partially on the stack, the whole of it is passed on the
1258 if (cum < NUM_ARGUMENT_REGISTERS
1259 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1260 cum = NUM_ARGUMENT_REGISTERS;
1262 cum += XSTORMY16_WORD_SIZE (type, mode);
1268 xstormy16_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
1269 tree type, int named ATTRIBUTE_UNUSED)
1271 if (mode == VOIDmode)
1273 if (targetm.calls.must_pass_in_stack (mode, type)
1274 || cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1276 return gen_rtx_REG (mode, cum + 2);
1279 /* Build the va_list type.
1281 For this chip, va_list is a record containing a counter and a pointer.
1282 The counter is of type 'int' and indicates how many bytes
1283 have been used to date. The pointer indicates the stack position
1284 for arguments that have not been passed in registers.
1285 To keep the layout nice, the pointer is first in the structure. */
1288 xstormy16_build_builtin_va_list (void)
1290 tree f_1, f_2, record, type_decl;
1292 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1293 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1295 f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
1297 f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
1298 unsigned_type_node);
1300 DECL_FIELD_CONTEXT (f_1) = record;
1301 DECL_FIELD_CONTEXT (f_2) = record;
1303 TREE_CHAIN (record) = type_decl;
1304 TYPE_NAME (record) = type_decl;
1305 TYPE_FIELDS (record) = f_1;
1306 TREE_CHAIN (f_1) = f_2;
1308 layout_type (record);
1313 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1314 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1315 variable to initialize. NEXTARG is the machine independent notion of the
1316 'next' argument after the variable arguments. */
1318 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1320 tree f_base, f_count;
1324 if (xstormy16_interrupt_function_p ())
1325 error ("cannot use va_start in interrupt function");
1327 f_base = TYPE_FIELDS (va_list_type_node);
1328 f_count = TREE_CHAIN (f_base);
1330 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1331 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1334 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1335 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1336 u = fold_convert (TREE_TYPE (count), u);
1337 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), t, u);
1338 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1339 TREE_SIDE_EFFECTS (t) = 1;
1340 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1342 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1343 build_int_cst (NULL_TREE,
1344 crtl->args.info * UNITS_PER_WORD));
1345 TREE_SIDE_EFFECTS (t) = 1;
1346 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1349 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1350 of type va_list as a tree, TYPE is the type passed to va_arg.
1351 Note: This algorithm is documented in stormy-abi. */
1354 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1355 gimple_seq *post_p ATTRIBUTE_UNUSED)
1357 tree f_base, f_count;
1359 tree count_tmp, addr, t;
1360 tree lab_gotaddr, lab_fromstack;
1361 int size, size_of_reg_args, must_stack;
1364 f_base = TYPE_FIELDS (va_list_type_node);
1365 f_count = TREE_CHAIN (f_base);
1367 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1368 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1371 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1372 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1373 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1375 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1377 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1378 lab_gotaddr = create_artificial_label ();
1379 lab_fromstack = create_artificial_label ();
1380 addr = create_tmp_var (ptr_type_node, NULL);
1386 t = fold_convert (TREE_TYPE (count), size_tree);
1387 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1388 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1389 t = build2 (GT_EXPR, boolean_type_node, t, r);
1390 t = build3 (COND_EXPR, void_type_node, t,
1391 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1393 gimplify_and_add (t, pre_p);
1395 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, base, count_tmp);
1396 gimplify_assign (addr, t, pre_p);
1398 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1399 gimplify_and_add (t, pre_p);
1401 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1402 gimplify_and_add (t, pre_p);
1405 /* Arguments larger than a word might need to skip over some
1406 registers, since arguments are either passed entirely in
1407 registers or entirely on the stack. */
1408 size = PUSH_ROUNDING (int_size_in_bytes (type));
1409 if (size > 2 || size < 0 || must_stack)
1413 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1414 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1416 t = fold_convert (TREE_TYPE (count), r);
1417 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1418 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1419 gimplify_and_add (t, pre_p);
1422 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1423 + INCOMING_FRAME_SP_OFFSET);
1424 t = fold_convert (TREE_TYPE (count), t);
1425 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1426 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1427 fold_convert (TREE_TYPE (count), size_tree));
1428 t = fold_convert (TREE_TYPE (t), fold (t));
1429 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1430 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t);
1431 gimplify_assign (addr, t, pre_p);
1433 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1434 gimplify_and_add (t, pre_p);
1436 t = fold_convert (TREE_TYPE (count), size_tree);
1437 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1438 gimplify_assign (count, t, pre_p);
1440 addr = fold_convert (build_pointer_type (type), addr);
1441 return build_va_arg_indirect_ref (addr);
1444 /* Initialize the variable parts of a trampoline. ADDR is an RTX for
1445 the address of the trampoline; FNADDR is an RTX for the address of
1446 the nested function; STATIC_CHAIN is an RTX for the static chain
1447 value that should be passed to the function when it is called. */
1449 xstormy16_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
1451 rtx reg_addr = gen_reg_rtx (Pmode);
1452 rtx temp = gen_reg_rtx (HImode);
1453 rtx reg_fnaddr = gen_reg_rtx (HImode);
1456 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1458 emit_move_insn (reg_addr, addr);
1459 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1460 emit_move_insn (reg_addr_mem, temp);
1461 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1462 emit_move_insn (temp, static_chain);
1463 emit_move_insn (reg_addr_mem, temp);
1464 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1465 emit_move_insn (reg_fnaddr, fnaddr);
1466 emit_move_insn (temp, reg_fnaddr);
1467 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1468 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1469 emit_move_insn (reg_addr_mem, temp);
1470 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1471 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1472 emit_move_insn (reg_addr_mem, reg_fnaddr);
1475 /* Worker function for FUNCTION_VALUE. */
1478 xstormy16_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
1480 enum machine_mode mode;
1481 mode = TYPE_MODE (valtype);
1482 PROMOTE_MODE (mode, 0, valtype);
1483 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1486 /* A C compound statement that outputs the assembler code for a thunk function,
1487 used to implement C++ virtual function calls with multiple inheritance. The
1488 thunk acts as a wrapper around a virtual function, adjusting the implicit
1489 object parameter before handing control off to the real function.
1491 First, emit code to add the integer DELTA to the location that contains the
1492 incoming first argument. Assume that this argument contains a pointer, and
1493 is the one used to pass the `this' pointer in C++. This is the incoming
1494 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1495 addition must preserve the values of all other incoming arguments.
1497 After the addition, emit code to jump to FUNCTION, which is a
1498 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1499 the return address. Hence returning from FUNCTION will return to whoever
1500 called the current `thunk'.
1502 The effect must be as if @var{function} had been called directly
1503 with the adjusted first argument. This macro is responsible for
1504 emitting all of the code for a thunk function;
1505 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1508 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1509 extracted from it.) It might possibly be useful on some targets, but
1513 xstormy16_asm_output_mi_thunk (FILE *file,
1514 tree thunk_fndecl ATTRIBUTE_UNUSED,
1515 HOST_WIDE_INT delta,
1516 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1519 int regnum = FIRST_ARGUMENT_REGISTER;
1521 /* There might be a hidden first argument for a returned structure. */
1522 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1525 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1526 fputs ("\tjmpf ", file);
1527 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1531 /* The purpose of this function is to override the default behavior of
1532 BSS objects. Normally, they go into .bss or .sbss via ".common"
1533 directives, but we need to override that and put them in
1534 .bss_below100. We can't just use a section override (like we do
1535 for .data_below100), because that makes them initialized rather
1536 than uninitialized. */
1538 xstormy16_asm_output_aligned_common (FILE *stream,
1545 rtx mem = DECL_RTL (decl);
1549 && GET_CODE (mem) == MEM
1550 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1551 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1556 switch_to_section (bss100_section);
1564 name2 = default_strip_name_encoding (name);
1566 fprintf (stream, "\t.globl\t%s\n", name2);
1568 fprintf (stream, "\t.p2align %d\n", p2align);
1569 fprintf (stream, "\t.type\t%s, @object\n", name2);
1570 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1571 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1577 fprintf (stream, "\t.local\t");
1578 assemble_name (stream, name);
1579 fprintf (stream, "\n");
1581 fprintf (stream, "\t.comm\t");
1582 assemble_name (stream, name);
1583 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1586 /* Implement TARGET_ASM_INIT_SECTIONS. */
1589 xstormy16_asm_init_sections (void)
1592 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1593 output_section_asm_op,
1594 "\t.section \".bss_below100\",\"aw\",@nobits");
1597 /* Mark symbols with the "below100" attribute so that we can use the
1598 special addressing modes for them. */
1601 xstormy16_encode_section_info (tree decl, rtx r, int first)
1603 default_encode_section_info (decl, r, first);
1605 if (TREE_CODE (decl) == VAR_DECL
1606 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1607 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1609 rtx symbol = XEXP (r, 0);
1611 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1612 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1616 /* Output constructors and destructors. Just like
1617 default_named_section_asm_out_* but don't set the sections writable. */
1618 #undef TARGET_ASM_CONSTRUCTOR
1619 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1620 #undef TARGET_ASM_DESTRUCTOR
1621 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1624 xstormy16_asm_out_destructor (rtx symbol, int priority)
1626 const char *section = ".dtors";
1629 /* ??? This only works reliably with the GNU linker. */
1630 if (priority != DEFAULT_INIT_PRIORITY)
1632 sprintf (buf, ".dtors.%.5u",
1633 /* Invert the numbering so the linker puts us in the proper
1634 order; constructors are run from right to left, and the
1635 linker sorts in increasing order. */
1636 MAX_INIT_PRIORITY - priority);
1640 switch_to_section (get_section (section, 0, NULL));
1641 assemble_align (POINTER_SIZE);
1642 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1646 xstormy16_asm_out_constructor (rtx symbol, int priority)
1648 const char *section = ".ctors";
1651 /* ??? This only works reliably with the GNU linker. */
1652 if (priority != DEFAULT_INIT_PRIORITY)
1654 sprintf (buf, ".ctors.%.5u",
1655 /* Invert the numbering so the linker puts us in the proper
1656 order; constructors are run from right to left, and the
1657 linker sorts in increasing order. */
1658 MAX_INIT_PRIORITY - priority);
1662 switch_to_section (get_section (section, 0, NULL));
1663 assemble_align (POINTER_SIZE);
1664 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1667 /* Print a memory address as an operand to reference that memory location. */
1669 xstormy16_print_operand_address (FILE *file, rtx address)
1671 HOST_WIDE_INT offset;
1672 int pre_dec, post_inc;
1674 /* There are a few easy cases. */
1675 if (GET_CODE (address) == CONST_INT)
1677 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1681 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1683 output_addr_const (file, address);
1687 /* Otherwise, it's hopefully something of the form
1688 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...))
1691 if (GET_CODE (address) == PLUS)
1693 gcc_assert (GET_CODE (XEXP (address, 1)) == CONST_INT);
1694 offset = INTVAL (XEXP (address, 1));
1695 address = XEXP (address, 0);
1700 pre_dec = (GET_CODE (address) == PRE_DEC);
1701 post_inc = (GET_CODE (address) == POST_INC);
1702 if (pre_dec || post_inc)
1703 address = XEXP (address, 0);
1705 gcc_assert (GET_CODE (address) == REG);
1710 fputs (reg_names [REGNO (address)], file);
1714 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1718 /* Print an operand to an assembler instruction. */
1720 xstormy16_print_operand (FILE *file, rtx x, int code)
1725 /* There is either one bit set, or one bit clear, in X.
1726 Print it preceded by '#'. */
1728 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1729 HOST_WIDE_INT xx = 1;
1732 if (GET_CODE (x) == CONST_INT)
1735 output_operand_lossage ("'B' operand is not constant");
1737 /* GCC sign-extends masks with the MSB set, so we have to
1738 detect all the cases that differ only in sign extension
1739 beyond the bits we care about. Normally, the predicates
1740 and constraints ensure that we have the right values. This
1741 works correctly for valid masks. */
1742 if (bits_set[xx & 7] <= 1)
1744 /* Remove sign extension bits. */
1745 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1747 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1749 l = exact_log2 (xx);
1753 /* Add sign extension bits. */
1754 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1755 xx |= ~(HOST_WIDE_INT)0xff;
1756 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1757 xx |= ~(HOST_WIDE_INT)0xffff;
1758 l = exact_log2 (~xx);
1762 output_operand_lossage ("'B' operand has multiple bits set");
1764 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1769 /* Print the symbol without a surrounding @fptr(). */
1770 if (GET_CODE (x) == SYMBOL_REF)
1771 assemble_name (file, XSTR (x, 0));
1772 else if (GET_CODE (x) == LABEL_REF)
1773 output_asm_label (x);
1775 xstormy16_print_operand_address (file, x);
1780 /* Print the immediate operand less one, preceded by '#'.
1781 For 'O', negate it first. */
1783 HOST_WIDE_INT xx = 0;
1785 if (GET_CODE (x) == CONST_INT)
1788 output_operand_lossage ("'o' operand is not constant");
1793 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1798 /* Print the shift mask for bp/bn. */
1800 HOST_WIDE_INT xx = 1;
1803 if (GET_CODE (x) == CONST_INT)
1806 output_operand_lossage ("'B' operand is not constant");
1810 fputs (IMMEDIATE_PREFIX, file);
1811 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1816 /* Handled below. */
1820 output_operand_lossage ("xstormy16_print_operand: unknown code");
1824 switch (GET_CODE (x))
1827 fputs (reg_names [REGNO (x)], file);
1831 xstormy16_print_operand_address (file, XEXP (x, 0));
1835 /* Some kind of constant or label; an immediate operand,
1836 so prefix it with '#' for the assembler. */
1837 fputs (IMMEDIATE_PREFIX, file);
1838 output_addr_const (file, x);
1846 /* Expander for the `casesi' pattern.
1847 INDEX is the index of the switch statement.
1848 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1849 to the first table entry.
1850 RANGE is the number of table entries.
1851 TABLE is an ADDR_VEC that is the jump table.
1852 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1853 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1857 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1858 rtx table, rtx default_label)
1860 HOST_WIDE_INT range_i = INTVAL (range);
1863 /* This code uses 'br', so it can deal only with tables of size up to
1865 if (range_i >= 8192)
1866 sorry ("switch statement of size %lu entries too large",
1867 (unsigned long) range_i);
1869 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1871 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1873 int_index = gen_lowpart_common (HImode, index);
1874 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1875 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1878 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1879 instructions, without label or alignment or any other special
1880 constructs. We know that the previous instruction will be the
1881 `tablejump_pcrel' output above.
1883 TODO: it might be nice to output 'br' instructions if they could
1887 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1891 switch_to_section (current_function_section ());
1893 vlen = XVECLEN (table, 0);
1894 for (idx = 0; idx < vlen; idx++)
1896 fputs ("\tjmpf ", file);
1897 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1903 /* Expander for the `call' patterns.
1904 INDEX is the index of the switch statement.
1905 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1906 to the first table entry.
1907 RANGE is the number of table entries.
1908 TABLE is an ADDR_VEC that is the jump table.
1909 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1910 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1914 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1917 enum machine_mode mode;
1919 gcc_assert (GET_CODE (dest) == MEM);
1920 dest = XEXP (dest, 0);
1922 if (! CONSTANT_P (dest)
1923 && GET_CODE (dest) != REG)
1924 dest = force_reg (Pmode, dest);
1929 mode = GET_MODE (retval);
1931 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1934 call = gen_rtx_SET (VOIDmode, retval, call);
1936 if (! CONSTANT_P (dest))
1938 temp = gen_reg_rtx (HImode);
1939 emit_move_insn (temp, const0_rtx);
1944 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1945 gen_rtx_USE (VOIDmode, temp)));
1946 emit_call_insn (call);
1949 /* Expanders for multiword computational operations. */
1951 /* Expander for arithmetic operations; emit insns to compute
1953 (set DEST (CODE:MODE SRC0 SRC1))
1955 When CODE is COMPARE, a branch template is generated
1956 (this saves duplicating code in xstormy16_split_cbranch). */
1959 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1960 rtx dest, rtx src0, rtx src1)
1962 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1967 emit_move_insn (src0, const0_rtx);
1969 for (i = 0; i < num_words; i++)
1971 rtx w_src0, w_src1, w_dest;
1974 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1975 i * UNITS_PER_WORD);
1976 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1977 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1983 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1987 insn = gen_addchi4 (w_dest, w_src0, w_src1);
1989 insn = gen_addchi5 (w_dest, w_src0, w_src1);
1995 if (code == COMPARE && i == num_words - 1)
1997 rtx branch, sub, clobber, sub_1;
1999 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2000 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
2001 sub = gen_rtx_SET (VOIDmode, w_dest,
2002 gen_rtx_MINUS (HImode, sub_1, w_src1));
2003 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
2004 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2005 gen_rtx_IF_THEN_ELSE (VOIDmode,
2011 insn = gen_rtx_PARALLEL (VOIDmode,
2012 gen_rtvec (3, branch, sub, clobber));
2016 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2019 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2021 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2027 if (GET_CODE (w_src1) == CONST_INT
2028 && INTVAL (w_src1) == -(code == AND))
2031 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2036 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2047 /* If we emit nothing, try_split() will think we failed. So emit
2048 something that does nothing and can be optimized away. */
2053 /* The shift operations are split at output time for constant values;
2054 variable-width shifts get handed off to a library routine.
2056 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2057 SIZE_R will be a CONST_INT, X will be a hard register. */
2060 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2061 rtx x, rtx size_r, rtx temp)
2064 const char *r0, *r1, *rt;
2067 gcc_assert (GET_CODE (size_r) == CONST_INT
2068 && GET_CODE (x) == REG && mode == SImode);
2069 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2074 r0 = reg_names [REGNO (x)];
2075 r1 = reg_names [REGNO (x) + 1];
2077 /* For shifts of size 1, we can use the rotate instructions. */
2083 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2086 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2089 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2097 /* For large shifts, there are easy special cases. */
2103 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2106 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2109 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2121 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2122 r1, r0, r0, r1, (int) size - 16);
2125 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2126 r0, r1, r1, r0, (int) size - 16);
2129 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2130 r0, r1, r1, r0, (int) size - 16);
2138 /* For the rest, we have to do more work. In particular, we
2139 need a temporary. */
2140 rt = reg_names [REGNO (temp)];
2145 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2146 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16-size),
2151 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2152 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2157 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2158 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2167 /* Attribute handling. */
2169 /* Return nonzero if the function is an interrupt function. */
2171 xstormy16_interrupt_function_p (void)
2175 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2176 any functions are declared, which is demonstrably wrong, but
2177 it is worked around here. FIXME. */
2181 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2182 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2185 #undef TARGET_ATTRIBUTE_TABLE
2186 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2187 static tree xstormy16_handle_interrupt_attribute
2188 (tree *, tree, tree, int, bool *);
2189 static tree xstormy16_handle_below100_attribute
2190 (tree *, tree, tree, int, bool *);
2192 static const struct attribute_spec xstormy16_attribute_table[] =
2194 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2195 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2196 { "BELOW100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2197 { "below100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2198 { NULL, 0, 0, false, false, false, NULL }
2201 /* Handle an "interrupt" attribute;
2202 arguments as in struct attribute_spec.handler. */
2204 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2205 tree args ATTRIBUTE_UNUSED,
2206 int flags ATTRIBUTE_UNUSED,
2209 if (TREE_CODE (*node) != FUNCTION_TYPE)
2211 warning (OPT_Wattributes, "%qs attribute only applies to functions",
2212 IDENTIFIER_POINTER (name));
2213 *no_add_attrs = true;
2219 /* Handle an "below" attribute;
2220 arguments as in struct attribute_spec.handler. */
2222 xstormy16_handle_below100_attribute (tree *node,
2223 tree name ATTRIBUTE_UNUSED,
2224 tree args ATTRIBUTE_UNUSED,
2225 int flags ATTRIBUTE_UNUSED,
2228 if (TREE_CODE (*node) != VAR_DECL
2229 && TREE_CODE (*node) != POINTER_TYPE
2230 && TREE_CODE (*node) != TYPE_DECL)
2232 warning (OPT_Wattributes,
2233 "%<__BELOW100__%> attribute only applies to variables");
2234 *no_add_attrs = true;
2236 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2238 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2240 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2241 "with auto storage class");
2242 *no_add_attrs = true;
2249 #undef TARGET_INIT_BUILTINS
2250 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2251 #undef TARGET_EXPAND_BUILTIN
2252 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2257 const char *arg_ops; /* 0..9, t for temp register, r for return value */
2258 const char *arg_types; /* s=short,l=long, upper case for unsigned */
2260 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2261 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2262 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2263 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2268 xstormy16_init_builtins (void)
2270 tree args, ret_type, arg;
2273 ret_type = void_type_node;
2275 for (i=0; s16builtins[i].name; i++)
2277 args = void_list_node;
2278 for (a=strlen (s16builtins[i].arg_types)-1; a>=0; a--)
2280 switch (s16builtins[i].arg_types[a])
2282 case 's': arg = short_integer_type_node; break;
2283 case 'S': arg = short_unsigned_type_node; break;
2284 case 'l': arg = long_integer_type_node; break;
2285 case 'L': arg = long_unsigned_type_node; break;
2286 default: gcc_unreachable ();
2291 args = tree_cons (NULL_TREE, arg, args);
2293 add_builtin_function (s16builtins[i].name,
2294 build_function_type (ret_type, args),
2295 i, BUILT_IN_MD, NULL, NULL);
2300 xstormy16_expand_builtin (tree exp, rtx target,
2301 rtx subtarget ATTRIBUTE_UNUSED,
2302 enum machine_mode mode ATTRIBUTE_UNUSED,
2303 int ignore ATTRIBUTE_UNUSED)
2305 rtx op[10], args[10], pat, copyto[10], retval = 0;
2306 tree fndecl, argtree;
2309 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2310 argtree = TREE_OPERAND (exp, 1);
2311 i = DECL_FUNCTION_CODE (fndecl);
2312 code = s16builtins[i].md_code;
2314 for (a = 0; a < 10 && argtree; a++)
2316 args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2317 argtree = TREE_CHAIN (argtree);
2320 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2322 char ao = s16builtins[i].arg_ops[o];
2323 char c = insn_data[code].operand[o].constraint[0];
2328 omode = insn_data[code].operand[o].mode;
2330 op[o] = target ? target : gen_reg_rtx (omode);
2332 op[o] = gen_reg_rtx (omode);
2334 op[o] = args[(int) hex_value (ao)];
2336 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2338 if (c == '+' || c == '=')
2341 op[o] = gen_reg_rtx (omode);
2344 op[o] = copy_to_mode_reg (omode, op[o]);
2351 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2352 op[5], op[6], op[7], op[8], op[9]);
2355 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2358 emit_move_insn (copyto[o], op[o]);
2359 if (op[o] == retval)
2367 /* Look for combinations of insns that can be converted to BN or BP
2368 opcodes. This is, unfortunately, too complex to do with MD
2371 combine_bnp (rtx insn)
2373 int insn_code, regno, need_extend;
2375 rtx cond, reg, and, load, qireg, mem;
2376 enum machine_mode load_mode = QImode;
2377 enum machine_mode and_mode = QImode;
2378 rtx shift = NULL_RTX;
2380 insn_code = recog_memoized (insn);
2381 if (insn_code != CODE_FOR_cbranchhi
2382 && insn_code != CODE_FOR_cbranchhi_neg)
2385 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2386 cond = XEXP (cond, 1); /* if */
2387 cond = XEXP (cond, 0); /* cond */
2388 switch (GET_CODE (cond))
2402 reg = XEXP (cond, 0);
2403 if (GET_CODE (reg) != REG)
2405 regno = REGNO (reg);
2406 if (XEXP (cond, 1) != const0_rtx)
2408 if (! find_regno_note (insn, REG_DEAD, regno))
2410 qireg = gen_rtx_REG (QImode, regno);
2414 /* LT and GE conditionals should have a sign extend before
2416 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2418 int and_code = recog_memoized (and);
2420 if (and_code == CODE_FOR_extendqihi2
2421 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2422 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), qireg))
2425 if (and_code == CODE_FOR_movhi_internal
2426 && rtx_equal_p (SET_DEST (PATTERN (and)), reg))
2428 /* This is for testing bit 15. */
2433 if (reg_mentioned_p (reg, and))
2436 if (GET_CODE (and) != NOTE
2437 && GET_CODE (and) != INSN)
2443 /* EQ and NE conditionals have an AND before them. */
2444 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2446 if (recog_memoized (and) == CODE_FOR_andhi3
2447 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2448 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), reg))
2451 if (reg_mentioned_p (reg, and))
2454 if (GET_CODE (and) != NOTE
2455 && GET_CODE (and) != INSN)
2461 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2462 followed by an AND like this:
2464 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2465 (clobber (reg:BI carry))]
2467 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2469 Attempt to detect this here. */
2470 for (shift = prev_real_insn (and); shift; shift = prev_real_insn (shift))
2472 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2473 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2474 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2477 if (reg_mentioned_p (reg, shift)
2478 || (GET_CODE (shift) != NOTE
2479 && GET_CODE (shift) != INSN))
2490 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and);
2492 load = prev_real_insn (load))
2494 int load_code = recog_memoized (load);
2496 if (load_code == CODE_FOR_movhi_internal
2497 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2498 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2499 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2505 if (load_code == CODE_FOR_movqi_internal
2506 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2507 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2513 if (load_code == CODE_FOR_zero_extendqihi2
2514 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2515 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2522 if (reg_mentioned_p (reg, load))
2525 if (GET_CODE (load) != NOTE
2526 && GET_CODE (load) != INSN)
2532 mem = SET_SRC (PATTERN (load));
2536 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2538 /* If the mem includes a zero-extend operation and we are
2539 going to generate a sign-extend operation then move the
2540 mem inside the zero-extend. */
2541 if (GET_CODE (mem) == ZERO_EXTEND)
2542 mem = XEXP (mem, 0);
2546 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and)), 1), load_mode))
2549 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and)), 1));
2552 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2555 if (load_mode == HImode)
2557 rtx addr = XEXP (mem, 0);
2559 if (! (mask & 0xff))
2561 addr = plus_constant (addr, 1);
2564 mem = gen_rtx_MEM (QImode, addr);
2568 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2570 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2572 INSN_CODE (insn) = -1;
2578 if (shift != NULL_RTX)
2579 delete_insn (shift);
2583 xstormy16_reorg (void)
2587 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2589 if (! JUMP_P (insn))
2596 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2599 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2601 const HOST_WIDE_INT size = int_size_in_bytes (type);
2602 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2605 #undef TARGET_ASM_ALIGNED_HI_OP
2606 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2607 #undef TARGET_ASM_ALIGNED_SI_OP
2608 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2609 #undef TARGET_ENCODE_SECTION_INFO
2610 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2612 /* select_section doesn't handle .bss_below100. */
2613 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2614 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2616 #undef TARGET_ASM_OUTPUT_MI_THUNK
2617 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2618 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2619 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2621 #undef TARGET_RTX_COSTS
2622 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2623 #undef TARGET_ADDRESS_COST
2624 #define TARGET_ADDRESS_COST xstormy16_address_cost
2626 #undef TARGET_BUILD_BUILTIN_VA_LIST
2627 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2628 #undef TARGET_EXPAND_BUILTIN_VA_START
2629 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2630 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2631 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2633 #undef TARGET_PROMOTE_FUNCTION_ARGS
2634 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
2635 #undef TARGET_PROMOTE_FUNCTION_RETURN
2636 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
2637 #undef TARGET_PROMOTE_PROTOTYPES
2638 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2640 #undef TARGET_RETURN_IN_MEMORY
2641 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2643 #undef TARGET_MACHINE_DEPENDENT_REORG
2644 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2646 struct gcc_target targetm = TARGET_INITIALIZER;
2648 #include "gt-stormy16.h"