1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
47 #include "target-def.h"
49 #include "langhooks.h"
51 static rtx emit_addhi3_postreload PARAMS ((rtx, rtx, rtx));
52 static void xstormy16_asm_out_constructor PARAMS ((rtx, int));
53 static void xstormy16_asm_out_destructor PARAMS ((rtx, int));
54 static void xstormy16_asm_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
55 HOST_WIDE_INT, tree));
57 static void xstormy16_init_builtins PARAMS ((void));
58 static rtx xstormy16_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
59 static bool xstormy16_rtx_costs PARAMS ((rtx, int, int, int *));
60 static int xstormy16_address_cost PARAMS ((rtx));
62 /* Define the information needed to generate branch and scc insns. This is
63 stored from the compare operation. */
64 struct rtx_def * xstormy16_compare_op0;
65 struct rtx_def * xstormy16_compare_op1;
67 /* Return 1 if this is a LT, GE, LTU, or GEU operator. */
70 xstormy16_ineqsi_operator (op, mode)
72 enum machine_mode mode;
74 enum rtx_code code = GET_CODE (op);
76 return ((mode == VOIDmode || GET_MODE (op) == mode)
77 && (code == LT || code == GE || code == LTU || code == GEU));
80 /* Return 1 if this is an EQ or NE operator. */
83 equality_operator (op, mode)
85 enum machine_mode mode;
87 return ((mode == VOIDmode || GET_MODE (op) == mode)
88 && (GET_CODE (op) == EQ || GET_CODE (op) == NE));
91 /* Return 1 if this is a comparison operator but not an EQ or NE operator. */
94 inequality_operator (op, mode)
96 enum machine_mode mode;
98 return comparison_operator (op, mode) && ! equality_operator (op, mode);
101 /* Compute a (partial) cost for rtx X. Return true if the complete
102 cost has been computed, and false if subexpressions should be
103 scanned. In either case, *TOTAL contains the cost result. */
106 xstormy16_rtx_costs (x, code, outer_code, total)
108 int code, outer_code ATTRIBUTE_UNUSED;
114 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
115 *total = COSTS_N_INSNS (1) / 2;
116 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
117 *total = COSTS_N_INSNS (1);
119 *total = COSTS_N_INSNS (2);
126 *total = COSTS_N_INSNS(2);
130 *total = COSTS_N_INSNS (35 + 6);
133 *total = COSTS_N_INSNS (51 - 6);
142 xstormy16_address_cost (x)
145 return (GET_CODE (x) == CONST_INT ? 2
146 : GET_CODE (x) == PLUS ? 7
150 /* Branches are handled as follows:
152 1. HImode compare-and-branches. The machine supports these
153 natively, so the appropriate pattern is emitted directly.
155 2. SImode EQ and NE. These are emitted as pairs of HImode
156 compare-and-branches.
158 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
159 of a SImode subtract followed by a branch (not a compare-and-branch),
165 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
173 /* Emit a branch of kind CODE to location LOC. */
176 xstormy16_emit_cbranch (code, loc)
180 rtx op0 = xstormy16_compare_op0;
181 rtx op1 = xstormy16_compare_op1;
182 rtx condition_rtx, loc_ref, branch, cy_clobber;
184 enum machine_mode mode;
186 mode = GET_MODE (op0);
187 if (mode != HImode && mode != SImode)
191 && (code == GT || code == LE || code == GTU || code == LEU))
193 int unsigned_p = (code == GTU || code == LEU);
194 int gt_p = (code == GT || code == GTU);
198 lab = gen_label_rtx ();
199 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
200 /* This should be generated as a comparison against the temporary
201 created by the previous insn, but reload can't handle that. */
202 xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
207 else if (mode == SImode
208 && (code == NE || code == EQ)
209 && op1 != const0_rtx)
212 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
216 lab = gen_label_rtx ();
218 for (i = 0; i < num_words - 1; i++)
220 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
222 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
224 xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
226 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
228 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
230 xstormy16_emit_cbranch (code, loc);
237 /* We can't allow reload to try to generate any reload after a branch,
238 so when some register must match we must make the temporary ourselves. */
242 tmp = gen_reg_rtx (mode);
243 emit_move_insn (tmp, op0);
247 condition_rtx = gen_rtx (code, mode, op0, op1);
248 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
249 branch = gen_rtx_SET (VOIDmode, pc_rtx,
250 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
253 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (BImode));
256 vec = gen_rtvec (2, branch, cy_clobber);
257 else if (code == NE || code == EQ)
258 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
263 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
265 sub = gen_rtx_CLOBBER (SImode, op0);
267 vec = gen_rtvec (3, branch, sub, cy_clobber);
270 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
273 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
274 the arithmetic operation. Most of the work is done by
275 xstormy16_expand_arith. */
278 xstormy16_split_cbranch (mode, label, comparison, dest, carry)
279 enum machine_mode mode;
285 rtx op0 = XEXP (comparison, 0);
286 rtx op1 = XEXP (comparison, 1);
291 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1, carry);
299 while (NEXT_INSN (last_insn) != NULL_RTX)
300 last_insn = NEXT_INSN (last_insn);
302 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
303 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
304 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
309 /* Return the string to output a conditional branch to LABEL, which is
310 the operand number of the label.
312 OP is the conditional expression, or NULL for branch-always.
314 REVERSED is nonzero if we should reverse the sense of the comparison.
319 xstormy16_output_cbranch_hi (op, label, reversed, insn)
325 static char string[64];
326 int need_longbranch = (op != NULL_RTX
327 ? get_attr_length (insn) == 8
328 : get_attr_length (insn) == 4);
329 int really_reversed = reversed ^ need_longbranch;
331 const char *template;
332 const char *operands;
341 sprintf (string, "%s %s", ccode, label);
345 code = GET_CODE (op);
347 if (GET_CODE (XEXP (op, 0)) != REG)
349 code = swap_condition (code);
355 /* Work out which way this really branches. */
357 code = reverse_condition (code);
361 case EQ: ccode = "z"; break;
362 case NE: ccode = "nz"; break;
363 case GE: ccode = "ge"; break;
364 case LT: ccode = "lt"; break;
365 case GT: ccode = "gt"; break;
366 case LE: ccode = "le"; break;
367 case GEU: ccode = "nc"; break;
368 case LTU: ccode = "c"; break;
369 case GTU: ccode = "hi"; break;
370 case LEU: ccode = "ls"; break;
377 template = "b%s %s,.+8 | jmpf %s";
379 template = "b%s %s,%s";
380 sprintf (string, template, ccode, operands, label);
385 /* Return the string to output a conditional branch to LABEL, which is
386 the operand number of the label, but suitable for the tail of a
389 OP is the conditional expression (OP is never NULL_RTX).
391 REVERSED is nonzero if we should reverse the sense of the comparison.
396 xstormy16_output_cbranch_si (op, label, reversed, insn)
402 static char string[64];
403 int need_longbranch = get_attr_length (insn) >= 8;
404 int really_reversed = reversed ^ need_longbranch;
406 const char *template;
410 code = GET_CODE (op);
412 /* Work out which way this really branches. */
414 code = reverse_condition (code);
418 case EQ: ccode = "z"; break;
419 case NE: ccode = "nz"; break;
420 case GE: ccode = "ge"; break;
421 case LT: ccode = "lt"; break;
422 case GEU: ccode = "nc"; break;
423 case LTU: ccode = "c"; break;
425 /* The missing codes above should never be generated. */
436 if (GET_CODE (XEXP (op, 0)) != REG)
439 regnum = REGNO (XEXP (op, 0));
440 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
444 case GE: case LT: case GEU: case LTU:
445 strcpy (prevop, "sbc %2,%3");
453 template = "%s | b%s .+6 | jmpf %s";
455 template = "%s | b%s %s";
456 sprintf (string, template, prevop, ccode, label);
461 /* Many machines have some registers that cannot be copied directly to or from
462 memory or even from other types of registers. An example is the `MQ'
463 register, which on most machines, can only be copied to or from general
464 registers, but not memory. Some machines allow copying all registers to and
465 from memory, but require a scratch register for stores to some memory
466 locations (e.g., those with symbolic address on the RT, and those with
467 certain symbolic address on the SPARC when compiling PIC). In some cases,
468 both an intermediate and a scratch register are required.
470 You should define these macros to indicate to the reload phase that it may
471 need to allocate at least one register for a reload in addition to the
472 register to contain the data. Specifically, if copying X to a register
473 CLASS in MODE requires an intermediate register, you should define
474 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
475 whose registers can be used as intermediate registers or scratch registers.
477 If copying a register CLASS in MODE to X requires an intermediate or scratch
478 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
479 largest register class required. If the requirements for input and output
480 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
481 instead of defining both macros identically.
483 The values returned by these macros are often `GENERAL_REGS'. Return
484 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
485 to or from a register of CLASS in MODE without requiring a scratch register.
486 Do not define this macro if it would always return `NO_REGS'.
488 If a scratch register is required (either with or without an intermediate
489 register), you should define patterns for `reload_inM' or `reload_outM', as
490 required.. These patterns, which will normally be implemented with a
491 `define_expand', should be similar to the `movM' patterns, except that
492 operand 2 is the scratch register.
494 Define constraints for the reload register and scratch register that contain
495 a single register class. If the original reload register (whose class is
496 CLASS) can meet the constraint given in the pattern, the value returned by
497 these macros is used for the class of the scratch register. Otherwise, two
498 additional reload registers are required. Their classes are obtained from
499 the constraints in the insn pattern.
501 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
502 either be in a hard register or in memory. Use `true_regnum' to find out;
503 it will return -1 if the pseudo is in memory and the hard register number if
506 These macros should not be used in the case where a particular class of
507 registers can only be copied to memory and not to another class of
508 registers. In that case, secondary reload registers are not needed and
509 would not be helpful. Instead, a stack location must be used to perform the
510 copy and the `movM' pattern should use memory as an intermediate storage.
511 This case often occurs between floating-point and general registers. */
514 xstormy16_secondary_reload_class (class, mode, x)
515 enum reg_class class;
516 enum machine_mode mode;
519 /* This chip has the interesting property that only the first eight
520 registers can be moved to/from memory. */
521 if ((GET_CODE (x) == MEM
522 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
523 && (true_regnum (x) == -1
524 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
525 && ! reg_class_subset_p (class, EIGHT_REGS))
528 /* When reloading a PLUS, the carry register will be required
529 unless the inc or dec instructions can be used. */
530 if (xstormy16_carry_plus_operand (x, mode))
536 /* Recognize a PLUS that needs the carry register. */
538 xstormy16_carry_plus_operand (x, mode)
540 enum machine_mode mode ATTRIBUTE_UNUSED;
542 return (GET_CODE (x) == PLUS
543 && GET_CODE (XEXP (x, 1)) == CONST_INT
544 && (INTVAL (XEXP (x, 1)) < -4 || INTVAL (XEXP (x, 1)) > 4));
547 /* Detect and error out on out-of-range constants for movhi. */
549 xs_hi_general_operand (x, mode)
551 enum machine_mode mode ATTRIBUTE_UNUSED;
553 if ((GET_CODE (x) == CONST_INT)
554 && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
555 error ("Constant halfword load operand out of range.");
556 return general_operand (x, mode);
559 /* Detect and error out on out-of-range constants for addhi and subhi. */
561 xs_hi_nonmemory_operand (x, mode)
563 enum machine_mode mode ATTRIBUTE_UNUSED;
565 if ((GET_CODE (x) == CONST_INT)
566 && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
567 error ("Constant arithmetic operand out of range.");
568 return nonmemory_operand (x, mode);
572 xstormy16_preferred_reload_class (x, class)
573 enum reg_class class;
576 if (class == GENERAL_REGS
577 && GET_CODE (x) == MEM)
583 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
584 (GET_CODE (X) == CONST_INT \
585 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
587 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
588 (GET_CODE (X) == CONST_INT \
589 && INTVAL (X) + (OFFSET) >= 0 \
590 && INTVAL (X) + (OFFSET) < 0x8000 \
591 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
594 xstormy16_legitimate_address_p (mode, x, strict)
595 enum machine_mode mode ATTRIBUTE_UNUSED;
599 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
602 if (GET_CODE (x) == PLUS
603 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
606 if ((GET_CODE (x) == PRE_MODIFY
607 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
608 || GET_CODE (x) == POST_INC
609 || GET_CODE (x) == PRE_DEC)
612 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
613 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
619 /* Return nonzero if memory address X (an RTX) can have different
620 meanings depending on the machine mode of the memory reference it
621 is used for or if the address is valid for some modes but not
624 Autoincrement and autodecrement addresses typically have mode-dependent
625 effects because the amount of the increment or decrement is the size of the
626 operand being addressed. Some machines have other mode-dependent addresses.
627 Many RISC machines have no mode-dependent addresses.
629 You may assume that ADDR is a valid address for the machine.
631 On this chip, this is true if the address is valid with an offset
632 of 0 but not of 6, because in that case it cannot be used as an
633 address for DImode or DFmode, or if the address is a post-increment
634 or pre-decrement address. */
636 xstormy16_mode_dependent_address_p (x)
639 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
640 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
643 if (GET_CODE (x) == PLUS
644 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
645 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
648 if (GET_CODE (x) == PLUS)
651 if (GET_CODE (x) == POST_INC
652 || GET_CODE (x) == PRE_DEC)
658 /* A C expression that defines the optional machine-dependent constraint
659 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
660 types of operands, usually memory references, for the target machine.
661 Normally this macro will not be defined. If it is required for a particular
662 target machine, it should return 1 if VALUE corresponds to the operand type
663 represented by the constraint letter C. If C is not defined as an extra
664 constraint, the value returned should be 0 regardless of VALUE. */
666 xstormy16_extra_constraint_p (x, c)
672 /* 'Q' is for pushes. */
674 return (GET_CODE (x) == MEM
675 && GET_CODE (XEXP (x, 0)) == POST_INC
676 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
678 /* 'R' is for pops. */
680 return (GET_CODE (x) == MEM
681 && GET_CODE (XEXP (x, 0)) == PRE_DEC
682 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
684 /* 'S' is for immediate memory addresses. */
686 return (GET_CODE (x) == MEM
687 && GET_CODE (XEXP (x, 0)) == CONST_INT
688 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
692 /* Not implemented yet. */
695 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
696 for allocating a scratch register for 32-bit shifts. */
698 return (GET_CODE (x) == CONST_INT
699 && (INTVAL (x) < 2 || INTVAL (x) > 15));
707 short_memory_operand (x, mode)
709 enum machine_mode mode;
711 if (! memory_operand (x, mode))
713 return (GET_CODE (XEXP (x, 0)) != PLUS);
717 nonimmediate_nonstack_operand (op, mode)
719 enum machine_mode mode;
721 /* 'Q' is for pushes, 'R' for pops. */
722 return (nonimmediate_operand (op, mode)
723 && ! xstormy16_extra_constraint_p (op, 'Q')
724 && ! xstormy16_extra_constraint_p (op, 'R'));
727 /* Splitter for the 'move' patterns, for modes not directly implemeted
728 by hardware. Emit insns to copy a value of mode MODE from SRC to
731 This function is only called when reload_completed.
735 xstormy16_split_move (mode, dest, src)
736 enum machine_mode mode;
740 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
741 int direction, end, i;
742 int src_modifies = 0;
743 int dest_modifies = 0;
744 int src_volatile = 0;
745 int dest_volatile = 0;
747 rtx auto_inc_reg_rtx = NULL_RTX;
749 /* Check initial conditions. */
750 if (! reload_completed
751 || mode == QImode || mode == HImode
752 || ! nonimmediate_operand (dest, mode)
753 || ! general_operand (src, mode))
756 /* This case is not supported below, and shouldn't be generated. */
757 if (GET_CODE (dest) == MEM
758 && GET_CODE (src) == MEM)
761 /* This case is very very bad after reload, so trap it now. */
762 if (GET_CODE (dest) == SUBREG
763 || GET_CODE (src) == SUBREG)
766 /* The general idea is to copy by words, offsetting the source and
767 destination. Normally the least-significant word will be copied
768 first, but for pre-dec operations it's better to copy the
769 most-significant word first. Only one operand can be a pre-dec
772 It's also possible that the copy overlaps so that the direction
776 if (GET_CODE (dest) == MEM)
778 mem_operand = XEXP (dest, 0);
779 dest_modifies = side_effects_p (mem_operand);
780 if (auto_inc_p (mem_operand))
781 auto_inc_reg_rtx = XEXP (mem_operand, 0);
782 dest_volatile = MEM_VOLATILE_P (dest);
785 dest = copy_rtx (dest);
786 MEM_VOLATILE_P (dest) = 0;
789 else if (GET_CODE (src) == MEM)
791 mem_operand = XEXP (src, 0);
792 src_modifies = side_effects_p (mem_operand);
793 if (auto_inc_p (mem_operand))
794 auto_inc_reg_rtx = XEXP (mem_operand, 0);
795 src_volatile = MEM_VOLATILE_P (src);
798 src = copy_rtx (src);
799 MEM_VOLATILE_P (src) = 0;
803 mem_operand = NULL_RTX;
805 if (mem_operand == NULL_RTX)
807 if (GET_CODE (src) == REG
808 && GET_CODE (dest) == REG
809 && reg_overlap_mentioned_p (dest, src)
810 && REGNO (dest) > REGNO (src))
813 else if (GET_CODE (mem_operand) == PRE_DEC
814 || (GET_CODE (mem_operand) == PLUS
815 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
817 else if (GET_CODE (src) == MEM
818 && reg_overlap_mentioned_p (dest, src))
821 if (GET_CODE (dest) != REG)
823 regno = REGNO (dest);
825 if (! refers_to_regno_p (regno, regno + num_words, mem_operand, 0))
828 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
830 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
834 /* This means something like
835 (set (reg:DI r0) (mem:DI (reg:HI r1)))
836 which we'd need to support by doing the set of the second word
841 end = direction < 0 ? -1 : num_words;
842 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
844 rtx w_src, w_dest, insn;
847 w_src = gen_rtx_MEM (word_mode, mem_operand);
849 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
851 MEM_VOLATILE_P (w_src) = 1;
853 w_dest = gen_rtx_MEM (word_mode, mem_operand);
855 w_dest = simplify_gen_subreg (word_mode, dest, mode,
858 MEM_VOLATILE_P (w_dest) = 1;
860 /* The simplify_subreg calls must always be able to simplify. */
861 if (GET_CODE (w_src) == SUBREG
862 || GET_CODE (w_dest) == SUBREG)
865 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
866 if (auto_inc_reg_rtx)
867 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
873 /* Expander for the 'move' patterns. Emit insns to copy a value of
874 mode MODE from SRC to DEST. */
877 xstormy16_expand_move (mode, dest, src)
878 enum machine_mode mode;
882 if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
884 rtx pmv = XEXP (dest, 0);
885 rtx dest_reg = XEXP (pmv, 0);
886 rtx dest_mod = XEXP (pmv, 1);
887 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
888 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
890 dest = gen_rtx_MEM (mode, dest_reg);
891 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
893 else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
895 rtx pmv = XEXP (src, 0);
896 rtx src_reg = XEXP (pmv, 0);
897 rtx src_mod = XEXP (pmv, 1);
898 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
899 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
901 src = gen_rtx_MEM (mode, src_reg);
902 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
905 /* There are only limited immediate-to-memory move instructions. */
906 if (! reload_in_progress
907 && ! reload_completed
908 && GET_CODE (dest) == MEM
909 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
910 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
911 && GET_CODE (src) != REG
912 && GET_CODE (src) != SUBREG)
913 src = copy_to_mode_reg (mode, src);
915 /* Don't emit something we would immediately split. */
917 && mode != HImode && mode != QImode)
919 xstormy16_split_move (mode, dest, src);
923 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
929 The stack is laid out as follows:
933 Register save area (up to 4 words)
934 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
936 AP-> Return address (two words)
937 9th procedure parameter word
938 10th procedure parameter word
940 last procedure parameter word
942 The frame pointer location is tuned to make it most likely that all
943 parameters and local variables can be accessed using a load-indexed
946 /* A structure to describe the layout. */
947 struct xstormy16_stack_layout
949 /* Size of the topmost three items on the stack. */
951 int register_save_size;
952 int stdarg_save_size;
953 /* Sum of the above items. */
955 /* Various offsets. */
956 int first_local_minus_ap;
961 /* Does REGNO need to be saved? */
962 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
963 ((regs_ever_live[REGNUM] && ! call_used_regs[REGNUM]) \
964 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
965 && (REGNO_REG_CLASS (REGNUM) != CARRY_REGS) \
966 && (regs_ever_live[REGNUM] || ! current_function_is_leaf)))
968 /* Compute the stack layout. */
969 struct xstormy16_stack_layout
970 xstormy16_compute_stack_layout ()
972 struct xstormy16_stack_layout layout;
974 const int ifun = xstormy16_interrupt_function_p ();
976 layout.locals_size = get_frame_size ();
978 layout.register_save_size = 0;
979 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
980 if (REG_NEEDS_SAVE (regno, ifun))
981 layout.register_save_size += UNITS_PER_WORD;
983 if (current_function_stdarg)
984 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
986 layout.stdarg_save_size = 0;
988 layout.frame_size = (layout.locals_size
989 + layout.register_save_size
990 + layout.stdarg_save_size);
992 if (current_function_args_size <= 2048 && current_function_args_size != -1)
994 if (layout.frame_size + INCOMING_FRAME_SP_OFFSET
995 + current_function_args_size <= 2048)
996 layout.fp_minus_ap = layout.frame_size + INCOMING_FRAME_SP_OFFSET;
998 layout.fp_minus_ap = 2048 - current_function_args_size;
1001 layout.fp_minus_ap = (layout.stdarg_save_size
1002 + layout.register_save_size
1003 + INCOMING_FRAME_SP_OFFSET);
1004 layout.sp_minus_fp = (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1005 - layout.fp_minus_ap);
1006 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1010 /* Determine how all the special registers get eliminated. */
1012 xstormy16_initial_elimination_offset (from, to)
1015 struct xstormy16_stack_layout layout;
1018 layout = xstormy16_compute_stack_layout ();
1020 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1021 result = layout.sp_minus_fp - layout.locals_size;
1022 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1023 result = -layout.locals_size;
1024 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1025 result = -layout.fp_minus_ap;
1026 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1027 result = -(layout.sp_minus_fp + layout.fp_minus_ap);
1035 emit_addhi3_postreload (dest, src0, src1)
1040 rtx set, clobber, insn;
1042 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1043 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
1044 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1048 /* Called after register allocation to add any instructions needed for
1049 the prologue. Using a prologue insn is favored compared to putting
1050 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1051 since it allows the scheduler to intermix instructions with the
1052 saves of the caller saved registers. In some cases, it might be
1053 necessary to emit a barrier instruction as the last insn to prevent
1056 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1057 so that the debug info generation code can handle them properly. */
1059 xstormy16_expand_prologue ()
1061 struct xstormy16_stack_layout layout;
1065 const int ifun = xstormy16_interrupt_function_p ();
1067 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1068 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1070 layout = xstormy16_compute_stack_layout ();
1072 if (layout.locals_size >= 32768)
1073 error ("Local variable memory requirements exceed capacity.");
1075 /* Save the argument registers if necessary. */
1076 if (layout.stdarg_save_size)
1077 for (regno = FIRST_ARGUMENT_REGISTER;
1078 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1082 rtx reg = gen_rtx_REG (HImode, regno);
1084 insn = emit_move_insn (mem_push_rtx, reg);
1085 RTX_FRAME_RELATED_P (insn) = 1;
1087 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1089 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1090 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1092 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1093 plus_constant (stack_pointer_rtx,
1094 GET_MODE_SIZE (Pmode)));
1095 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1098 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1099 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1102 /* Push each of the registers to save. */
1103 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1104 if (REG_NEEDS_SAVE (regno, ifun))
1107 rtx reg = gen_rtx_REG (HImode, regno);
1109 insn = emit_move_insn (mem_push_rtx, reg);
1110 RTX_FRAME_RELATED_P (insn) = 1;
1112 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1114 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1115 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1117 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1118 plus_constant (stack_pointer_rtx,
1119 GET_MODE_SIZE (Pmode)));
1120 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1123 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1124 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1127 /* It's just possible that the SP here might be what we need for
1129 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1131 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1132 RTX_FRAME_RELATED_P (insn) = 1;
1135 /* Allocate space for local variables. */
1136 if (layout.locals_size)
1138 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1139 GEN_INT (layout.locals_size));
1140 RTX_FRAME_RELATED_P (insn) = 1;
1143 /* Set up the frame pointer, if required. */
1144 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1146 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1147 RTX_FRAME_RELATED_P (insn) = 1;
1148 if (layout.sp_minus_fp)
1150 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1151 hard_frame_pointer_rtx,
1152 GEN_INT (-layout.sp_minus_fp));
1153 RTX_FRAME_RELATED_P (insn) = 1;
1158 /* Do we need an epilogue at all? */
1162 return (reload_completed
1163 && xstormy16_compute_stack_layout ().frame_size == 0);
1166 /* Called after register allocation to add any instructions needed for
1167 the epilogue. Using an epilogue insn is favored compared to putting
1168 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1169 since it allows the scheduler to intermix instructions with the
1170 saves of the caller saved registers. In some cases, it might be
1171 necessary to emit a barrier instruction as the last insn to prevent
1175 xstormy16_expand_epilogue ()
1177 struct xstormy16_stack_layout layout;
1180 const int ifun = xstormy16_interrupt_function_p ();
1182 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1183 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1185 layout = xstormy16_compute_stack_layout ();
1187 /* Pop the stack for the locals. */
1188 if (layout.locals_size)
1190 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1191 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1193 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1194 GEN_INT (- layout.locals_size));
1197 /* Restore any call-saved registers. */
1198 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1199 if (REG_NEEDS_SAVE (regno, ifun))
1200 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1202 /* Pop the stack for the stdarg save area. */
1203 if (layout.stdarg_save_size)
1204 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1205 GEN_INT (- layout.stdarg_save_size));
1209 emit_jump_insn (gen_return_internal_interrupt ());
1211 emit_jump_insn (gen_return_internal ());
1215 xstormy16_epilogue_uses (regno)
1218 if (reload_completed && call_used_regs[regno])
1220 const int ifun = xstormy16_interrupt_function_p ();
1221 return REG_NEEDS_SAVE (regno, ifun);
1226 /* Return an updated summarizer variable CUM to advance past an
1227 argument in the argument list. The values MODE, TYPE and NAMED
1228 describe that argument. Once this is done, the variable CUM is
1229 suitable for analyzing the *following* argument with
1230 `FUNCTION_ARG', etc.
1232 This function need not do anything if the argument in question was
1233 passed on the stack. The compiler knows how to track the amount of
1234 stack space used for arguments without any special help. However,
1235 it makes life easier for xstormy16_build_va_list if it does update
1238 xstormy16_function_arg_advance (cum, mode, type, named)
1239 CUMULATIVE_ARGS cum;
1240 enum machine_mode mode;
1242 int named ATTRIBUTE_UNUSED;
1244 /* If an argument would otherwise be passed partially in registers,
1245 and partially on the stack, the whole of it is passed on the
1247 if (cum < NUM_ARGUMENT_REGISTERS
1248 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1249 cum = NUM_ARGUMENT_REGISTERS;
1251 cum += XSTORMY16_WORD_SIZE (type, mode);
1256 /* Do any needed setup for a variadic function. CUM has not been updated
1257 for the last named argument which has type TYPE and mode MODE. */
1259 xstormy16_setup_incoming_varargs (cum, int_mode, type, pretend_size)
1260 CUMULATIVE_ARGS cum ATTRIBUTE_UNUSED;
1261 int int_mode ATTRIBUTE_UNUSED;
1262 tree type ATTRIBUTE_UNUSED;
1263 int * pretend_size ATTRIBUTE_UNUSED;
1267 /* Build the va_list type.
1269 For this chip, va_list is a record containing a counter and a pointer.
1270 The counter is of type 'int' and indicates how many bytes
1271 have been used to date. The pointer indicates the stack position
1272 for arguments that have not been passed in registers.
1273 To keep the layout nice, the pointer is first in the structure. */
1276 xstormy16_build_va_list ()
1278 tree f_1, f_2, record, type_decl;
1280 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1281 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1283 f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
1285 f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
1286 unsigned_type_node);
1288 DECL_FIELD_CONTEXT (f_1) = record;
1289 DECL_FIELD_CONTEXT (f_2) = record;
1291 TREE_CHAIN (record) = type_decl;
1292 TYPE_NAME (record) = type_decl;
1293 TYPE_FIELDS (record) = f_1;
1294 TREE_CHAIN (f_1) = f_2;
1296 layout_type (record);
1301 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1302 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1303 variable to initialize. NEXTARG is the machine independent notion of the
1304 'next' argument after the variable arguments. */
1306 xstormy16_expand_builtin_va_start (valist, nextarg)
1308 rtx nextarg ATTRIBUTE_UNUSED;
1310 tree f_base, f_count;
1314 if (xstormy16_interrupt_function_p ())
1315 error ("cannot use va_start in interrupt function");
1317 f_base = TYPE_FIELDS (va_list_type_node);
1318 f_count = TREE_CHAIN (f_base);
1320 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base);
1321 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count);
1323 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1324 t = build (PLUS_EXPR, TREE_TYPE (base), t,
1325 build_int_2 (INCOMING_FRAME_SP_OFFSET, 0));
1326 t = build (MODIFY_EXPR, TREE_TYPE (base), base, t);
1327 TREE_SIDE_EFFECTS (t) = 1;
1328 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1330 t = build (MODIFY_EXPR, TREE_TYPE (count), count,
1331 build_int_2 (current_function_args_info * UNITS_PER_WORD, 0));
1332 TREE_SIDE_EFFECTS (t) = 1;
1333 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1336 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1337 of type va_list as a tree, TYPE is the type passed to va_arg.
1338 Note: This algorithm is documented in stormy-abi. */
1341 xstormy16_expand_builtin_va_arg (valist, type)
1345 tree f_base, f_count;
1347 rtx count_rtx, addr_rtx, r;
1348 rtx lab_gotaddr, lab_fromstack;
1350 int size, size_of_reg_args;
1351 tree size_tree, count_plus_size;
1352 rtx count_plus_size_rtx;
1354 f_base = TYPE_FIELDS (va_list_type_node);
1355 f_count = TREE_CHAIN (f_base);
1357 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base);
1358 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count);
1360 size = PUSH_ROUNDING (int_size_in_bytes (type));
1361 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1363 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1365 count_rtx = expand_expr (count, NULL_RTX, HImode, EXPAND_NORMAL);
1366 lab_gotaddr = gen_label_rtx ();
1367 lab_fromstack = gen_label_rtx ();
1368 addr_rtx = gen_reg_rtx (Pmode);
1370 count_plus_size = build (PLUS_EXPR, TREE_TYPE (count), count, size_tree);
1371 count_plus_size_rtx = expand_expr (count_plus_size, NULL_RTX, HImode, EXPAND_NORMAL);
1372 emit_cmp_and_jump_insns (count_plus_size_rtx, GEN_INT (size_of_reg_args),
1373 GTU, const1_rtx, HImode, 1, lab_fromstack);
1375 t = build (PLUS_EXPR, ptr_type_node, base, count);
1376 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
1378 emit_move_insn (addr_rtx, r);
1380 emit_jump_insn (gen_jump (lab_gotaddr));
1382 emit_label (lab_fromstack);
1384 /* Arguments larger than a word might need to skip over some
1385 registers, since arguments are either passed entirely in
1386 registers or entirely on the stack. */
1387 if (size > 2 || size < 0)
1389 rtx lab_notransition = gen_label_rtx ();
1390 emit_cmp_and_jump_insns (count_rtx, GEN_INT (NUM_ARGUMENT_REGISTERS
1392 GEU, const1_rtx, HImode, 1, lab_notransition);
1394 t = build (MODIFY_EXPR, TREE_TYPE (count), count,
1395 build_int_2 (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD, 0));
1396 TREE_SIDE_EFFECTS (t) = 1;
1397 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1399 emit_label (lab_notransition);
1402 t = build (PLUS_EXPR, sizetype, size_tree,
1403 build_int_2 ((- NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1404 + INCOMING_FRAME_SP_OFFSET),
1406 t = build (PLUS_EXPR, TREE_TYPE (count), count, fold (t));
1407 t = build (MINUS_EXPR, TREE_TYPE (base), base, t);
1408 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
1410 emit_move_insn (addr_rtx, r);
1412 emit_label (lab_gotaddr);
1414 count_plus_size = build (PLUS_EXPR, TREE_TYPE (count), count, size_tree);
1415 t = build (MODIFY_EXPR, TREE_TYPE (count), count, count_plus_size);
1416 TREE_SIDE_EFFECTS (t) = 1;
1417 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1422 /* Initialize the variable parts of a trampoline. ADDR is an RTX for
1423 the address of the trampoline; FNADDR is an RTX for the address of
1424 the nested function; STATIC_CHAIN is an RTX for the static chain
1425 value that should be passed to the function when it is called. */
1427 xstormy16_initialize_trampoline (addr, fnaddr, static_chain)
1432 rtx reg_addr = gen_reg_rtx (Pmode);
1433 rtx temp = gen_reg_rtx (HImode);
1434 rtx reg_fnaddr = gen_reg_rtx (HImode);
1437 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1439 emit_move_insn (reg_addr, addr);
1440 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1441 emit_move_insn (reg_addr_mem, temp);
1442 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1443 emit_move_insn (temp, static_chain);
1444 emit_move_insn (reg_addr_mem, temp);
1445 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1446 emit_move_insn (reg_fnaddr, fnaddr);
1447 emit_move_insn (temp, reg_fnaddr);
1448 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1449 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1450 emit_move_insn (reg_addr_mem, temp);
1451 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1452 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1453 emit_move_insn (reg_addr_mem, reg_fnaddr);
1456 /* Create an RTX representing the place where a function returns a
1457 value of data type VALTYPE. VALTYPE is a tree node representing a
1458 data type. Write `TYPE_MODE (VALTYPE)' to get the machine mode
1459 used to represent that type. On many machines, only the mode is
1460 relevant. (Actually, on most machines, scalar values are returned
1461 in the same place regardless of mode).
1463 If `PROMOTE_FUNCTION_RETURN' is defined, you must apply the same promotion
1464 rules specified in `PROMOTE_MODE' if VALTYPE is a scalar type.
1466 If the precise function being called is known, FUNC is a tree node
1467 (`FUNCTION_DECL') for it; otherwise, FUNC is a null pointer. This makes it
1468 possible to use a different value-returning convention for specific
1469 functions when all their calls are known.
1471 `FUNCTION_VALUE' is not used for return vales with aggregate data types,
1472 because these are returned in another way. See `STRUCT_VALUE_REGNUM' and
1475 xstormy16_function_value (valtype, func)
1477 tree func ATTRIBUTE_UNUSED;
1479 enum machine_mode mode;
1480 mode = TYPE_MODE (valtype);
1481 PROMOTE_MODE (mode, 0, valtype);
1482 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1485 /* A C compound statement that outputs the assembler code for a thunk function,
1486 used to implement C++ virtual function calls with multiple inheritance. The
1487 thunk acts as a wrapper around a virtual function, adjusting the implicit
1488 object parameter before handing control off to the real function.
1490 First, emit code to add the integer DELTA to the location that contains the
1491 incoming first argument. Assume that this argument contains a pointer, and
1492 is the one used to pass the `this' pointer in C++. This is the incoming
1493 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1494 addition must preserve the values of all other incoming arguments.
1496 After the addition, emit code to jump to FUNCTION, which is a
1497 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1498 the return address. Hence returning from FUNCTION will return to whoever
1499 called the current `thunk'.
1501 The effect must be as if @var{function} had been called directly
1502 with the adjusted first argument. This macro is responsible for
1503 emitting all of the code for a thunk function;
1504 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1507 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1508 extracted from it.) It might possibly be useful on some targets, but
1512 xstormy16_asm_output_mi_thunk (file, thunk_fndecl, delta,
1513 vcall_offset, function)
1515 tree thunk_fndecl ATTRIBUTE_UNUSED;
1516 HOST_WIDE_INT delta;
1517 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED;
1520 int regnum = FIRST_ARGUMENT_REGISTER;
1522 /* There might be a hidden first argument for a returned structure. */
1523 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
1526 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1527 fputs ("\tjmpf ", file);
1528 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1532 /* Output constructors and destructors. Just like
1533 default_named_section_asm_out_* but don't set the sections writable. */
1534 #undef TARGET_ASM_CONSTRUCTOR
1535 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1536 #undef TARGET_ASM_DESTRUCTOR
1537 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1540 xstormy16_asm_out_destructor (symbol, priority)
1544 const char *section = ".dtors";
1547 /* ??? This only works reliably with the GNU linker. */
1548 if (priority != DEFAULT_INIT_PRIORITY)
1550 sprintf (buf, ".dtors.%.5u",
1551 /* Invert the numbering so the linker puts us in the proper
1552 order; constructors are run from right to left, and the
1553 linker sorts in increasing order. */
1554 MAX_INIT_PRIORITY - priority);
1558 named_section_flags (section, 0);
1559 assemble_align (POINTER_SIZE);
1560 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1564 xstormy16_asm_out_constructor (symbol, priority)
1568 const char *section = ".ctors";
1571 /* ??? This only works reliably with the GNU linker. */
1572 if (priority != DEFAULT_INIT_PRIORITY)
1574 sprintf (buf, ".ctors.%.5u",
1575 /* Invert the numbering so the linker puts us in the proper
1576 order; constructors are run from right to left, and the
1577 linker sorts in increasing order. */
1578 MAX_INIT_PRIORITY - priority);
1582 named_section_flags (section, 0);
1583 assemble_align (POINTER_SIZE);
1584 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1587 /* Print a memory address as an operand to reference that memory location. */
1589 xstormy16_print_operand_address (file, address)
1593 HOST_WIDE_INT offset;
1594 int pre_dec, post_inc;
1596 /* There are a few easy cases. */
1597 if (GET_CODE (address) == CONST_INT)
1599 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1603 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1605 output_addr_const (file, address);
1609 /* Otherwise, it's hopefully something of the form
1610 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...))
1613 if (GET_CODE (address) == PLUS)
1615 if (GET_CODE (XEXP (address, 1)) != CONST_INT)
1617 offset = INTVAL (XEXP (address, 1));
1618 address = XEXP (address, 0);
1623 pre_dec = (GET_CODE (address) == PRE_DEC);
1624 post_inc = (GET_CODE (address) == POST_INC);
1625 if (pre_dec || post_inc)
1626 address = XEXP (address, 0);
1628 if (GET_CODE (address) != REG)
1634 fputs (reg_names [REGNO (address)], file);
1640 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
1645 /* Print an operand to an assembler instruction. */
1647 xstormy16_print_operand (file, x, code)
1655 /* There is either one bit set, or one bit clear, in X.
1656 Print it preceded by '#'. */
1658 HOST_WIDE_INT xx = 1;
1661 if (GET_CODE (x) == CONST_INT)
1664 output_operand_lossage ("`B' operand is not constant");
1666 l = exact_log2 (xx);
1668 l = exact_log2 (~xx);
1670 output_operand_lossage ("`B' operand has multiple bits set");
1672 fputs (IMMEDIATE_PREFIX, file);
1673 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1678 /* Print the symbol without a surrounding @fptr(). */
1679 if (GET_CODE (x) == SYMBOL_REF)
1680 assemble_name (file, XSTR (x, 0));
1681 else if (GET_CODE (x) == LABEL_REF)
1682 output_asm_label (x);
1684 xstormy16_print_operand_address (file, x);
1689 /* Print the immediate operand less one, preceded by '#'.
1690 For 'O', negate it first. */
1692 HOST_WIDE_INT xx = 0;
1694 if (GET_CODE (x) == CONST_INT)
1697 output_operand_lossage ("`o' operand is not constant");
1702 fputs (IMMEDIATE_PREFIX, file);
1703 fprintf (file, HOST_WIDE_INT_PRINT_DEC, xx - 1);
1708 /* Handled below. */
1712 output_operand_lossage ("xstormy16_print_operand: unknown code");
1716 switch (GET_CODE (x))
1719 fputs (reg_names [REGNO (x)], file);
1723 xstormy16_print_operand_address (file, XEXP (x, 0));
1727 /* Some kind of constant or label; an immediate operand,
1728 so prefix it with '#' for the assembler. */
1729 fputs (IMMEDIATE_PREFIX, file);
1730 output_addr_const (file, x);
1738 /* Expander for the `casesi' pattern.
1739 INDEX is the index of the switch statement.
1740 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1741 to the first table entry.
1742 RANGE is the number of table entries.
1743 TABLE is an ADDR_VEC that is the jump table.
1744 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1745 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1749 xstormy16_expand_casesi (index, lower_bound, range, table, default_label)
1756 HOST_WIDE_INT range_i = INTVAL (range);
1759 /* This code uses 'br', so it can deal only with tables of size up to
1761 if (range_i >= 8192)
1762 sorry ("switch statement of size %lu entries too large",
1763 (unsigned long) range_i);
1765 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1767 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1769 int_index = gen_lowpart_common (HImode, index);
1770 emit_insn (gen_ashlhi3 (int_index, int_index, GEN_INT (2)));
1771 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1774 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1775 instructions, without label or alignment or any other special
1776 constructs. We know that the previous instruction will be the
1777 `tablejump_pcrel' output above.
1779 TODO: it might be nice to output 'br' instructions if they could
1783 xstormy16_output_addr_vec (file, label, table)
1785 rtx label ATTRIBUTE_UNUSED;
1790 function_section (current_function_decl);
1792 vlen = XVECLEN (table, 0);
1793 for (idx = 0; idx < vlen; idx++)
1795 fputs ("\tjmpf ", file);
1796 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1802 /* Expander for the `call' patterns.
1803 INDEX is the index of the switch statement.
1804 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1805 to the first table entry.
1806 RANGE is the number of table entries.
1807 TABLE is an ADDR_VEC that is the jump table.
1808 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1809 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1813 xstormy16_expand_call (retval, dest, counter)
1819 enum machine_mode mode;
1821 if (GET_CODE (dest) != MEM)
1823 dest = XEXP (dest, 0);
1825 if (! CONSTANT_P (dest)
1826 && GET_CODE (dest) != REG)
1827 dest = force_reg (Pmode, dest);
1832 mode = GET_MODE (retval);
1834 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1837 call = gen_rtx_SET (VOIDmode, retval, call);
1839 if (! CONSTANT_P (dest))
1841 temp = gen_reg_rtx (HImode);
1842 emit_move_insn (temp, const0_rtx);
1847 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1848 gen_rtx_USE (VOIDmode, temp)));
1849 emit_call_insn (call);
1852 /* Expanders for multiword computational operations. */
1854 /* Expander for arithmetic operations; emit insns to compute
1856 (set DEST (CODE:MODE SRC0 SRC1))
1858 using CARRY as a temporary. When CODE is COMPARE, a branch
1859 template is generated (this saves duplicating code in
1860 xstormy16_split_cbranch). */
1863 xstormy16_expand_arith (mode, code, dest, src0, src1, carry)
1864 enum machine_mode mode;
1871 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1877 rtx zero_reg = gen_reg_rtx (word_mode);
1878 emit_move_insn (zero_reg, src0);
1882 for (i = 0; i < num_words; i++)
1884 rtx w_src0, w_src1, w_dest;
1890 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1891 i * UNITS_PER_WORD);
1892 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1893 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1899 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1903 insn = gen_addchi4 (w_dest, w_src0, w_src1, carry);
1905 insn = gen_addchi5 (w_dest, w_src0, w_src1, carry, carry);
1911 if (code == COMPARE && i == num_words - 1)
1913 rtx branch, sub, clobber, sub_1;
1915 sub_1 = gen_rtx_MINUS (HImode, w_src0,
1916 gen_rtx_ZERO_EXTEND (HImode, carry));
1917 sub = gen_rtx_SET (VOIDmode, w_dest,
1918 gen_rtx_MINUS (HImode, sub_1, w_src1));
1919 clobber = gen_rtx_CLOBBER (VOIDmode, carry);
1920 branch = gen_rtx_SET (VOIDmode, pc_rtx,
1921 gen_rtx_IF_THEN_ELSE (VOIDmode,
1927 insn = gen_rtx_PARALLEL (VOIDmode,
1928 gen_rtvec (3, branch, sub, clobber));
1932 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1935 insn = gen_subchi4 (w_dest, w_src0, w_src1, carry);
1937 insn = gen_subchi5 (w_dest, w_src0, w_src1, carry, carry);
1943 if (GET_CODE (w_src1) == CONST_INT
1944 && INTVAL (w_src1) == -(code == AND))
1947 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx (code, mode,
1952 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
1963 /* If we emit nothing, try_split() will think we failed. So emit
1964 something that does nothing and can be optimized away. */
1969 /* Return 1 if OP is a shift operator. */
1972 shift_operator (op, mode)
1974 enum machine_mode mode ATTRIBUTE_UNUSED;
1976 enum rtx_code code = GET_CODE (op);
1978 return (code == ASHIFT
1980 || code == LSHIFTRT);
1983 /* The shift operations are split at output time for constant values;
1984 variable-width shifts get handed off to a library routine.
1986 Generate an output string to do (set X (CODE:MODE X SIZE_R))
1987 SIZE_R will be a CONST_INT, X will be a hard register. */
1990 xstormy16_output_shift (mode, code, x, size_r, temp)
1991 enum machine_mode mode;
1998 const char *r0, *r1, *rt;
2001 if (GET_CODE (size_r) != CONST_INT
2002 || GET_CODE (x) != REG
2005 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2010 r0 = reg_names [REGNO (x)];
2011 r1 = reg_names [REGNO (x) + 1];
2013 /* For shifts of size 1, we can use the rotate instructions. */
2019 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2022 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2025 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2033 /* For large shifts, there are easy special cases. */
2039 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2042 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2045 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2057 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2058 r1, r0, r0, r1, (int) size - 16);
2061 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2062 r0, r1, r1, r0, (int) size - 16);
2065 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2066 r0, r1, r1, r0, (int) size - 16);
2074 /* For the rest, we have to do more work. In particular, we
2075 need a temporary. */
2076 rt = reg_names [REGNO (temp)];
2081 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2082 rt, r0, r0, (int) size, r1, (int) size, rt, (int) 16-size,
2087 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2088 rt, r1, r1, (int) size, r0, (int) size, rt, (int) 16-size,
2093 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2094 rt, r1, r1, (int) size, r0, (int) size, rt, (int) 16-size,
2103 /* Attribute handling. */
2105 /* Return nonzero if the function is an interrupt function. */
2107 xstormy16_interrupt_function_p ()
2111 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2112 any functions are declared, which is demonstrably wrong, but
2113 it is worked around here. FIXME. */
2117 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2118 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2121 #undef TARGET_ATTRIBUTE_TABLE
2122 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2123 static tree xstormy16_handle_interrupt_attribute PARAMS ((tree *, tree, tree, int, bool *));
2124 static const struct attribute_spec xstormy16_attribute_table[] =
2126 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2127 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2128 { NULL, 0, 0, false, false, false, NULL }
2131 /* Handle an "interrupt" attribute;
2132 arguments as in struct attribute_spec.handler. */
2134 xstormy16_handle_interrupt_attribute (node, name, args, flags, no_add_attrs)
2137 tree args ATTRIBUTE_UNUSED;
2138 int flags ATTRIBUTE_UNUSED;
2141 if (TREE_CODE (*node) != FUNCTION_TYPE)
2143 warning ("`%s' attribute only applies to functions",
2144 IDENTIFIER_POINTER (name));
2145 *no_add_attrs = true;
2151 #undef TARGET_INIT_BUILTINS
2152 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2153 #undef TARGET_EXPAND_BUILTIN
2154 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2159 const char *arg_ops; /* 0..9, t for temp register, r for return value */
2160 const char *arg_types; /* s=short,l=long, upper case for unsigned */
2162 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2163 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2164 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2165 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2170 xstormy16_init_builtins ()
2172 tree args, ret_type, arg;
2175 ret_type = void_type_node;
2177 for (i=0; s16builtins[i].name; i++)
2179 args = void_list_node;
2180 for (a=strlen (s16builtins[i].arg_types)-1; a>=0; a--)
2182 switch (s16builtins[i].arg_types[a])
2184 case 's': arg = short_integer_type_node; break;
2185 case 'S': arg = short_unsigned_type_node; break;
2186 case 'l': arg = long_integer_type_node; break;
2187 case 'L': arg = long_unsigned_type_node; break;
2193 args = tree_cons (NULL_TREE, arg, args);
2195 builtin_function (s16builtins[i].name,
2196 build_function_type (ret_type, args),
2197 i, BUILT_IN_MD, NULL, NULL);
2202 xstormy16_expand_builtin(exp, target, subtarget, mode, ignore)
2205 rtx subtarget ATTRIBUTE_UNUSED;
2206 enum machine_mode mode ATTRIBUTE_UNUSED;
2207 int ignore ATTRIBUTE_UNUSED;
2209 rtx op[10], args[10], pat, copyto[10], retval = 0;
2210 tree fndecl, argtree;
2213 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2214 argtree = TREE_OPERAND (exp, 1);
2215 i = DECL_FUNCTION_CODE (fndecl);
2216 code = s16builtins[i].md_code;
2218 for (a = 0; a < 10 && argtree; a++)
2220 args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2221 argtree = TREE_CHAIN (argtree);
2224 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2226 char ao = s16builtins[i].arg_ops[o];
2227 char c = insn_data[code].operand[o].constraint[0];
2232 omode = insn_data[code].operand[o].mode;
2234 op[o] = target ? target : gen_reg_rtx (omode);
2236 op[o] = gen_reg_rtx (omode);
2238 op[o] = args[(int) hex_value (ao)];
2240 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2242 if (c == '+' || c == '=')
2245 op[o] = gen_reg_rtx (omode);
2248 op[o] = copy_to_mode_reg (omode, op[o]);
2255 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2256 op[5], op[6], op[7], op[8], op[9]);
2259 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2262 emit_move_insn (copyto[o], op[o]);
2263 if (op[o] == retval)
2271 #undef TARGET_ASM_ALIGNED_HI_OP
2272 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2273 #undef TARGET_ASM_ALIGNED_SI_OP
2274 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2276 #undef TARGET_ASM_OUTPUT_MI_THUNK
2277 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2278 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2279 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2281 #undef TARGET_RTX_COSTS
2282 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2283 #undef TARGET_ADDRESS_COST
2284 #define TARGET_ADDRESS_COST xstormy16_address_cost
2286 struct gcc_target targetm = TARGET_INITIALIZER;