1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
46 #include "target-def.h"
48 #include "langhooks.h"
50 static rtx emit_addhi3_postreload PARAMS ((rtx, rtx, rtx));
51 static void xstormy16_asm_out_constructor PARAMS ((rtx, int));
52 static void xstormy16_asm_out_destructor PARAMS ((rtx, int));
53 static void xstormy16_asm_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
54 HOST_WIDE_INT, tree));
56 static void xstormy16_init_builtins PARAMS ((void));
57 static rtx xstormy16_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
58 static bool xstormy16_rtx_costs PARAMS ((rtx, int, int, int *));
59 static int xstormy16_address_cost PARAMS ((rtx));
61 /* Define the information needed to generate branch and scc insns. This is
62 stored from the compare operation. */
63 struct rtx_def * xstormy16_compare_op0;
64 struct rtx_def * xstormy16_compare_op1;
66 /* Return 1 if this is a LT, GE, LTU, or GEU operator. */
69 xstormy16_ineqsi_operator (op, mode)
71 enum machine_mode mode;
73 enum rtx_code code = GET_CODE (op);
75 return ((mode == VOIDmode || GET_MODE (op) == mode)
76 && (code == LT || code == GE || code == LTU || code == GEU));
79 /* Return 1 if this is an EQ or NE operator. */
82 equality_operator (op, mode)
84 enum machine_mode mode;
86 return ((mode == VOIDmode || GET_MODE (op) == mode)
87 && (GET_CODE (op) == EQ || GET_CODE (op) == NE));
90 /* Return 1 if this is a comparison operator but not an EQ or NE operator. */
93 inequality_operator (op, mode)
95 enum machine_mode mode;
97 return comparison_operator (op, mode) && ! equality_operator (op, mode);
100 /* Compute a (partial) cost for rtx X. Return true if the complete
101 cost has been computed, and false if subexpressions should be
102 scanned. In either case, *TOTAL contains the cost result. */
105 xstormy16_rtx_costs (x, code, outer_code, total)
107 int code, outer_code ATTRIBUTE_UNUSED;
113 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
114 *total = COSTS_N_INSNS (1) / 2;
115 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
116 *total = COSTS_N_INSNS (1);
118 *total = COSTS_N_INSNS (2);
125 *total = COSTS_N_INSNS(2);
129 *total = COSTS_N_INSNS (35 + 6);
132 *total = COSTS_N_INSNS (51 - 6);
141 xstormy16_address_cost (x)
144 return (GET_CODE (x) == CONST_INT ? 2
145 : GET_CODE (x) == PLUS ? 7
149 /* Branches are handled as follows:
151 1. HImode compare-and-branches. The machine supports these
152 natively, so the appropriate pattern is emitted directly.
154 2. SImode EQ and NE. These are emitted as pairs of HImode
155 compare-and-branches.
157 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
158 of a SImode subtract followed by a branch (not a compare-and-branch),
164 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
172 /* Emit a branch of kind CODE to location LOC. */
175 xstormy16_emit_cbranch (code, loc)
179 rtx op0 = xstormy16_compare_op0;
180 rtx op1 = xstormy16_compare_op1;
181 rtx condition_rtx, loc_ref, branch, cy_clobber;
183 enum machine_mode mode;
185 mode = GET_MODE (op0);
186 if (mode != HImode && mode != SImode)
190 && (code == GT || code == LE || code == GTU || code == LEU))
192 int unsigned_p = (code == GTU || code == LEU);
193 int gt_p = (code == GT || code == GTU);
197 lab = gen_label_rtx ();
198 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
199 /* This should be generated as a comparison against the temporary
200 created by the previous insn, but reload can't handle that. */
201 xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
206 else if (mode == SImode
207 && (code == NE || code == EQ)
208 && op1 != const0_rtx)
211 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
215 lab = gen_label_rtx ();
217 for (i = 0; i < num_words - 1; i++)
219 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
221 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
223 xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
225 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
227 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
229 xstormy16_emit_cbranch (code, loc);
236 /* We can't allow reload to try to generate any reload after a branch,
237 so when some register must match we must make the temporary ourselves. */
241 tmp = gen_reg_rtx (mode);
242 emit_move_insn (tmp, op0);
246 condition_rtx = gen_rtx (code, mode, op0, op1);
247 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
248 branch = gen_rtx_SET (VOIDmode, pc_rtx,
249 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
252 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (BImode));
255 vec = gen_rtvec (2, branch, cy_clobber);
256 else if (code == NE || code == EQ)
257 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
262 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
264 sub = gen_rtx_CLOBBER (SImode, op0);
266 vec = gen_rtvec (3, branch, sub, cy_clobber);
269 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
272 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
273 the arithmetic operation. Most of the work is done by
274 xstormy16_expand_arith. */
277 xstormy16_split_cbranch (mode, label, comparison, dest, carry)
278 enum machine_mode mode;
284 rtx op0 = XEXP (comparison, 0);
285 rtx op1 = XEXP (comparison, 1);
290 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1, carry);
298 while (NEXT_INSN (last_insn) != NULL_RTX)
299 last_insn = NEXT_INSN (last_insn);
301 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
302 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
303 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
308 /* Return the string to output a conditional branch to LABEL, which is
309 the operand number of the label.
311 OP is the conditional expression, or NULL for branch-always.
313 REVERSED is nonzero if we should reverse the sense of the comparison.
318 xstormy16_output_cbranch_hi (op, label, reversed, insn)
324 static char string[64];
325 int need_longbranch = (op != NULL_RTX
326 ? get_attr_length (insn) == 8
327 : get_attr_length (insn) == 4);
328 int really_reversed = reversed ^ need_longbranch;
330 const char *template;
331 const char *operands;
340 sprintf (string, "%s %s", ccode, label);
344 code = GET_CODE (op);
346 if (GET_CODE (XEXP (op, 0)) != REG)
348 code = swap_condition (code);
354 /* Work out which way this really branches. */
356 code = reverse_condition (code);
360 case EQ: ccode = "z"; break;
361 case NE: ccode = "nz"; break;
362 case GE: ccode = "ge"; break;
363 case LT: ccode = "lt"; break;
364 case GT: ccode = "gt"; break;
365 case LE: ccode = "le"; break;
366 case GEU: ccode = "nc"; break;
367 case LTU: ccode = "c"; break;
368 case GTU: ccode = "hi"; break;
369 case LEU: ccode = "ls"; break;
376 template = "b%s %s,.+8 | jmpf %s";
378 template = "b%s %s,%s";
379 sprintf (string, template, ccode, operands, label);
384 /* Return the string to output a conditional branch to LABEL, which is
385 the operand number of the label, but suitable for the tail of a
388 OP is the conditional expression (OP is never NULL_RTX).
390 REVERSED is nonzero if we should reverse the sense of the comparison.
395 xstormy16_output_cbranch_si (op, label, reversed, insn)
401 static char string[64];
402 int need_longbranch = get_attr_length (insn) >= 8;
403 int really_reversed = reversed ^ need_longbranch;
405 const char *template;
409 code = GET_CODE (op);
411 /* Work out which way this really branches. */
413 code = reverse_condition (code);
417 case EQ: ccode = "z"; break;
418 case NE: ccode = "nz"; break;
419 case GE: ccode = "ge"; break;
420 case LT: ccode = "lt"; break;
421 case GEU: ccode = "nc"; break;
422 case LTU: ccode = "c"; break;
424 /* The missing codes above should never be generated. */
435 if (GET_CODE (XEXP (op, 0)) != REG)
438 regnum = REGNO (XEXP (op, 0));
439 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
443 case GE: case LT: case GEU: case LTU:
444 strcpy (prevop, "sbc %2,%3");
452 template = "%s | b%s .+6 | jmpf %s";
454 template = "%s | b%s %s";
455 sprintf (string, template, prevop, ccode, label);
460 /* Many machines have some registers that cannot be copied directly to or from
461 memory or even from other types of registers. An example is the `MQ'
462 register, which on most machines, can only be copied to or from general
463 registers, but not memory. Some machines allow copying all registers to and
464 from memory, but require a scratch register for stores to some memory
465 locations (e.g., those with symbolic address on the RT, and those with
466 certain symbolic address on the SPARC when compiling PIC). In some cases,
467 both an intermediate and a scratch register are required.
469 You should define these macros to indicate to the reload phase that it may
470 need to allocate at least one register for a reload in addition to the
471 register to contain the data. Specifically, if copying X to a register
472 CLASS in MODE requires an intermediate register, you should define
473 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
474 whose registers can be used as intermediate registers or scratch registers.
476 If copying a register CLASS in MODE to X requires an intermediate or scratch
477 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
478 largest register class required. If the requirements for input and output
479 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
480 instead of defining both macros identically.
482 The values returned by these macros are often `GENERAL_REGS'. Return
483 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
484 to or from a register of CLASS in MODE without requiring a scratch register.
485 Do not define this macro if it would always return `NO_REGS'.
487 If a scratch register is required (either with or without an intermediate
488 register), you should define patterns for `reload_inM' or `reload_outM', as
489 required.. These patterns, which will normally be implemented with a
490 `define_expand', should be similar to the `movM' patterns, except that
491 operand 2 is the scratch register.
493 Define constraints for the reload register and scratch register that contain
494 a single register class. If the original reload register (whose class is
495 CLASS) can meet the constraint given in the pattern, the value returned by
496 these macros is used for the class of the scratch register. Otherwise, two
497 additional reload registers are required. Their classes are obtained from
498 the constraints in the insn pattern.
500 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
501 either be in a hard register or in memory. Use `true_regnum' to find out;
502 it will return -1 if the pseudo is in memory and the hard register number if
505 These macros should not be used in the case where a particular class of
506 registers can only be copied to memory and not to another class of
507 registers. In that case, secondary reload registers are not needed and
508 would not be helpful. Instead, a stack location must be used to perform the
509 copy and the `movM' pattern should use memory as an intermediate storage.
510 This case often occurs between floating-point and general registers. */
513 xstormy16_secondary_reload_class (class, mode, x)
514 enum reg_class class;
515 enum machine_mode mode;
518 /* This chip has the interesting property that only the first eight
519 registers can be moved to/from memory. */
520 if ((GET_CODE (x) == MEM
521 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
522 && (true_regnum (x) == -1
523 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
524 && ! reg_class_subset_p (class, EIGHT_REGS))
527 /* When reloading a PLUS, the carry register will be required
528 unless the inc or dec instructions can be used. */
529 if (xstormy16_carry_plus_operand (x, mode))
535 /* Recognize a PLUS that needs the carry register. */
537 xstormy16_carry_plus_operand (x, mode)
539 enum machine_mode mode ATTRIBUTE_UNUSED;
541 return (GET_CODE (x) == PLUS
542 && GET_CODE (XEXP (x, 1)) == CONST_INT
543 && (INTVAL (XEXP (x, 1)) < -4 || INTVAL (XEXP (x, 1)) > 4));
546 /* Detect and error out on out-of-range constants for movhi. */
548 xs_hi_general_operand (x, mode)
550 enum machine_mode mode ATTRIBUTE_UNUSED;
552 if ((GET_CODE (x) == CONST_INT)
553 && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
554 error ("Constant halfword load operand out of range.");
555 return general_operand (x, mode);
558 /* Detect and error out on out-of-range constants for addhi and subhi. */
560 xs_hi_nonmemory_operand (x, mode)
562 enum machine_mode mode ATTRIBUTE_UNUSED;
564 if ((GET_CODE (x) == CONST_INT)
565 && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
566 error ("Constant arithmetic operand out of range.");
567 return nonmemory_operand (x, mode);
571 xstormy16_preferred_reload_class (x, class)
572 enum reg_class class;
575 if (class == GENERAL_REGS
576 && GET_CODE (x) == MEM)
582 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
583 (GET_CODE (X) == CONST_INT \
584 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
586 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
587 (GET_CODE (X) == CONST_INT \
588 && INTVAL (X) + (OFFSET) >= 0 \
589 && INTVAL (X) + (OFFSET) < 0x8000 \
590 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
593 xstormy16_legitimate_address_p (mode, x, strict)
594 enum machine_mode mode ATTRIBUTE_UNUSED;
598 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
601 if (GET_CODE (x) == PLUS
602 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
605 if ((GET_CODE (x) == PRE_MODIFY
606 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
607 || GET_CODE (x) == POST_INC
608 || GET_CODE (x) == PRE_DEC)
611 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
612 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
618 /* Return nonzero if memory address X (an RTX) can have different
619 meanings depending on the machine mode of the memory reference it
620 is used for or if the address is valid for some modes but not
623 Autoincrement and autodecrement addresses typically have mode-dependent
624 effects because the amount of the increment or decrement is the size of the
625 operand being addressed. Some machines have other mode-dependent addresses.
626 Many RISC machines have no mode-dependent addresses.
628 You may assume that ADDR is a valid address for the machine.
630 On this chip, this is true if the address is valid with an offset
631 of 0 but not of 6, because in that case it cannot be used as an
632 address for DImode or DFmode, or if the address is a post-increment
633 or pre-decrement address. */
635 xstormy16_mode_dependent_address_p (x)
638 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
639 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
642 if (GET_CODE (x) == PLUS
643 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
644 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
647 if (GET_CODE (x) == PLUS)
650 if (GET_CODE (x) == POST_INC
651 || GET_CODE (x) == PRE_DEC)
657 /* A C expression that defines the optional machine-dependent constraint
658 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
659 types of operands, usually memory references, for the target machine.
660 Normally this macro will not be defined. If it is required for a particular
661 target machine, it should return 1 if VALUE corresponds to the operand type
662 represented by the constraint letter C. If C is not defined as an extra
663 constraint, the value returned should be 0 regardless of VALUE. */
665 xstormy16_extra_constraint_p (x, c)
671 /* 'Q' is for pushes. */
673 return (GET_CODE (x) == MEM
674 && GET_CODE (XEXP (x, 0)) == POST_INC
675 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
677 /* 'R' is for pops. */
679 return (GET_CODE (x) == MEM
680 && GET_CODE (XEXP (x, 0)) == PRE_DEC
681 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
683 /* 'S' is for immediate memory addresses. */
685 return (GET_CODE (x) == MEM
686 && GET_CODE (XEXP (x, 0)) == CONST_INT
687 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
691 /* Not implemented yet. */
694 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
695 for allocating a scratch register for 32-bit shifts. */
697 return (GET_CODE (x) == CONST_INT
698 && (INTVAL (x) < 2 || INTVAL (x) > 15));
706 short_memory_operand (x, mode)
708 enum machine_mode mode;
710 if (! memory_operand (x, mode))
712 return (GET_CODE (XEXP (x, 0)) != PLUS);
716 nonimmediate_nonstack_operand (op, mode)
718 enum machine_mode mode;
720 /* 'Q' is for pushes, 'R' for pops. */
721 return (nonimmediate_operand (op, mode)
722 && ! xstormy16_extra_constraint_p (op, 'Q')
723 && ! xstormy16_extra_constraint_p (op, 'R'));
726 /* Splitter for the 'move' patterns, for modes not directly implemeted
727 by hardware. Emit insns to copy a value of mode MODE from SRC to
730 This function is only called when reload_completed.
734 xstormy16_split_move (mode, dest, src)
735 enum machine_mode mode;
739 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
740 int direction, end, i;
741 int src_modifies = 0;
742 int dest_modifies = 0;
743 int src_volatile = 0;
744 int dest_volatile = 0;
746 rtx auto_inc_reg_rtx = NULL_RTX;
748 /* Check initial conditions. */
749 if (! reload_completed
750 || mode == QImode || mode == HImode
751 || ! nonimmediate_operand (dest, mode)
752 || ! general_operand (src, mode))
755 /* This case is not supported below, and shouldn't be generated. */
756 if (GET_CODE (dest) == MEM
757 && GET_CODE (src) == MEM)
760 /* This case is very very bad after reload, so trap it now. */
761 if (GET_CODE (dest) == SUBREG
762 || GET_CODE (src) == SUBREG)
765 /* The general idea is to copy by words, offsetting the source and
766 destination. Normally the least-significant word will be copied
767 first, but for pre-dec operations it's better to copy the
768 most-significant word first. Only one operand can be a pre-dec
771 It's also possible that the copy overlaps so that the direction
775 if (GET_CODE (dest) == MEM)
777 mem_operand = XEXP (dest, 0);
778 dest_modifies = side_effects_p (mem_operand);
779 if (auto_inc_p (mem_operand))
780 auto_inc_reg_rtx = XEXP (mem_operand, 0);
781 dest_volatile = MEM_VOLATILE_P (dest);
784 dest = copy_rtx (dest);
785 MEM_VOLATILE_P (dest) = 0;
788 else if (GET_CODE (src) == MEM)
790 mem_operand = XEXP (src, 0);
791 src_modifies = side_effects_p (mem_operand);
792 if (auto_inc_p (mem_operand))
793 auto_inc_reg_rtx = XEXP (mem_operand, 0);
794 src_volatile = MEM_VOLATILE_P (src);
797 src = copy_rtx (src);
798 MEM_VOLATILE_P (src) = 0;
802 mem_operand = NULL_RTX;
804 if (mem_operand == NULL_RTX)
806 if (GET_CODE (src) == REG
807 && GET_CODE (dest) == REG
808 && reg_overlap_mentioned_p (dest, src)
809 && REGNO (dest) > REGNO (src))
812 else if (GET_CODE (mem_operand) == PRE_DEC
813 || (GET_CODE (mem_operand) == PLUS
814 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
816 else if (GET_CODE (src) == MEM
817 && reg_overlap_mentioned_p (dest, src))
820 if (GET_CODE (dest) != REG)
822 regno = REGNO (dest);
824 if (! refers_to_regno_p (regno, regno + num_words, mem_operand, 0))
827 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
829 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
833 /* This means something like
834 (set (reg:DI r0) (mem:DI (reg:HI r1)))
835 which we'd need to support by doing the set of the second word
840 end = direction < 0 ? -1 : num_words;
841 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
843 rtx w_src, w_dest, insn;
846 w_src = gen_rtx_MEM (word_mode, mem_operand);
848 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
850 MEM_VOLATILE_P (w_src) = 1;
852 w_dest = gen_rtx_MEM (word_mode, mem_operand);
854 w_dest = simplify_gen_subreg (word_mode, dest, mode,
857 MEM_VOLATILE_P (w_dest) = 1;
859 /* The simplify_subreg calls must always be able to simplify. */
860 if (GET_CODE (w_src) == SUBREG
861 || GET_CODE (w_dest) == SUBREG)
864 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
865 if (auto_inc_reg_rtx)
866 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
872 /* Expander for the 'move' patterns. Emit insns to copy a value of
873 mode MODE from SRC to DEST. */
876 xstormy16_expand_move (mode, dest, src)
877 enum machine_mode mode;
881 if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
883 rtx pmv = XEXP (dest, 0);
884 rtx dest_reg = XEXP (pmv, 0);
885 rtx dest_mod = XEXP (pmv, 1);
886 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
887 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
889 dest = gen_rtx_MEM (mode, dest_reg);
890 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
892 else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
894 rtx pmv = XEXP (src, 0);
895 rtx src_reg = XEXP (pmv, 0);
896 rtx src_mod = XEXP (pmv, 1);
897 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
898 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
900 src = gen_rtx_MEM (mode, src_reg);
901 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
904 /* There are only limited immediate-to-memory move instructions. */
905 if (! reload_in_progress
906 && ! reload_completed
907 && GET_CODE (dest) == MEM
908 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
909 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
910 && GET_CODE (src) != REG
911 && GET_CODE (src) != SUBREG)
912 src = copy_to_mode_reg (mode, src);
914 /* Don't emit something we would immediately split. */
916 && mode != HImode && mode != QImode)
918 xstormy16_split_move (mode, dest, src);
922 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
928 The stack is laid out as follows:
932 Register save area (up to 4 words)
933 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
935 AP-> Return address (two words)
936 9th procedure parameter word
937 10th procedure parameter word
939 last procedure parameter word
941 The frame pointer location is tuned to make it most likely that all
942 parameters and local variables can be accessed using a load-indexed
945 /* A structure to describe the layout. */
946 struct xstormy16_stack_layout
948 /* Size of the topmost three items on the stack. */
950 int register_save_size;
951 int stdarg_save_size;
952 /* Sum of the above items. */
954 /* Various offsets. */
955 int first_local_minus_ap;
960 /* Does REGNO need to be saved? */
961 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
962 ((regs_ever_live[REGNUM] && ! call_used_regs[REGNUM]) \
963 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
964 && (REGNO_REG_CLASS (REGNUM) != CARRY_REGS) \
965 && (regs_ever_live[REGNUM] || ! current_function_is_leaf)))
967 /* Compute the stack layout. */
968 struct xstormy16_stack_layout
969 xstormy16_compute_stack_layout ()
971 struct xstormy16_stack_layout layout;
973 const int ifun = xstormy16_interrupt_function_p ();
975 layout.locals_size = get_frame_size ();
977 layout.register_save_size = 0;
978 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
979 if (REG_NEEDS_SAVE (regno, ifun))
980 layout.register_save_size += UNITS_PER_WORD;
982 if (current_function_stdarg)
983 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
985 layout.stdarg_save_size = 0;
987 layout.frame_size = (layout.locals_size
988 + layout.register_save_size
989 + layout.stdarg_save_size);
991 if (current_function_args_size <= 2048 && current_function_args_size != -1)
993 if (layout.frame_size + INCOMING_FRAME_SP_OFFSET
994 + current_function_args_size <= 2048)
995 layout.fp_minus_ap = layout.frame_size + INCOMING_FRAME_SP_OFFSET;
997 layout.fp_minus_ap = 2048 - current_function_args_size;
1000 layout.fp_minus_ap = (layout.stdarg_save_size
1001 + layout.register_save_size
1002 + INCOMING_FRAME_SP_OFFSET);
1003 layout.sp_minus_fp = (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1004 - layout.fp_minus_ap);
1005 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1009 /* Determine how all the special registers get eliminated. */
1011 xstormy16_initial_elimination_offset (from, to)
1014 struct xstormy16_stack_layout layout;
1017 layout = xstormy16_compute_stack_layout ();
1019 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1020 result = layout.sp_minus_fp - layout.locals_size;
1021 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1022 result = -layout.locals_size;
1023 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1024 result = -layout.fp_minus_ap;
1025 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1026 result = -(layout.sp_minus_fp + layout.fp_minus_ap);
1034 emit_addhi3_postreload (dest, src0, src1)
1039 rtx set, clobber, insn;
1041 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1042 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
1043 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1047 /* Called after register allocation to add any instructions needed for
1048 the prologue. Using a prologue insn is favored compared to putting
1049 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1050 since it allows the scheduler to intermix instructions with the
1051 saves of the caller saved registers. In some cases, it might be
1052 necessary to emit a barrier instruction as the last insn to prevent
1055 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1056 so that the debug info generation code can handle them properly. */
1058 xstormy16_expand_prologue ()
1060 struct xstormy16_stack_layout layout;
1064 const int ifun = xstormy16_interrupt_function_p ();
1066 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1067 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1069 layout = xstormy16_compute_stack_layout ();
1071 if (layout.locals_size >= 32768)
1072 error ("Local variable memory requirements exceed capacity.");
1074 /* Save the argument registers if necessary. */
1075 if (layout.stdarg_save_size)
1076 for (regno = FIRST_ARGUMENT_REGISTER;
1077 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1081 rtx reg = gen_rtx_REG (HImode, regno);
1083 insn = emit_move_insn (mem_push_rtx, reg);
1084 RTX_FRAME_RELATED_P (insn) = 1;
1086 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1088 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1089 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1091 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1092 plus_constant (stack_pointer_rtx,
1093 GET_MODE_SIZE (Pmode)));
1094 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1097 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1098 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1101 /* Push each of the registers to save. */
1102 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1103 if (REG_NEEDS_SAVE (regno, ifun))
1106 rtx reg = gen_rtx_REG (HImode, regno);
1108 insn = emit_move_insn (mem_push_rtx, reg);
1109 RTX_FRAME_RELATED_P (insn) = 1;
1111 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1113 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1114 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1116 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1117 plus_constant (stack_pointer_rtx,
1118 GET_MODE_SIZE (Pmode)));
1119 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1122 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1123 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1126 /* It's just possible that the SP here might be what we need for
1128 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1129 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1131 /* Allocate space for local variables. */
1132 if (layout.locals_size)
1134 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1135 GEN_INT (layout.locals_size));
1136 RTX_FRAME_RELATED_P (insn) = 1;
1139 /* Set up the frame pointer, if required. */
1140 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1142 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1144 if (layout.sp_minus_fp)
1145 emit_addhi3_postreload (hard_frame_pointer_rtx,
1146 hard_frame_pointer_rtx,
1147 GEN_INT (-layout.sp_minus_fp));
1151 /* Do we need an epilogue at all? */
1155 return (reload_completed
1156 && xstormy16_compute_stack_layout ().frame_size == 0);
1159 /* Called after register allocation to add any instructions needed for
1160 the epilogue. Using an epilogue insn is favored compared to putting
1161 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1162 since it allows the scheduler to intermix instructions with the
1163 saves of the caller saved registers. In some cases, it might be
1164 necessary to emit a barrier instruction as the last insn to prevent
1168 xstormy16_expand_epilogue ()
1170 struct xstormy16_stack_layout layout;
1171 rtx mem_pop_rtx, insn;
1173 const int ifun = xstormy16_interrupt_function_p ();
1175 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1176 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1178 layout = xstormy16_compute_stack_layout ();
1180 /* Pop the stack for the locals. */
1181 if (layout.locals_size)
1183 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1184 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1187 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1188 GEN_INT (- layout.locals_size));
1189 RTX_FRAME_RELATED_P (insn) = 1;
1193 /* Restore any call-saved registers. */
1194 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1195 if (REG_NEEDS_SAVE (regno, ifun))
1199 insn = emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1200 RTX_FRAME_RELATED_P (insn) = 1;
1201 dwarf = gen_rtx_SET (Pmode, stack_pointer_rtx,
1202 plus_constant (stack_pointer_rtx,
1203 -GET_MODE_SIZE (Pmode)));
1204 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1209 /* Pop the stack for the stdarg save area. */
1210 if (layout.stdarg_save_size)
1212 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1213 GEN_INT (- layout.stdarg_save_size));
1214 RTX_FRAME_RELATED_P (insn) = 1;
1219 emit_jump_insn (gen_return_internal_interrupt ());
1221 emit_jump_insn (gen_return_internal ());
1225 xstormy16_epilogue_uses (regno)
1228 if (reload_completed && call_used_regs[regno])
1230 const int ifun = xstormy16_interrupt_function_p ();
1231 return REG_NEEDS_SAVE (regno, ifun);
1237 xstormy16_function_profiler ()
1239 sorry ("function_profiler support");
1243 /* Return an updated summarizer variable CUM to advance past an
1244 argument in the argument list. The values MODE, TYPE and NAMED
1245 describe that argument. Once this is done, the variable CUM is
1246 suitable for analyzing the *following* argument with
1247 `FUNCTION_ARG', etc.
1249 This function need not do anything if the argument in question was
1250 passed on the stack. The compiler knows how to track the amount of
1251 stack space used for arguments without any special help. However,
1252 it makes life easier for xstormy16_build_va_list if it does update
1255 xstormy16_function_arg_advance (cum, mode, type, named)
1256 CUMULATIVE_ARGS cum;
1257 enum machine_mode mode;
1259 int named ATTRIBUTE_UNUSED;
1261 /* If an argument would otherwise be passed partially in registers,
1262 and partially on the stack, the whole of it is passed on the
1264 if (cum < NUM_ARGUMENT_REGISTERS
1265 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1266 cum = NUM_ARGUMENT_REGISTERS;
1268 cum += XSTORMY16_WORD_SIZE (type, mode);
1274 xstormy16_function_arg (cum, mode, type, named)
1275 CUMULATIVE_ARGS cum;
1276 enum machine_mode mode;
1278 int named ATTRIBUTE_UNUSED;
1280 if (mode == VOIDmode)
1282 if (MUST_PASS_IN_STACK (mode, type)
1283 || cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1285 return gen_rtx_REG (mode, cum + 2);
1288 /* Do any needed setup for a variadic function. CUM has not been updated
1289 for the last named argument which has type TYPE and mode MODE. */
1291 xstormy16_setup_incoming_varargs (cum, int_mode, type, pretend_size)
1292 CUMULATIVE_ARGS cum ATTRIBUTE_UNUSED;
1293 int int_mode ATTRIBUTE_UNUSED;
1294 tree type ATTRIBUTE_UNUSED;
1295 int * pretend_size ATTRIBUTE_UNUSED;
1299 /* Build the va_list type.
1301 For this chip, va_list is a record containing a counter and a pointer.
1302 The counter is of type 'int' and indicates how many bytes
1303 have been used to date. The pointer indicates the stack position
1304 for arguments that have not been passed in registers.
1305 To keep the layout nice, the pointer is first in the structure. */
1308 xstormy16_build_va_list ()
1310 tree f_1, f_2, record, type_decl;
1312 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1313 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1315 f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
1317 f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
1318 unsigned_type_node);
1320 DECL_FIELD_CONTEXT (f_1) = record;
1321 DECL_FIELD_CONTEXT (f_2) = record;
1323 TREE_CHAIN (record) = type_decl;
1324 TYPE_NAME (record) = type_decl;
1325 TYPE_FIELDS (record) = f_1;
1326 TREE_CHAIN (f_1) = f_2;
1328 layout_type (record);
1333 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1334 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1335 variable to initialize. NEXTARG is the machine independent notion of the
1336 'next' argument after the variable arguments. */
1338 xstormy16_expand_builtin_va_start (valist, nextarg)
1340 rtx nextarg ATTRIBUTE_UNUSED;
1342 tree f_base, f_count;
1346 if (xstormy16_interrupt_function_p ())
1347 error ("cannot use va_start in interrupt function");
1349 f_base = TYPE_FIELDS (va_list_type_node);
1350 f_count = TREE_CHAIN (f_base);
1352 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base);
1353 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count);
1355 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1356 t = build (PLUS_EXPR, TREE_TYPE (base), t,
1357 build_int_2 (INCOMING_FRAME_SP_OFFSET, 0));
1358 t = build (MODIFY_EXPR, TREE_TYPE (base), base, t);
1359 TREE_SIDE_EFFECTS (t) = 1;
1360 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1362 t = build (MODIFY_EXPR, TREE_TYPE (count), count,
1363 build_int_2 (current_function_args_info * UNITS_PER_WORD, 0));
1364 TREE_SIDE_EFFECTS (t) = 1;
1365 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1368 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1369 of type va_list as a tree, TYPE is the type passed to va_arg.
1370 Note: This algorithm is documented in stormy-abi. */
1373 xstormy16_expand_builtin_va_arg (valist, type)
1377 tree f_base, f_count;
1379 rtx count_rtx, addr_rtx, r;
1380 rtx lab_gotaddr, lab_fromstack;
1382 int size, size_of_reg_args, must_stack;
1383 tree size_tree, count_plus_size;
1384 rtx count_plus_size_rtx;
1386 f_base = TYPE_FIELDS (va_list_type_node);
1387 f_count = TREE_CHAIN (f_base);
1389 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base);
1390 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count);
1392 must_stack = MUST_PASS_IN_STACK (TYPE_MODE (type), type);
1393 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1395 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1397 count_rtx = expand_expr (count, NULL_RTX, HImode, EXPAND_NORMAL);
1398 lab_gotaddr = gen_label_rtx ();
1399 lab_fromstack = gen_label_rtx ();
1400 addr_rtx = gen_reg_rtx (Pmode);
1404 count_plus_size = build (PLUS_EXPR, TREE_TYPE (count), count, size_tree);
1405 count_plus_size_rtx = expand_expr (count_plus_size, NULL_RTX, HImode, EXPAND_NORMAL);
1406 emit_cmp_and_jump_insns (count_plus_size_rtx, GEN_INT (size_of_reg_args),
1407 GTU, const1_rtx, HImode, 1, lab_fromstack);
1409 t = build (PLUS_EXPR, ptr_type_node, base, count);
1410 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
1412 emit_move_insn (addr_rtx, r);
1414 emit_jump_insn (gen_jump (lab_gotaddr));
1416 emit_label (lab_fromstack);
1419 /* Arguments larger than a word might need to skip over some
1420 registers, since arguments are either passed entirely in
1421 registers or entirely on the stack. */
1422 size = PUSH_ROUNDING (int_size_in_bytes (type));
1423 if (size > 2 || size < 0 || must_stack)
1425 rtx lab_notransition = gen_label_rtx ();
1426 emit_cmp_and_jump_insns (count_rtx, GEN_INT (NUM_ARGUMENT_REGISTERS
1428 GEU, const1_rtx, HImode, 1, lab_notransition);
1430 t = build (MODIFY_EXPR, TREE_TYPE (count), count,
1431 build_int_2 (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD, 0));
1432 TREE_SIDE_EFFECTS (t) = 1;
1433 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1435 emit_label (lab_notransition);
1438 t = build (PLUS_EXPR, sizetype, size_tree,
1439 build_int_2 ((- NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1440 + INCOMING_FRAME_SP_OFFSET),
1442 t = build (PLUS_EXPR, TREE_TYPE (count), count, fold (t));
1443 t = build (MINUS_EXPR, TREE_TYPE (base), base, t);
1444 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
1446 emit_move_insn (addr_rtx, r);
1448 emit_label (lab_gotaddr);
1450 count_plus_size = build (PLUS_EXPR, TREE_TYPE (count), count, size_tree);
1451 t = build (MODIFY_EXPR, TREE_TYPE (count), count, count_plus_size);
1452 TREE_SIDE_EFFECTS (t) = 1;
1453 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1458 /* Initialize the variable parts of a trampoline. ADDR is an RTX for
1459 the address of the trampoline; FNADDR is an RTX for the address of
1460 the nested function; STATIC_CHAIN is an RTX for the static chain
1461 value that should be passed to the function when it is called. */
1463 xstormy16_initialize_trampoline (addr, fnaddr, static_chain)
1468 rtx reg_addr = gen_reg_rtx (Pmode);
1469 rtx temp = gen_reg_rtx (HImode);
1470 rtx reg_fnaddr = gen_reg_rtx (HImode);
1473 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1475 emit_move_insn (reg_addr, addr);
1476 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1477 emit_move_insn (reg_addr_mem, temp);
1478 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1479 emit_move_insn (temp, static_chain);
1480 emit_move_insn (reg_addr_mem, temp);
1481 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1482 emit_move_insn (reg_fnaddr, fnaddr);
1483 emit_move_insn (temp, reg_fnaddr);
1484 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1485 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1486 emit_move_insn (reg_addr_mem, temp);
1487 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1488 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1489 emit_move_insn (reg_addr_mem, reg_fnaddr);
1492 /* Create an RTX representing the place where a function returns a
1493 value of data type VALTYPE. VALTYPE is a tree node representing a
1494 data type. Write `TYPE_MODE (VALTYPE)' to get the machine mode
1495 used to represent that type. On many machines, only the mode is
1496 relevant. (Actually, on most machines, scalar values are returned
1497 in the same place regardless of mode).
1499 If `PROMOTE_FUNCTION_RETURN' is defined, you must apply the same promotion
1500 rules specified in `PROMOTE_MODE' if VALTYPE is a scalar type.
1502 If the precise function being called is known, FUNC is a tree node
1503 (`FUNCTION_DECL') for it; otherwise, FUNC is a null pointer. This makes it
1504 possible to use a different value-returning convention for specific
1505 functions when all their calls are known.
1507 `FUNCTION_VALUE' is not used for return vales with aggregate data types,
1508 because these are returned in another way. See `STRUCT_VALUE_REGNUM' and
1511 xstormy16_function_value (valtype, func)
1513 tree func ATTRIBUTE_UNUSED;
1515 enum machine_mode mode;
1516 mode = TYPE_MODE (valtype);
1517 PROMOTE_MODE (mode, 0, valtype);
1518 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1521 /* A C compound statement that outputs the assembler code for a thunk function,
1522 used to implement C++ virtual function calls with multiple inheritance. The
1523 thunk acts as a wrapper around a virtual function, adjusting the implicit
1524 object parameter before handing control off to the real function.
1526 First, emit code to add the integer DELTA to the location that contains the
1527 incoming first argument. Assume that this argument contains a pointer, and
1528 is the one used to pass the `this' pointer in C++. This is the incoming
1529 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1530 addition must preserve the values of all other incoming arguments.
1532 After the addition, emit code to jump to FUNCTION, which is a
1533 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1534 the return address. Hence returning from FUNCTION will return to whoever
1535 called the current `thunk'.
1537 The effect must be as if @var{function} had been called directly
1538 with the adjusted first argument. This macro is responsible for
1539 emitting all of the code for a thunk function;
1540 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1543 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1544 extracted from it.) It might possibly be useful on some targets, but
1548 xstormy16_asm_output_mi_thunk (file, thunk_fndecl, delta,
1549 vcall_offset, function)
1551 tree thunk_fndecl ATTRIBUTE_UNUSED;
1552 HOST_WIDE_INT delta;
1553 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED;
1556 int regnum = FIRST_ARGUMENT_REGISTER;
1558 /* There might be a hidden first argument for a returned structure. */
1559 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
1562 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1563 fputs ("\tjmpf ", file);
1564 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1568 /* Output constructors and destructors. Just like
1569 default_named_section_asm_out_* but don't set the sections writable. */
1570 #undef TARGET_ASM_CONSTRUCTOR
1571 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1572 #undef TARGET_ASM_DESTRUCTOR
1573 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1576 xstormy16_asm_out_destructor (symbol, priority)
1580 const char *section = ".dtors";
1583 /* ??? This only works reliably with the GNU linker. */
1584 if (priority != DEFAULT_INIT_PRIORITY)
1586 sprintf (buf, ".dtors.%.5u",
1587 /* Invert the numbering so the linker puts us in the proper
1588 order; constructors are run from right to left, and the
1589 linker sorts in increasing order. */
1590 MAX_INIT_PRIORITY - priority);
1594 named_section_flags (section, 0);
1595 assemble_align (POINTER_SIZE);
1596 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1600 xstormy16_asm_out_constructor (symbol, priority)
1604 const char *section = ".ctors";
1607 /* ??? This only works reliably with the GNU linker. */
1608 if (priority != DEFAULT_INIT_PRIORITY)
1610 sprintf (buf, ".ctors.%.5u",
1611 /* Invert the numbering so the linker puts us in the proper
1612 order; constructors are run from right to left, and the
1613 linker sorts in increasing order. */
1614 MAX_INIT_PRIORITY - priority);
1618 named_section_flags (section, 0);
1619 assemble_align (POINTER_SIZE);
1620 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1623 /* Print a memory address as an operand to reference that memory location. */
1625 xstormy16_print_operand_address (file, address)
1629 HOST_WIDE_INT offset;
1630 int pre_dec, post_inc;
1632 /* There are a few easy cases. */
1633 if (GET_CODE (address) == CONST_INT)
1635 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1639 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1641 output_addr_const (file, address);
1645 /* Otherwise, it's hopefully something of the form
1646 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...))
1649 if (GET_CODE (address) == PLUS)
1651 if (GET_CODE (XEXP (address, 1)) != CONST_INT)
1653 offset = INTVAL (XEXP (address, 1));
1654 address = XEXP (address, 0);
1659 pre_dec = (GET_CODE (address) == PRE_DEC);
1660 post_inc = (GET_CODE (address) == POST_INC);
1661 if (pre_dec || post_inc)
1662 address = XEXP (address, 0);
1664 if (GET_CODE (address) != REG)
1670 fputs (reg_names [REGNO (address)], file);
1674 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1678 /* Print an operand to an assembler instruction. */
1680 xstormy16_print_operand (file, x, code)
1688 /* There is either one bit set, or one bit clear, in X.
1689 Print it preceded by '#'. */
1691 HOST_WIDE_INT xx = 1;
1694 if (GET_CODE (x) == CONST_INT)
1697 output_operand_lossage ("`B' operand is not constant");
1699 l = exact_log2 (xx);
1701 l = exact_log2 (~xx);
1703 output_operand_lossage ("`B' operand has multiple bits set");
1705 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1710 /* Print the symbol without a surrounding @fptr(). */
1711 if (GET_CODE (x) == SYMBOL_REF)
1712 assemble_name (file, XSTR (x, 0));
1713 else if (GET_CODE (x) == LABEL_REF)
1714 output_asm_label (x);
1716 xstormy16_print_operand_address (file, x);
1721 /* Print the immediate operand less one, preceded by '#'.
1722 For 'O', negate it first. */
1724 HOST_WIDE_INT xx = 0;
1726 if (GET_CODE (x) == CONST_INT)
1729 output_operand_lossage ("`o' operand is not constant");
1734 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1739 /* Handled below. */
1743 output_operand_lossage ("xstormy16_print_operand: unknown code");
1747 switch (GET_CODE (x))
1750 fputs (reg_names [REGNO (x)], file);
1754 xstormy16_print_operand_address (file, XEXP (x, 0));
1758 /* Some kind of constant or label; an immediate operand,
1759 so prefix it with '#' for the assembler. */
1760 fputs (IMMEDIATE_PREFIX, file);
1761 output_addr_const (file, x);
1769 /* Expander for the `casesi' pattern.
1770 INDEX is the index of the switch statement.
1771 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1772 to the first table entry.
1773 RANGE is the number of table entries.
1774 TABLE is an ADDR_VEC that is the jump table.
1775 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1776 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1780 xstormy16_expand_casesi (index, lower_bound, range, table, default_label)
1787 HOST_WIDE_INT range_i = INTVAL (range);
1790 /* This code uses 'br', so it can deal only with tables of size up to
1792 if (range_i >= 8192)
1793 sorry ("switch statement of size %lu entries too large",
1794 (unsigned long) range_i);
1796 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1798 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1800 int_index = gen_lowpart_common (HImode, index);
1801 emit_insn (gen_ashlhi3 (int_index, int_index, GEN_INT (2)));
1802 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1805 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1806 instructions, without label or alignment or any other special
1807 constructs. We know that the previous instruction will be the
1808 `tablejump_pcrel' output above.
1810 TODO: it might be nice to output 'br' instructions if they could
1814 xstormy16_output_addr_vec (file, label, table)
1816 rtx label ATTRIBUTE_UNUSED;
1821 function_section (current_function_decl);
1823 vlen = XVECLEN (table, 0);
1824 for (idx = 0; idx < vlen; idx++)
1826 fputs ("\tjmpf ", file);
1827 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1833 /* Expander for the `call' patterns.
1834 INDEX is the index of the switch statement.
1835 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1836 to the first table entry.
1837 RANGE is the number of table entries.
1838 TABLE is an ADDR_VEC that is the jump table.
1839 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1840 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1844 xstormy16_expand_call (retval, dest, counter)
1850 enum machine_mode mode;
1852 if (GET_CODE (dest) != MEM)
1854 dest = XEXP (dest, 0);
1856 if (! CONSTANT_P (dest)
1857 && GET_CODE (dest) != REG)
1858 dest = force_reg (Pmode, dest);
1863 mode = GET_MODE (retval);
1865 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1868 call = gen_rtx_SET (VOIDmode, retval, call);
1870 if (! CONSTANT_P (dest))
1872 temp = gen_reg_rtx (HImode);
1873 emit_move_insn (temp, const0_rtx);
1878 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1879 gen_rtx_USE (VOIDmode, temp)));
1880 emit_call_insn (call);
1883 /* Expanders for multiword computational operations. */
1885 /* Expander for arithmetic operations; emit insns to compute
1887 (set DEST (CODE:MODE SRC0 SRC1))
1889 using CARRY as a temporary. When CODE is COMPARE, a branch
1890 template is generated (this saves duplicating code in
1891 xstormy16_split_cbranch). */
1894 xstormy16_expand_arith (mode, code, dest, src0, src1, carry)
1895 enum machine_mode mode;
1902 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1907 emit_move_insn (src0, const0_rtx);
1909 for (i = 0; i < num_words; i++)
1911 rtx w_src0, w_src1, w_dest;
1914 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1915 i * UNITS_PER_WORD);
1916 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1917 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1923 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1927 insn = gen_addchi4 (w_dest, w_src0, w_src1, carry);
1929 insn = gen_addchi5 (w_dest, w_src0, w_src1, carry, carry);
1935 if (code == COMPARE && i == num_words - 1)
1937 rtx branch, sub, clobber, sub_1;
1939 sub_1 = gen_rtx_MINUS (HImode, w_src0,
1940 gen_rtx_ZERO_EXTEND (HImode, carry));
1941 sub = gen_rtx_SET (VOIDmode, w_dest,
1942 gen_rtx_MINUS (HImode, sub_1, w_src1));
1943 clobber = gen_rtx_CLOBBER (VOIDmode, carry);
1944 branch = gen_rtx_SET (VOIDmode, pc_rtx,
1945 gen_rtx_IF_THEN_ELSE (VOIDmode,
1951 insn = gen_rtx_PARALLEL (VOIDmode,
1952 gen_rtvec (3, branch, sub, clobber));
1956 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1959 insn = gen_subchi4 (w_dest, w_src0, w_src1, carry);
1961 insn = gen_subchi5 (w_dest, w_src0, w_src1, carry, carry);
1967 if (GET_CODE (w_src1) == CONST_INT
1968 && INTVAL (w_src1) == -(code == AND))
1971 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx (code, mode,
1976 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
1987 /* If we emit nothing, try_split() will think we failed. So emit
1988 something that does nothing and can be optimized away. */
1993 /* Return 1 if OP is a shift operator. */
1996 shift_operator (op, mode)
1998 enum machine_mode mode ATTRIBUTE_UNUSED;
2000 enum rtx_code code = GET_CODE (op);
2002 return (code == ASHIFT
2004 || code == LSHIFTRT);
2007 /* The shift operations are split at output time for constant values;
2008 variable-width shifts get handed off to a library routine.
2010 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2011 SIZE_R will be a CONST_INT, X will be a hard register. */
2014 xstormy16_output_shift (mode, code, x, size_r, temp)
2015 enum machine_mode mode;
2022 const char *r0, *r1, *rt;
2025 if (GET_CODE (size_r) != CONST_INT
2026 || GET_CODE (x) != REG
2029 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2034 r0 = reg_names [REGNO (x)];
2035 r1 = reg_names [REGNO (x) + 1];
2037 /* For shifts of size 1, we can use the rotate instructions. */
2043 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2046 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2049 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2057 /* For large shifts, there are easy special cases. */
2063 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2066 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2069 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2081 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2082 r1, r0, r0, r1, (int) size - 16);
2085 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2086 r0, r1, r1, r0, (int) size - 16);
2089 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2090 r0, r1, r1, r0, (int) size - 16);
2098 /* For the rest, we have to do more work. In particular, we
2099 need a temporary. */
2100 rt = reg_names [REGNO (temp)];
2105 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2106 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16-size),
2111 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2112 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2117 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2118 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2127 /* Attribute handling. */
2129 /* Return nonzero if the function is an interrupt function. */
2131 xstormy16_interrupt_function_p ()
2135 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2136 any functions are declared, which is demonstrably wrong, but
2137 it is worked around here. FIXME. */
2141 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2142 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2145 #undef TARGET_ATTRIBUTE_TABLE
2146 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2147 static tree xstormy16_handle_interrupt_attribute PARAMS ((tree *, tree, tree, int, bool *));
2148 static const struct attribute_spec xstormy16_attribute_table[] =
2150 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2151 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2152 { NULL, 0, 0, false, false, false, NULL }
2155 /* Handle an "interrupt" attribute;
2156 arguments as in struct attribute_spec.handler. */
2158 xstormy16_handle_interrupt_attribute (node, name, args, flags, no_add_attrs)
2161 tree args ATTRIBUTE_UNUSED;
2162 int flags ATTRIBUTE_UNUSED;
2165 if (TREE_CODE (*node) != FUNCTION_TYPE)
2167 warning ("`%s' attribute only applies to functions",
2168 IDENTIFIER_POINTER (name));
2169 *no_add_attrs = true;
2175 #undef TARGET_INIT_BUILTINS
2176 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2177 #undef TARGET_EXPAND_BUILTIN
2178 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2183 const char *arg_ops; /* 0..9, t for temp register, r for return value */
2184 const char *arg_types; /* s=short,l=long, upper case for unsigned */
2186 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2187 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2188 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2189 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2194 xstormy16_init_builtins ()
2196 tree args, ret_type, arg;
2199 ret_type = void_type_node;
2201 for (i=0; s16builtins[i].name; i++)
2203 args = void_list_node;
2204 for (a=strlen (s16builtins[i].arg_types)-1; a>=0; a--)
2206 switch (s16builtins[i].arg_types[a])
2208 case 's': arg = short_integer_type_node; break;
2209 case 'S': arg = short_unsigned_type_node; break;
2210 case 'l': arg = long_integer_type_node; break;
2211 case 'L': arg = long_unsigned_type_node; break;
2217 args = tree_cons (NULL_TREE, arg, args);
2219 builtin_function (s16builtins[i].name,
2220 build_function_type (ret_type, args),
2221 i, BUILT_IN_MD, NULL, NULL);
2226 xstormy16_expand_builtin(exp, target, subtarget, mode, ignore)
2229 rtx subtarget ATTRIBUTE_UNUSED;
2230 enum machine_mode mode ATTRIBUTE_UNUSED;
2231 int ignore ATTRIBUTE_UNUSED;
2233 rtx op[10], args[10], pat, copyto[10], retval = 0;
2234 tree fndecl, argtree;
2237 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2238 argtree = TREE_OPERAND (exp, 1);
2239 i = DECL_FUNCTION_CODE (fndecl);
2240 code = s16builtins[i].md_code;
2242 for (a = 0; a < 10 && argtree; a++)
2244 args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2245 argtree = TREE_CHAIN (argtree);
2248 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2250 char ao = s16builtins[i].arg_ops[o];
2251 char c = insn_data[code].operand[o].constraint[0];
2256 omode = insn_data[code].operand[o].mode;
2258 op[o] = target ? target : gen_reg_rtx (omode);
2260 op[o] = gen_reg_rtx (omode);
2262 op[o] = args[(int) hex_value (ao)];
2264 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2266 if (c == '+' || c == '=')
2269 op[o] = gen_reg_rtx (omode);
2272 op[o] = copy_to_mode_reg (omode, op[o]);
2279 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2280 op[5], op[6], op[7], op[8], op[9]);
2283 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2286 emit_move_insn (copyto[o], op[o]);
2287 if (op[o] == retval)
2295 #undef TARGET_ASM_ALIGNED_HI_OP
2296 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2297 #undef TARGET_ASM_ALIGNED_SI_OP
2298 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2300 #undef TARGET_ASM_OUTPUT_MI_THUNK
2301 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2302 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2303 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2305 #undef TARGET_RTX_COSTS
2306 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2307 #undef TARGET_ADDRESS_COST
2308 #define TARGET_ADDRESS_COST xstormy16_address_cost
2310 struct gcc_target targetm = TARGET_INITIALIZER;