1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
46 #include "target-def.h"
48 #include "langhooks.h"
49 #include "tree-gimple.h"
51 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
52 static void xstormy16_asm_out_constructor (rtx, int);
53 static void xstormy16_asm_out_destructor (rtx, int);
54 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
57 static void xstormy16_init_builtins (void);
58 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
59 static bool xstormy16_rtx_costs (rtx, int, int, int *);
60 static int xstormy16_address_cost (rtx);
61 static bool xstormy16_return_in_memory (tree, tree);
63 /* Define the information needed to generate branch and scc insns. This is
64 stored from the compare operation. */
65 struct rtx_def * xstormy16_compare_op0;
66 struct rtx_def * xstormy16_compare_op1;
68 /* Return 1 if this is a LT, GE, LTU, or GEU operator. */
71 xstormy16_ineqsi_operator (register rtx op, enum machine_mode mode)
73 enum rtx_code code = GET_CODE (op);
75 return ((mode == VOIDmode || GET_MODE (op) == mode)
76 && (code == LT || code == GE || code == LTU || code == GEU));
79 /* Return 1 if this is an EQ or NE operator. */
82 equality_operator (register rtx op, enum machine_mode mode)
84 return ((mode == VOIDmode || GET_MODE (op) == mode)
85 && (GET_CODE (op) == EQ || GET_CODE (op) == NE));
88 /* Return 1 if this is a comparison operator but not an EQ or NE operator. */
91 inequality_operator (register rtx op, enum machine_mode mode)
93 return comparison_operator (op, mode) && ! equality_operator (op, mode);
96 /* Compute a (partial) cost for rtx X. Return true if the complete
97 cost has been computed, and false if subexpressions should be
98 scanned. In either case, *TOTAL contains the cost result. */
101 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
107 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
108 *total = COSTS_N_INSNS (1) / 2;
109 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
110 *total = COSTS_N_INSNS (1);
112 *total = COSTS_N_INSNS (2);
119 *total = COSTS_N_INSNS(2);
123 *total = COSTS_N_INSNS (35 + 6);
126 *total = COSTS_N_INSNS (51 - 6);
135 xstormy16_address_cost (rtx x)
137 return (GET_CODE (x) == CONST_INT ? 2
138 : GET_CODE (x) == PLUS ? 7
142 /* Branches are handled as follows:
144 1. HImode compare-and-branches. The machine supports these
145 natively, so the appropriate pattern is emitted directly.
147 2. SImode EQ and NE. These are emitted as pairs of HImode
148 compare-and-branches.
150 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
151 of a SImode subtract followed by a branch (not a compare-and-branch),
157 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
165 /* Emit a branch of kind CODE to location LOC. */
168 xstormy16_emit_cbranch (enum rtx_code code, rtx loc)
170 rtx op0 = xstormy16_compare_op0;
171 rtx op1 = xstormy16_compare_op1;
172 rtx condition_rtx, loc_ref, branch, cy_clobber;
174 enum machine_mode mode;
176 mode = GET_MODE (op0);
177 if (mode != HImode && mode != SImode)
181 && (code == GT || code == LE || code == GTU || code == LEU))
183 int unsigned_p = (code == GTU || code == LEU);
184 int gt_p = (code == GT || code == GTU);
188 lab = gen_label_rtx ();
189 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
190 /* This should be generated as a comparison against the temporary
191 created by the previous insn, but reload can't handle that. */
192 xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
197 else if (mode == SImode
198 && (code == NE || code == EQ)
199 && op1 != const0_rtx)
202 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
206 lab = gen_label_rtx ();
208 for (i = 0; i < num_words - 1; i++)
210 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
212 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
214 xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
216 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
218 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
220 xstormy16_emit_cbranch (code, loc);
227 /* We can't allow reload to try to generate any reload after a branch,
228 so when some register must match we must make the temporary ourselves. */
232 tmp = gen_reg_rtx (mode);
233 emit_move_insn (tmp, op0);
237 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
238 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
239 branch = gen_rtx_SET (VOIDmode, pc_rtx,
240 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
243 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (BImode));
246 vec = gen_rtvec (2, branch, cy_clobber);
247 else if (code == NE || code == EQ)
248 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
253 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
255 sub = gen_rtx_CLOBBER (SImode, op0);
257 vec = gen_rtvec (3, branch, sub, cy_clobber);
260 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
263 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
264 the arithmetic operation. Most of the work is done by
265 xstormy16_expand_arith. */
268 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
271 rtx op0 = XEXP (comparison, 0);
272 rtx op1 = XEXP (comparison, 1);
277 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1, carry);
285 while (NEXT_INSN (last_insn) != NULL_RTX)
286 last_insn = NEXT_INSN (last_insn);
288 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
289 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
290 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
295 /* Return the string to output a conditional branch to LABEL, which is
296 the operand number of the label.
298 OP is the conditional expression, or NULL for branch-always.
300 REVERSED is nonzero if we should reverse the sense of the comparison.
305 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
307 static char string[64];
308 int need_longbranch = (op != NULL_RTX
309 ? get_attr_length (insn) == 8
310 : get_attr_length (insn) == 4);
311 int really_reversed = reversed ^ need_longbranch;
313 const char *template;
314 const char *operands;
323 sprintf (string, "%s %s", ccode, label);
327 code = GET_CODE (op);
329 if (GET_CODE (XEXP (op, 0)) != REG)
331 code = swap_condition (code);
337 /* Work out which way this really branches. */
339 code = reverse_condition (code);
343 case EQ: ccode = "z"; break;
344 case NE: ccode = "nz"; break;
345 case GE: ccode = "ge"; break;
346 case LT: ccode = "lt"; break;
347 case GT: ccode = "gt"; break;
348 case LE: ccode = "le"; break;
349 case GEU: ccode = "nc"; break;
350 case LTU: ccode = "c"; break;
351 case GTU: ccode = "hi"; break;
352 case LEU: ccode = "ls"; break;
359 template = "b%s %s,.+8 | jmpf %s";
361 template = "b%s %s,%s";
362 sprintf (string, template, ccode, operands, label);
367 /* Return the string to output a conditional branch to LABEL, which is
368 the operand number of the label, but suitable for the tail of a
371 OP is the conditional expression (OP is never NULL_RTX).
373 REVERSED is nonzero if we should reverse the sense of the comparison.
378 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
380 static char string[64];
381 int need_longbranch = get_attr_length (insn) >= 8;
382 int really_reversed = reversed ^ need_longbranch;
384 const char *template;
388 code = GET_CODE (op);
390 /* Work out which way this really branches. */
392 code = reverse_condition (code);
396 case EQ: ccode = "z"; break;
397 case NE: ccode = "nz"; break;
398 case GE: ccode = "ge"; break;
399 case LT: ccode = "lt"; break;
400 case GEU: ccode = "nc"; break;
401 case LTU: ccode = "c"; break;
403 /* The missing codes above should never be generated. */
414 if (GET_CODE (XEXP (op, 0)) != REG)
417 regnum = REGNO (XEXP (op, 0));
418 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
422 case GE: case LT: case GEU: case LTU:
423 strcpy (prevop, "sbc %2,%3");
431 template = "%s | b%s .+6 | jmpf %s";
433 template = "%s | b%s %s";
434 sprintf (string, template, prevop, ccode, label);
439 /* Many machines have some registers that cannot be copied directly to or from
440 memory or even from other types of registers. An example is the `MQ'
441 register, which on most machines, can only be copied to or from general
442 registers, but not memory. Some machines allow copying all registers to and
443 from memory, but require a scratch register for stores to some memory
444 locations (e.g., those with symbolic address on the RT, and those with
445 certain symbolic address on the SPARC when compiling PIC). In some cases,
446 both an intermediate and a scratch register are required.
448 You should define these macros to indicate to the reload phase that it may
449 need to allocate at least one register for a reload in addition to the
450 register to contain the data. Specifically, if copying X to a register
451 CLASS in MODE requires an intermediate register, you should define
452 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
453 whose registers can be used as intermediate registers or scratch registers.
455 If copying a register CLASS in MODE to X requires an intermediate or scratch
456 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
457 largest register class required. If the requirements for input and output
458 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
459 instead of defining both macros identically.
461 The values returned by these macros are often `GENERAL_REGS'. Return
462 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
463 to or from a register of CLASS in MODE without requiring a scratch register.
464 Do not define this macro if it would always return `NO_REGS'.
466 If a scratch register is required (either with or without an intermediate
467 register), you should define patterns for `reload_inM' or `reload_outM', as
468 required.. These patterns, which will normally be implemented with a
469 `define_expand', should be similar to the `movM' patterns, except that
470 operand 2 is the scratch register.
472 Define constraints for the reload register and scratch register that contain
473 a single register class. If the original reload register (whose class is
474 CLASS) can meet the constraint given in the pattern, the value returned by
475 these macros is used for the class of the scratch register. Otherwise, two
476 additional reload registers are required. Their classes are obtained from
477 the constraints in the insn pattern.
479 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
480 either be in a hard register or in memory. Use `true_regnum' to find out;
481 it will return -1 if the pseudo is in memory and the hard register number if
484 These macros should not be used in the case where a particular class of
485 registers can only be copied to memory and not to another class of
486 registers. In that case, secondary reload registers are not needed and
487 would not be helpful. Instead, a stack location must be used to perform the
488 copy and the `movM' pattern should use memory as an intermediate storage.
489 This case often occurs between floating-point and general registers. */
492 xstormy16_secondary_reload_class (enum reg_class class,
493 enum machine_mode mode,
496 /* This chip has the interesting property that only the first eight
497 registers can be moved to/from memory. */
498 if ((GET_CODE (x) == MEM
499 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
500 && (true_regnum (x) == -1
501 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
502 && ! reg_class_subset_p (class, EIGHT_REGS))
505 /* When reloading a PLUS, the carry register will be required
506 unless the inc or dec instructions can be used. */
507 if (xstormy16_carry_plus_operand (x, mode))
513 /* Recognize a PLUS that needs the carry register. */
515 xstormy16_carry_plus_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
517 return (GET_CODE (x) == PLUS
518 && GET_CODE (XEXP (x, 1)) == CONST_INT
519 && (INTVAL (XEXP (x, 1)) < -4 || INTVAL (XEXP (x, 1)) > 4));
522 /* Detect and error out on out-of-range constants for movhi. */
524 xs_hi_general_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
526 if ((GET_CODE (x) == CONST_INT)
527 && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
528 error ("Constant halfword load operand out of range.");
529 return general_operand (x, mode);
532 /* Detect and error out on out-of-range constants for addhi and subhi. */
534 xs_hi_nonmemory_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
536 if ((GET_CODE (x) == CONST_INT)
537 && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
538 error ("Constant arithmetic operand out of range.");
539 return nonmemory_operand (x, mode);
543 xstormy16_preferred_reload_class (rtx x, enum reg_class class)
545 if (class == GENERAL_REGS
546 && GET_CODE (x) == MEM)
552 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
553 (GET_CODE (X) == CONST_INT \
554 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
556 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
557 (GET_CODE (X) == CONST_INT \
558 && INTVAL (X) + (OFFSET) >= 0 \
559 && INTVAL (X) + (OFFSET) < 0x8000 \
560 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
563 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
566 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
569 if (GET_CODE (x) == PLUS
570 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
573 if ((GET_CODE (x) == PRE_MODIFY
574 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
575 || GET_CODE (x) == POST_INC
576 || GET_CODE (x) == PRE_DEC)
579 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
580 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
586 /* Return nonzero if memory address X (an RTX) can have different
587 meanings depending on the machine mode of the memory reference it
588 is used for or if the address is valid for some modes but not
591 Autoincrement and autodecrement addresses typically have mode-dependent
592 effects because the amount of the increment or decrement is the size of the
593 operand being addressed. Some machines have other mode-dependent addresses.
594 Many RISC machines have no mode-dependent addresses.
596 You may assume that ADDR is a valid address for the machine.
598 On this chip, this is true if the address is valid with an offset
599 of 0 but not of 6, because in that case it cannot be used as an
600 address for DImode or DFmode, or if the address is a post-increment
601 or pre-decrement address. */
603 xstormy16_mode_dependent_address_p (rtx x)
605 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
606 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
609 if (GET_CODE (x) == PLUS
610 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
611 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
614 if (GET_CODE (x) == PLUS)
617 if (GET_CODE (x) == POST_INC
618 || GET_CODE (x) == PRE_DEC)
624 /* A C expression that defines the optional machine-dependent constraint
625 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
626 types of operands, usually memory references, for the target machine.
627 Normally this macro will not be defined. If it is required for a particular
628 target machine, it should return 1 if VALUE corresponds to the operand type
629 represented by the constraint letter C. If C is not defined as an extra
630 constraint, the value returned should be 0 regardless of VALUE. */
632 xstormy16_extra_constraint_p (rtx x, int c)
636 /* 'Q' is for pushes. */
638 return (GET_CODE (x) == MEM
639 && GET_CODE (XEXP (x, 0)) == POST_INC
640 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
642 /* 'R' is for pops. */
644 return (GET_CODE (x) == MEM
645 && GET_CODE (XEXP (x, 0)) == PRE_DEC
646 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
648 /* 'S' is for immediate memory addresses. */
650 return (GET_CODE (x) == MEM
651 && GET_CODE (XEXP (x, 0)) == CONST_INT
652 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
656 /* Not implemented yet. */
659 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
660 for allocating a scratch register for 32-bit shifts. */
662 return (GET_CODE (x) == CONST_INT
663 && (INTVAL (x) < 2 || INTVAL (x) > 15));
665 /* 'Z' is for CONST_INT value zero. This is for adding zero to
666 a register in addhi3, which would otherwise require a carry. */
668 return (GET_CODE (x) == CONST_INT
669 && (INTVAL (x) == 0));
677 short_memory_operand (rtx x, enum machine_mode mode)
679 if (! memory_operand (x, mode))
681 return (GET_CODE (XEXP (x, 0)) != PLUS);
685 nonimmediate_nonstack_operand (rtx op, enum machine_mode mode)
687 /* 'Q' is for pushes, 'R' for pops. */
688 return (nonimmediate_operand (op, mode)
689 && ! xstormy16_extra_constraint_p (op, 'Q')
690 && ! xstormy16_extra_constraint_p (op, 'R'));
693 /* Splitter for the 'move' patterns, for modes not directly implemented
694 by hardware. Emit insns to copy a value of mode MODE from SRC to
697 This function is only called when reload_completed.
701 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
703 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
704 int direction, end, i;
705 int src_modifies = 0;
706 int dest_modifies = 0;
707 int src_volatile = 0;
708 int dest_volatile = 0;
710 rtx auto_inc_reg_rtx = NULL_RTX;
712 /* Check initial conditions. */
713 if (! reload_completed
714 || mode == QImode || mode == HImode
715 || ! nonimmediate_operand (dest, mode)
716 || ! general_operand (src, mode))
719 /* This case is not supported below, and shouldn't be generated. */
720 if (GET_CODE (dest) == MEM
721 && GET_CODE (src) == MEM)
724 /* This case is very very bad after reload, so trap it now. */
725 if (GET_CODE (dest) == SUBREG
726 || GET_CODE (src) == SUBREG)
729 /* The general idea is to copy by words, offsetting the source and
730 destination. Normally the least-significant word will be copied
731 first, but for pre-dec operations it's better to copy the
732 most-significant word first. Only one operand can be a pre-dec
735 It's also possible that the copy overlaps so that the direction
739 if (GET_CODE (dest) == MEM)
741 mem_operand = XEXP (dest, 0);
742 dest_modifies = side_effects_p (mem_operand);
743 if (auto_inc_p (mem_operand))
744 auto_inc_reg_rtx = XEXP (mem_operand, 0);
745 dest_volatile = MEM_VOLATILE_P (dest);
748 dest = copy_rtx (dest);
749 MEM_VOLATILE_P (dest) = 0;
752 else if (GET_CODE (src) == MEM)
754 mem_operand = XEXP (src, 0);
755 src_modifies = side_effects_p (mem_operand);
756 if (auto_inc_p (mem_operand))
757 auto_inc_reg_rtx = XEXP (mem_operand, 0);
758 src_volatile = MEM_VOLATILE_P (src);
761 src = copy_rtx (src);
762 MEM_VOLATILE_P (src) = 0;
766 mem_operand = NULL_RTX;
768 if (mem_operand == NULL_RTX)
770 if (GET_CODE (src) == REG
771 && GET_CODE (dest) == REG
772 && reg_overlap_mentioned_p (dest, src)
773 && REGNO (dest) > REGNO (src))
776 else if (GET_CODE (mem_operand) == PRE_DEC
777 || (GET_CODE (mem_operand) == PLUS
778 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
780 else if (GET_CODE (src) == MEM
781 && reg_overlap_mentioned_p (dest, src))
784 if (GET_CODE (dest) != REG)
786 regno = REGNO (dest);
788 if (! refers_to_regno_p (regno, regno + num_words, mem_operand, 0))
791 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
793 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
797 /* This means something like
798 (set (reg:DI r0) (mem:DI (reg:HI r1)))
799 which we'd need to support by doing the set of the second word
804 end = direction < 0 ? -1 : num_words;
805 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
807 rtx w_src, w_dest, insn;
810 w_src = gen_rtx_MEM (word_mode, mem_operand);
812 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
814 MEM_VOLATILE_P (w_src) = 1;
816 w_dest = gen_rtx_MEM (word_mode, mem_operand);
818 w_dest = simplify_gen_subreg (word_mode, dest, mode,
821 MEM_VOLATILE_P (w_dest) = 1;
823 /* The simplify_subreg calls must always be able to simplify. */
824 if (GET_CODE (w_src) == SUBREG
825 || GET_CODE (w_dest) == SUBREG)
828 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
829 if (auto_inc_reg_rtx)
830 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
836 /* Expander for the 'move' patterns. Emit insns to copy a value of
837 mode MODE from SRC to DEST. */
840 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
842 if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
844 rtx pmv = XEXP (dest, 0);
845 rtx dest_reg = XEXP (pmv, 0);
846 rtx dest_mod = XEXP (pmv, 1);
847 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
848 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
850 dest = gen_rtx_MEM (mode, dest_reg);
851 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
853 else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
855 rtx pmv = XEXP (src, 0);
856 rtx src_reg = XEXP (pmv, 0);
857 rtx src_mod = XEXP (pmv, 1);
858 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
859 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
861 src = gen_rtx_MEM (mode, src_reg);
862 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
865 /* There are only limited immediate-to-memory move instructions. */
866 if (! reload_in_progress
867 && ! reload_completed
868 && GET_CODE (dest) == MEM
869 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
870 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
871 && GET_CODE (src) != REG
872 && GET_CODE (src) != SUBREG)
873 src = copy_to_mode_reg (mode, src);
875 /* Don't emit something we would immediately split. */
877 && mode != HImode && mode != QImode)
879 xstormy16_split_move (mode, dest, src);
883 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
889 The stack is laid out as follows:
893 Register save area (up to 4 words)
894 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
896 AP-> Return address (two words)
897 9th procedure parameter word
898 10th procedure parameter word
900 last procedure parameter word
902 The frame pointer location is tuned to make it most likely that all
903 parameters and local variables can be accessed using a load-indexed
906 /* A structure to describe the layout. */
907 struct xstormy16_stack_layout
909 /* Size of the topmost three items on the stack. */
911 int register_save_size;
912 int stdarg_save_size;
913 /* Sum of the above items. */
915 /* Various offsets. */
916 int first_local_minus_ap;
921 /* Does REGNO need to be saved? */
922 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
923 ((regs_ever_live[REGNUM] && ! call_used_regs[REGNUM]) \
924 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
925 && (REGNO_REG_CLASS (REGNUM) != CARRY_REGS) \
926 && (regs_ever_live[REGNUM] || ! current_function_is_leaf)))
928 /* Compute the stack layout. */
929 struct xstormy16_stack_layout
930 xstormy16_compute_stack_layout (void)
932 struct xstormy16_stack_layout layout;
934 const int ifun = xstormy16_interrupt_function_p ();
936 layout.locals_size = get_frame_size ();
938 layout.register_save_size = 0;
939 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
940 if (REG_NEEDS_SAVE (regno, ifun))
941 layout.register_save_size += UNITS_PER_WORD;
943 if (current_function_stdarg)
944 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
946 layout.stdarg_save_size = 0;
948 layout.frame_size = (layout.locals_size
949 + layout.register_save_size
950 + layout.stdarg_save_size);
952 if (current_function_args_size <= 2048 && current_function_args_size != -1)
954 if (layout.frame_size + INCOMING_FRAME_SP_OFFSET
955 + current_function_args_size <= 2048)
956 layout.fp_minus_ap = layout.frame_size + INCOMING_FRAME_SP_OFFSET;
958 layout.fp_minus_ap = 2048 - current_function_args_size;
961 layout.fp_minus_ap = (layout.stdarg_save_size
962 + layout.register_save_size
963 + INCOMING_FRAME_SP_OFFSET);
964 layout.sp_minus_fp = (layout.frame_size + INCOMING_FRAME_SP_OFFSET
965 - layout.fp_minus_ap);
966 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
970 /* Determine how all the special registers get eliminated. */
972 xstormy16_initial_elimination_offset (int from, int to)
974 struct xstormy16_stack_layout layout;
977 layout = xstormy16_compute_stack_layout ();
979 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
980 result = layout.sp_minus_fp - layout.locals_size;
981 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
982 result = -layout.locals_size;
983 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
984 result = -layout.fp_minus_ap;
985 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
986 result = -(layout.sp_minus_fp + layout.fp_minus_ap);
994 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
996 rtx set, clobber, insn;
998 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
999 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
1000 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1004 /* Called after register allocation to add any instructions needed for
1005 the prologue. Using a prologue insn is favored compared to putting
1006 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1007 since it allows the scheduler to intermix instructions with the
1008 saves of the caller saved registers. In some cases, it might be
1009 necessary to emit a barrier instruction as the last insn to prevent
1012 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1013 so that the debug info generation code can handle them properly. */
1015 xstormy16_expand_prologue (void)
1017 struct xstormy16_stack_layout layout;
1021 const int ifun = xstormy16_interrupt_function_p ();
1023 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1024 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1026 layout = xstormy16_compute_stack_layout ();
1028 if (layout.locals_size >= 32768)
1029 error ("Local variable memory requirements exceed capacity.");
1031 /* Save the argument registers if necessary. */
1032 if (layout.stdarg_save_size)
1033 for (regno = FIRST_ARGUMENT_REGISTER;
1034 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1038 rtx reg = gen_rtx_REG (HImode, regno);
1040 insn = emit_move_insn (mem_push_rtx, reg);
1041 RTX_FRAME_RELATED_P (insn) = 1;
1043 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1045 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1046 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1048 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1049 plus_constant (stack_pointer_rtx,
1050 GET_MODE_SIZE (Pmode)));
1051 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1054 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1055 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1058 /* Push each of the registers to save. */
1059 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1060 if (REG_NEEDS_SAVE (regno, ifun))
1063 rtx reg = gen_rtx_REG (HImode, regno);
1065 insn = emit_move_insn (mem_push_rtx, reg);
1066 RTX_FRAME_RELATED_P (insn) = 1;
1068 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1070 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1071 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1073 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1074 plus_constant (stack_pointer_rtx,
1075 GET_MODE_SIZE (Pmode)));
1076 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1079 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1080 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1083 /* It's just possible that the SP here might be what we need for
1085 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1086 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1088 /* Allocate space for local variables. */
1089 if (layout.locals_size)
1091 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1092 GEN_INT (layout.locals_size));
1093 RTX_FRAME_RELATED_P (insn) = 1;
1096 /* Set up the frame pointer, if required. */
1097 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1099 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1101 if (layout.sp_minus_fp)
1102 emit_addhi3_postreload (hard_frame_pointer_rtx,
1103 hard_frame_pointer_rtx,
1104 GEN_INT (-layout.sp_minus_fp));
1108 /* Do we need an epilogue at all? */
1110 direct_return (void)
1112 return (reload_completed
1113 && xstormy16_compute_stack_layout ().frame_size == 0);
1116 /* Called after register allocation to add any instructions needed for
1117 the epilogue. Using an epilogue insn is favored compared to putting
1118 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1119 since it allows the scheduler to intermix instructions with the
1120 saves of the caller saved registers. In some cases, it might be
1121 necessary to emit a barrier instruction as the last insn to prevent
1125 xstormy16_expand_epilogue (void)
1127 struct xstormy16_stack_layout layout;
1128 rtx mem_pop_rtx, insn;
1130 const int ifun = xstormy16_interrupt_function_p ();
1132 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1133 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1135 layout = xstormy16_compute_stack_layout ();
1137 /* Pop the stack for the locals. */
1138 if (layout.locals_size)
1140 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1141 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1144 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1145 GEN_INT (- layout.locals_size));
1146 RTX_FRAME_RELATED_P (insn) = 1;
1150 /* Restore any call-saved registers. */
1151 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1152 if (REG_NEEDS_SAVE (regno, ifun))
1156 insn = emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1157 RTX_FRAME_RELATED_P (insn) = 1;
1158 dwarf = gen_rtx_SET (Pmode, stack_pointer_rtx,
1159 plus_constant (stack_pointer_rtx,
1160 -GET_MODE_SIZE (Pmode)));
1161 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1166 /* Pop the stack for the stdarg save area. */
1167 if (layout.stdarg_save_size)
1169 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1170 GEN_INT (- layout.stdarg_save_size));
1171 RTX_FRAME_RELATED_P (insn) = 1;
1176 emit_jump_insn (gen_return_internal_interrupt ());
1178 emit_jump_insn (gen_return_internal ());
1182 xstormy16_epilogue_uses (int regno)
1184 if (reload_completed && call_used_regs[regno])
1186 const int ifun = xstormy16_interrupt_function_p ();
1187 return REG_NEEDS_SAVE (regno, ifun);
1193 xstormy16_function_profiler (void)
1195 sorry ("function_profiler support");
1199 /* Return an updated summarizer variable CUM to advance past an
1200 argument in the argument list. The values MODE, TYPE and NAMED
1201 describe that argument. Once this is done, the variable CUM is
1202 suitable for analyzing the *following* argument with
1203 `FUNCTION_ARG', etc.
1205 This function need not do anything if the argument in question was
1206 passed on the stack. The compiler knows how to track the amount of
1207 stack space used for arguments without any special help. However,
1208 it makes life easier for xstormy16_build_va_list if it does update
1211 xstormy16_function_arg_advance (CUMULATIVE_ARGS cum, enum machine_mode mode,
1212 tree type, int named ATTRIBUTE_UNUSED)
1214 /* If an argument would otherwise be passed partially in registers,
1215 and partially on the stack, the whole of it is passed on the
1217 if (cum < NUM_ARGUMENT_REGISTERS
1218 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1219 cum = NUM_ARGUMENT_REGISTERS;
1221 cum += XSTORMY16_WORD_SIZE (type, mode);
1227 xstormy16_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
1228 tree type, int named ATTRIBUTE_UNUSED)
1230 if (mode == VOIDmode)
1232 if (targetm.calls.must_pass_in_stack (mode, type)
1233 || cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1235 return gen_rtx_REG (mode, cum + 2);
1238 /* Build the va_list type.
1240 For this chip, va_list is a record containing a counter and a pointer.
1241 The counter is of type 'int' and indicates how many bytes
1242 have been used to date. The pointer indicates the stack position
1243 for arguments that have not been passed in registers.
1244 To keep the layout nice, the pointer is first in the structure. */
1247 xstormy16_build_builtin_va_list (void)
1249 tree f_1, f_2, record, type_decl;
1251 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1252 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1254 f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
1256 f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
1257 unsigned_type_node);
1259 DECL_FIELD_CONTEXT (f_1) = record;
1260 DECL_FIELD_CONTEXT (f_2) = record;
1262 TREE_CHAIN (record) = type_decl;
1263 TYPE_NAME (record) = type_decl;
1264 TYPE_FIELDS (record) = f_1;
1265 TREE_CHAIN (f_1) = f_2;
1267 layout_type (record);
1272 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1273 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1274 variable to initialize. NEXTARG is the machine independent notion of the
1275 'next' argument after the variable arguments. */
1277 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1279 tree f_base, f_count;
1283 if (xstormy16_interrupt_function_p ())
1284 error ("cannot use va_start in interrupt function");
1286 f_base = TYPE_FIELDS (va_list_type_node);
1287 f_count = TREE_CHAIN (f_base);
1289 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1290 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1293 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1294 t = build (PLUS_EXPR, TREE_TYPE (base), t,
1295 build_int_2 (INCOMING_FRAME_SP_OFFSET, 0));
1296 t = build (MODIFY_EXPR, TREE_TYPE (base), base, t);
1297 TREE_SIDE_EFFECTS (t) = 1;
1298 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1300 t = build (MODIFY_EXPR, TREE_TYPE (count), count,
1301 build_int_2 (current_function_args_info * UNITS_PER_WORD, 0));
1302 TREE_SIDE_EFFECTS (t) = 1;
1303 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1306 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1307 of type va_list as a tree, TYPE is the type passed to va_arg.
1308 Note: This algorithm is documented in stormy-abi. */
1311 xstormy16_expand_builtin_va_arg (tree valist, tree type, tree *pre_p,
1312 tree *post_p ATTRIBUTE_UNUSED)
1314 tree f_base, f_count;
1316 tree count_tmp, addr, t;
1317 tree lab_gotaddr, lab_fromstack;
1318 int size, size_of_reg_args, must_stack;
1321 f_base = TYPE_FIELDS (va_list_type_node);
1322 f_count = TREE_CHAIN (f_base);
1324 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1325 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1328 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1329 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1330 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1332 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1334 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1335 lab_gotaddr = create_artificial_label ();
1336 lab_fromstack = create_artificial_label ();
1337 addr = create_tmp_var (ptr_type_node, NULL);
1343 t = fold_convert (TREE_TYPE (count), size_tree);
1344 t = build (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1345 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1346 t = build (GT_EXPR, boolean_type_node, t, r);
1347 t = build (COND_EXPR, void_type_node, t,
1348 build (GOTO_EXPR, void_type_node, lab_fromstack),
1350 gimplify_and_add (t, pre_p);
1352 t = fold_convert (ptr_type_node, count_tmp);
1353 t = build (PLUS_EXPR, ptr_type_node, base, t);
1354 t = build (MODIFY_EXPR, void_type_node, addr, t);
1355 gimplify_and_add (t, pre_p);
1357 t = build (GOTO_EXPR, void_type_node, lab_gotaddr);
1358 gimplify_and_add (t, pre_p);
1360 t = build (LABEL_EXPR, void_type_node, lab_fromstack);
1361 gimplify_and_add (t, pre_p);
1364 /* Arguments larger than a word might need to skip over some
1365 registers, since arguments are either passed entirely in
1366 registers or entirely on the stack. */
1367 size = PUSH_ROUNDING (int_size_in_bytes (type));
1368 if (size > 2 || size < 0 || must_stack)
1372 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1373 u = build (MODIFY_EXPR, void_type_node, count_tmp, r);
1375 t = fold_convert (TREE_TYPE (count), r);
1376 t = build (GE_EXPR, boolean_type_node, count_tmp, t);
1377 t = build (COND_EXPR, void_type_node, t, NULL, u);
1378 gimplify_and_add (t, pre_p);
1381 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1382 - INCOMING_FRAME_SP_OFFSET);
1383 t = fold_convert (TREE_TYPE (count), t);
1384 t = build (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1385 t = build (PLUS_EXPR, TREE_TYPE (count), t,
1386 fold_convert (TREE_TYPE (count), size_tree));
1387 t = fold_convert (TREE_TYPE (base), fold (t));
1388 t = build (MINUS_EXPR, TREE_TYPE (base), base, t);
1389 t = build (MODIFY_EXPR, void_type_node, addr, t);
1390 gimplify_and_add (t, pre_p);
1392 t = build (LABEL_EXPR, void_type_node, lab_gotaddr);
1393 gimplify_and_add (t, pre_p);
1395 t = fold_convert (TREE_TYPE (count), size_tree);
1396 t = build (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1397 t = build (MODIFY_EXPR, TREE_TYPE (count), count, t);
1398 gimplify_and_add (t, pre_p);
1400 addr = fold_convert (build_pointer_type (type), addr);
1401 return build_fold_indirect_ref (addr);
1404 /* Initialize the variable parts of a trampoline. ADDR is an RTX for
1405 the address of the trampoline; FNADDR is an RTX for the address of
1406 the nested function; STATIC_CHAIN is an RTX for the static chain
1407 value that should be passed to the function when it is called. */
1409 xstormy16_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
1411 rtx reg_addr = gen_reg_rtx (Pmode);
1412 rtx temp = gen_reg_rtx (HImode);
1413 rtx reg_fnaddr = gen_reg_rtx (HImode);
1416 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1418 emit_move_insn (reg_addr, addr);
1419 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1420 emit_move_insn (reg_addr_mem, temp);
1421 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1422 emit_move_insn (temp, static_chain);
1423 emit_move_insn (reg_addr_mem, temp);
1424 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1425 emit_move_insn (reg_fnaddr, fnaddr);
1426 emit_move_insn (temp, reg_fnaddr);
1427 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1428 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1429 emit_move_insn (reg_addr_mem, temp);
1430 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1431 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1432 emit_move_insn (reg_addr_mem, reg_fnaddr);
1435 /* Worker function for FUNCTION_VALUE. */
1438 xstormy16_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
1440 enum machine_mode mode;
1441 mode = TYPE_MODE (valtype);
1442 PROMOTE_MODE (mode, 0, valtype);
1443 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1446 /* A C compound statement that outputs the assembler code for a thunk function,
1447 used to implement C++ virtual function calls with multiple inheritance. The
1448 thunk acts as a wrapper around a virtual function, adjusting the implicit
1449 object parameter before handing control off to the real function.
1451 First, emit code to add the integer DELTA to the location that contains the
1452 incoming first argument. Assume that this argument contains a pointer, and
1453 is the one used to pass the `this' pointer in C++. This is the incoming
1454 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1455 addition must preserve the values of all other incoming arguments.
1457 After the addition, emit code to jump to FUNCTION, which is a
1458 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1459 the return address. Hence returning from FUNCTION will return to whoever
1460 called the current `thunk'.
1462 The effect must be as if @var{function} had been called directly
1463 with the adjusted first argument. This macro is responsible for
1464 emitting all of the code for a thunk function;
1465 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1468 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1469 extracted from it.) It might possibly be useful on some targets, but
1473 xstormy16_asm_output_mi_thunk (FILE *file,
1474 tree thunk_fndecl ATTRIBUTE_UNUSED,
1475 HOST_WIDE_INT delta,
1476 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1479 int regnum = FIRST_ARGUMENT_REGISTER;
1481 /* There might be a hidden first argument for a returned structure. */
1482 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1485 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1486 fputs ("\tjmpf ", file);
1487 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1491 /* Output constructors and destructors. Just like
1492 default_named_section_asm_out_* but don't set the sections writable. */
1493 #undef TARGET_ASM_CONSTRUCTOR
1494 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1495 #undef TARGET_ASM_DESTRUCTOR
1496 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1499 xstormy16_asm_out_destructor (rtx symbol, int priority)
1501 const char *section = ".dtors";
1504 /* ??? This only works reliably with the GNU linker. */
1505 if (priority != DEFAULT_INIT_PRIORITY)
1507 sprintf (buf, ".dtors.%.5u",
1508 /* Invert the numbering so the linker puts us in the proper
1509 order; constructors are run from right to left, and the
1510 linker sorts in increasing order. */
1511 MAX_INIT_PRIORITY - priority);
1515 named_section_flags (section, 0);
1516 assemble_align (POINTER_SIZE);
1517 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1521 xstormy16_asm_out_constructor (rtx symbol, int priority)
1523 const char *section = ".ctors";
1526 /* ??? This only works reliably with the GNU linker. */
1527 if (priority != DEFAULT_INIT_PRIORITY)
1529 sprintf (buf, ".ctors.%.5u",
1530 /* Invert the numbering so the linker puts us in the proper
1531 order; constructors are run from right to left, and the
1532 linker sorts in increasing order. */
1533 MAX_INIT_PRIORITY - priority);
1537 named_section_flags (section, 0);
1538 assemble_align (POINTER_SIZE);
1539 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1542 /* Print a memory address as an operand to reference that memory location. */
1544 xstormy16_print_operand_address (FILE *file, rtx address)
1546 HOST_WIDE_INT offset;
1547 int pre_dec, post_inc;
1549 /* There are a few easy cases. */
1550 if (GET_CODE (address) == CONST_INT)
1552 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1556 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1558 output_addr_const (file, address);
1562 /* Otherwise, it's hopefully something of the form
1563 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...))
1566 if (GET_CODE (address) == PLUS)
1568 if (GET_CODE (XEXP (address, 1)) != CONST_INT)
1570 offset = INTVAL (XEXP (address, 1));
1571 address = XEXP (address, 0);
1576 pre_dec = (GET_CODE (address) == PRE_DEC);
1577 post_inc = (GET_CODE (address) == POST_INC);
1578 if (pre_dec || post_inc)
1579 address = XEXP (address, 0);
1581 if (GET_CODE (address) != REG)
1587 fputs (reg_names [REGNO (address)], file);
1591 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1595 /* Print an operand to an assembler instruction. */
1597 xstormy16_print_operand (FILE *file, rtx x, int code)
1602 /* There is either one bit set, or one bit clear, in X.
1603 Print it preceded by '#'. */
1605 HOST_WIDE_INT xx = 1;
1608 if (GET_CODE (x) == CONST_INT)
1611 output_operand_lossage ("`B' operand is not constant");
1613 l = exact_log2 (xx);
1615 l = exact_log2 (~xx);
1617 output_operand_lossage ("`B' operand has multiple bits set");
1619 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1624 /* Print the symbol without a surrounding @fptr(). */
1625 if (GET_CODE (x) == SYMBOL_REF)
1626 assemble_name (file, XSTR (x, 0));
1627 else if (GET_CODE (x) == LABEL_REF)
1628 output_asm_label (x);
1630 xstormy16_print_operand_address (file, x);
1635 /* Print the immediate operand less one, preceded by '#'.
1636 For 'O', negate it first. */
1638 HOST_WIDE_INT xx = 0;
1640 if (GET_CODE (x) == CONST_INT)
1643 output_operand_lossage ("`o' operand is not constant");
1648 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1653 /* Handled below. */
1657 output_operand_lossage ("xstormy16_print_operand: unknown code");
1661 switch (GET_CODE (x))
1664 fputs (reg_names [REGNO (x)], file);
1668 xstormy16_print_operand_address (file, XEXP (x, 0));
1672 /* Some kind of constant or label; an immediate operand,
1673 so prefix it with '#' for the assembler. */
1674 fputs (IMMEDIATE_PREFIX, file);
1675 output_addr_const (file, x);
1683 /* Expander for the `casesi' pattern.
1684 INDEX is the index of the switch statement.
1685 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1686 to the first table entry.
1687 RANGE is the number of table entries.
1688 TABLE is an ADDR_VEC that is the jump table.
1689 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1690 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1694 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1695 rtx table, rtx default_label)
1697 HOST_WIDE_INT range_i = INTVAL (range);
1700 /* This code uses 'br', so it can deal only with tables of size up to
1702 if (range_i >= 8192)
1703 sorry ("switch statement of size %lu entries too large",
1704 (unsigned long) range_i);
1706 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1708 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1710 int_index = gen_lowpart_common (HImode, index);
1711 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1712 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1715 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1716 instructions, without label or alignment or any other special
1717 constructs. We know that the previous instruction will be the
1718 `tablejump_pcrel' output above.
1720 TODO: it might be nice to output 'br' instructions if they could
1724 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1728 function_section (current_function_decl);
1730 vlen = XVECLEN (table, 0);
1731 for (idx = 0; idx < vlen; idx++)
1733 fputs ("\tjmpf ", file);
1734 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1740 /* Expander for the `call' patterns.
1741 INDEX is the index of the switch statement.
1742 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1743 to the first table entry.
1744 RANGE is the number of table entries.
1745 TABLE is an ADDR_VEC that is the jump table.
1746 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1747 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1751 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1754 enum machine_mode mode;
1756 if (GET_CODE (dest) != MEM)
1758 dest = XEXP (dest, 0);
1760 if (! CONSTANT_P (dest)
1761 && GET_CODE (dest) != REG)
1762 dest = force_reg (Pmode, dest);
1767 mode = GET_MODE (retval);
1769 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1772 call = gen_rtx_SET (VOIDmode, retval, call);
1774 if (! CONSTANT_P (dest))
1776 temp = gen_reg_rtx (HImode);
1777 emit_move_insn (temp, const0_rtx);
1782 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1783 gen_rtx_USE (VOIDmode, temp)));
1784 emit_call_insn (call);
1787 /* Expanders for multiword computational operations. */
1789 /* Expander for arithmetic operations; emit insns to compute
1791 (set DEST (CODE:MODE SRC0 SRC1))
1793 using CARRY as a temporary. When CODE is COMPARE, a branch
1794 template is generated (this saves duplicating code in
1795 xstormy16_split_cbranch). */
1798 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1799 rtx dest, rtx src0, rtx src1, rtx carry)
1801 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1806 emit_move_insn (src0, const0_rtx);
1808 for (i = 0; i < num_words; i++)
1810 rtx w_src0, w_src1, w_dest;
1813 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1814 i * UNITS_PER_WORD);
1815 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1816 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1822 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1826 insn = gen_addchi4 (w_dest, w_src0, w_src1, carry);
1828 insn = gen_addchi5 (w_dest, w_src0, w_src1, carry, carry);
1834 if (code == COMPARE && i == num_words - 1)
1836 rtx branch, sub, clobber, sub_1;
1838 sub_1 = gen_rtx_MINUS (HImode, w_src0,
1839 gen_rtx_ZERO_EXTEND (HImode, carry));
1840 sub = gen_rtx_SET (VOIDmode, w_dest,
1841 gen_rtx_MINUS (HImode, sub_1, w_src1));
1842 clobber = gen_rtx_CLOBBER (VOIDmode, carry);
1843 branch = gen_rtx_SET (VOIDmode, pc_rtx,
1844 gen_rtx_IF_THEN_ELSE (VOIDmode,
1850 insn = gen_rtx_PARALLEL (VOIDmode,
1851 gen_rtvec (3, branch, sub, clobber));
1855 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1858 insn = gen_subchi4 (w_dest, w_src0, w_src1, carry);
1860 insn = gen_subchi5 (w_dest, w_src0, w_src1, carry, carry);
1866 if (GET_CODE (w_src1) == CONST_INT
1867 && INTVAL (w_src1) == -(code == AND))
1870 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
1875 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
1886 /* If we emit nothing, try_split() will think we failed. So emit
1887 something that does nothing and can be optimized away. */
1892 /* Return 1 if OP is a shift operator. */
1895 shift_operator (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1897 enum rtx_code code = GET_CODE (op);
1899 return (code == ASHIFT
1901 || code == LSHIFTRT);
1904 /* The shift operations are split at output time for constant values;
1905 variable-width shifts get handed off to a library routine.
1907 Generate an output string to do (set X (CODE:MODE X SIZE_R))
1908 SIZE_R will be a CONST_INT, X will be a hard register. */
1911 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
1912 rtx x, rtx size_r, rtx temp)
1915 const char *r0, *r1, *rt;
1918 if (GET_CODE (size_r) != CONST_INT
1919 || GET_CODE (x) != REG
1922 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
1927 r0 = reg_names [REGNO (x)];
1928 r1 = reg_names [REGNO (x) + 1];
1930 /* For shifts of size 1, we can use the rotate instructions. */
1936 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
1939 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
1942 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
1950 /* For large shifts, there are easy special cases. */
1956 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
1959 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
1962 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
1974 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
1975 r1, r0, r0, r1, (int) size - 16);
1978 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
1979 r0, r1, r1, r0, (int) size - 16);
1982 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
1983 r0, r1, r1, r0, (int) size - 16);
1991 /* For the rest, we have to do more work. In particular, we
1992 need a temporary. */
1993 rt = reg_names [REGNO (temp)];
1998 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
1999 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16-size),
2004 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2005 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2010 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2011 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2020 /* Attribute handling. */
2022 /* Return nonzero if the function is an interrupt function. */
2024 xstormy16_interrupt_function_p (void)
2028 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2029 any functions are declared, which is demonstrably wrong, but
2030 it is worked around here. FIXME. */
2034 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2035 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2038 #undef TARGET_ATTRIBUTE_TABLE
2039 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2040 static tree xstormy16_handle_interrupt_attribute
2041 (tree *, tree, tree, int, bool *);
2043 static const struct attribute_spec xstormy16_attribute_table[] =
2045 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2046 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2047 { NULL, 0, 0, false, false, false, NULL }
2050 /* Handle an "interrupt" attribute;
2051 arguments as in struct attribute_spec.handler. */
2053 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2054 tree args ATTRIBUTE_UNUSED,
2055 int flags ATTRIBUTE_UNUSED,
2058 if (TREE_CODE (*node) != FUNCTION_TYPE)
2060 warning ("`%s' attribute only applies to functions",
2061 IDENTIFIER_POINTER (name));
2062 *no_add_attrs = true;
2068 #undef TARGET_INIT_BUILTINS
2069 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2070 #undef TARGET_EXPAND_BUILTIN
2071 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2076 const char *arg_ops; /* 0..9, t for temp register, r for return value */
2077 const char *arg_types; /* s=short,l=long, upper case for unsigned */
2079 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2080 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2081 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2082 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2087 xstormy16_init_builtins (void)
2089 tree args, ret_type, arg;
2092 ret_type = void_type_node;
2094 for (i=0; s16builtins[i].name; i++)
2096 args = void_list_node;
2097 for (a=strlen (s16builtins[i].arg_types)-1; a>=0; a--)
2099 switch (s16builtins[i].arg_types[a])
2101 case 's': arg = short_integer_type_node; break;
2102 case 'S': arg = short_unsigned_type_node; break;
2103 case 'l': arg = long_integer_type_node; break;
2104 case 'L': arg = long_unsigned_type_node; break;
2110 args = tree_cons (NULL_TREE, arg, args);
2112 lang_hooks.builtin_function (s16builtins[i].name,
2113 build_function_type (ret_type, args),
2114 i, BUILT_IN_MD, NULL, NULL);
2119 xstormy16_expand_builtin(tree exp, rtx target,
2120 rtx subtarget ATTRIBUTE_UNUSED,
2121 enum machine_mode mode ATTRIBUTE_UNUSED,
2122 int ignore ATTRIBUTE_UNUSED)
2124 rtx op[10], args[10], pat, copyto[10], retval = 0;
2125 tree fndecl, argtree;
2128 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2129 argtree = TREE_OPERAND (exp, 1);
2130 i = DECL_FUNCTION_CODE (fndecl);
2131 code = s16builtins[i].md_code;
2133 for (a = 0; a < 10 && argtree; a++)
2135 args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2136 argtree = TREE_CHAIN (argtree);
2139 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2141 char ao = s16builtins[i].arg_ops[o];
2142 char c = insn_data[code].operand[o].constraint[0];
2147 omode = insn_data[code].operand[o].mode;
2149 op[o] = target ? target : gen_reg_rtx (omode);
2151 op[o] = gen_reg_rtx (omode);
2153 op[o] = args[(int) hex_value (ao)];
2155 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2157 if (c == '+' || c == '=')
2160 op[o] = gen_reg_rtx (omode);
2163 op[o] = copy_to_mode_reg (omode, op[o]);
2170 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2171 op[5], op[6], op[7], op[8], op[9]);
2174 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2177 emit_move_insn (copyto[o], op[o]);
2178 if (op[o] == retval)
2185 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2188 xstormy16_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2190 HOST_WIDE_INT size = int_size_in_bytes (type);
2191 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2194 #undef TARGET_ASM_ALIGNED_HI_OP
2195 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2196 #undef TARGET_ASM_ALIGNED_SI_OP
2197 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2199 #undef TARGET_ASM_OUTPUT_MI_THUNK
2200 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2201 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2202 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2204 #undef TARGET_RTX_COSTS
2205 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2206 #undef TARGET_ADDRESS_COST
2207 #define TARGET_ADDRESS_COST xstormy16_address_cost
2209 #undef TARGET_BUILD_BUILTIN_VA_LIST
2210 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2211 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2212 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_expand_builtin_va_arg
2214 #undef TARGET_PROMOTE_FUNCTION_ARGS
2215 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
2216 #undef TARGET_PROMOTE_FUNCTION_RETURN
2217 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
2218 #undef TARGET_PROMOTE_PROTOTYPES
2219 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2221 #undef TARGET_RETURN_IN_MEMORY
2222 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2224 struct gcc_target targetm = TARGET_INITIALIZER;