1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
26 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
32 #include "insn-attr.h"
44 #include "target-def.h"
47 static rtx emit_addhi3_postreload PARAMS ((rtx, rtx, rtx));
48 static void xstormy16_asm_out_constructor PARAMS ((rtx, int));
49 static void xstormy16_asm_out_destructor PARAMS ((rtx, int));
51 /* Define the information needed to generate branch and scc insns. This is
52 stored from the compare operation. */
53 struct rtx_def * xstormy16_compare_op0;
54 struct rtx_def * xstormy16_compare_op1;
56 /* Return 1 if this is a LT, GE, LTU, or GEU operator. */
59 xstormy16_ineqsi_operator (op, mode)
61 enum machine_mode mode;
63 enum rtx_code code = GET_CODE (op);
65 return ((mode == VOIDmode || GET_MODE (op) == mode)
66 && (code == LT || code == GE || code == LTU || code == GEU));
69 /* Return 1 if this is an EQ or NE operator. */
72 equality_operator (op, mode)
74 enum machine_mode mode;
76 return ((mode == VOIDmode || GET_MODE (op) == mode)
77 && (GET_CODE (op) == EQ || GET_CODE (op) == NE));
80 /* Return 1 if this is a comparison operator but not an EQ or NE operator. */
83 inequality_operator (op, mode)
85 enum machine_mode mode;
87 return comparison_operator (op, mode) && ! equality_operator (op, mode);
90 /* Branches are handled as follows:
92 1. HImode compare-and-branches. The machine supports these
93 natively, so the appropriate pattern is emitted directly.
95 2. SImode EQ and NE. These are emitted as pairs of HImode
98 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
99 of a SImode subtract followed by a branch (not a compare-and-branch),
105 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
113 /* Emit a branch of kind CODE to location LOC. */
116 xstormy16_emit_cbranch (code, loc)
120 rtx op0 = xstormy16_compare_op0;
121 rtx op1 = xstormy16_compare_op1;
122 rtx condition_rtx, loc_ref, branch, cy_clobber;
124 enum machine_mode mode;
126 mode = GET_MODE (op0);
127 if (mode != HImode && mode != SImode)
131 && (code == GT || code == LE || code == GTU || code == LEU))
133 int unsigned_p = (code == GTU || code == LEU);
134 int gt_p = (code == GT || code == GTU);
138 lab = gen_label_rtx ();
139 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
140 /* This should be generated as a comparison against the temporary
141 created by the previous insn, but reload can't handle that. */
142 xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
147 else if (mode == SImode
148 && (code == NE || code == EQ)
149 && op1 != const0_rtx)
152 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
156 lab = gen_label_rtx ();
158 for (i = 0; i < num_words - 1; i++)
160 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
162 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
164 xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
166 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
168 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
170 xstormy16_emit_cbranch (code, loc);
177 /* We can't allow reload to try to generate any reload after a branch,
178 so when some register must match we must make the temporary ourselves. */
182 tmp = gen_reg_rtx (mode);
183 emit_move_insn (tmp, op0);
187 condition_rtx = gen_rtx (code, mode, op0, op1);
188 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
189 branch = gen_rtx_SET (VOIDmode, pc_rtx,
190 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
193 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (BImode));
196 vec = gen_rtvec (2, branch, cy_clobber);
197 else if (code == NE || code == EQ)
198 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
203 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
205 sub = gen_rtx_CLOBBER (SImode, op0);
207 vec = gen_rtvec (3, branch, sub, cy_clobber);
210 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
213 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
214 the arithmetic operation. Most of the work is done by
215 xstormy16_expand_arith. */
218 xstormy16_split_cbranch (mode, label, comparison, dest, carry)
219 enum machine_mode mode;
225 rtx op0 = XEXP (comparison, 0);
226 rtx op1 = XEXP (comparison, 1);
231 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1, carry);
232 seq = gen_sequence ();
234 compare = SET_SRC (XVECEXP (PATTERN (XVECEXP (seq, 0, XVECLEN (seq, 0) - 1)),
236 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
237 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
242 /* Return the string to output a conditional branch to LABEL, which is
243 the operand number of the label.
245 OP is the conditional expression, or NULL for branch-always.
247 REVERSED is non-zero if we should reverse the sense of the comparison.
252 xstormy16_output_cbranch_hi (op, label, reversed, insn)
258 static char string[64];
259 int need_longbranch = (op != NULL_RTX
260 ? get_attr_length (insn) == 8
261 : get_attr_length (insn) == 4);
262 int really_reversed = reversed ^ need_longbranch;
264 const char *template;
265 const char *operands;
274 sprintf (string, "%s %s", ccode, label);
278 code = GET_CODE (op);
280 if (GET_CODE (XEXP (op, 0)) != REG)
282 code = swap_condition (code);
288 /* Work out which way this really branches. */
290 code = reverse_condition (code);
294 case EQ: ccode = "z"; break;
295 case NE: ccode = "nz"; break;
296 case GE: ccode = "ge"; break;
297 case LT: ccode = "lt"; break;
298 case GT: ccode = "gt"; break;
299 case LE: ccode = "le"; break;
300 case GEU: ccode = "nc"; break;
301 case LTU: ccode = "c"; break;
302 case GTU: ccode = "hi"; break;
303 case LEU: ccode = "ls"; break;
310 template = "b%s %s,.+8 | jmpf %s";
312 template = "b%s %s,%s";
313 sprintf (string, template, ccode, operands, label);
318 /* Return the string to output a conditional branch to LABEL, which is
319 the operand number of the label, but suitable for the tail of a
322 OP is the conditional expression (OP is never NULL_RTX).
324 REVERSED is non-zero if we should reverse the sense of the comparison.
329 xstormy16_output_cbranch_si (op, label, reversed, insn)
335 static char string[64];
336 int need_longbranch = get_attr_length (insn) >= 8;
337 int really_reversed = reversed ^ need_longbranch;
339 const char *template;
343 code = GET_CODE (op);
345 /* Work out which way this really branches. */
347 code = reverse_condition (code);
351 case EQ: ccode = "z"; break;
352 case NE: ccode = "nz"; break;
353 case GE: ccode = "ge"; break;
354 case LT: ccode = "lt"; break;
355 case GEU: ccode = "nc"; break;
356 case LTU: ccode = "c"; break;
358 /* The missing codes above should never be generated. */
369 if (GET_CODE (XEXP (op, 0)) != REG)
372 regnum = REGNO (XEXP (op, 0));
373 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
377 case GE: case LT: case GEU: case LTU:
378 strcpy (prevop, "sbc %2,%3");
386 template = "%s | b%s .+6 | jmpf %s";
388 template = "%s | b%s %s";
389 sprintf (string, template, prevop, ccode, label);
394 /* Many machines have some registers that cannot be copied directly to or from
395 memory or even from other types of registers. An example is the `MQ'
396 register, which on most machines, can only be copied to or from general
397 registers, but not memory. Some machines allow copying all registers to and
398 from memory, but require a scratch register for stores to some memory
399 locations (e.g., those with symbolic address on the RT, and those with
400 certain symbolic address on the Sparc when compiling PIC). In some cases,
401 both an intermediate and a scratch register are required.
403 You should define these macros to indicate to the reload phase that it may
404 need to allocate at least one register for a reload in addition to the
405 register to contain the data. Specifically, if copying X to a register
406 CLASS in MODE requires an intermediate register, you should define
407 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
408 whose registers can be used as intermediate registers or scratch registers.
410 If copying a register CLASS in MODE to X requires an intermediate or scratch
411 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
412 largest register class required. If the requirements for input and output
413 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
414 instead of defining both macros identically.
416 The values returned by these macros are often `GENERAL_REGS'. Return
417 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
418 to or from a register of CLASS in MODE without requiring a scratch register.
419 Do not define this macro if it would always return `NO_REGS'.
421 If a scratch register is required (either with or without an intermediate
422 register), you should define patterns for `reload_inM' or `reload_outM', as
423 required.. These patterns, which will normally be implemented with a
424 `define_expand', should be similar to the `movM' patterns, except that
425 operand 2 is the scratch register.
427 Define constraints for the reload register and scratch register that contain
428 a single register class. If the original reload register (whose class is
429 CLASS) can meet the constraint given in the pattern, the value returned by
430 these macros is used for the class of the scratch register. Otherwise, two
431 additional reload registers are required. Their classes are obtained from
432 the constraints in the insn pattern.
434 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
435 either be in a hard register or in memory. Use `true_regnum' to find out;
436 it will return -1 if the pseudo is in memory and the hard register number if
439 These macros should not be used in the case where a particular class of
440 registers can only be copied to memory and not to another class of
441 registers. In that case, secondary reload registers are not needed and
442 would not be helpful. Instead, a stack location must be used to perform the
443 copy and the `movM' pattern should use memory as a intermediate storage.
444 This case often occurs between floating-point and general registers. */
447 xstormy16_secondary_reload_class (class, mode, x)
448 enum reg_class class;
449 enum machine_mode mode;
452 /* This chip has the interesting property that only the first eight
453 registers can be moved to/from memory. */
454 if ((GET_CODE (x) == MEM
455 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
456 && (true_regnum (x) == -1
457 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
458 && ! reg_class_subset_p (class, EIGHT_REGS))
461 /* When reloading a PLUS, the carry register will be required
462 unless the inc or dec instructions can be used. */
463 if (xstormy16_carry_plus_operand (x, mode))
469 /* Recognise a PLUS that needs the carry register. */
471 xstormy16_carry_plus_operand (x, mode)
473 enum machine_mode mode ATTRIBUTE_UNUSED;
475 return (GET_CODE (x) == PLUS
476 && GET_CODE (XEXP (x, 1)) == CONST_INT
477 && (INTVAL (XEXP (x, 1)) < -4 || INTVAL (XEXP (x, 1)) > 4));
482 xstormy16_preferred_reload_class (x, class)
483 enum reg_class class;
486 if (class == GENERAL_REGS
487 && GET_CODE (x) == MEM)
493 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
494 (GET_CODE (X) == CONST_INT \
495 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
497 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
498 (GET_CODE (X) == CONST_INT \
499 && INTVAL (X) + (OFFSET) >= 0 \
500 && INTVAL (X) + (OFFSET) < 0x8000 \
501 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
504 xstormy16_legitimate_address_p (mode, x, strict)
505 enum machine_mode mode ATTRIBUTE_UNUSED;
509 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
512 if (GET_CODE (x) == PLUS
513 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
516 if (GET_CODE (x) == POST_INC
517 || GET_CODE (x) == PRE_DEC)
520 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
521 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
527 /* Return nonzero if memory address X (an RTX) can have different
528 meanings depending on the machine mode of the memory reference it
529 is used for or if the address is valid for some modes but not
532 Autoincrement and autodecrement addresses typically have mode-dependent
533 effects because the amount of the increment or decrement is the size of the
534 operand being addressed. Some machines have other mode-dependent addresses.
535 Many RISC machines have no mode-dependent addresses.
537 You may assume that ADDR is a valid address for the machine.
539 On this chip, this is true if the address is valid with an offset
540 of 0 but not of 6, because in that case it cannot be used as an
541 address for DImode or DFmode, or if the address is a post-increment
542 or pre-decrement address. */
544 xstormy16_mode_dependent_address_p (x)
547 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
548 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
551 if (GET_CODE (x) == PLUS
552 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
553 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
556 if (GET_CODE (x) == PLUS)
559 if (GET_CODE (x) == POST_INC
560 || GET_CODE (x) == PRE_DEC)
566 /* A C expression that defines the optional machine-dependent constraint
567 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
568 types of operands, usually memory references, for the target machine.
569 Normally this macro will not be defined. If it is required for a particular
570 target machine, it should return 1 if VALUE corresponds to the operand type
571 represented by the constraint letter C. If C is not defined as an extra
572 constraint, the value returned should be 0 regardless of VALUE. */
574 xstormy16_extra_constraint_p (x, c)
580 /* 'Q' is for pushes. */
582 return (GET_CODE (x) == MEM
583 && GET_CODE (XEXP (x, 0)) == POST_INC
584 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
586 /* 'R' is for pops. */
588 return (GET_CODE (x) == MEM
589 && GET_CODE (XEXP (x, 0)) == PRE_DEC
590 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
592 /* 'S' is for immediate memory addresses. */
594 return (GET_CODE (x) == MEM
595 && GET_CODE (XEXP (x, 0)) == CONST_INT
596 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
600 /* Not implemented yet. */
603 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
604 for allocating a scratch register for 32-bit shifts. */
606 return (GET_CODE (x) == CONST_INT
607 && (INTVAL (x) < 2 || INTVAL (x) > 15));
615 short_memory_operand (x, mode)
617 enum machine_mode mode;
619 if (! memory_operand (x, mode))
621 return (GET_CODE (XEXP (x, 0)) != PLUS);
624 /* Splitter for the 'move' patterns, for modes not directly implemeted
625 by hardware. Emit insns to copy a value of mode MODE from SRC to
628 This function is only called when reload_completed.
632 xstormy16_split_move (mode, dest, src)
633 enum machine_mode mode;
637 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
638 int direction, end, i;
639 int src_modifies = 0;
640 int dest_modifies = 0;
641 int src_volatile = 0;
642 int dest_volatile = 0;
644 rtx auto_inc_reg_rtx = NULL_RTX;
646 /* Check initial conditions. */
647 if (! reload_completed
648 || mode == QImode || mode == HImode
649 || ! nonimmediate_operand (dest, mode)
650 || ! general_operand (src, mode))
653 /* This case is not supported below, and shouldn't be generated. */
654 if (GET_CODE (dest) == MEM
655 && GET_CODE (src) == MEM)
658 /* This case is very very bad after reload, so trap it now. */
659 if (GET_CODE (dest) == SUBREG
660 || GET_CODE (src) == SUBREG)
663 /* The general idea is to copy by words, offsetting the source and
664 destination. Normally the least-significant word will be copied
665 first, but for pre-dec operations it's better to copy the
666 most-significant word first. Only one operand can be a pre-dec
669 It's also possible that the copy overlaps so that the direction
673 if (GET_CODE (dest) == MEM)
675 mem_operand = XEXP (dest, 0);
676 dest_modifies = side_effects_p (mem_operand);
677 if (auto_inc_p (mem_operand))
678 auto_inc_reg_rtx = XEXP (mem_operand, 0);
679 dest_volatile = MEM_VOLATILE_P (dest);
682 dest = copy_rtx (dest);
683 MEM_VOLATILE_P (dest) = 0;
686 else if (GET_CODE (src) == MEM)
688 mem_operand = XEXP (src, 0);
689 src_modifies = side_effects_p (mem_operand);
690 if (auto_inc_p (mem_operand))
691 auto_inc_reg_rtx = XEXP (mem_operand, 0);
692 src_volatile = MEM_VOLATILE_P (src);
695 src = copy_rtx (src);
696 MEM_VOLATILE_P (src) = 0;
700 mem_operand = NULL_RTX;
702 if (mem_operand == NULL_RTX)
704 if (GET_CODE (src) == REG
705 && GET_CODE (dest) == REG
706 && reg_overlap_mentioned_p (dest, src)
707 && REGNO (dest) > REGNO (src))
710 else if (GET_CODE (mem_operand) == PRE_DEC
711 || (GET_CODE (mem_operand) == PLUS
712 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
714 else if (GET_CODE (src) == MEM
715 && reg_overlap_mentioned_p (dest, src))
718 if (GET_CODE (dest) != REG)
720 regno = REGNO (dest);
722 if (! refers_to_regno_p (regno, regno + num_words, mem_operand, 0))
725 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
727 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
731 /* This means something like
732 (set (reg:DI r0) (mem:DI (reg:HI r1)))
733 which we'd need to support by doing the set of the second word
738 end = direction < 0 ? -1 : num_words;
739 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
741 rtx w_src, w_dest, insn;
744 w_src = gen_rtx_MEM (word_mode, mem_operand);
746 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
748 MEM_VOLATILE_P (w_src) = 1;
750 w_dest = gen_rtx_MEM (word_mode, mem_operand);
752 w_dest = simplify_gen_subreg (word_mode, dest, mode,
755 MEM_VOLATILE_P (w_dest) = 1;
757 /* The simplify_subreg calls must always be able to simplify. */
758 if (GET_CODE (w_src) == SUBREG
759 || GET_CODE (w_dest) == SUBREG)
762 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
763 if (auto_inc_reg_rtx)
764 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
770 /* Expander for the 'move' patterns. Emit insns to copy a value of
771 mode MODE from SRC to DEST. */
774 xstormy16_expand_move (mode, dest, src)
775 enum machine_mode mode;
779 /* There are only limited immediate-to-memory move instructions. */
780 if (! reload_in_progress
781 && ! reload_completed
782 && GET_CODE (dest) == MEM
783 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
784 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
785 && GET_CODE (src) != REG
786 && GET_CODE (src) != SUBREG)
787 src = copy_to_mode_reg (mode, src);
789 /* Don't emit something we would immediately split. */
791 && mode != HImode && mode != QImode)
793 xstormy16_split_move (mode, dest, src);
797 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
803 The stack is laid out as follows:
807 Register save area (up to 4 words)
808 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
810 AP-> Return address (two words)
811 9th procedure parameter word
812 10th procedure parameter word
814 last procedure parameter word
816 The frame pointer location is tuned to make it most likely that all
817 parameters and local variables can be accessed using a load-indexed
820 /* A structure to describe the layout. */
821 struct xstormy16_stack_layout
823 /* Size of the topmost three items on the stack. */
825 int register_save_size;
826 int stdarg_save_size;
827 /* Sum of the above items. */
829 /* Various offsets. */
830 int first_local_minus_ap;
835 /* Does REGNO need to be saved? */
836 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
837 ((regs_ever_live[REGNUM] && ! call_used_regs[REGNUM]) \
838 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
839 && (regs_ever_live[REGNUM] || ! current_function_is_leaf)))
841 /* Compute the stack layout. */
842 struct xstormy16_stack_layout
843 xstormy16_compute_stack_layout ()
845 struct xstormy16_stack_layout layout;
847 const int ifun = xstormy16_interrupt_function_p ();
849 layout.locals_size = get_frame_size ();
851 layout.register_save_size = 0;
852 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
853 if (REG_NEEDS_SAVE (regno, ifun))
854 layout.register_save_size += UNITS_PER_WORD;
856 if (current_function_varargs || current_function_stdarg)
857 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
859 layout.stdarg_save_size = 0;
861 layout.frame_size = (layout.locals_size
862 + layout.register_save_size
863 + layout.stdarg_save_size);
865 if (current_function_args_size <= 2048 && current_function_args_size != -1)
867 if (layout.frame_size + INCOMING_FRAME_SP_OFFSET
868 + current_function_args_size <= 2048)
869 layout.fp_minus_ap = layout.frame_size + INCOMING_FRAME_SP_OFFSET;
871 layout.fp_minus_ap = 2048 - current_function_args_size;
874 layout.fp_minus_ap = (layout.stdarg_save_size
875 + layout.register_save_size
876 + INCOMING_FRAME_SP_OFFSET);
877 layout.sp_minus_fp = (layout.frame_size + INCOMING_FRAME_SP_OFFSET
878 - layout.fp_minus_ap);
879 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
883 /* Determine how all the special registers get eliminated. */
885 xstormy16_initial_elimination_offset (from, to)
888 struct xstormy16_stack_layout layout;
891 layout = xstormy16_compute_stack_layout ();
893 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
894 result = layout.sp_minus_fp - layout.locals_size;
895 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
896 result = -layout.locals_size;
897 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
898 result = -layout.fp_minus_ap;
899 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
900 result = -(layout.sp_minus_fp + layout.fp_minus_ap);
908 emit_addhi3_postreload (dest, src0, src1)
913 rtx set, clobber, insn;
915 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
916 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
917 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
921 /* Called after register allocation to add any instructions needed for
922 the prologue. Using a prologue insn is favored compared to putting
923 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
924 since it allows the scheduler to intermix instructions with the
925 saves of the caller saved registers. In some cases, it might be
926 necessary to emit a barrier instruction as the last insn to prevent
929 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
930 so that the debug info generation code can handle them properly. */
932 xstormy16_expand_prologue ()
934 struct xstormy16_stack_layout layout;
938 rtx mem_fake_push_rtx;
939 const int ifun = xstormy16_interrupt_function_p ();
941 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
942 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
943 mem_fake_push_rtx = gen_rtx_PRE_INC (Pmode, stack_pointer_rtx);
944 mem_fake_push_rtx = gen_rtx_MEM (HImode, mem_fake_push_rtx);
946 layout = xstormy16_compute_stack_layout ();
948 /* Save the argument registers if necessary. */
949 if (layout.stdarg_save_size)
950 for (regno = FIRST_ARGUMENT_REGISTER;
951 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
954 rtx reg = gen_rtx_REG (HImode, regno);
955 insn = emit_move_insn (mem_push_rtx, reg);
956 RTX_FRAME_RELATED_P (insn) = 1;
957 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
958 gen_rtx_SET (VOIDmode,
964 /* Push each of the registers to save. */
965 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
966 if (REG_NEEDS_SAVE (regno, ifun))
968 rtx reg = gen_rtx_REG (HImode, regno);
969 insn = emit_move_insn (mem_push_rtx, reg);
970 RTX_FRAME_RELATED_P (insn) = 1;
971 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
972 gen_rtx_SET (VOIDmode,
978 /* It's just possible that the SP here might be what we need for
980 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
982 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
983 RTX_FRAME_RELATED_P (insn) = 1;
986 /* Allocate space for local variables. */
987 if (layout.locals_size)
989 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
990 GEN_INT (layout.locals_size));
991 RTX_FRAME_RELATED_P (insn) = 1;
994 /* Set up the frame pointer, if required. */
995 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
997 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
998 RTX_FRAME_RELATED_P (insn) = 1;
999 if (layout.sp_minus_fp)
1001 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1002 hard_frame_pointer_rtx,
1003 GEN_INT (-layout.sp_minus_fp));
1004 RTX_FRAME_RELATED_P (insn) = 1;
1009 /* Do we need an epilogue at all? */
1013 return (reload_completed
1014 && xstormy16_compute_stack_layout ().frame_size == 0);
1017 /* Called after register allocation to add any instructions needed for
1018 the epilogue. Using a epilogue insn is favored compared to putting
1019 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1020 since it allows the scheduler to intermix instructions with the
1021 saves of the caller saved registers. In some cases, it might be
1022 necessary to emit a barrier instruction as the last insn to prevent
1026 xstormy16_expand_epilogue ()
1028 struct xstormy16_stack_layout layout;
1031 const int ifun = xstormy16_interrupt_function_p ();
1033 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1034 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1036 layout = xstormy16_compute_stack_layout ();
1038 /* Pop the stack for the locals. */
1039 if (layout.locals_size)
1041 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1042 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1044 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1045 GEN_INT (- layout.locals_size));
1048 /* Restore any call-saved registers. */
1049 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1050 if (REG_NEEDS_SAVE (regno, ifun))
1051 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1053 /* Pop the stack for the stdarg save area. */
1054 if (layout.stdarg_save_size)
1055 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1056 GEN_INT (- layout.stdarg_save_size));
1060 emit_jump_insn (gen_return_internal_interrupt ());
1062 emit_jump_insn (gen_return_internal ());
1066 xstormy16_epilogue_uses (regno)
1069 if (reload_completed && call_used_regs[regno])
1071 const int ifun = xstormy16_interrupt_function_p ();
1072 return REG_NEEDS_SAVE (regno, ifun);
1077 /* Return an updated summarizer variable CUM to advance past an
1078 argument in the argument list. The values MODE, TYPE and NAMED
1079 describe that argument. Once this is done, the variable CUM is
1080 suitable for analyzing the *following* argument with
1081 `FUNCTION_ARG', etc.
1083 This function need not do anything if the argument in question was
1084 passed on the stack. The compiler knows how to track the amount of
1085 stack space used for arguments without any special help. However,
1086 it makes life easier for xstormy16_build_va_list if it does update
1089 xstormy16_function_arg_advance (cum, mode, type, named)
1090 CUMULATIVE_ARGS cum;
1091 enum machine_mode mode;
1093 int named ATTRIBUTE_UNUSED;
1095 /* If an argument would otherwise be passed partially in registers,
1096 and partially on the stack, the whole of it is passed on the
1098 if (cum < NUM_ARGUMENT_REGISTERS
1099 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1100 cum = NUM_ARGUMENT_REGISTERS;
1102 cum += XSTORMY16_WORD_SIZE (type, mode);
1107 /* Do any needed setup for a variadic function. CUM has not been updated
1108 for the last named argument which has type TYPE and mode MODE. */
1110 xstormy16_setup_incoming_varargs (cum, int_mode, type, pretend_size)
1111 CUMULATIVE_ARGS cum ATTRIBUTE_UNUSED;
1112 int int_mode ATTRIBUTE_UNUSED;
1113 tree type ATTRIBUTE_UNUSED;
1114 int * pretend_size ATTRIBUTE_UNUSED;
1118 /* Build the va_list type.
1120 For this chip, va_list is a record containing a counter and a pointer.
1121 The counter is of type 'int' and indicates how many bytes
1122 have been used to date. The pointer indicates the stack position
1123 for arguments that have not been passed in registers.
1124 To keep the layout nice, the pointer is first in the structure. */
1127 xstormy16_build_va_list ()
1129 tree f_1, f_2, record, type_decl;
1131 record = make_lang_type (RECORD_TYPE);
1132 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1134 f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
1136 f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
1137 unsigned_type_node);
1139 DECL_FIELD_CONTEXT (f_1) = record;
1140 DECL_FIELD_CONTEXT (f_2) = record;
1142 TREE_CHAIN (record) = type_decl;
1143 TYPE_NAME (record) = type_decl;
1144 TYPE_FIELDS (record) = f_1;
1145 TREE_CHAIN (f_1) = f_2;
1147 layout_type (record);
1152 /* Implement the stdarg/varargs va_start macro. STDARG_P is non-zero if this
1153 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1154 variable to initialize. NEXTARG is the machine independent notion of the
1155 'next' argument after the variable arguments. */
1157 xstormy16_expand_builtin_va_start (stdarg_p, valist, nextarg)
1158 int stdarg_p ATTRIBUTE_UNUSED;
1160 rtx nextarg ATTRIBUTE_UNUSED;
1162 tree f_base, f_count;
1166 if (xstormy16_interrupt_function_p ())
1167 error ("cannot use va_start in interrupt function");
1169 f_base = TYPE_FIELDS (va_list_type_node);
1170 f_count = TREE_CHAIN (f_base);
1172 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base);
1173 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count);
1175 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1176 t = build (PLUS_EXPR, TREE_TYPE (base), t,
1177 build_int_2 (INCOMING_FRAME_SP_OFFSET, 0));
1178 t = build (MODIFY_EXPR, TREE_TYPE (base), base, t);
1179 TREE_SIDE_EFFECTS (t) = 1;
1180 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1182 t = build (MODIFY_EXPR, TREE_TYPE (count), count,
1183 build_int_2 (current_function_args_info * UNITS_PER_WORD, 0));
1184 TREE_SIDE_EFFECTS (t) = 1;
1185 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1188 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1189 of type va_list as a tree, TYPE is the type passed to va_arg.
1190 Note: This algorithm is documented in stormy-abi. */
1193 xstormy16_expand_builtin_va_arg (valist, type)
1197 tree f_base, f_count;
1199 rtx count_rtx, addr_rtx, r;
1200 rtx lab_gotaddr, lab_fromstack;
1202 int size, size_of_reg_args;
1203 tree size_tree, count_plus_size;
1204 rtx count_plus_size_rtx;
1206 f_base = TYPE_FIELDS (va_list_type_node);
1207 f_count = TREE_CHAIN (f_base);
1209 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base);
1210 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count);
1212 size = PUSH_ROUNDING (int_size_in_bytes (type));
1213 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1215 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1217 count_rtx = expand_expr (count, NULL_RTX, HImode, EXPAND_NORMAL);
1218 lab_gotaddr = gen_label_rtx ();
1219 lab_fromstack = gen_label_rtx ();
1220 addr_rtx = gen_reg_rtx (Pmode);
1222 count_plus_size = build (PLUS_EXPR, TREE_TYPE (count), count, size_tree);
1223 count_plus_size_rtx = expand_expr (count_plus_size, NULL_RTX, HImode, EXPAND_NORMAL);
1224 emit_cmp_and_jump_insns (count_plus_size_rtx, GEN_INT (size_of_reg_args),
1225 GTU, const1_rtx, HImode, 1, lab_fromstack);
1227 t = build (PLUS_EXPR, ptr_type_node, base, count);
1228 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
1230 emit_move_insn (addr_rtx, r);
1232 emit_jump_insn (gen_jump (lab_gotaddr));
1234 emit_label (lab_fromstack);
1236 /* Arguments larger than a word might need to skip over some
1237 registers, since arguments are either passed entirely in
1238 registers or entirely on the stack. */
1239 if (size > 2 || size < 0)
1241 rtx lab_notransition = gen_label_rtx ();
1242 emit_cmp_and_jump_insns (count_rtx, GEN_INT (NUM_ARGUMENT_REGISTERS
1244 GEU, const1_rtx, HImode, 1, lab_notransition);
1246 t = build (MODIFY_EXPR, TREE_TYPE (count), count,
1247 build_int_2 (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD, 0));
1248 TREE_SIDE_EFFECTS (t) = 1;
1249 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1251 emit_label (lab_notransition);
1254 t = build (PLUS_EXPR, sizetype, size_tree,
1255 build_int_2 ((- NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1256 + INCOMING_FRAME_SP_OFFSET),
1258 t = build (PLUS_EXPR, TREE_TYPE (count), count, fold (t));
1259 t = build (MINUS_EXPR, TREE_TYPE (base), base, t);
1260 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
1262 emit_move_insn (addr_rtx, r);
1264 emit_label (lab_gotaddr);
1266 count_plus_size = build (PLUS_EXPR, TREE_TYPE (count), count, size_tree);
1267 t = build (MODIFY_EXPR, TREE_TYPE (count), count, count_plus_size);
1268 TREE_SIDE_EFFECTS (t) = 1;
1269 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1274 /* Initialize the variable parts of a trampoline. ADDR is an RTX for
1275 the address of the trampoline; FNADDR is an RTX for the address of
1276 the nested function; STATIC_CHAIN is an RTX for the static chain
1277 value that should be passed to the function when it is called. */
1279 xstormy16_initialize_trampoline (addr, fnaddr, static_chain)
1284 rtx reg_addr = gen_reg_rtx (Pmode);
1285 rtx temp = gen_reg_rtx (HImode);
1286 rtx reg_fnaddr = gen_reg_rtx (HImode);
1289 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1291 emit_move_insn (reg_addr, addr);
1292 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1293 emit_move_insn (reg_addr_mem, temp);
1294 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1295 emit_move_insn (temp, static_chain);
1296 emit_move_insn (reg_addr_mem, temp);
1297 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1298 emit_move_insn (reg_fnaddr, fnaddr);
1299 emit_move_insn (temp, reg_fnaddr);
1300 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1301 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1302 emit_move_insn (reg_addr_mem, temp);
1303 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1304 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1305 emit_move_insn (reg_addr_mem, reg_fnaddr);
1308 /* Create an RTX representing the place where a function returns a
1309 value of data type VALTYPE. VALTYPE is a tree node representing a
1310 data type. Write `TYPE_MODE (VALTYPE)' to get the machine mode
1311 used to represent that type. On many machines, only the mode is
1312 relevant. (Actually, on most machines, scalar values are returned
1313 in the same place regardless of mode).
1315 If `PROMOTE_FUNCTION_RETURN' is defined, you must apply the same promotion
1316 rules specified in `PROMOTE_MODE' if VALTYPE is a scalar type.
1318 If the precise function being called is known, FUNC is a tree node
1319 (`FUNCTION_DECL') for it; otherwise, FUNC is a null pointer. This makes it
1320 possible to use a different value-returning convention for specific
1321 functions when all their calls are known.
1323 `FUNCTION_VALUE' is not used for return vales with aggregate data types,
1324 because these are returned in another way. See `STRUCT_VALUE_REGNUM' and
1327 xstormy16_function_value (valtype, func)
1329 tree func ATTRIBUTE_UNUSED;
1331 enum machine_mode mode;
1332 mode = TYPE_MODE (valtype);
1333 PROMOTE_MODE (mode, 0, valtype);
1334 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1337 /* A C compound statement that outputs the assembler code for a thunk function,
1338 used to implement C++ virtual function calls with multiple inheritance. The
1339 thunk acts as a wrapper around a virtual function, adjusting the implicit
1340 object parameter before handing control off to the real function.
1342 First, emit code to add the integer DELTA to the location that contains the
1343 incoming first argument. Assume that this argument contains a pointer, and
1344 is the one used to pass the `this' pointer in C++. This is the incoming
1345 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1346 addition must preserve the values of all other incoming arguments.
1348 After the addition, emit code to jump to FUNCTION, which is a
1349 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1350 the return address. Hence returning from FUNCTION will return to whoever
1351 called the current `thunk'.
1353 The effect must be as if @var{function} had been called directly
1354 with the adjusted first argument. This macro is responsible for
1355 emitting all of the code for a thunk function;
1356 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1359 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1360 extracted from it.) It might possibly be useful on some targets, but
1364 xstormy16_asm_output_mi_thunk (file, thunk_fndecl, delta, function)
1366 tree thunk_fndecl ATTRIBUTE_UNUSED;
1370 int regnum = FIRST_ARGUMENT_REGISTER;
1372 /* There might be a hidden first argument for a returned structure. */
1373 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
1376 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (delta) & 0xFFFF);
1377 fputs ("\tjmpf ", file);
1378 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1382 /* Mark functions with SYMBOL_REF_FLAG. */
1385 xstormy16_encode_section_info (decl)
1388 if (TREE_CODE (decl) == FUNCTION_DECL)
1389 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1392 /* Output constructors and destructors. Just like
1393 default_named_section_asm_out_* but don't set the sections writable. */
1394 #undef TARGET_ASM_CONSTRUCTOR
1395 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1396 #undef TARGET_ASM_DESTRUCTOR
1397 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1400 xstormy16_asm_out_destructor (symbol, priority)
1404 const char *section = ".dtors";
1407 /* ??? This only works reliably with the GNU linker. */
1408 if (priority != DEFAULT_INIT_PRIORITY)
1410 sprintf (buf, ".dtors.%.5u",
1411 /* Invert the numbering so the linker puts us in the proper
1412 order; constructors are run from right to left, and the
1413 linker sorts in increasing order. */
1414 MAX_INIT_PRIORITY - priority);
1418 named_section_flags (section, 0);
1419 assemble_align (POINTER_SIZE);
1420 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1424 xstormy16_asm_out_constructor (symbol, priority)
1428 const char *section = ".ctors";
1431 /* ??? This only works reliably with the GNU linker. */
1432 if (priority != DEFAULT_INIT_PRIORITY)
1434 sprintf (buf, ".ctors.%.5u",
1435 /* Invert the numbering so the linker puts us in the proper
1436 order; constructors are run from right to left, and the
1437 linker sorts in increasing order. */
1438 MAX_INIT_PRIORITY - priority);
1442 named_section_flags (section, 0);
1443 assemble_align (POINTER_SIZE);
1444 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1447 /* Print a memory address as an operand to reference that memory location. */
1449 xstormy16_print_operand_address (file, address)
1453 HOST_WIDE_INT offset;
1454 int pre_dec, post_inc;
1456 /* There are a few easy cases. */
1457 if (GET_CODE (address) == CONST_INT)
1459 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1463 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1465 output_addr_const (file, address);
1469 /* Otherwise, it's hopefully something of the form
1470 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...))
1473 if (GET_CODE (address) == PLUS)
1475 if (GET_CODE (XEXP (address, 1)) != CONST_INT)
1477 offset = INTVAL (XEXP (address, 1));
1478 address = XEXP (address, 0);
1483 pre_dec = (GET_CODE (address) == PRE_DEC);
1484 post_inc = (GET_CODE (address) == POST_INC);
1485 if (pre_dec || post_inc)
1486 address = XEXP (address, 0);
1488 if (GET_CODE (address) != REG)
1494 fputs (reg_names [REGNO (address)], file);
1500 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
1505 /* Print an operand to a assembler instruction. */
1507 xstormy16_print_operand (file, x, code)
1515 /* There is either one bit set, or one bit clear, in X.
1516 Print it preceded by '#'. */
1518 HOST_WIDE_INT xx = 1;
1521 if (GET_CODE (x) == CONST_INT)
1524 output_operand_lossage ("`B' operand is not constant");
1526 l = exact_log2 (xx);
1528 l = exact_log2 (~xx);
1530 output_operand_lossage ("`B' operand has multiple bits set");
1532 fputs (IMMEDIATE_PREFIX, file);
1533 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1538 /* Print the symbol without a surrounding @fptr(). */
1539 if (GET_CODE (x) == SYMBOL_REF)
1540 assemble_name (file, XSTR (x, 0));
1541 else if (GET_CODE (x) == LABEL_REF)
1542 output_asm_label (x);
1544 xstormy16_print_operand_address (file, x);
1549 /* Print the immediate operand less one, preceded by '#'.
1550 For 'O', negate it first. */
1552 HOST_WIDE_INT xx = 0;
1554 if (GET_CODE (x) == CONST_INT)
1557 output_operand_lossage ("`o' operand is not constant");
1562 fputs (IMMEDIATE_PREFIX, file);
1563 fprintf (file, HOST_WIDE_INT_PRINT_DEC, xx - 1);
1568 /* Handled below. */
1572 output_operand_lossage ("xstormy16_print_operand: unknown code");
1576 switch (GET_CODE (x))
1579 fputs (reg_names [REGNO (x)], file);
1583 xstormy16_print_operand_address (file, XEXP (x, 0));
1587 /* Some kind of constant or label; an immediate operand,
1588 so prefix it with '#' for the assembler. */
1589 fputs (IMMEDIATE_PREFIX, file);
1590 output_addr_const (file, x);
1598 /* Expander for the `casesi' pattern.
1599 INDEX is the index of the switch statement.
1600 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1601 to the first table entry.
1602 RANGE is the number of table entries.
1603 TABLE is an ADDR_VEC that is the jump table.
1604 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1605 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1609 xstormy16_expand_casesi (index, lower_bound, range, table, default_label)
1616 HOST_WIDE_INT range_i = INTVAL (range);
1619 /* This code uses 'br', so it can deal only with tables of size up to
1621 if (range_i >= 8192)
1622 sorry ("switch statement of size %lu entries too large",
1623 (unsigned long) range_i);
1625 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1627 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1629 int_index = gen_lowpart_common (HImode, index);
1630 emit_insn (gen_ashlhi3 (int_index, int_index, GEN_INT (2)));
1631 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1634 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1635 instructions, without label or alignment or any other special
1636 constructs. We know that the previous instruction will be the
1637 `tablejump_pcrel' output above.
1639 TODO: it might be nice to output 'br' instructions if they could
1643 xstormy16_output_addr_vec (file, label, table)
1645 rtx label ATTRIBUTE_UNUSED;
1650 function_section (current_function_decl);
1652 vlen = XVECLEN (table, 0);
1653 for (idx = 0; idx < vlen; idx++)
1655 fputs ("\tjmpf ", file);
1656 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1662 /* Expander for the `call' patterns.
1663 INDEX is the index of the switch statement.
1664 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1665 to the first table entry.
1666 RANGE is the number of table entries.
1667 TABLE is an ADDR_VEC that is the jump table.
1668 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1669 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1673 xstormy16_expand_call (retval, dest, counter)
1679 enum machine_mode mode;
1681 if (GET_CODE (dest) != MEM)
1683 dest = XEXP (dest, 0);
1685 if (! CONSTANT_P (dest)
1686 && GET_CODE (dest) != REG)
1687 dest = force_reg (Pmode, dest);
1692 mode = GET_MODE (retval);
1694 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1697 call = gen_rtx_SET (VOIDmode, retval, call);
1699 if (! CONSTANT_P (dest))
1701 temp = gen_reg_rtx (HImode);
1702 emit_move_insn (temp, const0_rtx);
1707 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1708 gen_rtx_USE (VOIDmode, temp)));
1709 emit_call_insn (call);
1712 /* Expanders for multiword computational operations. */
1714 /* Expander for arithmetic operations; emit insns to compute
1716 (set DEST (CODE:MODE SRC0 SRC1))
1718 using CARRY as a temporary. When CODE is COMPARE, a branch
1719 template is generated (this saves duplicating code in
1720 xstormy16_split_cbranch). */
1723 xstormy16_expand_arith (mode, code, dest, src0, src1, carry)
1724 enum machine_mode mode;
1731 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1737 rtx zero_reg = gen_reg_rtx (word_mode);
1738 emit_move_insn (zero_reg, src0);
1742 for (i = 0; i < num_words; i++)
1744 rtx w_src0, w_src1, w_dest;
1750 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1751 i * UNITS_PER_WORD);
1752 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1753 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1759 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1763 insn = gen_addchi4 (w_dest, w_src0, w_src1, carry);
1765 insn = gen_addchi5 (w_dest, w_src0, w_src1, carry, carry);
1771 if (code == COMPARE && i == num_words - 1)
1773 rtx branch, sub, clobber, sub_1;
1775 sub_1 = gen_rtx_MINUS (HImode, w_src0,
1776 gen_rtx_ZERO_EXTEND (HImode, carry));
1777 sub = gen_rtx_SET (VOIDmode, w_dest,
1778 gen_rtx_MINUS (HImode, sub_1, w_src1));
1779 clobber = gen_rtx_CLOBBER (VOIDmode, carry);
1780 branch = gen_rtx_SET (VOIDmode, pc_rtx,
1781 gen_rtx_IF_THEN_ELSE (VOIDmode,
1787 insn = gen_rtx_PARALLEL (VOIDmode,
1788 gen_rtvec (3, branch, sub, clobber));
1792 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1795 insn = gen_subchi4 (w_dest, w_src0, w_src1, carry);
1797 insn = gen_subchi5 (w_dest, w_src0, w_src1, carry, carry);
1803 if (GET_CODE (w_src1) == CONST_INT
1804 && INTVAL (w_src1) == -(code == AND))
1807 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx (code, mode,
1812 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
1824 /* Return 1 if OP is a shift operator. */
1827 shift_operator (op, mode)
1829 enum machine_mode mode ATTRIBUTE_UNUSED;
1831 enum rtx_code code = GET_CODE (op);
1833 return (code == ASHIFT
1835 || code == LSHIFTRT);
1838 /* The shift operations are split at output time for constant values;
1839 variable-width shifts get handed off to a library routine.
1841 Generate an output string to do (set X (CODE:MODE X SIZE_R))
1842 SIZE_R will be a CONST_INT, X will be a hard register. */
1845 xstormy16_output_shift (mode, code, x, size_r, temp)
1846 enum machine_mode mode;
1853 const char *r0, *r1, *rt;
1856 if (GET_CODE (size_r) != CONST_INT
1857 || GET_CODE (x) != REG
1860 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
1865 r0 = reg_names [REGNO (x)];
1866 r1 = reg_names [REGNO (x) + 1];
1868 /* For shifts of size 1, we can use the rotate instructions. */
1874 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
1877 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
1880 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
1888 /* For large shifts, there are easy special cases. */
1894 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
1897 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
1900 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
1912 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
1913 r1, r0, r0, r1, (int) size - 16);
1916 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
1917 r0, r1, r1, r0, (int) size - 16);
1920 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
1921 r0, r1, r1, r0, (int) size - 16);
1929 /* For the rest, we have to do more work. In particular, we
1930 need a temporary. */
1931 rt = reg_names [REGNO (temp)];
1936 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
1937 rt, r0, r0, (int) size, r1, (int) size, rt, (int) 16-size,
1942 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
1943 rt, r1, r1, (int) size, r0, (int) size, rt, (int) 16-size,
1948 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
1949 rt, r1, r1, (int) size, r0, (int) size, rt, (int) 16-size,
1958 /* Attribute handling. */
1960 /* Return nonzero if the function is an interrupt function. */
1962 xstormy16_interrupt_function_p ()
1966 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
1967 any functions are declared, which is demonstrably wrong, but
1968 it is worked around here. FIXME. */
1972 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1973 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
1976 #undef TARGET_ATTRIBUTE_TABLE
1977 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
1978 static tree xstormy16_handle_interrupt_attribute PARAMS ((tree *, tree, tree, int, bool *));
1979 static const struct attribute_spec xstormy16_attribute_table[] =
1981 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
1982 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
1983 { NULL, 0, 0, false, false, false, NULL }
1986 /* Handle an "interrupt" attribute;
1987 arguments as in struct attribute_spec.handler. */
1989 xstormy16_handle_interrupt_attribute (node, name, args, flags, no_add_attrs)
1992 tree args ATTRIBUTE_UNUSED;
1993 int flags ATTRIBUTE_UNUSED;
1996 if (TREE_CODE (*node) != FUNCTION_TYPE)
1998 warning ("`%s' attribute only applies to functions",
1999 IDENTIFIER_POINTER (name));
2000 *no_add_attrs = true;
2006 struct gcc_target targetm = TARGET_INITIALIZER;