1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
45 #include "target-def.h"
48 static rtx emit_addhi3_postreload PARAMS ((rtx, rtx, rtx));
49 static void xstormy16_asm_out_constructor PARAMS ((rtx, int));
50 static void xstormy16_asm_out_destructor PARAMS ((rtx, int));
52 /* Define the information needed to generate branch and scc insns. This is
53 stored from the compare operation. */
54 struct rtx_def * xstormy16_compare_op0;
55 struct rtx_def * xstormy16_compare_op1;
57 /* Return 1 if this is a LT, GE, LTU, or GEU operator. */
60 xstormy16_ineqsi_operator (op, mode)
62 enum machine_mode mode;
64 enum rtx_code code = GET_CODE (op);
66 return ((mode == VOIDmode || GET_MODE (op) == mode)
67 && (code == LT || code == GE || code == LTU || code == GEU));
70 /* Return 1 if this is an EQ or NE operator. */
73 equality_operator (op, mode)
75 enum machine_mode mode;
77 return ((mode == VOIDmode || GET_MODE (op) == mode)
78 && (GET_CODE (op) == EQ || GET_CODE (op) == NE));
81 /* Return 1 if this is a comparison operator but not an EQ or NE operator. */
84 inequality_operator (op, mode)
86 enum machine_mode mode;
88 return comparison_operator (op, mode) && ! equality_operator (op, mode);
91 /* Branches are handled as follows:
93 1. HImode compare-and-branches. The machine supports these
94 natively, so the appropriate pattern is emitted directly.
96 2. SImode EQ and NE. These are emitted as pairs of HImode
99 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
100 of a SImode subtract followed by a branch (not a compare-and-branch),
106 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
114 /* Emit a branch of kind CODE to location LOC. */
117 xstormy16_emit_cbranch (code, loc)
121 rtx op0 = xstormy16_compare_op0;
122 rtx op1 = xstormy16_compare_op1;
123 rtx condition_rtx, loc_ref, branch, cy_clobber;
125 enum machine_mode mode;
127 mode = GET_MODE (op0);
128 if (mode != HImode && mode != SImode)
132 && (code == GT || code == LE || code == GTU || code == LEU))
134 int unsigned_p = (code == GTU || code == LEU);
135 int gt_p = (code == GT || code == GTU);
139 lab = gen_label_rtx ();
140 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
141 /* This should be generated as a comparison against the temporary
142 created by the previous insn, but reload can't handle that. */
143 xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
148 else if (mode == SImode
149 && (code == NE || code == EQ)
150 && op1 != const0_rtx)
153 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
157 lab = gen_label_rtx ();
159 for (i = 0; i < num_words - 1; i++)
161 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
163 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
165 xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
167 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
169 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
171 xstormy16_emit_cbranch (code, loc);
178 /* We can't allow reload to try to generate any reload after a branch,
179 so when some register must match we must make the temporary ourselves. */
183 tmp = gen_reg_rtx (mode);
184 emit_move_insn (tmp, op0);
188 condition_rtx = gen_rtx (code, mode, op0, op1);
189 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
190 branch = gen_rtx_SET (VOIDmode, pc_rtx,
191 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
194 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (BImode));
197 vec = gen_rtvec (2, branch, cy_clobber);
198 else if (code == NE || code == EQ)
199 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
204 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
206 sub = gen_rtx_CLOBBER (SImode, op0);
208 vec = gen_rtvec (3, branch, sub, cy_clobber);
211 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
214 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
215 the arithmetic operation. Most of the work is done by
216 xstormy16_expand_arith. */
219 xstormy16_split_cbranch (mode, label, comparison, dest, carry)
220 enum machine_mode mode;
226 rtx op0 = XEXP (comparison, 0);
227 rtx op1 = XEXP (comparison, 1);
232 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1, carry);
233 seq = gen_sequence ();
235 compare = SET_SRC (XVECEXP (PATTERN (XVECEXP (seq, 0, XVECLEN (seq, 0) - 1)),
237 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
238 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
243 /* Return the string to output a conditional branch to LABEL, which is
244 the operand number of the label.
246 OP is the conditional expression, or NULL for branch-always.
248 REVERSED is non-zero if we should reverse the sense of the comparison.
253 xstormy16_output_cbranch_hi (op, label, reversed, insn)
259 static char string[64];
260 int need_longbranch = (op != NULL_RTX
261 ? get_attr_length (insn) == 8
262 : get_attr_length (insn) == 4);
263 int really_reversed = reversed ^ need_longbranch;
265 const char *template;
266 const char *operands;
275 sprintf (string, "%s %s", ccode, label);
279 code = GET_CODE (op);
281 if (GET_CODE (XEXP (op, 0)) != REG)
283 code = swap_condition (code);
289 /* Work out which way this really branches. */
291 code = reverse_condition (code);
295 case EQ: ccode = "z"; break;
296 case NE: ccode = "nz"; break;
297 case GE: ccode = "ge"; break;
298 case LT: ccode = "lt"; break;
299 case GT: ccode = "gt"; break;
300 case LE: ccode = "le"; break;
301 case GEU: ccode = "nc"; break;
302 case LTU: ccode = "c"; break;
303 case GTU: ccode = "hi"; break;
304 case LEU: ccode = "ls"; break;
311 template = "b%s %s,.+8 | jmpf %s";
313 template = "b%s %s,%s";
314 sprintf (string, template, ccode, operands, label);
319 /* Return the string to output a conditional branch to LABEL, which is
320 the operand number of the label, but suitable for the tail of a
323 OP is the conditional expression (OP is never NULL_RTX).
325 REVERSED is non-zero if we should reverse the sense of the comparison.
330 xstormy16_output_cbranch_si (op, label, reversed, insn)
336 static char string[64];
337 int need_longbranch = get_attr_length (insn) >= 8;
338 int really_reversed = reversed ^ need_longbranch;
340 const char *template;
344 code = GET_CODE (op);
346 /* Work out which way this really branches. */
348 code = reverse_condition (code);
352 case EQ: ccode = "z"; break;
353 case NE: ccode = "nz"; break;
354 case GE: ccode = "ge"; break;
355 case LT: ccode = "lt"; break;
356 case GEU: ccode = "nc"; break;
357 case LTU: ccode = "c"; break;
359 /* The missing codes above should never be generated. */
370 if (GET_CODE (XEXP (op, 0)) != REG)
373 regnum = REGNO (XEXP (op, 0));
374 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
378 case GE: case LT: case GEU: case LTU:
379 strcpy (prevop, "sbc %2,%3");
387 template = "%s | b%s .+6 | jmpf %s";
389 template = "%s | b%s %s";
390 sprintf (string, template, prevop, ccode, label);
395 /* Many machines have some registers that cannot be copied directly to or from
396 memory or even from other types of registers. An example is the `MQ'
397 register, which on most machines, can only be copied to or from general
398 registers, but not memory. Some machines allow copying all registers to and
399 from memory, but require a scratch register for stores to some memory
400 locations (e.g., those with symbolic address on the RT, and those with
401 certain symbolic address on the Sparc when compiling PIC). In some cases,
402 both an intermediate and a scratch register are required.
404 You should define these macros to indicate to the reload phase that it may
405 need to allocate at least one register for a reload in addition to the
406 register to contain the data. Specifically, if copying X to a register
407 CLASS in MODE requires an intermediate register, you should define
408 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
409 whose registers can be used as intermediate registers or scratch registers.
411 If copying a register CLASS in MODE to X requires an intermediate or scratch
412 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
413 largest register class required. If the requirements for input and output
414 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
415 instead of defining both macros identically.
417 The values returned by these macros are often `GENERAL_REGS'. Return
418 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
419 to or from a register of CLASS in MODE without requiring a scratch register.
420 Do not define this macro if it would always return `NO_REGS'.
422 If a scratch register is required (either with or without an intermediate
423 register), you should define patterns for `reload_inM' or `reload_outM', as
424 required.. These patterns, which will normally be implemented with a
425 `define_expand', should be similar to the `movM' patterns, except that
426 operand 2 is the scratch register.
428 Define constraints for the reload register and scratch register that contain
429 a single register class. If the original reload register (whose class is
430 CLASS) can meet the constraint given in the pattern, the value returned by
431 these macros is used for the class of the scratch register. Otherwise, two
432 additional reload registers are required. Their classes are obtained from
433 the constraints in the insn pattern.
435 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
436 either be in a hard register or in memory. Use `true_regnum' to find out;
437 it will return -1 if the pseudo is in memory and the hard register number if
440 These macros should not be used in the case where a particular class of
441 registers can only be copied to memory and not to another class of
442 registers. In that case, secondary reload registers are not needed and
443 would not be helpful. Instead, a stack location must be used to perform the
444 copy and the `movM' pattern should use memory as an intermediate storage.
445 This case often occurs between floating-point and general registers. */
448 xstormy16_secondary_reload_class (class, mode, x)
449 enum reg_class class;
450 enum machine_mode mode;
453 /* This chip has the interesting property that only the first eight
454 registers can be moved to/from memory. */
455 if ((GET_CODE (x) == MEM
456 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
457 && (true_regnum (x) == -1
458 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
459 && ! reg_class_subset_p (class, EIGHT_REGS))
462 /* When reloading a PLUS, the carry register will be required
463 unless the inc or dec instructions can be used. */
464 if (xstormy16_carry_plus_operand (x, mode))
470 /* Recognise a PLUS that needs the carry register. */
472 xstormy16_carry_plus_operand (x, mode)
474 enum machine_mode mode ATTRIBUTE_UNUSED;
476 return (GET_CODE (x) == PLUS
477 && GET_CODE (XEXP (x, 1)) == CONST_INT
478 && (INTVAL (XEXP (x, 1)) < -4 || INTVAL (XEXP (x, 1)) > 4));
483 xstormy16_preferred_reload_class (x, class)
484 enum reg_class class;
487 if (class == GENERAL_REGS
488 && GET_CODE (x) == MEM)
494 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
495 (GET_CODE (X) == CONST_INT \
496 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
498 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
499 (GET_CODE (X) == CONST_INT \
500 && INTVAL (X) + (OFFSET) >= 0 \
501 && INTVAL (X) + (OFFSET) < 0x8000 \
502 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
505 xstormy16_legitimate_address_p (mode, x, strict)
506 enum machine_mode mode ATTRIBUTE_UNUSED;
510 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
513 if (GET_CODE (x) == PLUS
514 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
517 if (GET_CODE (x) == POST_INC
518 || GET_CODE (x) == PRE_DEC)
521 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
522 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
528 /* Return nonzero if memory address X (an RTX) can have different
529 meanings depending on the machine mode of the memory reference it
530 is used for or if the address is valid for some modes but not
533 Autoincrement and autodecrement addresses typically have mode-dependent
534 effects because the amount of the increment or decrement is the size of the
535 operand being addressed. Some machines have other mode-dependent addresses.
536 Many RISC machines have no mode-dependent addresses.
538 You may assume that ADDR is a valid address for the machine.
540 On this chip, this is true if the address is valid with an offset
541 of 0 but not of 6, because in that case it cannot be used as an
542 address for DImode or DFmode, or if the address is a post-increment
543 or pre-decrement address. */
545 xstormy16_mode_dependent_address_p (x)
548 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
549 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
552 if (GET_CODE (x) == PLUS
553 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
554 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
557 if (GET_CODE (x) == PLUS)
560 if (GET_CODE (x) == POST_INC
561 || GET_CODE (x) == PRE_DEC)
567 /* A C expression that defines the optional machine-dependent constraint
568 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
569 types of operands, usually memory references, for the target machine.
570 Normally this macro will not be defined. If it is required for a particular
571 target machine, it should return 1 if VALUE corresponds to the operand type
572 represented by the constraint letter C. If C is not defined as an extra
573 constraint, the value returned should be 0 regardless of VALUE. */
575 xstormy16_extra_constraint_p (x, c)
581 /* 'Q' is for pushes. */
583 return (GET_CODE (x) == MEM
584 && GET_CODE (XEXP (x, 0)) == POST_INC
585 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
587 /* 'R' is for pops. */
589 return (GET_CODE (x) == MEM
590 && GET_CODE (XEXP (x, 0)) == PRE_DEC
591 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
593 /* 'S' is for immediate memory addresses. */
595 return (GET_CODE (x) == MEM
596 && GET_CODE (XEXP (x, 0)) == CONST_INT
597 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
601 /* Not implemented yet. */
604 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
605 for allocating a scratch register for 32-bit shifts. */
607 return (GET_CODE (x) == CONST_INT
608 && (INTVAL (x) < 2 || INTVAL (x) > 15));
616 short_memory_operand (x, mode)
618 enum machine_mode mode;
620 if (! memory_operand (x, mode))
622 return (GET_CODE (XEXP (x, 0)) != PLUS);
626 nonimmediate_nonstack_operand (op, mode)
628 enum machine_mode mode;
630 /* 'Q' is for pushes, 'R' for pops. */
631 return (nonimmediate_operand (op, mode)
632 && ! xstormy16_extra_constraint_p (op, 'Q')
633 && ! xstormy16_extra_constraint_p (op, 'R'));
636 /* Splitter for the 'move' patterns, for modes not directly implemeted
637 by hardware. Emit insns to copy a value of mode MODE from SRC to
640 This function is only called when reload_completed.
644 xstormy16_split_move (mode, dest, src)
645 enum machine_mode mode;
649 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
650 int direction, end, i;
651 int src_modifies = 0;
652 int dest_modifies = 0;
653 int src_volatile = 0;
654 int dest_volatile = 0;
656 rtx auto_inc_reg_rtx = NULL_RTX;
658 /* Check initial conditions. */
659 if (! reload_completed
660 || mode == QImode || mode == HImode
661 || ! nonimmediate_operand (dest, mode)
662 || ! general_operand (src, mode))
665 /* This case is not supported below, and shouldn't be generated. */
666 if (GET_CODE (dest) == MEM
667 && GET_CODE (src) == MEM)
670 /* This case is very very bad after reload, so trap it now. */
671 if (GET_CODE (dest) == SUBREG
672 || GET_CODE (src) == SUBREG)
675 /* The general idea is to copy by words, offsetting the source and
676 destination. Normally the least-significant word will be copied
677 first, but for pre-dec operations it's better to copy the
678 most-significant word first. Only one operand can be a pre-dec
681 It's also possible that the copy overlaps so that the direction
685 if (GET_CODE (dest) == MEM)
687 mem_operand = XEXP (dest, 0);
688 dest_modifies = side_effects_p (mem_operand);
689 if (auto_inc_p (mem_operand))
690 auto_inc_reg_rtx = XEXP (mem_operand, 0);
691 dest_volatile = MEM_VOLATILE_P (dest);
694 dest = copy_rtx (dest);
695 MEM_VOLATILE_P (dest) = 0;
698 else if (GET_CODE (src) == MEM)
700 mem_operand = XEXP (src, 0);
701 src_modifies = side_effects_p (mem_operand);
702 if (auto_inc_p (mem_operand))
703 auto_inc_reg_rtx = XEXP (mem_operand, 0);
704 src_volatile = MEM_VOLATILE_P (src);
707 src = copy_rtx (src);
708 MEM_VOLATILE_P (src) = 0;
712 mem_operand = NULL_RTX;
714 if (mem_operand == NULL_RTX)
716 if (GET_CODE (src) == REG
717 && GET_CODE (dest) == REG
718 && reg_overlap_mentioned_p (dest, src)
719 && REGNO (dest) > REGNO (src))
722 else if (GET_CODE (mem_operand) == PRE_DEC
723 || (GET_CODE (mem_operand) == PLUS
724 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
726 else if (GET_CODE (src) == MEM
727 && reg_overlap_mentioned_p (dest, src))
730 if (GET_CODE (dest) != REG)
732 regno = REGNO (dest);
734 if (! refers_to_regno_p (regno, regno + num_words, mem_operand, 0))
737 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
739 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
743 /* This means something like
744 (set (reg:DI r0) (mem:DI (reg:HI r1)))
745 which we'd need to support by doing the set of the second word
750 end = direction < 0 ? -1 : num_words;
751 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
753 rtx w_src, w_dest, insn;
756 w_src = gen_rtx_MEM (word_mode, mem_operand);
758 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
760 MEM_VOLATILE_P (w_src) = 1;
762 w_dest = gen_rtx_MEM (word_mode, mem_operand);
764 w_dest = simplify_gen_subreg (word_mode, dest, mode,
767 MEM_VOLATILE_P (w_dest) = 1;
769 /* The simplify_subreg calls must always be able to simplify. */
770 if (GET_CODE (w_src) == SUBREG
771 || GET_CODE (w_dest) == SUBREG)
774 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
775 if (auto_inc_reg_rtx)
776 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
782 /* Expander for the 'move' patterns. Emit insns to copy a value of
783 mode MODE from SRC to DEST. */
786 xstormy16_expand_move (mode, dest, src)
787 enum machine_mode mode;
791 /* There are only limited immediate-to-memory move instructions. */
792 if (! reload_in_progress
793 && ! reload_completed
794 && GET_CODE (dest) == MEM
795 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
796 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
797 && GET_CODE (src) != REG
798 && GET_CODE (src) != SUBREG)
799 src = copy_to_mode_reg (mode, src);
801 /* Don't emit something we would immediately split. */
803 && mode != HImode && mode != QImode)
805 xstormy16_split_move (mode, dest, src);
809 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
815 The stack is laid out as follows:
819 Register save area (up to 4 words)
820 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
822 AP-> Return address (two words)
823 9th procedure parameter word
824 10th procedure parameter word
826 last procedure parameter word
828 The frame pointer location is tuned to make it most likely that all
829 parameters and local variables can be accessed using a load-indexed
832 /* A structure to describe the layout. */
833 struct xstormy16_stack_layout
835 /* Size of the topmost three items on the stack. */
837 int register_save_size;
838 int stdarg_save_size;
839 /* Sum of the above items. */
841 /* Various offsets. */
842 int first_local_minus_ap;
847 /* Does REGNO need to be saved? */
848 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
849 ((regs_ever_live[REGNUM] && ! call_used_regs[REGNUM]) \
850 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
851 && (regs_ever_live[REGNUM] || ! current_function_is_leaf)))
853 /* Compute the stack layout. */
854 struct xstormy16_stack_layout
855 xstormy16_compute_stack_layout ()
857 struct xstormy16_stack_layout layout;
859 const int ifun = xstormy16_interrupt_function_p ();
861 layout.locals_size = get_frame_size ();
863 layout.register_save_size = 0;
864 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
865 if (REG_NEEDS_SAVE (regno, ifun))
866 layout.register_save_size += UNITS_PER_WORD;
868 if (current_function_varargs || current_function_stdarg)
869 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
871 layout.stdarg_save_size = 0;
873 layout.frame_size = (layout.locals_size
874 + layout.register_save_size
875 + layout.stdarg_save_size);
877 if (current_function_args_size <= 2048 && current_function_args_size != -1)
879 if (layout.frame_size + INCOMING_FRAME_SP_OFFSET
880 + current_function_args_size <= 2048)
881 layout.fp_minus_ap = layout.frame_size + INCOMING_FRAME_SP_OFFSET;
883 layout.fp_minus_ap = 2048 - current_function_args_size;
886 layout.fp_minus_ap = (layout.stdarg_save_size
887 + layout.register_save_size
888 + INCOMING_FRAME_SP_OFFSET);
889 layout.sp_minus_fp = (layout.frame_size + INCOMING_FRAME_SP_OFFSET
890 - layout.fp_minus_ap);
891 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
895 /* Determine how all the special registers get eliminated. */
897 xstormy16_initial_elimination_offset (from, to)
900 struct xstormy16_stack_layout layout;
903 layout = xstormy16_compute_stack_layout ();
905 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
906 result = layout.sp_minus_fp - layout.locals_size;
907 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
908 result = -layout.locals_size;
909 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
910 result = -layout.fp_minus_ap;
911 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
912 result = -(layout.sp_minus_fp + layout.fp_minus_ap);
920 emit_addhi3_postreload (dest, src0, src1)
925 rtx set, clobber, insn;
927 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
928 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
929 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
933 /* Called after register allocation to add any instructions needed for
934 the prologue. Using a prologue insn is favored compared to putting
935 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
936 since it allows the scheduler to intermix instructions with the
937 saves of the caller saved registers. In some cases, it might be
938 necessary to emit a barrier instruction as the last insn to prevent
941 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
942 so that the debug info generation code can handle them properly. */
944 xstormy16_expand_prologue ()
946 struct xstormy16_stack_layout layout;
950 rtx mem_fake_push_rtx;
951 const int ifun = xstormy16_interrupt_function_p ();
953 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
954 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
955 mem_fake_push_rtx = gen_rtx_PRE_INC (Pmode, stack_pointer_rtx);
956 mem_fake_push_rtx = gen_rtx_MEM (HImode, mem_fake_push_rtx);
958 layout = xstormy16_compute_stack_layout ();
960 /* Save the argument registers if necessary. */
961 if (layout.stdarg_save_size)
962 for (regno = FIRST_ARGUMENT_REGISTER;
963 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
966 rtx reg = gen_rtx_REG (HImode, regno);
967 insn = emit_move_insn (mem_push_rtx, reg);
968 RTX_FRAME_RELATED_P (insn) = 1;
969 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
970 gen_rtx_SET (VOIDmode,
976 /* Push each of the registers to save. */
977 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
978 if (REG_NEEDS_SAVE (regno, ifun))
980 rtx reg = gen_rtx_REG (HImode, regno);
981 insn = emit_move_insn (mem_push_rtx, reg);
982 RTX_FRAME_RELATED_P (insn) = 1;
983 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
984 gen_rtx_SET (VOIDmode,
990 /* It's just possible that the SP here might be what we need for
992 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
994 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
995 RTX_FRAME_RELATED_P (insn) = 1;
998 /* Allocate space for local variables. */
999 if (layout.locals_size)
1001 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1002 GEN_INT (layout.locals_size));
1003 RTX_FRAME_RELATED_P (insn) = 1;
1006 /* Set up the frame pointer, if required. */
1007 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1009 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1010 RTX_FRAME_RELATED_P (insn) = 1;
1011 if (layout.sp_minus_fp)
1013 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1014 hard_frame_pointer_rtx,
1015 GEN_INT (-layout.sp_minus_fp));
1016 RTX_FRAME_RELATED_P (insn) = 1;
1021 /* Do we need an epilogue at all? */
1025 return (reload_completed
1026 && xstormy16_compute_stack_layout ().frame_size == 0);
1029 /* Called after register allocation to add any instructions needed for
1030 the epilogue. Using an epilogue insn is favored compared to putting
1031 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1032 since it allows the scheduler to intermix instructions with the
1033 saves of the caller saved registers. In some cases, it might be
1034 necessary to emit a barrier instruction as the last insn to prevent
1038 xstormy16_expand_epilogue ()
1040 struct xstormy16_stack_layout layout;
1043 const int ifun = xstormy16_interrupt_function_p ();
1045 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1046 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1048 layout = xstormy16_compute_stack_layout ();
1050 /* Pop the stack for the locals. */
1051 if (layout.locals_size)
1053 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1054 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1056 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1057 GEN_INT (- layout.locals_size));
1060 /* Restore any call-saved registers. */
1061 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1062 if (REG_NEEDS_SAVE (regno, ifun))
1063 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1065 /* Pop the stack for the stdarg save area. */
1066 if (layout.stdarg_save_size)
1067 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1068 GEN_INT (- layout.stdarg_save_size));
1072 emit_jump_insn (gen_return_internal_interrupt ());
1074 emit_jump_insn (gen_return_internal ());
1078 xstormy16_epilogue_uses (regno)
1081 if (reload_completed && call_used_regs[regno])
1083 const int ifun = xstormy16_interrupt_function_p ();
1084 return REG_NEEDS_SAVE (regno, ifun);
1089 /* Return an updated summarizer variable CUM to advance past an
1090 argument in the argument list. The values MODE, TYPE and NAMED
1091 describe that argument. Once this is done, the variable CUM is
1092 suitable for analyzing the *following* argument with
1093 `FUNCTION_ARG', etc.
1095 This function need not do anything if the argument in question was
1096 passed on the stack. The compiler knows how to track the amount of
1097 stack space used for arguments without any special help. However,
1098 it makes life easier for xstormy16_build_va_list if it does update
1101 xstormy16_function_arg_advance (cum, mode, type, named)
1102 CUMULATIVE_ARGS cum;
1103 enum machine_mode mode;
1105 int named ATTRIBUTE_UNUSED;
1107 /* If an argument would otherwise be passed partially in registers,
1108 and partially on the stack, the whole of it is passed on the
1110 if (cum < NUM_ARGUMENT_REGISTERS
1111 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1112 cum = NUM_ARGUMENT_REGISTERS;
1114 cum += XSTORMY16_WORD_SIZE (type, mode);
1119 /* Do any needed setup for a variadic function. CUM has not been updated
1120 for the last named argument which has type TYPE and mode MODE. */
1122 xstormy16_setup_incoming_varargs (cum, int_mode, type, pretend_size)
1123 CUMULATIVE_ARGS cum ATTRIBUTE_UNUSED;
1124 int int_mode ATTRIBUTE_UNUSED;
1125 tree type ATTRIBUTE_UNUSED;
1126 int * pretend_size ATTRIBUTE_UNUSED;
1130 /* Build the va_list type.
1132 For this chip, va_list is a record containing a counter and a pointer.
1133 The counter is of type 'int' and indicates how many bytes
1134 have been used to date. The pointer indicates the stack position
1135 for arguments that have not been passed in registers.
1136 To keep the layout nice, the pointer is first in the structure. */
1139 xstormy16_build_va_list ()
1141 tree f_1, f_2, record, type_decl;
1143 record = make_lang_type (RECORD_TYPE);
1144 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1146 f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
1148 f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
1149 unsigned_type_node);
1151 DECL_FIELD_CONTEXT (f_1) = record;
1152 DECL_FIELD_CONTEXT (f_2) = record;
1154 TREE_CHAIN (record) = type_decl;
1155 TYPE_NAME (record) = type_decl;
1156 TYPE_FIELDS (record) = f_1;
1157 TREE_CHAIN (f_1) = f_2;
1159 layout_type (record);
1164 /* Implement the stdarg/varargs va_start macro. STDARG_P is non-zero if this
1165 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1166 variable to initialize. NEXTARG is the machine independent notion of the
1167 'next' argument after the variable arguments. */
1169 xstormy16_expand_builtin_va_start (stdarg_p, valist, nextarg)
1170 int stdarg_p ATTRIBUTE_UNUSED;
1172 rtx nextarg ATTRIBUTE_UNUSED;
1174 tree f_base, f_count;
1178 if (xstormy16_interrupt_function_p ())
1179 error ("cannot use va_start in interrupt function");
1181 f_base = TYPE_FIELDS (va_list_type_node);
1182 f_count = TREE_CHAIN (f_base);
1184 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base);
1185 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count);
1187 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1188 t = build (PLUS_EXPR, TREE_TYPE (base), t,
1189 build_int_2 (INCOMING_FRAME_SP_OFFSET, 0));
1190 t = build (MODIFY_EXPR, TREE_TYPE (base), base, t);
1191 TREE_SIDE_EFFECTS (t) = 1;
1192 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1194 t = build (MODIFY_EXPR, TREE_TYPE (count), count,
1195 build_int_2 (current_function_args_info * UNITS_PER_WORD, 0));
1196 TREE_SIDE_EFFECTS (t) = 1;
1197 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1200 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1201 of type va_list as a tree, TYPE is the type passed to va_arg.
1202 Note: This algorithm is documented in stormy-abi. */
1205 xstormy16_expand_builtin_va_arg (valist, type)
1209 tree f_base, f_count;
1211 rtx count_rtx, addr_rtx, r;
1212 rtx lab_gotaddr, lab_fromstack;
1214 int size, size_of_reg_args;
1215 tree size_tree, count_plus_size;
1216 rtx count_plus_size_rtx;
1218 f_base = TYPE_FIELDS (va_list_type_node);
1219 f_count = TREE_CHAIN (f_base);
1221 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base);
1222 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count);
1224 size = PUSH_ROUNDING (int_size_in_bytes (type));
1225 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1227 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1229 count_rtx = expand_expr (count, NULL_RTX, HImode, EXPAND_NORMAL);
1230 lab_gotaddr = gen_label_rtx ();
1231 lab_fromstack = gen_label_rtx ();
1232 addr_rtx = gen_reg_rtx (Pmode);
1234 count_plus_size = build (PLUS_EXPR, TREE_TYPE (count), count, size_tree);
1235 count_plus_size_rtx = expand_expr (count_plus_size, NULL_RTX, HImode, EXPAND_NORMAL);
1236 emit_cmp_and_jump_insns (count_plus_size_rtx, GEN_INT (size_of_reg_args),
1237 GTU, const1_rtx, HImode, 1, lab_fromstack);
1239 t = build (PLUS_EXPR, ptr_type_node, base, count);
1240 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
1242 emit_move_insn (addr_rtx, r);
1244 emit_jump_insn (gen_jump (lab_gotaddr));
1246 emit_label (lab_fromstack);
1248 /* Arguments larger than a word might need to skip over some
1249 registers, since arguments are either passed entirely in
1250 registers or entirely on the stack. */
1251 if (size > 2 || size < 0)
1253 rtx lab_notransition = gen_label_rtx ();
1254 emit_cmp_and_jump_insns (count_rtx, GEN_INT (NUM_ARGUMENT_REGISTERS
1256 GEU, const1_rtx, HImode, 1, lab_notransition);
1258 t = build (MODIFY_EXPR, TREE_TYPE (count), count,
1259 build_int_2 (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD, 0));
1260 TREE_SIDE_EFFECTS (t) = 1;
1261 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1263 emit_label (lab_notransition);
1266 t = build (PLUS_EXPR, sizetype, size_tree,
1267 build_int_2 ((- NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1268 + INCOMING_FRAME_SP_OFFSET),
1270 t = build (PLUS_EXPR, TREE_TYPE (count), count, fold (t));
1271 t = build (MINUS_EXPR, TREE_TYPE (base), base, t);
1272 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
1274 emit_move_insn (addr_rtx, r);
1276 emit_label (lab_gotaddr);
1278 count_plus_size = build (PLUS_EXPR, TREE_TYPE (count), count, size_tree);
1279 t = build (MODIFY_EXPR, TREE_TYPE (count), count, count_plus_size);
1280 TREE_SIDE_EFFECTS (t) = 1;
1281 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1286 /* Initialize the variable parts of a trampoline. ADDR is an RTX for
1287 the address of the trampoline; FNADDR is an RTX for the address of
1288 the nested function; STATIC_CHAIN is an RTX for the static chain
1289 value that should be passed to the function when it is called. */
1291 xstormy16_initialize_trampoline (addr, fnaddr, static_chain)
1296 rtx reg_addr = gen_reg_rtx (Pmode);
1297 rtx temp = gen_reg_rtx (HImode);
1298 rtx reg_fnaddr = gen_reg_rtx (HImode);
1301 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1303 emit_move_insn (reg_addr, addr);
1304 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1305 emit_move_insn (reg_addr_mem, temp);
1306 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1307 emit_move_insn (temp, static_chain);
1308 emit_move_insn (reg_addr_mem, temp);
1309 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1310 emit_move_insn (reg_fnaddr, fnaddr);
1311 emit_move_insn (temp, reg_fnaddr);
1312 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1313 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1314 emit_move_insn (reg_addr_mem, temp);
1315 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1316 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1317 emit_move_insn (reg_addr_mem, reg_fnaddr);
1320 /* Create an RTX representing the place where a function returns a
1321 value of data type VALTYPE. VALTYPE is a tree node representing a
1322 data type. Write `TYPE_MODE (VALTYPE)' to get the machine mode
1323 used to represent that type. On many machines, only the mode is
1324 relevant. (Actually, on most machines, scalar values are returned
1325 in the same place regardless of mode).
1327 If `PROMOTE_FUNCTION_RETURN' is defined, you must apply the same promotion
1328 rules specified in `PROMOTE_MODE' if VALTYPE is a scalar type.
1330 If the precise function being called is known, FUNC is a tree node
1331 (`FUNCTION_DECL') for it; otherwise, FUNC is a null pointer. This makes it
1332 possible to use a different value-returning convention for specific
1333 functions when all their calls are known.
1335 `FUNCTION_VALUE' is not used for return vales with aggregate data types,
1336 because these are returned in another way. See `STRUCT_VALUE_REGNUM' and
1339 xstormy16_function_value (valtype, func)
1341 tree func ATTRIBUTE_UNUSED;
1343 enum machine_mode mode;
1344 mode = TYPE_MODE (valtype);
1345 PROMOTE_MODE (mode, 0, valtype);
1346 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1349 /* A C compound statement that outputs the assembler code for a thunk function,
1350 used to implement C++ virtual function calls with multiple inheritance. The
1351 thunk acts as a wrapper around a virtual function, adjusting the implicit
1352 object parameter before handing control off to the real function.
1354 First, emit code to add the integer DELTA to the location that contains the
1355 incoming first argument. Assume that this argument contains a pointer, and
1356 is the one used to pass the `this' pointer in C++. This is the incoming
1357 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1358 addition must preserve the values of all other incoming arguments.
1360 After the addition, emit code to jump to FUNCTION, which is a
1361 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1362 the return address. Hence returning from FUNCTION will return to whoever
1363 called the current `thunk'.
1365 The effect must be as if @var{function} had been called directly
1366 with the adjusted first argument. This macro is responsible for
1367 emitting all of the code for a thunk function;
1368 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1371 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1372 extracted from it.) It might possibly be useful on some targets, but
1376 xstormy16_asm_output_mi_thunk (file, thunk_fndecl, delta, function)
1378 tree thunk_fndecl ATTRIBUTE_UNUSED;
1382 int regnum = FIRST_ARGUMENT_REGISTER;
1384 /* There might be a hidden first argument for a returned structure. */
1385 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
1388 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (delta) & 0xFFFF);
1389 fputs ("\tjmpf ", file);
1390 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1394 /* Mark functions with SYMBOL_REF_FLAG. */
1397 xstormy16_encode_section_info (decl)
1400 if (TREE_CODE (decl) == FUNCTION_DECL)
1401 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1404 /* Output constructors and destructors. Just like
1405 default_named_section_asm_out_* but don't set the sections writable. */
1406 #undef TARGET_ASM_CONSTRUCTOR
1407 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1408 #undef TARGET_ASM_DESTRUCTOR
1409 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1412 xstormy16_asm_out_destructor (symbol, priority)
1416 const char *section = ".dtors";
1419 /* ??? This only works reliably with the GNU linker. */
1420 if (priority != DEFAULT_INIT_PRIORITY)
1422 sprintf (buf, ".dtors.%.5u",
1423 /* Invert the numbering so the linker puts us in the proper
1424 order; constructors are run from right to left, and the
1425 linker sorts in increasing order. */
1426 MAX_INIT_PRIORITY - priority);
1430 named_section_flags (section, 0);
1431 assemble_align (POINTER_SIZE);
1432 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1436 xstormy16_asm_out_constructor (symbol, priority)
1440 const char *section = ".ctors";
1443 /* ??? This only works reliably with the GNU linker. */
1444 if (priority != DEFAULT_INIT_PRIORITY)
1446 sprintf (buf, ".ctors.%.5u",
1447 /* Invert the numbering so the linker puts us in the proper
1448 order; constructors are run from right to left, and the
1449 linker sorts in increasing order. */
1450 MAX_INIT_PRIORITY - priority);
1454 named_section_flags (section, 0);
1455 assemble_align (POINTER_SIZE);
1456 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1459 /* Print a memory address as an operand to reference that memory location. */
1461 xstormy16_print_operand_address (file, address)
1465 HOST_WIDE_INT offset;
1466 int pre_dec, post_inc;
1468 /* There are a few easy cases. */
1469 if (GET_CODE (address) == CONST_INT)
1471 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1475 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1477 output_addr_const (file, address);
1481 /* Otherwise, it's hopefully something of the form
1482 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...))
1485 if (GET_CODE (address) == PLUS)
1487 if (GET_CODE (XEXP (address, 1)) != CONST_INT)
1489 offset = INTVAL (XEXP (address, 1));
1490 address = XEXP (address, 0);
1495 pre_dec = (GET_CODE (address) == PRE_DEC);
1496 post_inc = (GET_CODE (address) == POST_INC);
1497 if (pre_dec || post_inc)
1498 address = XEXP (address, 0);
1500 if (GET_CODE (address) != REG)
1506 fputs (reg_names [REGNO (address)], file);
1512 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
1517 /* Print an operand to an assembler instruction. */
1519 xstormy16_print_operand (file, x, code)
1527 /* There is either one bit set, or one bit clear, in X.
1528 Print it preceded by '#'. */
1530 HOST_WIDE_INT xx = 1;
1533 if (GET_CODE (x) == CONST_INT)
1536 output_operand_lossage ("`B' operand is not constant");
1538 l = exact_log2 (xx);
1540 l = exact_log2 (~xx);
1542 output_operand_lossage ("`B' operand has multiple bits set");
1544 fputs (IMMEDIATE_PREFIX, file);
1545 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1550 /* Print the symbol without a surrounding @fptr(). */
1551 if (GET_CODE (x) == SYMBOL_REF)
1552 assemble_name (file, XSTR (x, 0));
1553 else if (GET_CODE (x) == LABEL_REF)
1554 output_asm_label (x);
1556 xstormy16_print_operand_address (file, x);
1561 /* Print the immediate operand less one, preceded by '#'.
1562 For 'O', negate it first. */
1564 HOST_WIDE_INT xx = 0;
1566 if (GET_CODE (x) == CONST_INT)
1569 output_operand_lossage ("`o' operand is not constant");
1574 fputs (IMMEDIATE_PREFIX, file);
1575 fprintf (file, HOST_WIDE_INT_PRINT_DEC, xx - 1);
1580 /* Handled below. */
1584 output_operand_lossage ("xstormy16_print_operand: unknown code");
1588 switch (GET_CODE (x))
1591 fputs (reg_names [REGNO (x)], file);
1595 xstormy16_print_operand_address (file, XEXP (x, 0));
1599 /* Some kind of constant or label; an immediate operand,
1600 so prefix it with '#' for the assembler. */
1601 fputs (IMMEDIATE_PREFIX, file);
1602 output_addr_const (file, x);
1610 /* Expander for the `casesi' pattern.
1611 INDEX is the index of the switch statement.
1612 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1613 to the first table entry.
1614 RANGE is the number of table entries.
1615 TABLE is an ADDR_VEC that is the jump table.
1616 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1617 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1621 xstormy16_expand_casesi (index, lower_bound, range, table, default_label)
1628 HOST_WIDE_INT range_i = INTVAL (range);
1631 /* This code uses 'br', so it can deal only with tables of size up to
1633 if (range_i >= 8192)
1634 sorry ("switch statement of size %lu entries too large",
1635 (unsigned long) range_i);
1637 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1639 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1641 int_index = gen_lowpart_common (HImode, index);
1642 emit_insn (gen_ashlhi3 (int_index, int_index, GEN_INT (2)));
1643 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1646 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1647 instructions, without label or alignment or any other special
1648 constructs. We know that the previous instruction will be the
1649 `tablejump_pcrel' output above.
1651 TODO: it might be nice to output 'br' instructions if they could
1655 xstormy16_output_addr_vec (file, label, table)
1657 rtx label ATTRIBUTE_UNUSED;
1662 function_section (current_function_decl);
1664 vlen = XVECLEN (table, 0);
1665 for (idx = 0; idx < vlen; idx++)
1667 fputs ("\tjmpf ", file);
1668 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1674 /* Expander for the `call' patterns.
1675 INDEX is the index of the switch statement.
1676 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1677 to the first table entry.
1678 RANGE is the number of table entries.
1679 TABLE is an ADDR_VEC that is the jump table.
1680 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1681 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1685 xstormy16_expand_call (retval, dest, counter)
1691 enum machine_mode mode;
1693 if (GET_CODE (dest) != MEM)
1695 dest = XEXP (dest, 0);
1697 if (! CONSTANT_P (dest)
1698 && GET_CODE (dest) != REG)
1699 dest = force_reg (Pmode, dest);
1704 mode = GET_MODE (retval);
1706 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1709 call = gen_rtx_SET (VOIDmode, retval, call);
1711 if (! CONSTANT_P (dest))
1713 temp = gen_reg_rtx (HImode);
1714 emit_move_insn (temp, const0_rtx);
1719 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1720 gen_rtx_USE (VOIDmode, temp)));
1721 emit_call_insn (call);
1724 /* Expanders for multiword computational operations. */
1726 /* Expander for arithmetic operations; emit insns to compute
1728 (set DEST (CODE:MODE SRC0 SRC1))
1730 using CARRY as a temporary. When CODE is COMPARE, a branch
1731 template is generated (this saves duplicating code in
1732 xstormy16_split_cbranch). */
1735 xstormy16_expand_arith (mode, code, dest, src0, src1, carry)
1736 enum machine_mode mode;
1743 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1749 rtx zero_reg = gen_reg_rtx (word_mode);
1750 emit_move_insn (zero_reg, src0);
1754 for (i = 0; i < num_words; i++)
1756 rtx w_src0, w_src1, w_dest;
1762 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1763 i * UNITS_PER_WORD);
1764 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1765 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1771 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1775 insn = gen_addchi4 (w_dest, w_src0, w_src1, carry);
1777 insn = gen_addchi5 (w_dest, w_src0, w_src1, carry, carry);
1783 if (code == COMPARE && i == num_words - 1)
1785 rtx branch, sub, clobber, sub_1;
1787 sub_1 = gen_rtx_MINUS (HImode, w_src0,
1788 gen_rtx_ZERO_EXTEND (HImode, carry));
1789 sub = gen_rtx_SET (VOIDmode, w_dest,
1790 gen_rtx_MINUS (HImode, sub_1, w_src1));
1791 clobber = gen_rtx_CLOBBER (VOIDmode, carry);
1792 branch = gen_rtx_SET (VOIDmode, pc_rtx,
1793 gen_rtx_IF_THEN_ELSE (VOIDmode,
1799 insn = gen_rtx_PARALLEL (VOIDmode,
1800 gen_rtvec (3, branch, sub, clobber));
1804 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1807 insn = gen_subchi4 (w_dest, w_src0, w_src1, carry);
1809 insn = gen_subchi5 (w_dest, w_src0, w_src1, carry, carry);
1815 if (GET_CODE (w_src1) == CONST_INT
1816 && INTVAL (w_src1) == -(code == AND))
1819 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx (code, mode,
1824 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
1836 /* Return 1 if OP is a shift operator. */
1839 shift_operator (op, mode)
1841 enum machine_mode mode ATTRIBUTE_UNUSED;
1843 enum rtx_code code = GET_CODE (op);
1845 return (code == ASHIFT
1847 || code == LSHIFTRT);
1850 /* The shift operations are split at output time for constant values;
1851 variable-width shifts get handed off to a library routine.
1853 Generate an output string to do (set X (CODE:MODE X SIZE_R))
1854 SIZE_R will be a CONST_INT, X will be a hard register. */
1857 xstormy16_output_shift (mode, code, x, size_r, temp)
1858 enum machine_mode mode;
1865 const char *r0, *r1, *rt;
1868 if (GET_CODE (size_r) != CONST_INT
1869 || GET_CODE (x) != REG
1872 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
1877 r0 = reg_names [REGNO (x)];
1878 r1 = reg_names [REGNO (x) + 1];
1880 /* For shifts of size 1, we can use the rotate instructions. */
1886 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
1889 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
1892 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
1900 /* For large shifts, there are easy special cases. */
1906 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
1909 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
1912 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
1924 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
1925 r1, r0, r0, r1, (int) size - 16);
1928 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
1929 r0, r1, r1, r0, (int) size - 16);
1932 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
1933 r0, r1, r1, r0, (int) size - 16);
1941 /* For the rest, we have to do more work. In particular, we
1942 need a temporary. */
1943 rt = reg_names [REGNO (temp)];
1948 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
1949 rt, r0, r0, (int) size, r1, (int) size, rt, (int) 16-size,
1954 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
1955 rt, r1, r1, (int) size, r0, (int) size, rt, (int) 16-size,
1960 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
1961 rt, r1, r1, (int) size, r0, (int) size, rt, (int) 16-size,
1970 /* Attribute handling. */
1972 /* Return nonzero if the function is an interrupt function. */
1974 xstormy16_interrupt_function_p ()
1978 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
1979 any functions are declared, which is demonstrably wrong, but
1980 it is worked around here. FIXME. */
1984 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1985 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
1988 #undef TARGET_ATTRIBUTE_TABLE
1989 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
1990 static tree xstormy16_handle_interrupt_attribute PARAMS ((tree *, tree, tree, int, bool *));
1991 static const struct attribute_spec xstormy16_attribute_table[] =
1993 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
1994 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
1995 { NULL, 0, 0, false, false, false, NULL }
1998 /* Handle an "interrupt" attribute;
1999 arguments as in struct attribute_spec.handler. */
2001 xstormy16_handle_interrupt_attribute (node, name, args, flags, no_add_attrs)
2004 tree args ATTRIBUTE_UNUSED;
2005 int flags ATTRIBUTE_UNUSED;
2008 if (TREE_CODE (*node) != FUNCTION_TYPE)
2010 warning ("`%s' attribute only applies to functions",
2011 IDENTIFIER_POINTER (name));
2012 *no_add_attrs = true;
2018 #undef TARGET_ASM_ALIGNED_HI_OP
2019 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2020 #undef TARGET_ASM_ALIGNED_SI_OP
2021 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2023 struct gcc_target targetm = TARGET_INITIALIZER;