1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
36 #include "diagnostic-core.h"
44 #include "target-def.h"
46 #include "langhooks.h"
52 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
53 static void xstormy16_asm_out_constructor (rtx, int);
54 static void xstormy16_asm_out_destructor (rtx, int);
55 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
58 static void xstormy16_init_builtins (void);
59 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
60 static bool xstormy16_rtx_costs (rtx, int, int, int, int *, bool);
61 static int xstormy16_address_cost (rtx, bool);
62 static bool xstormy16_return_in_memory (const_tree, const_tree);
64 static GTY(()) section *bss100_section;
66 /* Compute a (partial) cost for rtx X. Return true if the complete
67 cost has been computed, and false if subexpressions should be
68 scanned. In either case, *TOTAL contains the cost result. */
71 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
72 int opno ATTRIBUTE_UNUSED, int *total,
73 bool speed ATTRIBUTE_UNUSED)
78 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
79 *total = COSTS_N_INSNS (1) / 2;
80 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
81 *total = COSTS_N_INSNS (1);
83 *total = COSTS_N_INSNS (2);
90 *total = COSTS_N_INSNS (2);
94 *total = COSTS_N_INSNS (35 + 6);
97 *total = COSTS_N_INSNS (51 - 6);
106 xstormy16_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
108 return (CONST_INT_P (x) ? 2
109 : GET_CODE (x) == PLUS ? 7
113 /* Worker function for TARGET_MEMORY_MOVE_COST. */
116 xstormy16_memory_move_cost (enum machine_mode mode, reg_class_t rclass,
119 return (5 + memory_move_secondary_cost (mode, rclass, in));
122 /* Branches are handled as follows:
124 1. HImode compare-and-branches. The machine supports these
125 natively, so the appropriate pattern is emitted directly.
127 2. SImode EQ and NE. These are emitted as pairs of HImode
128 compare-and-branches.
130 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
131 of a SImode subtract followed by a branch (not a compare-and-branch),
137 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
144 /* Emit a branch of kind CODE to location LOC. */
147 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
149 rtx condition_rtx, loc_ref, branch, cy_clobber;
151 enum machine_mode mode;
153 mode = GET_MODE (op0);
154 gcc_assert (mode == HImode || mode == SImode);
157 && (code == GT || code == LE || code == GTU || code == LEU))
159 int unsigned_p = (code == GTU || code == LEU);
160 int gt_p = (code == GT || code == GTU);
164 lab = gen_label_rtx ();
165 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
166 /* This should be generated as a comparison against the temporary
167 created by the previous insn, but reload can't handle that. */
168 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
173 else if (mode == SImode
174 && (code == NE || code == EQ)
175 && op1 != const0_rtx)
177 rtx op0_word, op1_word;
179 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
183 lab = gen_label_rtx ();
185 for (i = 0; i < num_words - 1; i++)
187 op0_word = simplify_gen_subreg (word_mode, op0, mode,
189 op1_word = simplify_gen_subreg (word_mode, op1, mode,
191 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
193 op0_word = simplify_gen_subreg (word_mode, op0, mode,
195 op1_word = simplify_gen_subreg (word_mode, op1, mode,
197 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
204 /* We can't allow reload to try to generate any reload after a branch,
205 so when some register must match we must make the temporary ourselves. */
209 tmp = gen_reg_rtx (mode);
210 emit_move_insn (tmp, op0);
214 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
215 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
216 branch = gen_rtx_SET (VOIDmode, pc_rtx,
217 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
220 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
223 vec = gen_rtvec (2, branch, cy_clobber);
224 else if (code == NE || code == EQ)
225 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
230 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
232 sub = gen_rtx_CLOBBER (SImode, op0);
234 vec = gen_rtvec (3, branch, sub, cy_clobber);
237 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
240 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
241 the arithmetic operation. Most of the work is done by
242 xstormy16_expand_arith. */
245 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
248 rtx op0 = XEXP (comparison, 0);
249 rtx op1 = XEXP (comparison, 1);
254 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
258 gcc_assert (INSN_P (seq));
261 while (NEXT_INSN (last_insn) != NULL_RTX)
262 last_insn = NEXT_INSN (last_insn);
264 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
265 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
266 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
271 /* Return the string to output a conditional branch to LABEL, which is
272 the operand number of the label.
274 OP is the conditional expression, or NULL for branch-always.
276 REVERSED is nonzero if we should reverse the sense of the comparison.
281 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
283 static char string[64];
284 int need_longbranch = (op != NULL_RTX
285 ? get_attr_length (insn) == 8
286 : get_attr_length (insn) == 4);
287 int really_reversed = reversed ^ need_longbranch;
290 const char *operands;
299 sprintf (string, "%s %s", ccode, label);
303 code = GET_CODE (op);
305 if (! REG_P (XEXP (op, 0)))
307 code = swap_condition (code);
313 /* Work out which way this really branches. */
315 code = reverse_condition (code);
319 case EQ: ccode = "z"; break;
320 case NE: ccode = "nz"; break;
321 case GE: ccode = "ge"; break;
322 case LT: ccode = "lt"; break;
323 case GT: ccode = "gt"; break;
324 case LE: ccode = "le"; break;
325 case GEU: ccode = "nc"; break;
326 case LTU: ccode = "c"; break;
327 case GTU: ccode = "hi"; break;
328 case LEU: ccode = "ls"; break;
335 templ = "b%s %s,.+8 | jmpf %s";
338 sprintf (string, templ, ccode, operands, label);
343 /* Return the string to output a conditional branch to LABEL, which is
344 the operand number of the label, but suitable for the tail of a
347 OP is the conditional expression (OP is never NULL_RTX).
349 REVERSED is nonzero if we should reverse the sense of the comparison.
354 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
356 static char string[64];
357 int need_longbranch = get_attr_length (insn) >= 8;
358 int really_reversed = reversed ^ need_longbranch;
364 code = GET_CODE (op);
366 /* Work out which way this really branches. */
368 code = reverse_condition (code);
372 case EQ: ccode = "z"; break;
373 case NE: ccode = "nz"; break;
374 case GE: ccode = "ge"; break;
375 case LT: ccode = "lt"; break;
376 case GEU: ccode = "nc"; break;
377 case LTU: ccode = "c"; break;
379 /* The missing codes above should never be generated. */
390 gcc_assert (REG_P (XEXP (op, 0)));
392 regnum = REGNO (XEXP (op, 0));
393 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
397 case GE: case LT: case GEU: case LTU:
398 strcpy (prevop, "sbc %2,%3");
406 templ = "%s | b%s .+6 | jmpf %s";
408 templ = "%s | b%s %s";
409 sprintf (string, templ, prevop, ccode, label);
414 /* Many machines have some registers that cannot be copied directly to or from
415 memory or even from other types of registers. An example is the `MQ'
416 register, which on most machines, can only be copied to or from general
417 registers, but not memory. Some machines allow copying all registers to and
418 from memory, but require a scratch register for stores to some memory
419 locations (e.g., those with symbolic address on the RT, and those with
420 certain symbolic address on the SPARC when compiling PIC). In some cases,
421 both an intermediate and a scratch register are required.
423 You should define these macros to indicate to the reload phase that it may
424 need to allocate at least one register for a reload in addition to the
425 register to contain the data. Specifically, if copying X to a register
426 RCLASS in MODE requires an intermediate register, you should define
427 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
428 whose registers can be used as intermediate registers or scratch registers.
430 If copying a register RCLASS in MODE to X requires an intermediate or scratch
431 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
432 largest register class required. If the requirements for input and output
433 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
434 instead of defining both macros identically.
436 The values returned by these macros are often `GENERAL_REGS'. Return
437 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
438 to or from a register of RCLASS in MODE without requiring a scratch register.
439 Do not define this macro if it would always return `NO_REGS'.
441 If a scratch register is required (either with or without an intermediate
442 register), you should define patterns for `reload_inM' or `reload_outM', as
443 required.. These patterns, which will normally be implemented with a
444 `define_expand', should be similar to the `movM' patterns, except that
445 operand 2 is the scratch register.
447 Define constraints for the reload register and scratch register that contain
448 a single register class. If the original reload register (whose class is
449 RCLASS) can meet the constraint given in the pattern, the value returned by
450 these macros is used for the class of the scratch register. Otherwise, two
451 additional reload registers are required. Their classes are obtained from
452 the constraints in the insn pattern.
454 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
455 either be in a hard register or in memory. Use `true_regnum' to find out;
456 it will return -1 if the pseudo is in memory and the hard register number if
459 These macros should not be used in the case where a particular class of
460 registers can only be copied to memory and not to another class of
461 registers. In that case, secondary reload registers are not needed and
462 would not be helpful. Instead, a stack location must be used to perform the
463 copy and the `movM' pattern should use memory as an intermediate storage.
464 This case often occurs between floating-point and general registers. */
467 xstormy16_secondary_reload_class (enum reg_class rclass,
468 enum machine_mode mode ATTRIBUTE_UNUSED,
471 /* This chip has the interesting property that only the first eight
472 registers can be moved to/from memory. */
474 || ((GET_CODE (x) == SUBREG || REG_P (x))
475 && (true_regnum (x) == -1
476 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
477 && ! reg_class_subset_p (rclass, EIGHT_REGS))
483 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
484 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
487 xstormy16_preferred_reload_class (rtx x, reg_class_t rclass)
489 if (rclass == GENERAL_REGS && MEM_P (x))
495 /* Predicate for symbols and addresses that reflect special 8-bit
499 xstormy16_below100_symbol (rtx x,
500 enum machine_mode mode ATTRIBUTE_UNUSED)
502 if (GET_CODE (x) == CONST)
504 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
507 if (GET_CODE (x) == SYMBOL_REF)
508 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
512 HOST_WIDE_INT i = INTVAL (x);
514 if ((i >= 0x0000 && i <= 0x00ff)
515 || (i >= 0x7f00 && i <= 0x7fff))
521 /* Likewise, but only for non-volatile MEMs, for patterns where the
522 MEM will get split into smaller sized accesses. */
525 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
527 if (MEM_P (x) && MEM_VOLATILE_P (x))
529 return xstormy16_below100_operand (x, mode);
532 /* Expand an 8-bit IOR. This either detects the one case we can
533 actually do, or uses a 16-bit IOR. */
536 xstormy16_expand_iorqi3 (rtx *operands)
538 rtx in, out, outsub, val;
544 if (xstormy16_onebit_set_operand (val, QImode))
546 if (!xstormy16_below100_or_register (in, QImode))
547 in = copy_to_mode_reg (QImode, in);
548 if (!xstormy16_below100_or_register (out, QImode))
549 out = gen_reg_rtx (QImode);
550 emit_insn (gen_iorqi3_internal (out, in, val));
551 if (out != operands[0])
552 emit_move_insn (operands[0], out);
557 in = copy_to_mode_reg (QImode, in);
559 if (! REG_P (val) && ! CONST_INT_P (val))
560 val = copy_to_mode_reg (QImode, val);
563 out = gen_reg_rtx (QImode);
565 in = simplify_gen_subreg (HImode, in, QImode, 0);
566 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
568 if (! CONST_INT_P (val))
569 val = simplify_gen_subreg (HImode, val, QImode, 0);
571 emit_insn (gen_iorhi3 (outsub, in, val));
573 if (out != operands[0])
574 emit_move_insn (operands[0], out);
577 /* Expand an 8-bit AND. This either detects the one case we can
578 actually do, or uses a 16-bit AND. */
581 xstormy16_expand_andqi3 (rtx *operands)
583 rtx in, out, outsub, val;
589 if (xstormy16_onebit_clr_operand (val, QImode))
591 if (!xstormy16_below100_or_register (in, QImode))
592 in = copy_to_mode_reg (QImode, in);
593 if (!xstormy16_below100_or_register (out, QImode))
594 out = gen_reg_rtx (QImode);
595 emit_insn (gen_andqi3_internal (out, in, val));
596 if (out != operands[0])
597 emit_move_insn (operands[0], out);
602 in = copy_to_mode_reg (QImode, in);
604 if (! REG_P (val) && ! CONST_INT_P (val))
605 val = copy_to_mode_reg (QImode, val);
608 out = gen_reg_rtx (QImode);
610 in = simplify_gen_subreg (HImode, in, QImode, 0);
611 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
613 if (! CONST_INT_P (val))
614 val = simplify_gen_subreg (HImode, val, QImode, 0);
616 emit_insn (gen_andhi3 (outsub, in, val));
618 if (out != operands[0])
619 emit_move_insn (operands[0], out);
622 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
624 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
626 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
628 && INTVAL (X) + (OFFSET) >= 0 \
629 && INTVAL (X) + (OFFSET) < 0x8000 \
630 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
633 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
636 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
639 if (GET_CODE (x) == PLUS
640 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
643 /* PR 31232: Do not allow INT+INT as an address. */
648 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
649 || GET_CODE (x) == POST_INC
650 || GET_CODE (x) == PRE_DEC)
654 && REGNO_OK_FOR_BASE_P (REGNO (x))
655 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
658 if (xstormy16_below100_symbol (x, mode))
664 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
666 On this chip, this is true if the address is valid with an offset
667 of 0 but not of 6, because in that case it cannot be used as an
668 address for DImode or DFmode, or if the address is a post-increment
669 or pre-decrement address. */
672 xstormy16_mode_dependent_address_p (const_rtx x)
674 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
675 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
678 if (GET_CODE (x) == PLUS
679 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
680 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
683 /* Auto-increment addresses are now treated generically in recog.c. */
688 short_memory_operand (rtx x, enum machine_mode mode)
690 if (! memory_operand (x, mode))
692 return (GET_CODE (XEXP (x, 0)) != PLUS);
695 /* Splitter for the 'move' patterns, for modes not directly implemented
696 by hardware. Emit insns to copy a value of mode MODE from SRC to
699 This function is only called when reload_completed. */
702 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
704 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
705 int direction, end, i;
706 int src_modifies = 0;
707 int dest_modifies = 0;
708 int src_volatile = 0;
709 int dest_volatile = 0;
711 rtx auto_inc_reg_rtx = NULL_RTX;
713 /* Check initial conditions. */
714 gcc_assert (reload_completed
715 && mode != QImode && mode != HImode
716 && nonimmediate_operand (dest, mode)
717 && general_operand (src, mode));
719 /* This case is not supported below, and shouldn't be generated. */
720 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
722 /* This case is very very bad after reload, so trap it now. */
723 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
725 /* The general idea is to copy by words, offsetting the source and
726 destination. Normally the least-significant word will be copied
727 first, but for pre-dec operations it's better to copy the
728 most-significant word first. Only one operand can be a pre-dec
731 It's also possible that the copy overlaps so that the direction
737 mem_operand = XEXP (dest, 0);
738 dest_modifies = side_effects_p (mem_operand);
739 if (auto_inc_p (mem_operand))
740 auto_inc_reg_rtx = XEXP (mem_operand, 0);
741 dest_volatile = MEM_VOLATILE_P (dest);
744 dest = copy_rtx (dest);
745 MEM_VOLATILE_P (dest) = 0;
748 else if (MEM_P (src))
750 mem_operand = XEXP (src, 0);
751 src_modifies = side_effects_p (mem_operand);
752 if (auto_inc_p (mem_operand))
753 auto_inc_reg_rtx = XEXP (mem_operand, 0);
754 src_volatile = MEM_VOLATILE_P (src);
757 src = copy_rtx (src);
758 MEM_VOLATILE_P (src) = 0;
762 mem_operand = NULL_RTX;
764 if (mem_operand == NULL_RTX)
768 && reg_overlap_mentioned_p (dest, src)
769 && REGNO (dest) > REGNO (src))
772 else if (GET_CODE (mem_operand) == PRE_DEC
773 || (GET_CODE (mem_operand) == PLUS
774 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
776 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
780 gcc_assert (REG_P (dest));
781 regno = REGNO (dest);
783 gcc_assert (refers_to_regno_p (regno, regno + num_words,
786 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
788 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
792 /* This means something like
793 (set (reg:DI r0) (mem:DI (reg:HI r1)))
794 which we'd need to support by doing the set of the second word
799 end = direction < 0 ? -1 : num_words;
800 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
802 rtx w_src, w_dest, insn;
805 w_src = gen_rtx_MEM (word_mode, mem_operand);
807 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
809 MEM_VOLATILE_P (w_src) = 1;
811 w_dest = gen_rtx_MEM (word_mode, mem_operand);
813 w_dest = simplify_gen_subreg (word_mode, dest, mode,
816 MEM_VOLATILE_P (w_dest) = 1;
818 /* The simplify_subreg calls must always be able to simplify. */
819 gcc_assert (GET_CODE (w_src) != SUBREG
820 && GET_CODE (w_dest) != SUBREG);
822 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
823 if (auto_inc_reg_rtx)
824 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
830 /* Expander for the 'move' patterns. Emit insns to copy a value of
831 mode MODE from SRC to DEST. */
834 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
836 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
838 rtx pmv = XEXP (dest, 0);
839 rtx dest_reg = XEXP (pmv, 0);
840 rtx dest_mod = XEXP (pmv, 1);
841 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
842 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
844 dest = gen_rtx_MEM (mode, dest_reg);
845 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
847 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
849 rtx pmv = XEXP (src, 0);
850 rtx src_reg = XEXP (pmv, 0);
851 rtx src_mod = XEXP (pmv, 1);
852 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
853 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
855 src = gen_rtx_MEM (mode, src_reg);
856 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
859 /* There are only limited immediate-to-memory move instructions. */
860 if (! reload_in_progress
861 && ! reload_completed
863 && (! CONST_INT_P (XEXP (dest, 0))
864 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
865 && ! xstormy16_below100_operand (dest, mode)
867 && GET_CODE (src) != SUBREG)
868 src = copy_to_mode_reg (mode, src);
870 /* Don't emit something we would immediately split. */
872 && mode != HImode && mode != QImode)
874 xstormy16_split_move (mode, dest, src);
878 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
883 The stack is laid out as follows:
887 Register save area (up to 4 words)
888 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
890 AP-> Return address (two words)
891 9th procedure parameter word
892 10th procedure parameter word
894 last procedure parameter word
896 The frame pointer location is tuned to make it most likely that all
897 parameters and local variables can be accessed using a load-indexed
900 /* A structure to describe the layout. */
901 struct xstormy16_stack_layout
903 /* Size of the topmost three items on the stack. */
905 int register_save_size;
906 int stdarg_save_size;
907 /* Sum of the above items. */
909 /* Various offsets. */
910 int first_local_minus_ap;
915 /* Does REGNO need to be saved? */
916 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
917 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
918 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
919 && (REGNUM != CARRY_REGNUM) \
920 && (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
922 /* Compute the stack layout. */
924 struct xstormy16_stack_layout
925 xstormy16_compute_stack_layout (void)
927 struct xstormy16_stack_layout layout;
929 const int ifun = xstormy16_interrupt_function_p ();
931 layout.locals_size = get_frame_size ();
933 layout.register_save_size = 0;
934 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
935 if (REG_NEEDS_SAVE (regno, ifun))
936 layout.register_save_size += UNITS_PER_WORD;
939 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
941 layout.stdarg_save_size = 0;
943 layout.frame_size = (layout.locals_size
944 + layout.register_save_size
945 + layout.stdarg_save_size);
947 if (crtl->args.size <= 2048 && crtl->args.size != -1)
949 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
950 + crtl->args.size <= 2048)
951 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
953 layout.fp_minus_ap = 2048 - crtl->args.size;
956 layout.fp_minus_ap = (layout.stdarg_save_size
957 + layout.register_save_size
958 - INCOMING_FRAME_SP_OFFSET);
959 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
960 - layout.fp_minus_ap);
961 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
965 /* Worker function for TARGET_CAN_ELIMINATE. */
968 xstormy16_can_eliminate (const int from, const int to)
970 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
971 ? ! frame_pointer_needed
975 /* Determine how all the special registers get eliminated. */
978 xstormy16_initial_elimination_offset (int from, int to)
980 struct xstormy16_stack_layout layout;
983 layout = xstormy16_compute_stack_layout ();
985 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
986 result = layout.sp_minus_fp - layout.locals_size;
987 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
988 result = - layout.locals_size;
989 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
990 result = - layout.fp_minus_ap;
991 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
992 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
1000 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1002 rtx set, clobber, insn;
1004 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1005 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1006 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1010 /* Called after register allocation to add any instructions needed for
1011 the prologue. Using a prologue insn is favored compared to putting
1012 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1013 since it allows the scheduler to intermix instructions with the
1014 saves of the caller saved registers. In some cases, it might be
1015 necessary to emit a barrier instruction as the last insn to prevent
1018 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1019 so that the debug info generation code can handle them properly. */
1022 xstormy16_expand_prologue (void)
1024 struct xstormy16_stack_layout layout;
1028 const int ifun = xstormy16_interrupt_function_p ();
1030 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1031 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1033 layout = xstormy16_compute_stack_layout ();
1035 if (flag_stack_usage_info)
1036 current_function_static_stack_size = layout.frame_size;
1038 if (layout.locals_size >= 32768)
1039 error ("local variable memory requirements exceed capacity");
1041 /* Save the argument registers if necessary. */
1042 if (layout.stdarg_save_size)
1043 for (regno = FIRST_ARGUMENT_REGISTER;
1044 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1048 rtx reg = gen_rtx_REG (HImode, regno);
1050 insn = emit_move_insn (mem_push_rtx, reg);
1051 RTX_FRAME_RELATED_P (insn) = 1;
1053 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1055 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1056 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1058 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1059 plus_constant (stack_pointer_rtx,
1060 GET_MODE_SIZE (Pmode)));
1061 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1062 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1063 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1066 /* Push each of the registers to save. */
1067 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1068 if (REG_NEEDS_SAVE (regno, ifun))
1071 rtx reg = gen_rtx_REG (HImode, regno);
1073 insn = emit_move_insn (mem_push_rtx, reg);
1074 RTX_FRAME_RELATED_P (insn) = 1;
1076 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1078 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1079 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1081 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1082 plus_constant (stack_pointer_rtx,
1083 GET_MODE_SIZE (Pmode)));
1084 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1085 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1086 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1089 /* It's just possible that the SP here might be what we need for
1091 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1093 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1094 RTX_FRAME_RELATED_P (insn) = 1;
1097 /* Allocate space for local variables. */
1098 if (layout.locals_size)
1100 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1101 GEN_INT (layout.locals_size));
1102 RTX_FRAME_RELATED_P (insn) = 1;
1105 /* Set up the frame pointer, if required. */
1106 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1108 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1109 RTX_FRAME_RELATED_P (insn) = 1;
1111 if (layout.sp_minus_fp)
1113 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1114 hard_frame_pointer_rtx,
1115 GEN_INT (- layout.sp_minus_fp));
1116 RTX_FRAME_RELATED_P (insn) = 1;
1121 /* Do we need an epilogue at all? */
1124 direct_return (void)
1126 return (reload_completed
1127 && xstormy16_compute_stack_layout ().frame_size == 0
1128 && ! xstormy16_interrupt_function_p ());
1131 /* Called after register allocation to add any instructions needed for
1132 the epilogue. Using an epilogue insn is favored compared to putting
1133 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1134 since it allows the scheduler to intermix instructions with the
1135 saves of the caller saved registers. In some cases, it might be
1136 necessary to emit a barrier instruction as the last insn to prevent
1140 xstormy16_expand_epilogue (void)
1142 struct xstormy16_stack_layout layout;
1145 const int ifun = xstormy16_interrupt_function_p ();
1147 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1148 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1150 layout = xstormy16_compute_stack_layout ();
1152 /* Pop the stack for the locals. */
1153 if (layout.locals_size)
1155 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1156 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1158 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1159 GEN_INT (- layout.locals_size));
1162 /* Restore any call-saved registers. */
1163 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1164 if (REG_NEEDS_SAVE (regno, ifun))
1165 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1167 /* Pop the stack for the stdarg save area. */
1168 if (layout.stdarg_save_size)
1169 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1170 GEN_INT (- layout.stdarg_save_size));
1174 emit_jump_insn (gen_return_internal_interrupt ());
1176 emit_jump_insn (gen_return_internal ());
1180 xstormy16_epilogue_uses (int regno)
1182 if (reload_completed && call_used_regs[regno])
1184 const int ifun = xstormy16_interrupt_function_p ();
1185 return REG_NEEDS_SAVE (regno, ifun);
1191 xstormy16_function_profiler (void)
1193 sorry ("function_profiler support");
1196 /* Update CUM to advance past an argument in the argument list. The
1197 values MODE, TYPE and NAMED describe that argument. Once this is
1198 done, the variable CUM is suitable for analyzing the *following*
1199 argument with `TARGET_FUNCTION_ARG', etc.
1201 This function need not do anything if the argument in question was
1202 passed on the stack. The compiler knows how to track the amount of
1203 stack space used for arguments without any special help. However,
1204 it makes life easier for xstormy16_build_va_list if it does update
1208 xstormy16_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1209 const_tree type, bool named ATTRIBUTE_UNUSED)
1211 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1213 /* If an argument would otherwise be passed partially in registers,
1214 and partially on the stack, the whole of it is passed on the
1216 if (*cum < NUM_ARGUMENT_REGISTERS
1217 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1218 *cum = NUM_ARGUMENT_REGISTERS;
1220 *cum += XSTORMY16_WORD_SIZE (type, mode);
1224 xstormy16_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1225 const_tree type, bool named ATTRIBUTE_UNUSED)
1227 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1229 if (mode == VOIDmode)
1231 if (targetm.calls.must_pass_in_stack (mode, type)
1232 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1234 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1237 /* Build the va_list type.
1239 For this chip, va_list is a record containing a counter and a pointer.
1240 The counter is of type 'int' and indicates how many bytes
1241 have been used to date. The pointer indicates the stack position
1242 for arguments that have not been passed in registers.
1243 To keep the layout nice, the pointer is first in the structure. */
1246 xstormy16_build_builtin_va_list (void)
1248 tree f_1, f_2, record, type_decl;
1250 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1251 type_decl = build_decl (BUILTINS_LOCATION,
1252 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1254 f_1 = build_decl (BUILTINS_LOCATION,
1255 FIELD_DECL, get_identifier ("base"),
1257 f_2 = build_decl (BUILTINS_LOCATION,
1258 FIELD_DECL, get_identifier ("count"),
1259 unsigned_type_node);
1261 DECL_FIELD_CONTEXT (f_1) = record;
1262 DECL_FIELD_CONTEXT (f_2) = record;
1264 TYPE_STUB_DECL (record) = type_decl;
1265 TYPE_NAME (record) = type_decl;
1266 TYPE_FIELDS (record) = f_1;
1267 DECL_CHAIN (f_1) = f_2;
1269 layout_type (record);
1274 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1275 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1276 variable to initialize. NEXTARG is the machine independent notion of the
1277 'next' argument after the variable arguments. */
1280 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1282 tree f_base, f_count;
1286 if (xstormy16_interrupt_function_p ())
1287 error ("cannot use va_start in interrupt function");
1289 f_base = TYPE_FIELDS (va_list_type_node);
1290 f_count = DECL_CHAIN (f_base);
1292 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1293 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1296 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1297 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1298 u = fold_convert (TREE_TYPE (count), u);
1299 t = fold_build_pointer_plus (t, u);
1300 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1301 TREE_SIDE_EFFECTS (t) = 1;
1302 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1304 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1305 build_int_cst (NULL_TREE,
1306 crtl->args.info * UNITS_PER_WORD));
1307 TREE_SIDE_EFFECTS (t) = 1;
1308 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1311 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1312 of type va_list as a tree, TYPE is the type passed to va_arg.
1313 Note: This algorithm is documented in stormy-abi. */
1316 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1317 gimple_seq *post_p ATTRIBUTE_UNUSED)
1319 tree f_base, f_count;
1321 tree count_tmp, addr, t;
1322 tree lab_gotaddr, lab_fromstack;
1323 int size, size_of_reg_args, must_stack;
1326 f_base = TYPE_FIELDS (va_list_type_node);
1327 f_count = DECL_CHAIN (f_base);
1329 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1330 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1333 if (TYPE_MODE (type) == VOIDmode)
1336 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1337 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1338 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1340 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1342 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1343 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1344 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1345 addr = create_tmp_var (ptr_type_node, NULL);
1351 t = fold_convert (TREE_TYPE (count), size_tree);
1352 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1353 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1354 t = build2 (GT_EXPR, boolean_type_node, t, r);
1355 t = build3 (COND_EXPR, void_type_node, t,
1356 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1358 gimplify_and_add (t, pre_p);
1360 t = fold_build_pointer_plus (base, count_tmp);
1361 gimplify_assign (addr, t, pre_p);
1363 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1364 gimplify_and_add (t, pre_p);
1366 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1367 gimplify_and_add (t, pre_p);
1370 /* Arguments larger than a word might need to skip over some
1371 registers, since arguments are either passed entirely in
1372 registers or entirely on the stack. */
1373 size = PUSH_ROUNDING (int_size_in_bytes (type));
1374 if (size > 2 || size < 0 || must_stack)
1378 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1379 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1381 t = fold_convert (TREE_TYPE (count), r);
1382 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1383 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1384 gimplify_and_add (t, pre_p);
1387 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1388 + INCOMING_FRAME_SP_OFFSET);
1389 t = fold_convert (TREE_TYPE (count), t);
1390 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1391 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1392 fold_convert (TREE_TYPE (count), size_tree));
1393 t = fold_convert (TREE_TYPE (t), fold (t));
1394 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1395 t = fold_build_pointer_plus (base, t);
1396 gimplify_assign (addr, t, pre_p);
1398 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1399 gimplify_and_add (t, pre_p);
1401 t = fold_convert (TREE_TYPE (count), size_tree);
1402 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1403 gimplify_assign (count, t, pre_p);
1405 addr = fold_convert (build_pointer_type (type), addr);
1406 return build_va_arg_indirect_ref (addr);
1409 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1412 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1414 rtx temp = gen_reg_rtx (HImode);
1415 rtx reg_fnaddr = gen_reg_rtx (HImode);
1416 rtx reg_addr, reg_addr_mem;
1418 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1419 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1421 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1422 emit_move_insn (reg_addr_mem, temp);
1423 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1424 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1426 emit_move_insn (temp, static_chain);
1427 emit_move_insn (reg_addr_mem, temp);
1428 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1429 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1431 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1432 emit_move_insn (temp, reg_fnaddr);
1433 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1434 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1435 emit_move_insn (reg_addr_mem, temp);
1436 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1437 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1439 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1440 emit_move_insn (reg_addr_mem, reg_fnaddr);
1443 /* Worker function for TARGET_FUNCTION_VALUE. */
1446 xstormy16_function_value (const_tree valtype,
1447 const_tree func ATTRIBUTE_UNUSED,
1448 bool outgoing ATTRIBUTE_UNUSED)
1450 enum machine_mode mode;
1451 mode = TYPE_MODE (valtype);
1452 PROMOTE_MODE (mode, 0, valtype);
1453 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1456 /* Worker function for TARGET_LIBCALL_VALUE. */
1459 xstormy16_libcall_value (enum machine_mode mode,
1460 const_rtx fun ATTRIBUTE_UNUSED)
1462 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1465 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1468 xstormy16_function_value_regno_p (const unsigned int regno)
1470 return (regno == RETURN_VALUE_REGNUM);
1473 /* A C compound statement that outputs the assembler code for a thunk function,
1474 used to implement C++ virtual function calls with multiple inheritance. The
1475 thunk acts as a wrapper around a virtual function, adjusting the implicit
1476 object parameter before handing control off to the real function.
1478 First, emit code to add the integer DELTA to the location that contains the
1479 incoming first argument. Assume that this argument contains a pointer, and
1480 is the one used to pass the `this' pointer in C++. This is the incoming
1481 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1482 addition must preserve the values of all other incoming arguments.
1484 After the addition, emit code to jump to FUNCTION, which is a
1485 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1486 the return address. Hence returning from FUNCTION will return to whoever
1487 called the current `thunk'.
1489 The effect must be as if @var{function} had been called directly
1490 with the adjusted first argument. This macro is responsible for
1491 emitting all of the code for a thunk function;
1492 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1495 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1496 extracted from it.) It might possibly be useful on some targets, but
1500 xstormy16_asm_output_mi_thunk (FILE *file,
1501 tree thunk_fndecl ATTRIBUTE_UNUSED,
1502 HOST_WIDE_INT delta,
1503 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1506 int regnum = FIRST_ARGUMENT_REGISTER;
1508 /* There might be a hidden first argument for a returned structure. */
1509 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1512 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1513 fputs ("\tjmpf ", file);
1514 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1518 /* The purpose of this function is to override the default behavior of
1519 BSS objects. Normally, they go into .bss or .sbss via ".common"
1520 directives, but we need to override that and put them in
1521 .bss_below100. We can't just use a section override (like we do
1522 for .data_below100), because that makes them initialized rather
1523 than uninitialized. */
1526 xstormy16_asm_output_aligned_common (FILE *stream,
1533 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1538 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1539 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1544 switch_to_section (bss100_section);
1552 name2 = default_strip_name_encoding (name);
1554 fprintf (stream, "\t.globl\t%s\n", name2);
1556 fprintf (stream, "\t.p2align %d\n", p2align);
1557 fprintf (stream, "\t.type\t%s, @object\n", name2);
1558 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1559 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1565 fprintf (stream, "\t.local\t");
1566 assemble_name (stream, name);
1567 fprintf (stream, "\n");
1569 fprintf (stream, "\t.comm\t");
1570 assemble_name (stream, name);
1571 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1574 /* Implement TARGET_ASM_INIT_SECTIONS. */
1577 xstormy16_asm_init_sections (void)
1580 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1581 output_section_asm_op,
1582 "\t.section \".bss_below100\",\"aw\",@nobits");
1585 /* Mark symbols with the "below100" attribute so that we can use the
1586 special addressing modes for them. */
1589 xstormy16_encode_section_info (tree decl, rtx r, int first)
1591 default_encode_section_info (decl, r, first);
1593 if (TREE_CODE (decl) == VAR_DECL
1594 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1595 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1597 rtx symbol = XEXP (r, 0);
1599 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1600 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1604 #undef TARGET_ASM_CONSTRUCTOR
1605 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1606 #undef TARGET_ASM_DESTRUCTOR
1607 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1609 /* Output constructors and destructors. Just like
1610 default_named_section_asm_out_* but don't set the sections writable. */
1613 xstormy16_asm_out_destructor (rtx symbol, int priority)
1615 const char *section = ".dtors";
1618 /* ??? This only works reliably with the GNU linker. */
1619 if (priority != DEFAULT_INIT_PRIORITY)
1621 sprintf (buf, ".dtors.%.5u",
1622 /* Invert the numbering so the linker puts us in the proper
1623 order; constructors are run from right to left, and the
1624 linker sorts in increasing order. */
1625 MAX_INIT_PRIORITY - priority);
1629 switch_to_section (get_section (section, 0, NULL));
1630 assemble_align (POINTER_SIZE);
1631 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1635 xstormy16_asm_out_constructor (rtx symbol, int priority)
1637 const char *section = ".ctors";
1640 /* ??? This only works reliably with the GNU linker. */
1641 if (priority != DEFAULT_INIT_PRIORITY)
1643 sprintf (buf, ".ctors.%.5u",
1644 /* Invert the numbering so the linker puts us in the proper
1645 order; constructors are run from right to left, and the
1646 linker sorts in increasing order. */
1647 MAX_INIT_PRIORITY - priority);
1651 switch_to_section (get_section (section, 0, NULL));
1652 assemble_align (POINTER_SIZE);
1653 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1656 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1658 Print a memory address as an operand to reference that memory location. */
1661 xstormy16_print_operand_address (FILE *file, rtx address)
1663 HOST_WIDE_INT offset;
1664 int pre_dec, post_inc;
1666 /* There are a few easy cases. */
1667 if (CONST_INT_P (address))
1669 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1673 if (CONSTANT_P (address) || LABEL_P (address))
1675 output_addr_const (file, address);
1679 /* Otherwise, it's hopefully something of the form
1680 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1681 if (GET_CODE (address) == PLUS)
1683 gcc_assert (CONST_INT_P (XEXP (address, 1)));
1684 offset = INTVAL (XEXP (address, 1));
1685 address = XEXP (address, 0);
1690 pre_dec = (GET_CODE (address) == PRE_DEC);
1691 post_inc = (GET_CODE (address) == POST_INC);
1692 if (pre_dec || post_inc)
1693 address = XEXP (address, 0);
1695 gcc_assert (REG_P (address));
1700 fputs (reg_names [REGNO (address)], file);
1704 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1708 /* Worker function for TARGET_PRINT_OPERAND.
1710 Print an operand to an assembler instruction. */
1713 xstormy16_print_operand (FILE *file, rtx x, int code)
1718 /* There is either one bit set, or one bit clear, in X.
1719 Print it preceded by '#'. */
1721 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1722 HOST_WIDE_INT xx = 1;
1725 if (CONST_INT_P (x))
1728 output_operand_lossage ("'B' operand is not constant");
1730 /* GCC sign-extends masks with the MSB set, so we have to
1731 detect all the cases that differ only in sign extension
1732 beyond the bits we care about. Normally, the predicates
1733 and constraints ensure that we have the right values. This
1734 works correctly for valid masks. */
1735 if (bits_set[xx & 7] <= 1)
1737 /* Remove sign extension bits. */
1738 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1740 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1742 l = exact_log2 (xx);
1746 /* Add sign extension bits. */
1747 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1748 xx |= ~(HOST_WIDE_INT)0xff;
1749 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1750 xx |= ~(HOST_WIDE_INT)0xffff;
1751 l = exact_log2 (~xx);
1755 output_operand_lossage ("'B' operand has multiple bits set");
1757 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1762 /* Print the symbol without a surrounding @fptr(). */
1763 if (GET_CODE (x) == SYMBOL_REF)
1764 assemble_name (file, XSTR (x, 0));
1765 else if (LABEL_P (x))
1766 output_asm_label (x);
1768 xstormy16_print_operand_address (file, x);
1773 /* Print the immediate operand less one, preceded by '#'.
1774 For 'O', negate it first. */
1776 HOST_WIDE_INT xx = 0;
1778 if (CONST_INT_P (x))
1781 output_operand_lossage ("'o' operand is not constant");
1786 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1791 /* Print the shift mask for bp/bn. */
1793 HOST_WIDE_INT xx = 1;
1796 if (CONST_INT_P (x))
1799 output_operand_lossage ("'B' operand is not constant");
1803 fputs (IMMEDIATE_PREFIX, file);
1804 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1809 /* Handled below. */
1813 output_operand_lossage ("xstormy16_print_operand: unknown code");
1817 switch (GET_CODE (x))
1820 fputs (reg_names [REGNO (x)], file);
1824 xstormy16_print_operand_address (file, XEXP (x, 0));
1828 /* Some kind of constant or label; an immediate operand,
1829 so prefix it with '#' for the assembler. */
1830 fputs (IMMEDIATE_PREFIX, file);
1831 output_addr_const (file, x);
1838 /* Expander for the `casesi' pattern.
1839 INDEX is the index of the switch statement.
1840 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1841 to the first table entry.
1842 RANGE is the number of table entries.
1843 TABLE is an ADDR_VEC that is the jump table.
1844 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1845 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1848 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1849 rtx table, rtx default_label)
1851 HOST_WIDE_INT range_i = INTVAL (range);
1854 /* This code uses 'br', so it can deal only with tables of size up to
1856 if (range_i >= 8192)
1857 sorry ("switch statement of size %lu entries too large",
1858 (unsigned long) range_i);
1860 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1862 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1864 int_index = gen_lowpart_common (HImode, index);
1865 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1866 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1869 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1870 instructions, without label or alignment or any other special
1871 constructs. We know that the previous instruction will be the
1872 `tablejump_pcrel' output above.
1874 TODO: it might be nice to output 'br' instructions if they could
1878 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1882 switch_to_section (current_function_section ());
1884 vlen = XVECLEN (table, 0);
1885 for (idx = 0; idx < vlen; idx++)
1887 fputs ("\tjmpf ", file);
1888 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1893 /* Expander for the `call' patterns.
1894 RETVAL is the RTL for the return register or NULL for void functions.
1895 DEST is the function to call, expressed as a MEM.
1896 COUNTER is ignored. */
1899 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1902 enum machine_mode mode;
1904 gcc_assert (MEM_P (dest));
1905 dest = XEXP (dest, 0);
1907 if (! CONSTANT_P (dest) && ! REG_P (dest))
1908 dest = force_reg (Pmode, dest);
1913 mode = GET_MODE (retval);
1915 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1918 call = gen_rtx_SET (VOIDmode, retval, call);
1920 if (! CONSTANT_P (dest))
1922 temp = gen_reg_rtx (HImode);
1923 emit_move_insn (temp, const0_rtx);
1928 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1929 gen_rtx_USE (VOIDmode, temp)));
1930 emit_call_insn (call);
1933 /* Expanders for multiword computational operations. */
1935 /* Expander for arithmetic operations; emit insns to compute
1937 (set DEST (CODE:MODE SRC0 SRC1))
1939 When CODE is COMPARE, a branch template is generated
1940 (this saves duplicating code in xstormy16_split_cbranch). */
1943 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1944 rtx dest, rtx src0, rtx src1)
1946 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1951 emit_move_insn (src0, const0_rtx);
1953 for (i = 0; i < num_words; i++)
1955 rtx w_src0, w_src1, w_dest;
1958 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1959 i * UNITS_PER_WORD);
1960 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1961 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1967 && CONST_INT_P (w_src1)
1968 && INTVAL (w_src1) == 0)
1972 insn = gen_addchi4 (w_dest, w_src0, w_src1);
1974 insn = gen_addchi5 (w_dest, w_src0, w_src1);
1980 if (code == COMPARE && i == num_words - 1)
1982 rtx branch, sub, clobber, sub_1;
1984 sub_1 = gen_rtx_MINUS (HImode, w_src0,
1985 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
1986 sub = gen_rtx_SET (VOIDmode, w_dest,
1987 gen_rtx_MINUS (HImode, sub_1, w_src1));
1988 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1989 branch = gen_rtx_SET (VOIDmode, pc_rtx,
1990 gen_rtx_IF_THEN_ELSE (VOIDmode,
1996 insn = gen_rtx_PARALLEL (VOIDmode,
1997 gen_rtvec (3, branch, sub, clobber));
2001 && CONST_INT_P (w_src1)
2002 && INTVAL (w_src1) == 0)
2005 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2007 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2013 if (CONST_INT_P (w_src1)
2014 && INTVAL (w_src1) == -(code == AND))
2017 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2022 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2033 /* If we emit nothing, try_split() will think we failed. So emit
2034 something that does nothing and can be optimized away. */
2039 /* The shift operations are split at output time for constant values;
2040 variable-width shifts get handed off to a library routine.
2042 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2043 SIZE_R will be a CONST_INT, X will be a hard register. */
2046 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2047 rtx x, rtx size_r, rtx temp)
2050 const char *r0, *r1, *rt;
2053 gcc_assert (CONST_INT_P (size_r)
2057 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2062 r0 = reg_names [REGNO (x)];
2063 r1 = reg_names [REGNO (x) + 1];
2065 /* For shifts of size 1, we can use the rotate instructions. */
2071 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2074 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2077 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2085 /* For large shifts, there are easy special cases. */
2091 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2094 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2097 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2109 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2110 r1, r0, r0, r1, (int) size - 16);
2113 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2114 r0, r1, r1, r0, (int) size - 16);
2117 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2118 r0, r1, r1, r0, (int) size - 16);
2126 /* For the rest, we have to do more work. In particular, we
2127 need a temporary. */
2128 rt = reg_names [REGNO (temp)];
2133 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2134 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2139 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2140 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2145 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2146 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2155 /* Attribute handling. */
2157 /* Return nonzero if the function is an interrupt function. */
2160 xstormy16_interrupt_function_p (void)
2164 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2165 any functions are declared, which is demonstrably wrong, but
2166 it is worked around here. FIXME. */
2170 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2171 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2174 #undef TARGET_ATTRIBUTE_TABLE
2175 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2177 static tree xstormy16_handle_interrupt_attribute
2178 (tree *, tree, tree, int, bool *);
2179 static tree xstormy16_handle_below100_attribute
2180 (tree *, tree, tree, int, bool *);
2182 static const struct attribute_spec xstormy16_attribute_table[] =
2184 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2185 affects_type_identity. */
2186 { "interrupt", 0, 0, false, true, true,
2187 xstormy16_handle_interrupt_attribute , false },
2188 { "BELOW100", 0, 0, false, false, false,
2189 xstormy16_handle_below100_attribute, false },
2190 { "below100", 0, 0, false, false, false,
2191 xstormy16_handle_below100_attribute, false },
2192 { NULL, 0, 0, false, false, false, NULL, false }
2195 /* Handle an "interrupt" attribute;
2196 arguments as in struct attribute_spec.handler. */
2199 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2200 tree args ATTRIBUTE_UNUSED,
2201 int flags ATTRIBUTE_UNUSED,
2204 if (TREE_CODE (*node) != FUNCTION_TYPE)
2206 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2208 *no_add_attrs = true;
2214 /* Handle an "below" attribute;
2215 arguments as in struct attribute_spec.handler. */
2218 xstormy16_handle_below100_attribute (tree *node,
2219 tree name ATTRIBUTE_UNUSED,
2220 tree args ATTRIBUTE_UNUSED,
2221 int flags ATTRIBUTE_UNUSED,
2224 if (TREE_CODE (*node) != VAR_DECL
2225 && TREE_CODE (*node) != POINTER_TYPE
2226 && TREE_CODE (*node) != TYPE_DECL)
2228 warning (OPT_Wattributes,
2229 "%<__BELOW100__%> attribute only applies to variables");
2230 *no_add_attrs = true;
2232 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2234 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2236 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2237 "with auto storage class");
2238 *no_add_attrs = true;
2245 #undef TARGET_INIT_BUILTINS
2246 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2247 #undef TARGET_EXPAND_BUILTIN
2248 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2254 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2255 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2259 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2260 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2261 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2262 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2263 { NULL, 0, NULL, NULL }
2267 xstormy16_init_builtins (void)
2269 tree args[2], ret_type, arg = NULL_TREE, ftype;
2272 ret_type = void_type_node;
2274 for (i = 0; s16builtins[i].name; i++)
2276 n_args = strlen (s16builtins[i].arg_types) - 1;
2278 gcc_assert (n_args <= (int) ARRAY_SIZE (args));
2280 for (a = n_args - 1; a >= 0; a--)
2281 args[a] = NULL_TREE;
2283 for (a = n_args; a >= 0; a--)
2285 switch (s16builtins[i].arg_types[a])
2287 case 's': arg = short_integer_type_node; break;
2288 case 'S': arg = short_unsigned_type_node; break;
2289 case 'l': arg = long_integer_type_node; break;
2290 case 'L': arg = long_unsigned_type_node; break;
2291 default: gcc_unreachable ();
2298 ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE);
2299 add_builtin_function (s16builtins[i].name, ftype,
2300 i, BUILT_IN_MD, NULL, NULL_TREE);
2305 xstormy16_expand_builtin (tree exp, rtx target,
2306 rtx subtarget ATTRIBUTE_UNUSED,
2307 enum machine_mode mode ATTRIBUTE_UNUSED,
2308 int ignore ATTRIBUTE_UNUSED)
2310 rtx op[10], args[10], pat, copyto[10], retval = 0;
2311 tree fndecl, argtree;
2314 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2315 argtree = TREE_OPERAND (exp, 1);
2316 i = DECL_FUNCTION_CODE (fndecl);
2317 code = s16builtins[i].md_code;
2319 for (a = 0; a < 10 && argtree; a++)
2321 args[a] = expand_normal (TREE_VALUE (argtree));
2322 argtree = TREE_CHAIN (argtree);
2325 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2327 char ao = s16builtins[i].arg_ops[o];
2328 char c = insn_data[code].operand[o].constraint[0];
2329 enum machine_mode omode;
2333 omode = (enum machine_mode) insn_data[code].operand[o].mode;
2335 op[o] = target ? target : gen_reg_rtx (omode);
2337 op[o] = gen_reg_rtx (omode);
2339 op[o] = args[(int) hex_value (ao)];
2341 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2343 if (c == '+' || c == '=')
2346 op[o] = gen_reg_rtx (omode);
2349 op[o] = copy_to_mode_reg (omode, op[o]);
2356 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2357 op[5], op[6], op[7], op[8], op[9]);
2360 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2363 emit_move_insn (copyto[o], op[o]);
2364 if (op[o] == retval)
2371 /* Look for combinations of insns that can be converted to BN or BP
2372 opcodes. This is, unfortunately, too complex to do with MD
2376 combine_bnp (rtx insn)
2378 int insn_code, regno, need_extend;
2380 rtx cond, reg, and_insn, load, qireg, mem;
2381 enum machine_mode load_mode = QImode;
2382 enum machine_mode and_mode = QImode;
2383 rtx shift = NULL_RTX;
2385 insn_code = recog_memoized (insn);
2386 if (insn_code != CODE_FOR_cbranchhi
2387 && insn_code != CODE_FOR_cbranchhi_neg)
2390 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2391 cond = XEXP (cond, 1); /* if */
2392 cond = XEXP (cond, 0); /* cond */
2393 switch (GET_CODE (cond))
2407 reg = XEXP (cond, 0);
2410 regno = REGNO (reg);
2411 if (XEXP (cond, 1) != const0_rtx)
2413 if (! find_regno_note (insn, REG_DEAD, regno))
2415 qireg = gen_rtx_REG (QImode, regno);
2419 /* LT and GE conditionals should have a sign extend before
2421 for (and_insn = prev_real_insn (insn);
2422 and_insn != NULL_RTX;
2423 and_insn = prev_real_insn (and_insn))
2425 int and_code = recog_memoized (and_insn);
2427 if (and_code == CODE_FOR_extendqihi2
2428 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2429 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2432 if (and_code == CODE_FOR_movhi_internal
2433 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2435 /* This is for testing bit 15. */
2440 if (reg_mentioned_p (reg, and_insn))
2443 if (GET_CODE (and_insn) != NOTE
2444 && GET_CODE (and_insn) != INSN)
2450 /* EQ and NE conditionals have an AND before them. */
2451 for (and_insn = prev_real_insn (insn);
2452 and_insn != NULL_RTX;
2453 and_insn = prev_real_insn (and_insn))
2455 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2456 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2457 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2460 if (reg_mentioned_p (reg, and_insn))
2463 if (GET_CODE (and_insn) != NOTE
2464 && GET_CODE (and_insn) != INSN)
2470 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2471 followed by an AND like this:
2473 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2474 (clobber (reg:BI carry))]
2476 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2478 Attempt to detect this here. */
2479 for (shift = prev_real_insn (and_insn); shift;
2480 shift = prev_real_insn (shift))
2482 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2483 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2484 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2487 if (reg_mentioned_p (reg, shift)
2488 || (GET_CODE (shift) != NOTE
2489 && GET_CODE (shift) != INSN))
2498 if (and_insn == NULL_RTX)
2501 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2503 load = prev_real_insn (load))
2505 int load_code = recog_memoized (load);
2507 if (load_code == CODE_FOR_movhi_internal
2508 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2509 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2510 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2516 if (load_code == CODE_FOR_movqi_internal
2517 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2518 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2524 if (load_code == CODE_FOR_zero_extendqihi2
2525 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2526 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2533 if (reg_mentioned_p (reg, load))
2536 if (GET_CODE (load) != NOTE
2537 && GET_CODE (load) != INSN)
2543 mem = SET_SRC (PATTERN (load));
2547 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2549 /* If the mem includes a zero-extend operation and we are
2550 going to generate a sign-extend operation then move the
2551 mem inside the zero-extend. */
2552 if (GET_CODE (mem) == ZERO_EXTEND)
2553 mem = XEXP (mem, 0);
2557 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2561 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2564 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2567 if (load_mode == HImode)
2569 rtx addr = XEXP (mem, 0);
2571 if (! (mask & 0xff))
2573 addr = plus_constant (addr, 1);
2576 mem = gen_rtx_MEM (QImode, addr);
2580 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2582 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2584 INSN_CODE (insn) = -1;
2587 if (and_insn != insn)
2588 delete_insn (and_insn);
2590 if (shift != NULL_RTX)
2591 delete_insn (shift);
2595 xstormy16_reorg (void)
2599 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2601 if (! JUMP_P (insn))
2607 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2610 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2612 const HOST_WIDE_INT size = int_size_in_bytes (type);
2613 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2616 #undef TARGET_ASM_ALIGNED_HI_OP
2617 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2618 #undef TARGET_ASM_ALIGNED_SI_OP
2619 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2620 #undef TARGET_ENCODE_SECTION_INFO
2621 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2623 /* Select_section doesn't handle .bss_below100. */
2624 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2625 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2627 #undef TARGET_ASM_OUTPUT_MI_THUNK
2628 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2629 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2630 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2632 #undef TARGET_PRINT_OPERAND
2633 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2634 #undef TARGET_PRINT_OPERAND_ADDRESS
2635 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2637 #undef TARGET_MEMORY_MOVE_COST
2638 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2639 #undef TARGET_RTX_COSTS
2640 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2641 #undef TARGET_ADDRESS_COST
2642 #define TARGET_ADDRESS_COST xstormy16_address_cost
2644 #undef TARGET_BUILD_BUILTIN_VA_LIST
2645 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2646 #undef TARGET_EXPAND_BUILTIN_VA_START
2647 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2648 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2649 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2651 #undef TARGET_PROMOTE_FUNCTION_MODE
2652 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2653 #undef TARGET_PROMOTE_PROTOTYPES
2654 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2656 #undef TARGET_FUNCTION_ARG
2657 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2658 #undef TARGET_FUNCTION_ARG_ADVANCE
2659 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2661 #undef TARGET_RETURN_IN_MEMORY
2662 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2663 #undef TARGET_FUNCTION_VALUE
2664 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2665 #undef TARGET_LIBCALL_VALUE
2666 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2667 #undef TARGET_FUNCTION_VALUE_REGNO_P
2668 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2670 #undef TARGET_MACHINE_DEPENDENT_REORG
2671 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2673 #undef TARGET_PREFERRED_RELOAD_CLASS
2674 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2675 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2676 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2678 #undef TARGET_LEGITIMATE_ADDRESS_P
2679 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2680 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2681 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2683 #undef TARGET_CAN_ELIMINATE
2684 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2686 #undef TARGET_TRAMPOLINE_INIT
2687 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2690 xstormy16_option_override (void)
2692 if (flag_exceptions)
2693 flag_omit_frame_pointer = 0;
2696 #undef TARGET_OPTION_OVERRIDE
2697 #define TARGET_OPTION_OVERRIDE xstormy16_option_override
2699 struct gcc_target targetm = TARGET_INITIALIZER;
2701 #include "gt-stormy16.h"