1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
41 #include "target-def.h"
46 #include "integrate.h"
47 #include "bfin-protos.h"
51 /* Test and compare insns in bfin.md store the information needed to
52 generate branch and scc insns here. */
53 rtx bfin_compare_op0, bfin_compare_op1;
55 /* RTX for condition code flag register and RETS register */
56 extern GTY(()) rtx bfin_cc_rtx;
57 extern GTY(()) rtx bfin_rets_rtx;
58 rtx bfin_cc_rtx, bfin_rets_rtx;
60 int max_arg_registers = 0;
62 /* Arrays used when emitting register names. */
63 const char *short_reg_names[] = SHORT_REGISTER_NAMES;
64 const char *high_reg_names[] = HIGH_REGISTER_NAMES;
65 const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
66 const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
68 static int arg_regs[] = FUNCTION_ARG_REGISTERS;
70 /* Nonzero if -mshared-library-id was given. */
71 static int bfin_lib_id_given;
74 bfin_globalize_label (FILE *stream, const char *name)
76 fputs (".global ", stream);
77 assemble_name (stream, name);
83 output_file_start (void)
85 FILE *file = asm_out_file;
88 fprintf (file, ".file \"%s\";\n", input_filename);
90 for (i = 0; arg_regs[i] >= 0; i++)
92 max_arg_registers = i; /* how many arg reg used */
95 /* Called early in the compilation to conditionally modify
96 fixed_regs/call_used_regs. */
99 conditional_register_usage (void)
101 /* initialize condition code flag register rtx */
102 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
103 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
106 /* Examine machine-dependent attributes of function type FUNTYPE and return its
107 type. See the definition of E_FUNKIND. */
109 static e_funkind funkind (tree funtype)
111 tree attrs = TYPE_ATTRIBUTES (funtype);
112 if (lookup_attribute ("interrupt_handler", attrs))
113 return INTERRUPT_HANDLER;
114 else if (lookup_attribute ("exception_handler", attrs))
115 return EXCPT_HANDLER;
116 else if (lookup_attribute ("nmi_handler", attrs))
122 /* Stack frame layout. */
124 /* Compute the number of DREGS to save with a push_multiple operation.
125 This could include registers that aren't modified in the function,
126 since push_multiple only takes a range of registers. */
129 n_dregs_to_save (void)
133 for (i = REG_R0; i <= REG_R7; i++)
135 if (regs_ever_live[i] && ! call_used_regs[i])
136 return REG_R7 - i + 1;
138 if (current_function_calls_eh_return)
143 unsigned test = EH_RETURN_DATA_REGNO (j);
144 if (test == INVALID_REGNUM)
147 return REG_R7 - i + 1;
155 /* Like n_dregs_to_save, but compute number of PREGS to save. */
158 n_pregs_to_save (void)
162 for (i = REG_P0; i <= REG_P5; i++)
163 if ((regs_ever_live[i] && ! call_used_regs[i])
164 || (i == PIC_OFFSET_TABLE_REGNUM
165 && (current_function_uses_pic_offset_table
166 || (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
167 return REG_P5 - i + 1;
171 /* Determine if we are going to save the frame pointer in the prologue. */
174 must_save_fp_p (void)
176 return (frame_pointer_needed || regs_ever_live[REG_FP]);
180 stack_frame_needed_p (void)
182 /* EH return puts a new return address into the frame using an
183 address relative to the frame pointer. */
184 if (current_function_calls_eh_return)
186 return frame_pointer_needed;
189 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
190 must save all registers; this is used for interrupt handlers.
191 SPREG contains (reg:SI REG_SP). */
194 expand_prologue_reg_save (rtx spreg, int saveall)
196 int ndregs = saveall ? 8 : n_dregs_to_save ();
197 int npregs = saveall ? 6 : n_pregs_to_save ();
198 int dregno = REG_R7 + 1 - ndregs;
199 int pregno = REG_P5 + 1 - npregs;
200 int total = ndregs + npregs;
207 val = GEN_INT (-total * 4);
208 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
209 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
210 UNSPEC_PUSH_MULTIPLE);
211 XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
212 gen_rtx_PLUS (Pmode, spreg,
214 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
215 for (i = 0; i < total; i++)
217 rtx memref = gen_rtx_MEM (word_mode,
218 gen_rtx_PLUS (Pmode, spreg,
219 GEN_INT (- i * 4 - 4)));
223 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
229 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
233 XVECEXP (pat, 0, i + 1) = subpat;
234 RTX_FRAME_RELATED_P (subpat) = 1;
236 insn = emit_insn (pat);
237 RTX_FRAME_RELATED_P (insn) = 1;
240 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
241 must save all registers; this is used for interrupt handlers.
242 SPREG contains (reg:SI REG_SP). */
245 expand_epilogue_reg_restore (rtx spreg, int saveall)
247 int ndregs = saveall ? 8 : n_dregs_to_save ();
248 int npregs = saveall ? 6 : n_pregs_to_save ();
249 int total = ndregs + npregs;
256 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
257 XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
258 gen_rtx_PLUS (Pmode, spreg,
259 GEN_INT (total * 4)));
266 for (i = 0; i < total; i++)
269 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
271 rtx memref = gen_rtx_MEM (word_mode, addr);
274 XVECEXP (pat, 0, i + 1)
275 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
284 insn = emit_insn (pat);
285 RTX_FRAME_RELATED_P (insn) = 1;
288 /* Perform any needed actions needed for a function that is receiving a
289 variable number of arguments.
293 MODE and TYPE are the mode and type of the current parameter.
295 PRETEND_SIZE is a variable that should be set to the amount of stack
296 that must be pushed by the prolog to pretend that our caller pushed
299 Normally, this macro will push all remaining incoming registers on the
300 stack and set PRETEND_SIZE to the length of the registers pushed.
303 - VDSP C compiler manual (our ABI) says that a variable args function
304 should save the R0, R1 and R2 registers in the stack.
305 - The caller will always leave space on the stack for the
306 arguments that are passed in registers, so we dont have
307 to leave any extra space.
308 - now, the vastart pointer can access all arguments from the stack. */
311 setup_incoming_varargs (CUMULATIVE_ARGS *cum,
312 enum machine_mode mode ATTRIBUTE_UNUSED,
313 tree type ATTRIBUTE_UNUSED, int *pretend_size,
322 /* The move for named arguments will be generated automatically by the
323 compiler. We need to generate the move rtx for the unnamed arguments
324 if they are in the first 3 words. We assume at least 1 named argument
325 exists, so we never generate [ARGP] = R0 here. */
327 for (i = cum->words + 1; i < max_arg_registers; i++)
329 mem = gen_rtx_MEM (Pmode,
330 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
331 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
337 /* Value should be nonzero if functions must have frame pointers.
338 Zero means the frame pointer need not be set up (and parms may
339 be accessed via the stack pointer) in functions that seem suitable. */
342 bfin_frame_pointer_required (void)
344 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
346 if (fkind != SUBROUTINE)
349 /* We turn on on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
350 so we have to override it for non-leaf functions. */
351 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
357 /* Return the number of registers pushed during the prologue. */
360 n_regs_saved_by_prologue (void)
362 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
363 int n = n_dregs_to_save () + n_pregs_to_save ();
365 if (stack_frame_needed_p ())
366 /* We use a LINK instruction in this case. */
370 if (must_save_fp_p ())
372 if (! current_function_is_leaf)
376 if (fkind != SUBROUTINE)
378 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
379 tree all = lookup_attribute ("saveall", attrs);
382 /* Increment once for ASTAT. */
386 if (lookup_attribute ("nesting", attrs))
389 for (i = REG_P7 + 1; i < REG_CC; i++)
392 || (!leaf_function_p () && call_used_regs[i]))
393 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
398 /* Return the offset between two registers, one to be eliminated, and the other
399 its replacement, at the start of a routine. */
402 bfin_initial_elimination_offset (int from, int to)
404 HOST_WIDE_INT offset = 0;
406 if (from == ARG_POINTER_REGNUM)
407 offset = n_regs_saved_by_prologue () * 4;
409 if (to == STACK_POINTER_REGNUM)
411 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
412 offset += current_function_outgoing_args_size;
413 else if (current_function_outgoing_args_size)
414 offset += FIXED_STACK_AREA;
416 offset += get_frame_size ();
422 /* Emit code to load a constant CONSTANT into register REG; setting
423 RTX_FRAME_RELATED_P on all insns we generate. Make sure that the insns
424 we generate need not be split. */
427 frame_related_constant_load (rtx reg, HOST_WIDE_INT constant)
430 rtx cst = GEN_INT (constant);
432 if (constant >= -32768 && constant < 65536)
433 insn = emit_move_insn (reg, cst);
436 /* We don't call split_load_immediate here, since dwarf2out.c can get
437 confused about some of the more clever sequences it can generate. */
438 insn = emit_insn (gen_movsi_high (reg, cst));
439 RTX_FRAME_RELATED_P (insn) = 1;
440 insn = emit_insn (gen_movsi_low (reg, reg, cst));
442 RTX_FRAME_RELATED_P (insn) = 1;
445 /* Generate efficient code to add a value to the frame pointer. We
446 can use P1 as a scratch register. Set RTX_FRAME_RELATED_P on the
447 generated insns if FRAME is nonzero. */
450 add_to_sp (rtx spreg, HOST_WIDE_INT value, int frame)
455 /* Choose whether to use a sequence using a temporary register, or
456 a sequence with multiple adds. We can add a signed 7 bit value
457 in one instruction. */
458 if (value > 120 || value < -120)
460 rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
464 frame_related_constant_load (tmpreg, value);
467 insn = emit_move_insn (tmpreg, GEN_INT (value));
469 RTX_FRAME_RELATED_P (insn) = 1;
472 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
474 RTX_FRAME_RELATED_P (insn) = 1;
485 /* We could use -62, but that would leave the stack unaligned, so
489 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (size)));
491 RTX_FRAME_RELATED_P (insn) = 1;
497 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
498 is too large, generate a sequence of insns that has the same effect.
499 SPREG contains (reg:SI REG_SP). */
502 emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
504 HOST_WIDE_INT link_size = frame_size;
508 if (link_size > 262140)
511 /* Use a LINK insn with as big a constant as possible, then subtract
512 any remaining size from the SP. */
513 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
514 RTX_FRAME_RELATED_P (insn) = 1;
516 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
518 rtx set = XVECEXP (PATTERN (insn), 0, i);
519 gcc_assert (GET_CODE (set) == SET);
520 RTX_FRAME_RELATED_P (set) = 1;
523 frame_size -= link_size;
527 /* Must use a call-clobbered PREG that isn't the static chain. */
528 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
530 frame_related_constant_load (tmpreg, -frame_size);
531 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
532 RTX_FRAME_RELATED_P (insn) = 1;
536 /* Return the number of bytes we must reserve for outgoing arguments
537 in the current function's stack frame. */
542 if (current_function_outgoing_args_size)
544 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
545 return current_function_outgoing_args_size;
547 return FIXED_STACK_AREA;
552 /* Save RETS and FP, and allocate a stack frame. */
555 do_link (rtx spreg, HOST_WIDE_INT frame_size)
557 frame_size += arg_area_size ();
559 if (stack_frame_needed_p ()
560 || (must_save_fp_p () && ! current_function_is_leaf))
561 emit_link_insn (spreg, frame_size);
564 if (! current_function_is_leaf)
566 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
567 gen_rtx_PRE_DEC (Pmode, spreg)),
569 rtx insn = emit_insn (pat);
570 RTX_FRAME_RELATED_P (insn) = 1;
572 if (must_save_fp_p ())
574 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
575 gen_rtx_PRE_DEC (Pmode, spreg)),
576 gen_rtx_REG (Pmode, REG_FP));
577 rtx insn = emit_insn (pat);
578 RTX_FRAME_RELATED_P (insn) = 1;
580 add_to_sp (spreg, -frame_size, 1);
584 /* Like do_link, but used for epilogues to deallocate the stack frame. */
587 do_unlink (rtx spreg, HOST_WIDE_INT frame_size)
589 frame_size += arg_area_size ();
591 if (stack_frame_needed_p ())
592 emit_insn (gen_unlink ());
595 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
597 add_to_sp (spreg, frame_size, 0);
598 if (must_save_fp_p ())
600 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
601 emit_move_insn (fpreg, postinc);
602 emit_insn (gen_rtx_USE (VOIDmode, fpreg));
604 if (! current_function_is_leaf)
606 emit_move_insn (bfin_rets_rtx, postinc);
607 emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
612 /* Generate a prologue suitable for a function of kind FKIND. This is
613 called for interrupt and exception handler prologues.
614 SPREG contains (reg:SI REG_SP). */
617 expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
620 HOST_WIDE_INT frame_size = get_frame_size ();
621 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
622 rtx predec = gen_rtx_MEM (SImode, predec1);
624 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
625 tree all = lookup_attribute ("saveall", attrs);
626 tree kspisusp = lookup_attribute ("kspisusp", attrs);
630 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
631 RTX_FRAME_RELATED_P (insn) = 1;
634 /* We need space on the stack in case we need to save the argument
636 if (fkind == EXCPT_HANDLER)
638 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
639 RTX_FRAME_RELATED_P (insn) = 1;
642 insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
643 RTX_FRAME_RELATED_P (insn) = 1;
645 expand_prologue_reg_save (spreg, all != NULL_TREE);
647 for (i = REG_P7 + 1; i < REG_CC; i++)
650 || (!leaf_function_p () && call_used_regs[i]))
652 if (i == REG_A0 || i == REG_A1)
653 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
654 gen_rtx_REG (PDImode, i));
656 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
657 RTX_FRAME_RELATED_P (insn) = 1;
660 if (lookup_attribute ("nesting", attrs))
662 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
663 : fkind == NMI_HANDLER ? REG_RETN
665 insn = emit_move_insn (predec, srcreg);
666 RTX_FRAME_RELATED_P (insn) = 1;
669 do_link (spreg, frame_size);
671 if (fkind == EXCPT_HANDLER)
673 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
674 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
675 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
678 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
679 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
681 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
682 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
684 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
685 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
687 insn = emit_move_insn (r1reg, spreg);
688 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
690 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
691 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
693 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
694 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
699 /* Generate an epilogue suitable for a function of kind FKIND. This is
700 called for interrupt and exception handler epilogues.
701 SPREG contains (reg:SI REG_SP). */
704 expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
707 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
708 rtx postinc = gen_rtx_MEM (SImode, postinc1);
709 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
710 tree all = lookup_attribute ("saveall", attrs);
712 /* A slightly crude technique to stop flow from trying to delete "dead"
714 MEM_VOLATILE_P (postinc) = 1;
716 do_unlink (spreg, get_frame_size ());
718 if (lookup_attribute ("nesting", attrs))
720 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
721 : fkind == NMI_HANDLER ? REG_RETN
723 emit_move_insn (srcreg, postinc);
726 for (i = REG_CC - 1; i > REG_P7; i--)
729 || (!leaf_function_p () && call_used_regs[i]))
731 if (i == REG_A0 || i == REG_A1)
733 rtx mem = gen_rtx_MEM (PDImode, postinc1);
734 MEM_VOLATILE_P (mem) = 1;
735 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
738 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
741 expand_epilogue_reg_restore (spreg, all != NULL_TREE);
743 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
745 /* Deallocate any space we left on the stack in case we needed to save the
746 argument registers. */
747 if (fkind == EXCPT_HANDLER)
748 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
750 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
753 /* Generate RTL for the prologue of the current function. */
756 bfin_expand_prologue (void)
759 HOST_WIDE_INT frame_size = get_frame_size ();
760 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
761 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
763 if (fkind != SUBROUTINE)
765 expand_interrupt_handler_prologue (spreg, fkind);
769 expand_prologue_reg_save (spreg, 0);
771 do_link (spreg, frame_size);
773 if (TARGET_ID_SHARED_LIBRARY
774 && (current_function_uses_pic_offset_table
775 || !current_function_is_leaf))
779 if (bfin_lib_id_given)
780 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
782 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
783 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
784 UNSPEC_LIBRARY_OFFSET));
785 insn = emit_insn (gen_movsi (pic_offset_table_rtx,
786 gen_rtx_MEM (Pmode, addr)));
787 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
791 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
792 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
793 eh_return pattern. */
796 bfin_expand_epilogue (int need_return, int eh_return)
798 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
799 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
801 if (fkind != SUBROUTINE)
803 expand_interrupt_handler_epilogue (spreg, fkind);
807 do_unlink (spreg, get_frame_size ());
809 expand_epilogue_reg_restore (spreg, 0);
811 /* Omit the return insn if this is for a sibcall. */
816 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
818 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
821 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
824 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
825 unsigned int new_reg)
827 /* Interrupt functions can only use registers that have already been
828 saved by the prologue, even if they would normally be
831 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
832 && !regs_ever_live[new_reg])
838 /* Return the value of the return address for the frame COUNT steps up
839 from the current frame, after the prologue.
840 We punt for everything but the current frame by returning const0_rtx. */
843 bfin_return_addr_rtx (int count)
848 return get_hard_reg_initial_val (Pmode, REG_RETS);
851 /* Try machine-dependent ways of modifying an illegitimate address X
852 to be legitimate. If we find one, return the new, valid address,
853 otherwise return NULL_RTX.
855 OLDX is the address as it was before break_out_memory_refs was called.
856 In some cases it is useful to look at this to decide what needs to be done.
858 MODE is the mode of the memory reference. */
861 legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
862 enum machine_mode mode ATTRIBUTE_UNUSED)
867 /* This predicate is used to compute the length of a load/store insn.
868 OP is a MEM rtx, we return nonzero if its addressing mode requires a
869 32 bit instruction. */
872 effective_address_32bit_p (rtx op, enum machine_mode mode)
874 HOST_WIDE_INT offset;
876 mode = GET_MODE (op);
879 if (GET_CODE (op) != PLUS)
881 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
882 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
886 offset = INTVAL (XEXP (op, 1));
888 /* All byte loads use a 16 bit offset. */
889 if (GET_MODE_SIZE (mode) == 1)
892 if (GET_MODE_SIZE (mode) == 4)
894 /* Frame pointer relative loads can use a negative offset, all others
895 are restricted to a small positive one. */
896 if (XEXP (op, 0) == frame_pointer_rtx)
897 return offset < -128 || offset > 60;
898 return offset < 0 || offset > 60;
901 /* Must be HImode now. */
902 return offset < 0 || offset > 30;
905 /* Return cost of the memory address ADDR.
906 All addressing modes are equally cheap on the Blackfin. */
909 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
914 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
917 print_address_operand (FILE *file, rtx x)
919 switch (GET_CODE (x))
922 output_address (XEXP (x, 0));
924 output_address (XEXP (x, 1));
928 fprintf (file, "--");
929 output_address (XEXP (x, 0));
932 output_address (XEXP (x, 0));
933 fprintf (file, "++");
936 output_address (XEXP (x, 0));
937 fprintf (file, "--");
941 gcc_assert (GET_CODE (x) != MEM);
942 print_operand (file, x, 0);
947 /* Adding intp DImode support by Tony
953 print_operand (FILE *file, rtx x, char code)
955 enum machine_mode mode = GET_MODE (x);
960 switch (GET_CODE (x))
966 fprintf (file, "ne");
975 fprintf (file, "ge");
978 fprintf (file, "le");
987 fprintf (file, "ge");
990 fprintf (file, "le");
993 output_operand_lossage ("invalid %%j value");
997 case 'J': /* reverse logic */
1001 fprintf (file, "ne");
1004 fprintf (file, "e");
1007 fprintf (file, "le");
1010 fprintf (file, "ge");
1013 fprintf (file, "l");
1016 fprintf (file, "g");
1019 fprintf (file, "le");
1022 fprintf (file, "ge");
1025 fprintf (file, "l");
1028 fprintf (file, "g");
1031 output_operand_lossage ("invalid %%J value");
1036 switch (GET_CODE (x))
1041 gcc_assert (REGNO (x) < 32);
1042 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1043 /*fprintf (file, "\n%d\n ", REGNO (x));*/
1046 else if (code == 'd')
1048 gcc_assert (REGNO (x) < 32);
1049 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1052 else if (code == 'w')
1054 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1055 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1057 else if (code == 'x')
1059 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1060 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1062 else if (code == 'D')
1064 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1066 else if (code == 'H')
1068 gcc_assert (mode == DImode || mode == DFmode);
1069 gcc_assert (REG_P (x));
1070 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1072 else if (code == 'T')
1074 gcc_assert (D_REGNO_P (REGNO (x)));
1075 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1078 fprintf (file, "%s", reg_names[REGNO (x)]);
1084 print_address_operand (file, x);
1089 /* Moves to half registers with d or h modifiers always use unsigned
1092 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1093 else if (code == 'h')
1094 x = GEN_INT (INTVAL (x) & 0xffff);
1095 else if (code == 'X')
1096 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1097 else if (code == 'Y')
1098 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1099 else if (code == 'Z')
1100 /* Used for LINK insns. */
1101 x = GEN_INT (-8 - INTVAL (x));
1106 output_addr_const (file, x);
1107 if (code == 'G' && flag_pic)
1108 fprintf (file, "@GOT");
1112 output_operand_lossage ("invalid const_double operand");
1116 switch (XINT (x, 1))
1118 case UNSPEC_MOVE_PIC:
1119 output_addr_const (file, XVECEXP (x, 0, 0));
1120 fprintf (file, "@GOT");
1123 case UNSPEC_LIBRARY_OFFSET:
1124 fprintf (file, "_current_shared_library_p5_offset_");
1133 output_addr_const (file, x);
1138 /* Argument support functions. */
1140 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1141 for a call to a function whose data type is FNTYPE.
1142 For a library call, FNTYPE is 0.
1143 VDSP C Compiler manual, our ABI says that
1144 first 3 words of arguments will use R0, R1 and R2.
1148 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype ATTRIBUTE_UNUSED,
1149 rtx libname ATTRIBUTE_UNUSED)
1151 static CUMULATIVE_ARGS zero_cum;
1155 /* Set up the number of registers to use for passing arguments. */
1157 cum->nregs = max_arg_registers;
1158 cum->arg_regs = arg_regs;
1163 /* Update the data in CUM to advance over an argument
1164 of mode MODE and data type TYPE.
1165 (TYPE is null for libcalls where that information may not be available.) */
1168 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1169 int named ATTRIBUTE_UNUSED)
1171 int count, bytes, words;
1173 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1174 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1176 cum->words += words;
1177 cum->nregs -= words;
1179 if (cum->nregs <= 0)
1182 cum->arg_regs = NULL;
1186 for (count = 1; count <= words; count++)
1193 /* Define where to put the arguments to a function.
1194 Value is zero to push the argument on the stack,
1195 or a hard register in which to store the argument.
1197 MODE is the argument's machine mode.
1198 TYPE is the data type of the argument (as a tree).
1199 This is null for libcalls where that information may
1201 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1202 the preceding args and about the function being called.
1203 NAMED is nonzero if this argument is a named parameter
1204 (otherwise it is an extra parameter matching an ellipsis). */
1207 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1208 int named ATTRIBUTE_UNUSED)
1211 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1217 return gen_rtx_REG (mode, *(cum->arg_regs));
1222 /* For an arg passed partly in registers and partly in memory,
1223 this is the number of bytes passed in registers.
1224 For args passed entirely in registers or entirely in memory, zero.
1226 Refer VDSP C Compiler manual, our ABI.
1227 First 3 words are in registers. So, if a an argument is larger
1228 than the registers available, it will span the register and
1232 bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1233 tree type ATTRIBUTE_UNUSED,
1234 bool named ATTRIBUTE_UNUSED)
1237 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1238 int bytes_left = cum->nregs * UNITS_PER_WORD;
1243 if (bytes_left == 0)
1245 if (bytes > bytes_left)
1250 /* Variable sized types are passed by reference. */
1253 bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1254 enum machine_mode mode ATTRIBUTE_UNUSED,
1255 tree type, bool named ATTRIBUTE_UNUSED)
1257 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1260 /* Decide whether a type should be returned in memory (true)
1261 or in a register (false). This is called by the macro
1262 RETURN_IN_MEMORY. */
1265 bfin_return_in_memory (tree type)
1268 enum machine_mode mode = TYPE_MODE (type);
1270 if (mode == BLKmode)
1272 size = int_size_in_bytes (type);
1277 /* Register in which address to store a structure value
1278 is passed to a function. */
1280 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1281 int incoming ATTRIBUTE_UNUSED)
1283 return gen_rtx_REG (Pmode, REG_P0);
1286 /* Return true when register may be used to pass function parameters. */
1289 function_arg_regno_p (int n)
1292 for (i = 0; arg_regs[i] != -1; i++)
1293 if (n == arg_regs[i])
1298 /* Returns 1 if OP contains a symbol reference */
1301 symbolic_reference_mentioned_p (rtx op)
1303 register const char *fmt;
1306 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1309 fmt = GET_RTX_FORMAT (GET_CODE (op));
1310 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1316 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1317 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1321 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1328 /* Decide whether we can make a sibling call to a function. DECL is the
1329 declaration of the function being targeted by the call and EXP is the
1330 CALL_EXPR representing the call. */
1333 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1334 tree exp ATTRIBUTE_UNUSED)
1339 /* Emit RTL insns to initialize the variable parts of a trampoline at
1340 TRAMP. FNADDR is an RTX for the address of the function's pure
1341 code. CXT is an RTX for the static chain value for the function. */
1344 initialize_trampoline (tramp, fnaddr, cxt)
1345 rtx tramp, fnaddr, cxt;
1347 rtx t1 = copy_to_reg (fnaddr);
1348 rtx t2 = copy_to_reg (cxt);
1351 addr = memory_address (Pmode, plus_constant (tramp, 2));
1352 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1353 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1354 addr = memory_address (Pmode, plus_constant (tramp, 6));
1355 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1357 addr = memory_address (Pmode, plus_constant (tramp, 10));
1358 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1359 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1360 addr = memory_address (Pmode, plus_constant (tramp, 14));
1361 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1364 /* Legitimize PIC addresses. If the address is already position-independent,
1365 we return ORIG. Newly generated position-independent addresses go into a
1366 reg. This is REG if nonzero, otherwise we allocate register(s) as
1370 legitimize_pic_address (rtx orig, rtx reg)
1375 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
1377 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
1383 gcc_assert (!no_new_pseudos);
1384 reg = gen_reg_rtx (Pmode);
1389 emit_insn (gen_movsi_high_pic (reg, addr));
1390 emit_insn (gen_movsi_low_pic (reg, reg, addr));
1391 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1392 new = gen_rtx_MEM (Pmode, reg);
1396 rtx tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
1398 new = gen_rtx_MEM (Pmode,
1399 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1402 emit_move_insn (reg, new);
1404 current_function_uses_pic_offset_table = 1;
1408 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
1412 if (GET_CODE (addr) == CONST)
1414 addr = XEXP (addr, 0);
1415 gcc_assert (GET_CODE (addr) == PLUS);
1418 if (XEXP (addr, 0) == pic_offset_table_rtx)
1423 gcc_assert (!no_new_pseudos);
1424 reg = gen_reg_rtx (Pmode);
1427 base = legitimize_pic_address (XEXP (addr, 0), reg);
1428 addr = legitimize_pic_address (XEXP (addr, 1),
1429 base == reg ? NULL_RTX : reg);
1431 if (GET_CODE (addr) == CONST_INT)
1433 gcc_assert (! reload_in_progress && ! reload_completed);
1434 addr = force_reg (Pmode, addr);
1437 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
1439 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
1440 addr = XEXP (addr, 1);
1443 return gen_rtx_PLUS (Pmode, base, addr);
1449 /* Emit insns to move operands[1] into operands[0]. */
1452 emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1454 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1456 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1457 operands[1] = force_reg (SImode, operands[1]);
1459 operands[1] = legitimize_pic_address (operands[1], temp);
1462 /* Expand a move operation in mode MODE. The operands are in OPERANDS. */
1465 expand_move (rtx *operands, enum machine_mode mode)
1467 if (flag_pic && SYMBOLIC_CONST (operands[1]))
1468 emit_pic_move (operands, mode);
1470 /* Don't generate memory->memory or constant->memory moves, go through a
1472 else if ((reload_in_progress | reload_completed) == 0
1473 && GET_CODE (operands[0]) == MEM
1474 && GET_CODE (operands[1]) != REG)
1475 operands[1] = force_reg (mode, operands[1]);
1478 /* Split one or more DImode RTL references into pairs of SImode
1479 references. The RTL can be REG, offsettable MEM, integer constant, or
1480 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1481 split and "num" is its length. lo_half and hi_half are output arrays
1482 that parallel "operands". */
1485 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1489 rtx op = operands[num];
1491 /* simplify_subreg refuse to split volatile memory addresses,
1492 but we still have to handle it. */
1493 if (GET_CODE (op) == MEM)
1495 lo_half[num] = adjust_address (op, SImode, 0);
1496 hi_half[num] = adjust_address (op, SImode, 4);
1500 lo_half[num] = simplify_gen_subreg (SImode, op,
1501 GET_MODE (op) == VOIDmode
1502 ? DImode : GET_MODE (op), 0);
1503 hi_half[num] = simplify_gen_subreg (SImode, op,
1504 GET_MODE (op) == VOIDmode
1505 ? DImode : GET_MODE (op), 4);
1510 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
1511 SIBCALL is nonzero if this is a sibling call. */
1514 bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, int sibcall)
1516 rtx use = NULL, call;
1518 /* Static functions and indirect calls don't need the pic register. */
1520 && GET_CODE (XEXP (fnaddr, 0)) == SYMBOL_REF
1521 && ! SYMBOL_REF_LOCAL_P (XEXP (fnaddr, 0)))
1522 use_reg (&use, pic_offset_table_rtx);
1524 if (! call_insn_operand (XEXP (fnaddr, 0), Pmode))
1526 fnaddr = copy_to_mode_reg (Pmode, XEXP (fnaddr, 0));
1527 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
1529 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1532 call = gen_rtx_SET (VOIDmode, retval, call);
1535 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (2));
1536 XVECEXP (pat, 0, 0) = call;
1537 XVECEXP (pat, 0, 1) = gen_rtx_RETURN (VOIDmode);
1540 call = emit_call_insn (call);
1542 CALL_INSN_FUNCTION_USAGE (call) = use;
1545 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
1548 hard_regno_mode_ok (int regno, enum machine_mode mode)
1550 /* Allow only dregs to store value of mode HI or QI */
1551 enum reg_class class = REGNO_REG_CLASS (regno);
1556 if (mode == V2HImode)
1557 return D_REGNO_P (regno);
1558 if (class == CCREGS)
1559 return mode == BImode;
1560 if (mode == PDImode)
1561 return regno == REG_A0 || regno == REG_A1;
1563 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1566 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1569 /* Implements target hook vector_mode_supported_p. */
1572 bfin_vector_mode_supported_p (enum machine_mode mode)
1574 return mode == V2HImode;
1577 /* Return the cost of moving data from a register in class CLASS1 to
1578 one in class CLASS2. A cost of 2 is the default. */
1581 bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1582 enum reg_class class1, enum reg_class class2)
1584 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
1588 /* There are some stalls involved when moving from a DREG to a different
1589 class reg, and using the value in one of the following instructions.
1590 Attempt to model this by slightly discouraging such moves. */
1591 if (class1 == DREGS && class2 != DREGS)
1597 /* Return the cost of moving data of mode M between a
1598 register and memory. A value of 2 is the default; this cost is
1599 relative to those in `REGISTER_MOVE_COST'.
1601 ??? In theory L1 memory has single-cycle latency. We should add a switch
1602 that tells the compiler whether we expect to use only L1 memory for the
1603 program; it'll make the costs more accurate. */
1606 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1607 enum reg_class class,
1608 int in ATTRIBUTE_UNUSED)
1610 /* Make memory accesses slightly more expensive than any register-register
1611 move. Also, penalize non-DP registers, since they need secondary
1612 reloads to load and store. */
1613 if (! reg_class_subset_p (class, DPREGS))
1619 /* Inform reload about cases where moving X with a mode MODE to a register in
1620 CLASS requires an extra scratch register. Return the class needed for the
1621 scratch register. */
1624 secondary_input_reload_class (enum reg_class class, enum machine_mode mode,
1627 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1628 in most other cases we can also use PREGS. */
1629 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1630 enum reg_class x_class = NO_REGS;
1631 enum rtx_code code = GET_CODE (x);
1634 x = SUBREG_REG (x), code = GET_CODE (x);
1637 int regno = REGNO (x);
1638 if (regno >= FIRST_PSEUDO_REGISTER)
1639 regno = reg_renumber[regno];
1644 x_class = REGNO_REG_CLASS (regno);
1647 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1648 This happens as a side effect of register elimination, and we need
1649 a scratch register to do it. */
1650 if (fp_plus_const_operand (x, mode))
1652 rtx op2 = XEXP (x, 1);
1653 int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
1655 if (class == PREGS || class == PREGS_CLOBBERED)
1657 /* If destination is a DREG, we can do this without a scratch register
1658 if the constant is valid for an add instruction. */
1659 if (class == DREGS || class == DPREGS)
1660 return large_constant_p ? PREGS : NO_REGS;
1661 /* Reloading to anything other than a DREG? Use a PREG scratch
1666 /* Data can usually be moved freely between registers of most classes.
1667 AREGS are an exception; they can only move to or from another register
1668 in AREGS or one in DREGS. They can also be assigned the constant 0. */
1669 if (x_class == AREGS)
1670 return class == DREGS || class == AREGS ? NO_REGS : DREGS;
1674 if (x != const0_rtx && x_class != DREGS)
1680 /* CCREGS can only be moved from/to DREGS. */
1681 if (class == CCREGS && x_class != DREGS)
1683 if (x_class == CCREGS && class != DREGS)
1685 /* All registers other than AREGS can load arbitrary constants. The only
1686 case that remains is MEM. */
1688 if (! reg_class_subset_p (class, default_class))
1689 return default_class;
1693 /* Like secondary_input_reload_class; and all we do is call that function. */
1696 secondary_output_reload_class (enum reg_class class, enum machine_mode mode,
1699 return secondary_input_reload_class (class, mode, x);
1702 /* Implement TARGET_HANDLE_OPTION. */
1705 bfin_handle_option (size_t code, const char *arg, int value)
1709 case OPT_mshared_library_id_:
1710 if (value > MAX_LIBRARY_ID)
1711 error ("-mshared-library-id=%s is not between 0 and %d",
1712 arg, MAX_LIBRARY_ID);
1713 bfin_lib_id_given = 1;
1721 /* Implement the macro OVERRIDE_OPTIONS. */
1724 override_options (void)
1726 if (TARGET_OMIT_LEAF_FRAME_POINTER)
1727 flag_omit_frame_pointer = 1;
1729 /* Library identification */
1730 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
1731 error ("-mshared-library-id= specified without -mid-shared-library");
1733 if (TARGET_ID_SHARED_LIBRARY)
1734 /* ??? Provide a way to use a bigger GOT. */
1737 flag_schedule_insns = 0;
1740 /* Return the destination address of BRANCH.
1741 We need to use this instead of get_attr_length, because the
1742 cbranch_with_nops pattern conservatively sets its length to 6, and
1743 we still prefer to use shorter sequences. */
1746 branch_dest (rtx branch)
1750 rtx pat = PATTERN (branch);
1751 if (GET_CODE (pat) == PARALLEL)
1752 pat = XVECEXP (pat, 0, 0);
1753 dest = SET_SRC (pat);
1754 if (GET_CODE (dest) == IF_THEN_ELSE)
1755 dest = XEXP (dest, 1);
1756 dest = XEXP (dest, 0);
1757 dest_uid = INSN_UID (dest);
1758 return INSN_ADDRESSES (dest_uid);
1761 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
1762 it's a branch that's predicted taken. */
1765 cbranch_predicted_taken_p (rtx insn)
1767 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
1771 int pred_val = INTVAL (XEXP (x, 0));
1773 return pred_val >= REG_BR_PROB_BASE / 2;
1779 /* Templates for use by asm_conditional_branch. */
1781 static const char *ccbranch_templates[][3] = {
1782 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
1783 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
1784 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
1785 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
1788 /* Output INSN, which is a conditional branch instruction with operands
1791 We deal with the various forms of conditional branches that can be generated
1792 by bfin_reorg to prevent the hardware from doing speculative loads, by
1793 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
1794 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
1795 Either of these is only necessary if the branch is short, otherwise the
1796 template we use ends in an unconditional jump which flushes the pipeline
1800 asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
1802 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
1803 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
1804 is to be taken from start of if cc rather than jump.
1805 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
1807 int len = (offset >= -1024 && offset <= 1022 ? 0
1808 : offset >= -4094 && offset <= 4096 ? 1
1810 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
1811 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
1812 output_asm_insn (ccbranch_templates[idx][len], operands);
1813 gcc_assert (n_nops == 0 || !bp);
1815 while (n_nops-- > 0)
1816 output_asm_insn ("nop;", NULL);
1819 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
1820 stored in bfin_compare_op0 and bfin_compare_op1 already. */
1823 bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
1825 enum rtx_code code1, code2;
1826 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
1827 rtx tem = bfin_cc_rtx;
1828 enum rtx_code code = GET_CODE (cmp);
1830 /* If we have a BImode input, then we already have a compare result, and
1831 do not need to emit another comparison. */
1832 if (GET_MODE (op0) == BImode)
1834 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
1835 tem = op0, code2 = code;
1840 /* bfin has these conditions */
1850 code1 = reverse_condition (code);
1854 emit_insn (gen_rtx_SET (BImode, tem,
1855 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
1858 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
1861 /* Return nonzero iff C has exactly one bit set if it is interpreted
1862 as a 32 bit constant. */
1865 log2constp (unsigned HOST_WIDE_INT c)
1868 return c != 0 && (c & (c-1)) == 0;
1871 /* Returns the number of consecutive least significant zeros in the binary
1872 representation of *V.
1873 We modify *V to contain the original value arithmetically shifted right by
1874 the number of zeroes. */
1877 shiftr_zero (HOST_WIDE_INT *v)
1879 unsigned HOST_WIDE_INT tmp = *v;
1880 unsigned HOST_WIDE_INT sgn;
1886 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
1887 while ((tmp & 0x1) == 0 && n <= 32)
1889 tmp = (tmp >> 1) | sgn;
1896 /* After reload, split the load of an immediate constant. OPERANDS are the
1897 operands of the movsi_insn pattern which we are splitting. We return
1898 nonzero if we emitted a sequence to load the constant, zero if we emitted
1899 nothing because we want to use the splitter's default sequence. */
1902 split_load_immediate (rtx operands[])
1904 HOST_WIDE_INT val = INTVAL (operands[1]);
1906 HOST_WIDE_INT shifted = val;
1907 HOST_WIDE_INT shifted_compl = ~val;
1908 int num_zero = shiftr_zero (&shifted);
1909 int num_compl_zero = shiftr_zero (&shifted_compl);
1910 unsigned int regno = REGNO (operands[0]);
1911 enum reg_class class1 = REGNO_REG_CLASS (regno);
1913 /* This case takes care of single-bit set/clear constants, which we could
1914 also implement with BITSET/BITCLR. */
1916 && shifted >= -32768 && shifted < 65536
1917 && (D_REGNO_P (regno)
1918 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
1920 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
1921 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
1926 tmp |= -(tmp & 0x8000);
1928 /* If high word has one bit set or clear, try to use a bit operation. */
1929 if (D_REGNO_P (regno))
1931 if (log2constp (val & 0xFFFF0000))
1933 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
1934 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
1937 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
1939 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
1940 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
1944 if (D_REGNO_P (regno))
1946 if (CONST_7BIT_IMM_P (tmp))
1948 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
1949 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
1953 if ((val & 0xFFFF0000) == 0)
1955 emit_insn (gen_movsi (operands[0], const0_rtx));
1956 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
1960 if ((val & 0xFFFF0000) == 0xFFFF0000)
1962 emit_insn (gen_movsi (operands[0], constm1_rtx));
1963 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
1968 /* Need DREGs for the remaining case. */
1973 && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
1975 /* If optimizing for size, generate a sequence that has more instructions
1977 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
1978 emit_insn (gen_ashlsi3 (operands[0], operands[0],
1979 GEN_INT (num_compl_zero)));
1980 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
1986 /* Return true if the legitimate memory address for a memory operand of mode
1987 MODE. Return false if not. */
1990 bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
1992 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
1993 int sz = GET_MODE_SIZE (mode);
1994 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
1995 /* The usual offsettable_memref machinery doesn't work so well for this
1996 port, so we deal with the problem here. */
1997 unsigned HOST_WIDE_INT mask = sz == 8 ? 0x7ffe : 0x7fff;
1998 return (v & ~(mask << shift)) == 0;
2002 bfin_valid_reg_p (unsigned int regno, int strict)
2004 return ((strict && REGNO_OK_FOR_BASE_STRICT_P (regno))
2005 || (!strict && REGNO_OK_FOR_BASE_NONSTRICT_P (regno)));
2009 bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2011 switch (GET_CODE (x)) {
2013 if (bfin_valid_reg_p (REGNO (x), strict))
2017 if (REG_P (XEXP (x, 0))
2018 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict)
2019 && (GET_CODE (XEXP (x, 1)) == UNSPEC
2020 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2021 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2026 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2027 && REG_P (XEXP (x, 0))
2028 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2031 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2032 && XEXP (x, 0) == stack_pointer_rtx
2033 && REG_P (XEXP (x, 0))
2034 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2044 bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2046 int cost2 = COSTS_N_INSNS (1);
2051 if (outer_code == SET || outer_code == PLUS)
2052 *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2053 else if (outer_code == AND)
2054 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2055 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2056 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2057 else if (outer_code == LEU || outer_code == LTU)
2058 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2059 else if (outer_code == MULT)
2060 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2061 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2063 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2064 || outer_code == LSHIFTRT)
2065 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2066 else if (outer_code == IOR || outer_code == XOR)
2067 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2076 *total = COSTS_N_INSNS (2);
2080 if (GET_MODE (x) == Pmode)
2082 if (GET_CODE (XEXP (x, 0)) == MULT
2083 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2085 HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
2086 if (val == 2 || val == 4)
2089 *total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
2090 *total += rtx_cost (XEXP (x, 1), outer_code);
2102 if (GET_MODE (x) == DImode)
2109 if (GET_MODE (x) == DImode)
2114 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
2115 *total = COSTS_N_INSNS (3);
2124 bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2126 fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2129 /* Used for communication between {push,pop}_multiple_operation (which
2130 we use not only as a predicate) and the corresponding output functions. */
2131 static int first_preg_to_save, first_dreg_to_save;
2134 push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2136 int lastdreg = 8, lastpreg = 6;
2139 first_preg_to_save = lastpreg;
2140 first_dreg_to_save = lastdreg;
2141 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2143 rtx t = XVECEXP (op, 0, i);
2147 if (GET_CODE (t) != SET)
2151 dest = SET_DEST (t);
2152 if (GET_CODE (dest) != MEM || ! REG_P (src))
2154 dest = XEXP (dest, 0);
2155 if (GET_CODE (dest) != PLUS
2156 || ! REG_P (XEXP (dest, 0))
2157 || REGNO (XEXP (dest, 0)) != REG_SP
2158 || GET_CODE (XEXP (dest, 1)) != CONST_INT
2159 || INTVAL (XEXP (dest, 1)) != -i * 4)
2162 regno = REGNO (src);
2165 if (D_REGNO_P (regno))
2168 first_dreg_to_save = lastdreg = regno - REG_R0;
2170 else if (regno >= REG_P0 && regno <= REG_P7)
2173 first_preg_to_save = lastpreg = regno - REG_P0;
2183 if (regno >= REG_P0 && regno <= REG_P7)
2186 first_preg_to_save = lastpreg = regno - REG_P0;
2188 else if (regno != REG_R0 + lastdreg + 1)
2193 else if (group == 2)
2195 if (regno != REG_P0 + lastpreg + 1)
2204 pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2206 int lastdreg = 8, lastpreg = 6;
2209 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2211 rtx t = XVECEXP (op, 0, i);
2215 if (GET_CODE (t) != SET)
2219 dest = SET_DEST (t);
2220 if (GET_CODE (src) != MEM || ! REG_P (dest))
2222 src = XEXP (src, 0);
2226 if (! REG_P (src) || REGNO (src) != REG_SP)
2229 else if (GET_CODE (src) != PLUS
2230 || ! REG_P (XEXP (src, 0))
2231 || REGNO (XEXP (src, 0)) != REG_SP
2232 || GET_CODE (XEXP (src, 1)) != CONST_INT
2233 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2236 regno = REGNO (dest);
2239 if (regno == REG_R7)
2244 else if (regno != REG_P0 + lastpreg - 1)
2249 else if (group == 1)
2251 if (regno != REG_R0 + lastdreg - 1)
2257 first_dreg_to_save = lastdreg;
2258 first_preg_to_save = lastpreg;
2262 /* Emit assembly code for one multi-register push described by INSN, with
2263 operands in OPERANDS. */
2266 output_push_multiple (rtx insn, rtx *operands)
2271 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2272 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2275 if (first_dreg_to_save == 8)
2276 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2277 else if (first_preg_to_save == 6)
2278 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2280 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2281 first_dreg_to_save, first_preg_to_save);
2283 output_asm_insn (buf, operands);
2286 /* Emit assembly code for one multi-register pop described by INSN, with
2287 operands in OPERANDS. */
2290 output_pop_multiple (rtx insn, rtx *operands)
2295 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2296 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2299 if (first_dreg_to_save == 8)
2300 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2301 else if (first_preg_to_save == 6)
2302 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2304 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2305 first_dreg_to_save, first_preg_to_save);
2307 output_asm_insn (buf, operands);
2310 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
2313 single_move_for_strmov (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
2315 rtx scratch = gen_reg_rtx (mode);
2318 srcmem = adjust_address_nv (src, mode, offset);
2319 dstmem = adjust_address_nv (dst, mode, offset);
2320 emit_move_insn (scratch, srcmem);
2321 emit_move_insn (dstmem, scratch);
2324 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2325 alignment ALIGN_EXP. Return true if successful, false if we should fall
2326 back on a different method. */
2329 bfin_expand_strmov (rtx dst, rtx src, rtx count_exp, rtx align_exp)
2331 rtx srcreg, destreg, countreg;
2332 HOST_WIDE_INT align = 0;
2333 unsigned HOST_WIDE_INT count = 0;
2335 if (GET_CODE (align_exp) == CONST_INT)
2336 align = INTVAL (align_exp);
2337 if (GET_CODE (count_exp) == CONST_INT)
2339 count = INTVAL (count_exp);
2341 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2346 /* If optimizing for size, only do single copies inline. */
2349 if (count == 2 && align < 2)
2351 if (count == 4 && align < 4)
2353 if (count != 1 && count != 2 && count != 4)
2356 if (align < 2 && count != 1)
2359 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2360 if (destreg != XEXP (dst, 0))
2361 dst = replace_equiv_address_nv (dst, destreg);
2362 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2363 if (srcreg != XEXP (src, 0))
2364 src = replace_equiv_address_nv (src, srcreg);
2366 if (count != 0 && align >= 2)
2368 unsigned HOST_WIDE_INT offset = 0;
2372 if ((count & ~3) == 4)
2374 single_move_for_strmov (dst, src, SImode, offset);
2377 else if (count & ~3)
2379 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2380 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2382 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2387 if ((count & ~1) == 2)
2389 single_move_for_strmov (dst, src, HImode, offset);
2392 else if (count & ~1)
2394 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2395 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2397 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2402 single_move_for_strmov (dst, src, HImode, offset);
2407 single_move_for_strmov (dst, src, QImode, offset);
2416 bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2418 enum attr_type insn_type, dep_insn_type;
2419 int dep_insn_code_number;
2421 /* Anti and output dependencies have zero cost. */
2422 if (REG_NOTE_KIND (link) != 0)
2425 dep_insn_code_number = recog_memoized (dep_insn);
2427 /* If we can't recognize the insns, we can't really do anything. */
2428 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2431 insn_type = get_attr_type (insn);
2432 dep_insn_type = get_attr_type (dep_insn);
2434 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
2436 rtx pat = PATTERN (dep_insn);
2437 rtx dest = SET_DEST (pat);
2438 rtx src = SET_SRC (pat);
2439 if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
2441 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
2447 /* We use the machine specific reorg pass for emitting CSYNC instructions
2448 after conditional branches as needed.
2450 The Blackfin is unusual in that a code sequence like
2453 may speculatively perform the load even if the condition isn't true. This
2454 happens for a branch that is predicted not taken, because the pipeline
2455 isn't flushed or stalled, so the early stages of the following instructions,
2456 which perform the memory reference, are allowed to execute before the
2457 jump condition is evaluated.
2458 Therefore, we must insert additional instructions in all places where this
2459 could lead to incorrect behavior. The manual recommends CSYNC, while
2460 VDSP seems to use NOPs (even though its corresponding compiler option is
2463 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
2464 When optimizing for size, we turn the branch into a predicted taken one.
2465 This may be slower due to mispredicts, but saves code size. */
2470 rtx insn, last_condjump = NULL_RTX;
2471 int cycles_since_jump = INT_MAX;
2476 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2480 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
2483 pat = PATTERN (insn);
2484 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2485 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2486 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2491 if (any_condjump_p (insn)
2492 && ! cbranch_predicted_taken_p (insn))
2494 last_condjump = insn;
2495 cycles_since_jump = 0;
2498 cycles_since_jump = INT_MAX;
2500 else if (INSN_P (insn))
2502 enum attr_type type = get_attr_type (insn);
2503 if (cycles_since_jump < INT_MAX)
2504 cycles_since_jump++;
2506 if (type == TYPE_MCLD && cycles_since_jump < 3)
2510 pat = single_set (insn);
2511 if (may_trap_p (SET_SRC (pat)))
2514 rtx *op = recog_data.operand;
2516 extract_insn (last_condjump);
2518 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
2521 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
2522 GEN_INT (3 - cycles_since_jump));
2523 PATTERN (last_condjump) = pat;
2524 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
2525 cycles_since_jump = INT_MAX;
2532 /* Handle interrupt_handler, exception_handler and nmi_handler function
2533 attributes; arguments as in struct attribute_spec.handler. */
2536 handle_int_attribute (tree *node, tree name,
2537 tree args ATTRIBUTE_UNUSED,
2538 int flags ATTRIBUTE_UNUSED,
2542 if (TREE_CODE (x) == FUNCTION_DECL)
2545 if (TREE_CODE (x) != FUNCTION_TYPE)
2547 warning (OPT_Wattributes, "%qs attribute only applies to functions",
2548 IDENTIFIER_POINTER (name));
2549 *no_add_attrs = true;
2551 else if (funkind (x) != SUBROUTINE)
2552 error ("multiple function type attributes specified");
2557 /* Return 0 if the attributes for two types are incompatible, 1 if they
2558 are compatible, and 2 if they are nearly compatible (which causes a
2559 warning to be generated). */
2562 bfin_comp_type_attributes (tree type1, tree type2)
2564 e_funkind kind1, kind2;
2566 if (TREE_CODE (type1) != FUNCTION_TYPE)
2569 kind1 = funkind (type1);
2570 kind2 = funkind (type2);
2575 /* Check for mismatched modifiers */
2576 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
2577 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
2580 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
2581 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
2584 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
2585 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
2591 /* Table of valid machine attributes. */
2592 const struct attribute_spec bfin_attribute_table[] =
2594 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2595 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
2596 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
2597 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
2598 { "nesting", 0, 0, false, true, true, NULL },
2599 { "kspisusp", 0, 0, false, true, true, NULL },
2600 { "saveall", 0, 0, false, true, true, NULL },
2601 { NULL, 0, 0, false, false, false, NULL }
2604 /* Output the assembler code for a thunk function. THUNK_DECL is the
2605 declaration for the thunk function itself, FUNCTION is the decl for
2606 the target function. DELTA is an immediate constant offset to be
2607 added to THIS. If VCALL_OFFSET is nonzero, the word at
2608 *(*this + vcall_offset) should be added to THIS. */
2611 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
2612 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
2613 HOST_WIDE_INT vcall_offset, tree function)
2616 /* The this parameter is passed as the first argument. */
2617 rtx this = gen_rtx_REG (Pmode, REG_R0);
2619 /* Adjust the this parameter by a fixed constant. */
2623 if (delta >= -64 && delta <= 63)
2625 xops[0] = GEN_INT (delta);
2626 output_asm_insn ("%1 += %0;", xops);
2628 else if (delta >= -128 && delta < -64)
2630 xops[0] = GEN_INT (delta + 64);
2631 output_asm_insn ("%1 += -64; %1 += %0;", xops);
2633 else if (delta > 63 && delta <= 126)
2635 xops[0] = GEN_INT (delta - 63);
2636 output_asm_insn ("%1 += 63; %1 += %0;", xops);
2640 xops[0] = GEN_INT (delta);
2641 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
2645 /* Adjust the this parameter by a value stored in the vtable. */
2648 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
2649 rtx tmp = gen_rtx_REG (Pmode, REG_R2);
2653 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
2655 /* Adjust the this parameter. */
2656 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
2657 if (!memory_operand (xops[0], Pmode))
2659 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
2660 xops[0] = GEN_INT (vcall_offset);
2662 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
2663 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
2666 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
2669 xops[0] = XEXP (DECL_RTL (function), 0);
2670 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
2671 output_asm_insn ("jump.l\t%P0", xops);
2674 /* Codes for all the Blackfin builtins. */
2682 #define def_builtin(NAME, TYPE, CODE) \
2684 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
2688 /* Set up all builtin functions for this target. */
2690 bfin_init_builtins (void)
2692 tree void_ftype_void
2693 = build_function_type (void_type_node, void_list_node);
2695 /* Add the remaining MMX insns with somewhat more complicated types. */
2696 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
2697 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
2700 /* Expand an expression EXP that calls a built-in function,
2701 with result going to TARGET if that's convenient
2702 (and in mode MODE if that's convenient).
2703 SUBTARGET may be used as the target for computing one of EXP's operands.
2704 IGNORE is nonzero if the value is to be ignored. */
2707 bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
2708 rtx subtarget ATTRIBUTE_UNUSED,
2709 enum machine_mode mode ATTRIBUTE_UNUSED,
2710 int ignore ATTRIBUTE_UNUSED)
2712 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2713 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2717 case BFIN_BUILTIN_CSYNC:
2718 emit_insn (gen_csync ());
2720 case BFIN_BUILTIN_SSYNC:
2721 emit_insn (gen_ssync ());
2729 #undef TARGET_INIT_BUILTINS
2730 #define TARGET_INIT_BUILTINS bfin_init_builtins
2732 #undef TARGET_EXPAND_BUILTIN
2733 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
2735 #undef TARGET_ASM_GLOBALIZE_LABEL
2736 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
2738 #undef TARGET_ASM_FILE_START
2739 #define TARGET_ASM_FILE_START output_file_start
2741 #undef TARGET_ATTRIBUTE_TABLE
2742 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
2744 #undef TARGET_COMP_TYPE_ATTRIBUTES
2745 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
2747 #undef TARGET_RTX_COSTS
2748 #define TARGET_RTX_COSTS bfin_rtx_costs
2750 #undef TARGET_ADDRESS_COST
2751 #define TARGET_ADDRESS_COST bfin_address_cost
2753 #undef TARGET_ASM_INTERNAL_LABEL
2754 #define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
2756 #undef TARGET_MACHINE_DEPENDENT_REORG
2757 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
2759 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
2760 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
2762 #undef TARGET_ASM_OUTPUT_MI_THUNK
2763 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
2764 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2765 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
2767 #undef TARGET_SCHED_ADJUST_COST
2768 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
2770 #undef TARGET_PROMOTE_PROTOTYPES
2771 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2772 #undef TARGET_PROMOTE_FUNCTION_ARGS
2773 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
2774 #undef TARGET_PROMOTE_FUNCTION_RETURN
2775 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
2777 #undef TARGET_ARG_PARTIAL_BYTES
2778 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
2780 #undef TARGET_PASS_BY_REFERENCE
2781 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
2783 #undef TARGET_SETUP_INCOMING_VARARGS
2784 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
2786 #undef TARGET_STRUCT_VALUE_RTX
2787 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
2789 #undef TARGET_VECTOR_MODE_SUPPORTED_P
2790 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
2792 #undef TARGET_HANDLE_OPTION
2793 #define TARGET_HANDLE_OPTION bfin_handle_option
2795 struct gcc_target targetm = TARGET_INITIALIZER;