1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "insn-codes.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
42 #include "target-def.h"
47 #include "integrate.h"
48 #include "bfin-protos.h"
52 /* Test and compare insns in bfin.md store the information needed to
53 generate branch and scc insns here. */
54 rtx bfin_compare_op0, bfin_compare_op1;
56 /* RTX for condition code flag register and RETS register */
57 extern GTY(()) rtx bfin_cc_rtx;
58 extern GTY(()) rtx bfin_rets_rtx;
59 rtx bfin_cc_rtx, bfin_rets_rtx;
61 int max_arg_registers = 0;
63 /* Arrays used when emitting register names. */
64 const char *short_reg_names[] = SHORT_REGISTER_NAMES;
65 const char *high_reg_names[] = HIGH_REGISTER_NAMES;
66 const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
67 const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
69 static int arg_regs[] = FUNCTION_ARG_REGISTERS;
71 /* Nonzero if -mshared-library-id was given. */
72 static int bfin_lib_id_given;
75 bfin_globalize_label (FILE *stream, const char *name)
77 fputs (".global ", stream);
78 assemble_name (stream, name);
84 output_file_start (void)
86 FILE *file = asm_out_file;
89 fprintf (file, ".file \"%s\";\n", input_filename);
91 for (i = 0; arg_regs[i] >= 0; i++)
93 max_arg_registers = i; /* how many arg reg used */
96 /* Called early in the compilation to conditionally modify
97 fixed_regs/call_used_regs. */
100 conditional_register_usage (void)
102 /* initialize condition code flag register rtx */
103 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
104 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
107 /* Examine machine-dependent attributes of function type FUNTYPE and return its
108 type. See the definition of E_FUNKIND. */
110 static e_funkind funkind (tree funtype)
112 tree attrs = TYPE_ATTRIBUTES (funtype);
113 if (lookup_attribute ("interrupt_handler", attrs))
114 return INTERRUPT_HANDLER;
115 else if (lookup_attribute ("exception_handler", attrs))
116 return EXCPT_HANDLER;
117 else if (lookup_attribute ("nmi_handler", attrs))
123 /* Stack frame layout. */
125 /* Compute the number of DREGS to save with a push_multiple operation.
126 This could include registers that aren't modified in the function,
127 since push_multiple only takes a range of registers. */
130 n_dregs_to_save (void)
134 for (i = REG_R0; i <= REG_R7; i++)
136 if (regs_ever_live[i] && ! call_used_regs[i])
137 return REG_R7 - i + 1;
139 if (current_function_calls_eh_return)
144 unsigned test = EH_RETURN_DATA_REGNO (j);
145 if (test == INVALID_REGNUM)
148 return REG_R7 - i + 1;
156 /* Like n_dregs_to_save, but compute number of PREGS to save. */
159 n_pregs_to_save (void)
163 for (i = REG_P0; i <= REG_P5; i++)
164 if ((regs_ever_live[i] && ! call_used_regs[i])
165 || (i == PIC_OFFSET_TABLE_REGNUM
166 && (current_function_uses_pic_offset_table
167 || (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
168 return REG_P5 - i + 1;
172 /* Determine if we are going to save the frame pointer in the prologue. */
175 must_save_fp_p (void)
177 return (frame_pointer_needed || regs_ever_live[REG_FP]);
181 stack_frame_needed_p (void)
183 /* EH return puts a new return address into the frame using an
184 address relative to the frame pointer. */
185 if (current_function_calls_eh_return)
187 return frame_pointer_needed;
190 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
191 must save all registers; this is used for interrupt handlers.
192 SPREG contains (reg:SI REG_SP). */
195 expand_prologue_reg_save (rtx spreg, int saveall)
197 int ndregs = saveall ? 8 : n_dregs_to_save ();
198 int npregs = saveall ? 6 : n_pregs_to_save ();
199 int dregno = REG_R7 + 1 - ndregs;
200 int pregno = REG_P5 + 1 - npregs;
201 int total = ndregs + npregs;
208 val = GEN_INT (-total * 4);
209 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
210 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
211 UNSPEC_PUSH_MULTIPLE);
212 XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
213 gen_rtx_PLUS (Pmode, spreg,
215 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
216 for (i = 0; i < total; i++)
218 rtx memref = gen_rtx_MEM (word_mode,
219 gen_rtx_PLUS (Pmode, spreg,
220 GEN_INT (- i * 4 - 4)));
224 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
230 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
234 XVECEXP (pat, 0, i + 1) = subpat;
235 RTX_FRAME_RELATED_P (subpat) = 1;
237 insn = emit_insn (pat);
238 RTX_FRAME_RELATED_P (insn) = 1;
241 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
242 must save all registers; this is used for interrupt handlers.
243 SPREG contains (reg:SI REG_SP). */
246 expand_epilogue_reg_restore (rtx spreg, int saveall)
248 int ndregs = saveall ? 8 : n_dregs_to_save ();
249 int npregs = saveall ? 6 : n_pregs_to_save ();
250 int total = ndregs + npregs;
257 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
258 XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
259 gen_rtx_PLUS (Pmode, spreg,
260 GEN_INT (total * 4)));
267 for (i = 0; i < total; i++)
270 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
272 rtx memref = gen_rtx_MEM (word_mode, addr);
275 XVECEXP (pat, 0, i + 1)
276 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
285 insn = emit_insn (pat);
286 RTX_FRAME_RELATED_P (insn) = 1;
289 /* Perform any needed actions needed for a function that is receiving a
290 variable number of arguments.
294 MODE and TYPE are the mode and type of the current parameter.
296 PRETEND_SIZE is a variable that should be set to the amount of stack
297 that must be pushed by the prolog to pretend that our caller pushed
300 Normally, this macro will push all remaining incoming registers on the
301 stack and set PRETEND_SIZE to the length of the registers pushed.
304 - VDSP C compiler manual (our ABI) says that a variable args function
305 should save the R0, R1 and R2 registers in the stack.
306 - The caller will always leave space on the stack for the
307 arguments that are passed in registers, so we dont have
308 to leave any extra space.
309 - now, the vastart pointer can access all arguments from the stack. */
312 setup_incoming_varargs (CUMULATIVE_ARGS *cum,
313 enum machine_mode mode ATTRIBUTE_UNUSED,
314 tree type ATTRIBUTE_UNUSED, int *pretend_size,
323 /* The move for named arguments will be generated automatically by the
324 compiler. We need to generate the move rtx for the unnamed arguments
325 if they are in the first 3 words. We assume at least 1 named argument
326 exists, so we never generate [ARGP] = R0 here. */
328 for (i = cum->words + 1; i < max_arg_registers; i++)
330 mem = gen_rtx_MEM (Pmode,
331 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
332 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
338 /* Value should be nonzero if functions must have frame pointers.
339 Zero means the frame pointer need not be set up (and parms may
340 be accessed via the stack pointer) in functions that seem suitable. */
343 bfin_frame_pointer_required (void)
345 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
347 if (fkind != SUBROUTINE)
350 /* We turn on on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
351 so we have to override it for non-leaf functions. */
352 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
358 /* Return the number of registers pushed during the prologue. */
361 n_regs_saved_by_prologue (void)
363 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
364 int n = n_dregs_to_save () + n_pregs_to_save ();
366 if (stack_frame_needed_p ())
367 /* We use a LINK instruction in this case. */
371 if (must_save_fp_p ())
373 if (! current_function_is_leaf)
377 if (fkind != SUBROUTINE)
379 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
380 tree all = lookup_attribute ("saveall", attrs);
383 /* Increment once for ASTAT. */
387 if (lookup_attribute ("nesting", attrs))
390 for (i = REG_P7 + 1; i < REG_CC; i++)
393 || (!leaf_function_p () && call_used_regs[i]))
394 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
399 /* Return the offset between two registers, one to be eliminated, and the other
400 its replacement, at the start of a routine. */
403 bfin_initial_elimination_offset (int from, int to)
405 HOST_WIDE_INT offset = 0;
407 if (from == ARG_POINTER_REGNUM)
408 offset = n_regs_saved_by_prologue () * 4;
410 if (to == STACK_POINTER_REGNUM)
412 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
413 offset += current_function_outgoing_args_size;
414 else if (current_function_outgoing_args_size)
415 offset += FIXED_STACK_AREA;
417 offset += get_frame_size ();
423 /* Emit code to load a constant CONSTANT into register REG; setting
424 RTX_FRAME_RELATED_P on all insns we generate. Make sure that the insns
425 we generate need not be split. */
428 frame_related_constant_load (rtx reg, HOST_WIDE_INT constant)
431 rtx cst = GEN_INT (constant);
433 if (constant >= -32768 && constant < 65536)
434 insn = emit_move_insn (reg, cst);
437 /* We don't call split_load_immediate here, since dwarf2out.c can get
438 confused about some of the more clever sequences it can generate. */
439 insn = emit_insn (gen_movsi_high (reg, cst));
440 RTX_FRAME_RELATED_P (insn) = 1;
441 insn = emit_insn (gen_movsi_low (reg, reg, cst));
443 RTX_FRAME_RELATED_P (insn) = 1;
446 /* Generate efficient code to add a value to the frame pointer. We
447 can use P1 as a scratch register. Set RTX_FRAME_RELATED_P on the
448 generated insns if FRAME is nonzero. */
451 add_to_sp (rtx spreg, HOST_WIDE_INT value, int frame)
456 /* Choose whether to use a sequence using a temporary register, or
457 a sequence with multiple adds. We can add a signed 7 bit value
458 in one instruction. */
459 if (value > 120 || value < -120)
461 rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
465 frame_related_constant_load (tmpreg, value);
468 insn = emit_move_insn (tmpreg, GEN_INT (value));
470 RTX_FRAME_RELATED_P (insn) = 1;
473 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
475 RTX_FRAME_RELATED_P (insn) = 1;
486 /* We could use -62, but that would leave the stack unaligned, so
490 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (size)));
492 RTX_FRAME_RELATED_P (insn) = 1;
498 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
499 is too large, generate a sequence of insns that has the same effect.
500 SPREG contains (reg:SI REG_SP). */
503 emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
505 HOST_WIDE_INT link_size = frame_size;
509 if (link_size > 262140)
512 /* Use a LINK insn with as big a constant as possible, then subtract
513 any remaining size from the SP. */
514 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
515 RTX_FRAME_RELATED_P (insn) = 1;
517 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
519 rtx set = XVECEXP (PATTERN (insn), 0, i);
520 gcc_assert (GET_CODE (set) == SET);
521 RTX_FRAME_RELATED_P (set) = 1;
524 frame_size -= link_size;
528 /* Must use a call-clobbered PREG that isn't the static chain. */
529 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
531 frame_related_constant_load (tmpreg, -frame_size);
532 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
533 RTX_FRAME_RELATED_P (insn) = 1;
537 /* Return the number of bytes we must reserve for outgoing arguments
538 in the current function's stack frame. */
543 if (current_function_outgoing_args_size)
545 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
546 return current_function_outgoing_args_size;
548 return FIXED_STACK_AREA;
553 /* Save RETS and FP, and allocate a stack frame. */
556 do_link (rtx spreg, HOST_WIDE_INT frame_size)
558 frame_size += arg_area_size ();
560 if (stack_frame_needed_p ()
561 || (must_save_fp_p () && ! current_function_is_leaf))
562 emit_link_insn (spreg, frame_size);
565 if (! current_function_is_leaf)
567 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
568 gen_rtx_PRE_DEC (Pmode, spreg)),
570 rtx insn = emit_insn (pat);
571 RTX_FRAME_RELATED_P (insn) = 1;
573 if (must_save_fp_p ())
575 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
576 gen_rtx_PRE_DEC (Pmode, spreg)),
577 gen_rtx_REG (Pmode, REG_FP));
578 rtx insn = emit_insn (pat);
579 RTX_FRAME_RELATED_P (insn) = 1;
581 add_to_sp (spreg, -frame_size, 1);
585 /* Like do_link, but used for epilogues to deallocate the stack frame. */
588 do_unlink (rtx spreg, HOST_WIDE_INT frame_size)
590 frame_size += arg_area_size ();
592 if (stack_frame_needed_p ())
593 emit_insn (gen_unlink ());
596 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
598 add_to_sp (spreg, frame_size, 0);
599 if (must_save_fp_p ())
601 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
602 emit_move_insn (fpreg, postinc);
603 emit_insn (gen_rtx_USE (VOIDmode, fpreg));
605 if (! current_function_is_leaf)
607 emit_move_insn (bfin_rets_rtx, postinc);
608 emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
613 /* Generate a prologue suitable for a function of kind FKIND. This is
614 called for interrupt and exception handler prologues.
615 SPREG contains (reg:SI REG_SP). */
618 expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
621 HOST_WIDE_INT frame_size = get_frame_size ();
622 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
623 rtx predec = gen_rtx_MEM (SImode, predec1);
625 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
626 tree all = lookup_attribute ("saveall", attrs);
627 tree kspisusp = lookup_attribute ("kspisusp", attrs);
631 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
632 RTX_FRAME_RELATED_P (insn) = 1;
635 /* We need space on the stack in case we need to save the argument
637 if (fkind == EXCPT_HANDLER)
639 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
640 RTX_FRAME_RELATED_P (insn) = 1;
643 insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
644 RTX_FRAME_RELATED_P (insn) = 1;
646 expand_prologue_reg_save (spreg, all != NULL_TREE);
648 for (i = REG_P7 + 1; i < REG_CC; i++)
651 || (!leaf_function_p () && call_used_regs[i]))
653 if (i == REG_A0 || i == REG_A1)
654 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
655 gen_rtx_REG (PDImode, i));
657 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
658 RTX_FRAME_RELATED_P (insn) = 1;
661 if (lookup_attribute ("nesting", attrs))
663 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
664 : fkind == NMI_HANDLER ? REG_RETN
666 insn = emit_move_insn (predec, srcreg);
667 RTX_FRAME_RELATED_P (insn) = 1;
670 do_link (spreg, frame_size);
672 if (fkind == EXCPT_HANDLER)
674 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
675 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
676 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
679 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
680 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
682 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
683 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
685 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
686 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
688 insn = emit_move_insn (r1reg, spreg);
689 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
691 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
692 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
694 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
695 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
700 /* Generate an epilogue suitable for a function of kind FKIND. This is
701 called for interrupt and exception handler epilogues.
702 SPREG contains (reg:SI REG_SP). */
705 expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
708 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
709 rtx postinc = gen_rtx_MEM (SImode, postinc1);
710 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
711 tree all = lookup_attribute ("saveall", attrs);
713 /* A slightly crude technique to stop flow from trying to delete "dead"
715 MEM_VOLATILE_P (postinc) = 1;
717 do_unlink (spreg, get_frame_size ());
719 if (lookup_attribute ("nesting", attrs))
721 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
722 : fkind == NMI_HANDLER ? REG_RETN
724 emit_move_insn (srcreg, postinc);
727 for (i = REG_CC - 1; i > REG_P7; i--)
730 || (!leaf_function_p () && call_used_regs[i]))
732 if (i == REG_A0 || i == REG_A1)
734 rtx mem = gen_rtx_MEM (PDImode, postinc1);
735 MEM_VOLATILE_P (mem) = 1;
736 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
739 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
742 expand_epilogue_reg_restore (spreg, all != NULL_TREE);
744 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
746 /* Deallocate any space we left on the stack in case we needed to save the
747 argument registers. */
748 if (fkind == EXCPT_HANDLER)
749 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
751 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
754 /* Generate RTL for the prologue of the current function. */
757 bfin_expand_prologue (void)
760 HOST_WIDE_INT frame_size = get_frame_size ();
761 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
762 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
764 if (fkind != SUBROUTINE)
766 expand_interrupt_handler_prologue (spreg, fkind);
770 expand_prologue_reg_save (spreg, 0);
772 do_link (spreg, frame_size);
774 if (TARGET_ID_SHARED_LIBRARY
775 && (current_function_uses_pic_offset_table
776 || !current_function_is_leaf))
780 if (bfin_lib_id_given)
781 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
783 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
784 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
785 UNSPEC_LIBRARY_OFFSET));
786 insn = emit_insn (gen_movsi (pic_offset_table_rtx,
787 gen_rtx_MEM (Pmode, addr)));
788 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
792 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
793 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
794 eh_return pattern. */
797 bfin_expand_epilogue (int need_return, int eh_return)
799 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
800 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
802 if (fkind != SUBROUTINE)
804 expand_interrupt_handler_epilogue (spreg, fkind);
808 do_unlink (spreg, get_frame_size ());
810 expand_epilogue_reg_restore (spreg, 0);
812 /* Omit the return insn if this is for a sibcall. */
817 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
819 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
822 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
825 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
826 unsigned int new_reg)
828 /* Interrupt functions can only use registers that have already been
829 saved by the prologue, even if they would normally be
832 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
833 && !regs_ever_live[new_reg])
839 /* Return the value of the return address for the frame COUNT steps up
840 from the current frame, after the prologue.
841 We punt for everything but the current frame by returning const0_rtx. */
844 bfin_return_addr_rtx (int count)
849 return get_hard_reg_initial_val (Pmode, REG_RETS);
852 /* Try machine-dependent ways of modifying an illegitimate address X
853 to be legitimate. If we find one, return the new, valid address,
854 otherwise return NULL_RTX.
856 OLDX is the address as it was before break_out_memory_refs was called.
857 In some cases it is useful to look at this to decide what needs to be done.
859 MODE is the mode of the memory reference. */
862 legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
863 enum machine_mode mode ATTRIBUTE_UNUSED)
868 /* This predicate is used to compute the length of a load/store insn.
869 OP is a MEM rtx, we return nonzero if its addressing mode requires a
870 32 bit instruction. */
873 effective_address_32bit_p (rtx op, enum machine_mode mode)
875 HOST_WIDE_INT offset;
877 mode = GET_MODE (op);
880 if (GET_CODE (op) != PLUS)
882 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
883 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
887 offset = INTVAL (XEXP (op, 1));
889 /* All byte loads use a 16 bit offset. */
890 if (GET_MODE_SIZE (mode) == 1)
893 if (GET_MODE_SIZE (mode) == 4)
895 /* Frame pointer relative loads can use a negative offset, all others
896 are restricted to a small positive one. */
897 if (XEXP (op, 0) == frame_pointer_rtx)
898 return offset < -128 || offset > 60;
899 return offset < 0 || offset > 60;
902 /* Must be HImode now. */
903 return offset < 0 || offset > 30;
906 /* Return cost of the memory address ADDR.
907 All addressing modes are equally cheap on the Blackfin. */
910 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
915 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
918 print_address_operand (FILE *file, rtx x)
920 switch (GET_CODE (x))
923 output_address (XEXP (x, 0));
925 output_address (XEXP (x, 1));
929 fprintf (file, "--");
930 output_address (XEXP (x, 0));
933 output_address (XEXP (x, 0));
934 fprintf (file, "++");
937 output_address (XEXP (x, 0));
938 fprintf (file, "--");
942 gcc_assert (GET_CODE (x) != MEM);
943 print_operand (file, x, 0);
948 /* Adding intp DImode support by Tony
954 print_operand (FILE *file, rtx x, char code)
956 enum machine_mode mode = GET_MODE (x);
961 switch (GET_CODE (x))
967 fprintf (file, "ne");
976 fprintf (file, "ge");
979 fprintf (file, "le");
988 fprintf (file, "ge");
991 fprintf (file, "le");
994 output_operand_lossage ("invalid %%j value");
998 case 'J': /* reverse logic */
1002 fprintf (file, "ne");
1005 fprintf (file, "e");
1008 fprintf (file, "le");
1011 fprintf (file, "ge");
1014 fprintf (file, "l");
1017 fprintf (file, "g");
1020 fprintf (file, "le");
1023 fprintf (file, "ge");
1026 fprintf (file, "l");
1029 fprintf (file, "g");
1032 output_operand_lossage ("invalid %%J value");
1037 switch (GET_CODE (x))
1042 gcc_assert (REGNO (x) < 32);
1043 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1044 /*fprintf (file, "\n%d\n ", REGNO (x));*/
1047 else if (code == 'd')
1049 gcc_assert (REGNO (x) < 32);
1050 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1053 else if (code == 'w')
1055 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1056 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1058 else if (code == 'x')
1060 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1061 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1063 else if (code == 'D')
1065 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1067 else if (code == 'H')
1069 gcc_assert (mode == DImode || mode == DFmode);
1070 gcc_assert (REG_P (x));
1071 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1073 else if (code == 'T')
1075 gcc_assert (D_REGNO_P (REGNO (x)));
1076 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1079 fprintf (file, "%s", reg_names[REGNO (x)]);
1085 print_address_operand (file, x);
1090 /* Moves to half registers with d or h modifiers always use unsigned
1093 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1094 else if (code == 'h')
1095 x = GEN_INT (INTVAL (x) & 0xffff);
1096 else if (code == 'X')
1097 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1098 else if (code == 'Y')
1099 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1100 else if (code == 'Z')
1101 /* Used for LINK insns. */
1102 x = GEN_INT (-8 - INTVAL (x));
1107 output_addr_const (file, x);
1108 if (code == 'G' && flag_pic)
1109 fprintf (file, "@GOT");
1113 output_operand_lossage ("invalid const_double operand");
1117 switch (XINT (x, 1))
1119 case UNSPEC_MOVE_PIC:
1120 output_addr_const (file, XVECEXP (x, 0, 0));
1121 fprintf (file, "@GOT");
1124 case UNSPEC_LIBRARY_OFFSET:
1125 fprintf (file, "_current_shared_library_p5_offset_");
1134 output_addr_const (file, x);
1139 /* Argument support functions. */
1141 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1142 for a call to a function whose data type is FNTYPE.
1143 For a library call, FNTYPE is 0.
1144 VDSP C Compiler manual, our ABI says that
1145 first 3 words of arguments will use R0, R1 and R2.
1149 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype ATTRIBUTE_UNUSED,
1150 rtx libname ATTRIBUTE_UNUSED)
1152 static CUMULATIVE_ARGS zero_cum;
1156 /* Set up the number of registers to use for passing arguments. */
1158 cum->nregs = max_arg_registers;
1159 cum->arg_regs = arg_regs;
1164 /* Update the data in CUM to advance over an argument
1165 of mode MODE and data type TYPE.
1166 (TYPE is null for libcalls where that information may not be available.) */
1169 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1170 int named ATTRIBUTE_UNUSED)
1172 int count, bytes, words;
1174 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1175 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1177 cum->words += words;
1178 cum->nregs -= words;
1180 if (cum->nregs <= 0)
1183 cum->arg_regs = NULL;
1187 for (count = 1; count <= words; count++)
1194 /* Define where to put the arguments to a function.
1195 Value is zero to push the argument on the stack,
1196 or a hard register in which to store the argument.
1198 MODE is the argument's machine mode.
1199 TYPE is the data type of the argument (as a tree).
1200 This is null for libcalls where that information may
1202 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1203 the preceding args and about the function being called.
1204 NAMED is nonzero if this argument is a named parameter
1205 (otherwise it is an extra parameter matching an ellipsis). */
1208 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1209 int named ATTRIBUTE_UNUSED)
1212 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1218 return gen_rtx_REG (mode, *(cum->arg_regs));
1223 /* For an arg passed partly in registers and partly in memory,
1224 this is the number of bytes passed in registers.
1225 For args passed entirely in registers or entirely in memory, zero.
1227 Refer VDSP C Compiler manual, our ABI.
1228 First 3 words are in registers. So, if a an argument is larger
1229 than the registers available, it will span the register and
1233 bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1234 tree type ATTRIBUTE_UNUSED,
1235 bool named ATTRIBUTE_UNUSED)
1238 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1239 int bytes_left = cum->nregs * UNITS_PER_WORD;
1244 if (bytes_left == 0)
1246 if (bytes > bytes_left)
1251 /* Variable sized types are passed by reference. */
1254 bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1255 enum machine_mode mode ATTRIBUTE_UNUSED,
1256 tree type, bool named ATTRIBUTE_UNUSED)
1258 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1261 /* Decide whether a type should be returned in memory (true)
1262 or in a register (false). This is called by the macro
1263 RETURN_IN_MEMORY. */
1266 bfin_return_in_memory (tree type)
1269 enum machine_mode mode = TYPE_MODE (type);
1271 if (mode == BLKmode)
1273 size = int_size_in_bytes (type);
1278 /* Register in which address to store a structure value
1279 is passed to a function. */
1281 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1282 int incoming ATTRIBUTE_UNUSED)
1284 return gen_rtx_REG (Pmode, REG_P0);
1287 /* Return true when register may be used to pass function parameters. */
1290 function_arg_regno_p (int n)
1293 for (i = 0; arg_regs[i] != -1; i++)
1294 if (n == arg_regs[i])
1299 /* Returns 1 if OP contains a symbol reference */
1302 symbolic_reference_mentioned_p (rtx op)
1304 register const char *fmt;
1307 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1310 fmt = GET_RTX_FORMAT (GET_CODE (op));
1311 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1317 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1318 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1322 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1329 /* Decide whether we can make a sibling call to a function. DECL is the
1330 declaration of the function being targeted by the call and EXP is the
1331 CALL_EXPR representing the call. */
1334 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1335 tree exp ATTRIBUTE_UNUSED)
1340 /* Emit RTL insns to initialize the variable parts of a trampoline at
1341 TRAMP. FNADDR is an RTX for the address of the function's pure
1342 code. CXT is an RTX for the static chain value for the function. */
1345 initialize_trampoline (tramp, fnaddr, cxt)
1346 rtx tramp, fnaddr, cxt;
1348 rtx t1 = copy_to_reg (fnaddr);
1349 rtx t2 = copy_to_reg (cxt);
1352 addr = memory_address (Pmode, plus_constant (tramp, 2));
1353 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1354 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1355 addr = memory_address (Pmode, plus_constant (tramp, 6));
1356 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1358 addr = memory_address (Pmode, plus_constant (tramp, 10));
1359 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1360 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1361 addr = memory_address (Pmode, plus_constant (tramp, 14));
1362 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1365 /* Legitimize PIC addresses. If the address is already position-independent,
1366 we return ORIG. Newly generated position-independent addresses go into a
1367 reg. This is REG if nonzero, otherwise we allocate register(s) as
1371 legitimize_pic_address (rtx orig, rtx reg)
1376 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
1378 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
1384 gcc_assert (!no_new_pseudos);
1385 reg = gen_reg_rtx (Pmode);
1390 emit_insn (gen_movsi_high_pic (reg, addr));
1391 emit_insn (gen_movsi_low_pic (reg, reg, addr));
1392 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1393 new = gen_rtx_MEM (Pmode, reg);
1397 rtx tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
1399 new = gen_rtx_MEM (Pmode,
1400 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1403 emit_move_insn (reg, new);
1405 current_function_uses_pic_offset_table = 1;
1409 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
1413 if (GET_CODE (addr) == CONST)
1415 addr = XEXP (addr, 0);
1416 gcc_assert (GET_CODE (addr) == PLUS);
1419 if (XEXP (addr, 0) == pic_offset_table_rtx)
1424 gcc_assert (!no_new_pseudos);
1425 reg = gen_reg_rtx (Pmode);
1428 base = legitimize_pic_address (XEXP (addr, 0), reg);
1429 addr = legitimize_pic_address (XEXP (addr, 1),
1430 base == reg ? NULL_RTX : reg);
1432 if (GET_CODE (addr) == CONST_INT)
1434 gcc_assert (! reload_in_progress && ! reload_completed);
1435 addr = force_reg (Pmode, addr);
1438 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
1440 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
1441 addr = XEXP (addr, 1);
1444 return gen_rtx_PLUS (Pmode, base, addr);
1450 /* Emit insns to move operands[1] into operands[0]. */
1453 emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1455 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1457 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1458 operands[1] = force_reg (SImode, operands[1]);
1460 operands[1] = legitimize_pic_address (operands[1], temp);
1463 /* Expand a move operation in mode MODE. The operands are in OPERANDS. */
1466 expand_move (rtx *operands, enum machine_mode mode)
1468 if (flag_pic && SYMBOLIC_CONST (operands[1]))
1469 emit_pic_move (operands, mode);
1471 /* Don't generate memory->memory or constant->memory moves, go through a
1473 else if ((reload_in_progress | reload_completed) == 0
1474 && GET_CODE (operands[0]) == MEM
1475 && GET_CODE (operands[1]) != REG)
1476 operands[1] = force_reg (mode, operands[1]);
1479 /* Split one or more DImode RTL references into pairs of SImode
1480 references. The RTL can be REG, offsettable MEM, integer constant, or
1481 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1482 split and "num" is its length. lo_half and hi_half are output arrays
1483 that parallel "operands". */
1486 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1490 rtx op = operands[num];
1492 /* simplify_subreg refuse to split volatile memory addresses,
1493 but we still have to handle it. */
1494 if (GET_CODE (op) == MEM)
1496 lo_half[num] = adjust_address (op, SImode, 0);
1497 hi_half[num] = adjust_address (op, SImode, 4);
1501 lo_half[num] = simplify_gen_subreg (SImode, op,
1502 GET_MODE (op) == VOIDmode
1503 ? DImode : GET_MODE (op), 0);
1504 hi_half[num] = simplify_gen_subreg (SImode, op,
1505 GET_MODE (op) == VOIDmode
1506 ? DImode : GET_MODE (op), 4);
1511 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
1512 SIBCALL is nonzero if this is a sibling call. */
1515 bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, int sibcall)
1517 rtx use = NULL, call;
1519 /* Static functions and indirect calls don't need the pic register. */
1521 && GET_CODE (XEXP (fnaddr, 0)) == SYMBOL_REF
1522 && ! SYMBOL_REF_LOCAL_P (XEXP (fnaddr, 0)))
1523 use_reg (&use, pic_offset_table_rtx);
1525 if (! call_insn_operand (XEXP (fnaddr, 0), Pmode))
1527 fnaddr = copy_to_mode_reg (Pmode, XEXP (fnaddr, 0));
1528 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
1530 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1533 call = gen_rtx_SET (VOIDmode, retval, call);
1536 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (2));
1537 XVECEXP (pat, 0, 0) = call;
1538 XVECEXP (pat, 0, 1) = gen_rtx_RETURN (VOIDmode);
1541 call = emit_call_insn (call);
1543 CALL_INSN_FUNCTION_USAGE (call) = use;
1546 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
1549 hard_regno_mode_ok (int regno, enum machine_mode mode)
1551 /* Allow only dregs to store value of mode HI or QI */
1552 enum reg_class class = REGNO_REG_CLASS (regno);
1557 if (mode == V2HImode)
1558 return D_REGNO_P (regno);
1559 if (class == CCREGS)
1560 return mode == BImode;
1561 if (mode == PDImode)
1562 return regno == REG_A0 || regno == REG_A1;
1564 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1567 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1570 /* Implements target hook vector_mode_supported_p. */
1573 bfin_vector_mode_supported_p (enum machine_mode mode)
1575 return mode == V2HImode;
1578 /* Return the cost of moving data from a register in class CLASS1 to
1579 one in class CLASS2. A cost of 2 is the default. */
1582 bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1583 enum reg_class class1, enum reg_class class2)
1585 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
1589 /* There are some stalls involved when moving from a DREG to a different
1590 class reg, and using the value in one of the following instructions.
1591 Attempt to model this by slightly discouraging such moves. */
1592 if (class1 == DREGS && class2 != DREGS)
1598 /* Return the cost of moving data of mode M between a
1599 register and memory. A value of 2 is the default; this cost is
1600 relative to those in `REGISTER_MOVE_COST'.
1602 ??? In theory L1 memory has single-cycle latency. We should add a switch
1603 that tells the compiler whether we expect to use only L1 memory for the
1604 program; it'll make the costs more accurate. */
1607 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1608 enum reg_class class,
1609 int in ATTRIBUTE_UNUSED)
1611 /* Make memory accesses slightly more expensive than any register-register
1612 move. Also, penalize non-DP registers, since they need secondary
1613 reloads to load and store. */
1614 if (! reg_class_subset_p (class, DPREGS))
1620 /* Inform reload about cases where moving X with a mode MODE to a register in
1621 CLASS requires an extra scratch register. Return the class needed for the
1622 scratch register. */
1625 secondary_input_reload_class (enum reg_class class, enum machine_mode mode,
1628 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1629 in most other cases we can also use PREGS. */
1630 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1631 enum reg_class x_class = NO_REGS;
1632 enum rtx_code code = GET_CODE (x);
1635 x = SUBREG_REG (x), code = GET_CODE (x);
1638 int regno = REGNO (x);
1639 if (regno >= FIRST_PSEUDO_REGISTER)
1640 regno = reg_renumber[regno];
1645 x_class = REGNO_REG_CLASS (regno);
1648 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1649 This happens as a side effect of register elimination, and we need
1650 a scratch register to do it. */
1651 if (fp_plus_const_operand (x, mode))
1653 rtx op2 = XEXP (x, 1);
1654 int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
1656 if (class == PREGS || class == PREGS_CLOBBERED)
1658 /* If destination is a DREG, we can do this without a scratch register
1659 if the constant is valid for an add instruction. */
1660 if (class == DREGS || class == DPREGS)
1661 return large_constant_p ? PREGS : NO_REGS;
1662 /* Reloading to anything other than a DREG? Use a PREG scratch
1667 /* Data can usually be moved freely between registers of most classes.
1668 AREGS are an exception; they can only move to or from another register
1669 in AREGS or one in DREGS. They can also be assigned the constant 0. */
1670 if (x_class == AREGS)
1671 return class == DREGS || class == AREGS ? NO_REGS : DREGS;
1675 if (x != const0_rtx && x_class != DREGS)
1681 /* CCREGS can only be moved from/to DREGS. */
1682 if (class == CCREGS && x_class != DREGS)
1684 if (x_class == CCREGS && class != DREGS)
1686 /* All registers other than AREGS can load arbitrary constants. The only
1687 case that remains is MEM. */
1689 if (! reg_class_subset_p (class, default_class))
1690 return default_class;
1694 /* Like secondary_input_reload_class; and all we do is call that function. */
1697 secondary_output_reload_class (enum reg_class class, enum machine_mode mode,
1700 return secondary_input_reload_class (class, mode, x);
1703 /* Implement TARGET_HANDLE_OPTION. */
1706 bfin_handle_option (size_t code, const char *arg, int value)
1710 case OPT_mshared_library_id_:
1711 if (value > MAX_LIBRARY_ID)
1712 error ("-mshared-library-id=%s is not between 0 and %d",
1713 arg, MAX_LIBRARY_ID);
1714 bfin_lib_id_given = 1;
1722 /* Implement the macro OVERRIDE_OPTIONS. */
1725 override_options (void)
1727 if (TARGET_OMIT_LEAF_FRAME_POINTER)
1728 flag_omit_frame_pointer = 1;
1730 /* Library identification */
1731 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
1732 error ("-mshared-library-id= specified without -mid-shared-library");
1734 if (TARGET_ID_SHARED_LIBRARY)
1735 /* ??? Provide a way to use a bigger GOT. */
1738 flag_schedule_insns = 0;
1741 /* Return the destination address of BRANCH.
1742 We need to use this instead of get_attr_length, because the
1743 cbranch_with_nops pattern conservatively sets its length to 6, and
1744 we still prefer to use shorter sequences. */
1747 branch_dest (rtx branch)
1751 rtx pat = PATTERN (branch);
1752 if (GET_CODE (pat) == PARALLEL)
1753 pat = XVECEXP (pat, 0, 0);
1754 dest = SET_SRC (pat);
1755 if (GET_CODE (dest) == IF_THEN_ELSE)
1756 dest = XEXP (dest, 1);
1757 dest = XEXP (dest, 0);
1758 dest_uid = INSN_UID (dest);
1759 return INSN_ADDRESSES (dest_uid);
1762 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
1763 it's a branch that's predicted taken. */
1766 cbranch_predicted_taken_p (rtx insn)
1768 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
1772 int pred_val = INTVAL (XEXP (x, 0));
1774 return pred_val >= REG_BR_PROB_BASE / 2;
1780 /* Templates for use by asm_conditional_branch. */
1782 static const char *ccbranch_templates[][3] = {
1783 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
1784 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
1785 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
1786 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
1789 /* Output INSN, which is a conditional branch instruction with operands
1792 We deal with the various forms of conditional branches that can be generated
1793 by bfin_reorg to prevent the hardware from doing speculative loads, by
1794 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
1795 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
1796 Either of these is only necessary if the branch is short, otherwise the
1797 template we use ends in an unconditional jump which flushes the pipeline
1801 asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
1803 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
1804 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
1805 is to be taken from start of if cc rather than jump.
1806 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
1808 int len = (offset >= -1024 && offset <= 1022 ? 0
1809 : offset >= -4094 && offset <= 4096 ? 1
1811 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
1812 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
1813 output_asm_insn (ccbranch_templates[idx][len], operands);
1814 gcc_assert (n_nops == 0 || !bp);
1816 while (n_nops-- > 0)
1817 output_asm_insn ("nop;", NULL);
1820 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
1821 stored in bfin_compare_op0 and bfin_compare_op1 already. */
1824 bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
1826 enum rtx_code code1, code2;
1827 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
1828 rtx tem = bfin_cc_rtx;
1829 enum rtx_code code = GET_CODE (cmp);
1831 /* If we have a BImode input, then we already have a compare result, and
1832 do not need to emit another comparison. */
1833 if (GET_MODE (op0) == BImode)
1835 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
1836 tem = op0, code2 = code;
1841 /* bfin has these conditions */
1851 code1 = reverse_condition (code);
1855 emit_insn (gen_rtx_SET (BImode, tem,
1856 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
1859 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
1862 /* Return nonzero iff C has exactly one bit set if it is interpreted
1863 as a 32 bit constant. */
1866 log2constp (unsigned HOST_WIDE_INT c)
1869 return c != 0 && (c & (c-1)) == 0;
1872 /* Returns the number of consecutive least significant zeros in the binary
1873 representation of *V.
1874 We modify *V to contain the original value arithmetically shifted right by
1875 the number of zeroes. */
1878 shiftr_zero (HOST_WIDE_INT *v)
1880 unsigned HOST_WIDE_INT tmp = *v;
1881 unsigned HOST_WIDE_INT sgn;
1887 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
1888 while ((tmp & 0x1) == 0 && n <= 32)
1890 tmp = (tmp >> 1) | sgn;
1897 /* After reload, split the load of an immediate constant. OPERANDS are the
1898 operands of the movsi_insn pattern which we are splitting. We return
1899 nonzero if we emitted a sequence to load the constant, zero if we emitted
1900 nothing because we want to use the splitter's default sequence. */
1903 split_load_immediate (rtx operands[])
1905 HOST_WIDE_INT val = INTVAL (operands[1]);
1907 HOST_WIDE_INT shifted = val;
1908 HOST_WIDE_INT shifted_compl = ~val;
1909 int num_zero = shiftr_zero (&shifted);
1910 int num_compl_zero = shiftr_zero (&shifted_compl);
1911 unsigned int regno = REGNO (operands[0]);
1912 enum reg_class class1 = REGNO_REG_CLASS (regno);
1914 /* This case takes care of single-bit set/clear constants, which we could
1915 also implement with BITSET/BITCLR. */
1917 && shifted >= -32768 && shifted < 65536
1918 && (D_REGNO_P (regno)
1919 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
1921 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
1922 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
1927 tmp |= -(tmp & 0x8000);
1929 /* If high word has one bit set or clear, try to use a bit operation. */
1930 if (D_REGNO_P (regno))
1932 if (log2constp (val & 0xFFFF0000))
1934 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
1935 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
1938 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
1940 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
1941 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
1945 if (D_REGNO_P (regno))
1947 if (CONST_7BIT_IMM_P (tmp))
1949 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
1950 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
1954 if ((val & 0xFFFF0000) == 0)
1956 emit_insn (gen_movsi (operands[0], const0_rtx));
1957 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
1961 if ((val & 0xFFFF0000) == 0xFFFF0000)
1963 emit_insn (gen_movsi (operands[0], constm1_rtx));
1964 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
1969 /* Need DREGs for the remaining case. */
1974 && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
1976 /* If optimizing for size, generate a sequence that has more instructions
1978 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
1979 emit_insn (gen_ashlsi3 (operands[0], operands[0],
1980 GEN_INT (num_compl_zero)));
1981 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
1987 /* Return true if the legitimate memory address for a memory operand of mode
1988 MODE. Return false if not. */
1991 bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
1993 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
1994 int sz = GET_MODE_SIZE (mode);
1995 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
1996 /* The usual offsettable_memref machinery doesn't work so well for this
1997 port, so we deal with the problem here. */
1998 unsigned HOST_WIDE_INT mask = sz == 8 ? 0x7ffe : 0x7fff;
1999 return (v & ~(mask << shift)) == 0;
2003 bfin_valid_reg_p (unsigned int regno, int strict)
2005 return ((strict && REGNO_OK_FOR_BASE_STRICT_P (regno))
2006 || (!strict && REGNO_OK_FOR_BASE_NONSTRICT_P (regno)));
2010 bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2012 switch (GET_CODE (x)) {
2014 if (bfin_valid_reg_p (REGNO (x), strict))
2018 if (REG_P (XEXP (x, 0))
2019 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict)
2020 && (GET_CODE (XEXP (x, 1)) == UNSPEC
2021 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2022 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2027 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2028 && REG_P (XEXP (x, 0))
2029 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2032 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2033 && XEXP (x, 0) == stack_pointer_rtx
2034 && REG_P (XEXP (x, 0))
2035 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2045 bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2047 int cost2 = COSTS_N_INSNS (1);
2052 if (outer_code == SET || outer_code == PLUS)
2053 *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2054 else if (outer_code == AND)
2055 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2056 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2057 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2058 else if (outer_code == LEU || outer_code == LTU)
2059 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2060 else if (outer_code == MULT)
2061 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2062 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2064 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2065 || outer_code == LSHIFTRT)
2066 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2067 else if (outer_code == IOR || outer_code == XOR)
2068 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2077 *total = COSTS_N_INSNS (2);
2081 if (GET_MODE (x) == Pmode)
2083 if (GET_CODE (XEXP (x, 0)) == MULT
2084 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2086 HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
2087 if (val == 2 || val == 4)
2090 *total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
2091 *total += rtx_cost (XEXP (x, 1), outer_code);
2103 if (GET_MODE (x) == DImode)
2110 if (GET_MODE (x) == DImode)
2115 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
2116 *total = COSTS_N_INSNS (3);
2125 bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2127 fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2130 /* Used for communication between {push,pop}_multiple_operation (which
2131 we use not only as a predicate) and the corresponding output functions. */
2132 static int first_preg_to_save, first_dreg_to_save;
2135 push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2137 int lastdreg = 8, lastpreg = 6;
2140 first_preg_to_save = lastpreg;
2141 first_dreg_to_save = lastdreg;
2142 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2144 rtx t = XVECEXP (op, 0, i);
2148 if (GET_CODE (t) != SET)
2152 dest = SET_DEST (t);
2153 if (GET_CODE (dest) != MEM || ! REG_P (src))
2155 dest = XEXP (dest, 0);
2156 if (GET_CODE (dest) != PLUS
2157 || ! REG_P (XEXP (dest, 0))
2158 || REGNO (XEXP (dest, 0)) != REG_SP
2159 || GET_CODE (XEXP (dest, 1)) != CONST_INT
2160 || INTVAL (XEXP (dest, 1)) != -i * 4)
2163 regno = REGNO (src);
2166 if (D_REGNO_P (regno))
2169 first_dreg_to_save = lastdreg = regno - REG_R0;
2171 else if (regno >= REG_P0 && regno <= REG_P7)
2174 first_preg_to_save = lastpreg = regno - REG_P0;
2184 if (regno >= REG_P0 && regno <= REG_P7)
2187 first_preg_to_save = lastpreg = regno - REG_P0;
2189 else if (regno != REG_R0 + lastdreg + 1)
2194 else if (group == 2)
2196 if (regno != REG_P0 + lastpreg + 1)
2205 pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2207 int lastdreg = 8, lastpreg = 6;
2210 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2212 rtx t = XVECEXP (op, 0, i);
2216 if (GET_CODE (t) != SET)
2220 dest = SET_DEST (t);
2221 if (GET_CODE (src) != MEM || ! REG_P (dest))
2223 src = XEXP (src, 0);
2227 if (! REG_P (src) || REGNO (src) != REG_SP)
2230 else if (GET_CODE (src) != PLUS
2231 || ! REG_P (XEXP (src, 0))
2232 || REGNO (XEXP (src, 0)) != REG_SP
2233 || GET_CODE (XEXP (src, 1)) != CONST_INT
2234 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2237 regno = REGNO (dest);
2240 if (regno == REG_R7)
2245 else if (regno != REG_P0 + lastpreg - 1)
2250 else if (group == 1)
2252 if (regno != REG_R0 + lastdreg - 1)
2258 first_dreg_to_save = lastdreg;
2259 first_preg_to_save = lastpreg;
2263 /* Emit assembly code for one multi-register push described by INSN, with
2264 operands in OPERANDS. */
2267 output_push_multiple (rtx insn, rtx *operands)
2272 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2273 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2276 if (first_dreg_to_save == 8)
2277 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2278 else if (first_preg_to_save == 6)
2279 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2281 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2282 first_dreg_to_save, first_preg_to_save);
2284 output_asm_insn (buf, operands);
2287 /* Emit assembly code for one multi-register pop described by INSN, with
2288 operands in OPERANDS. */
2291 output_pop_multiple (rtx insn, rtx *operands)
2296 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2297 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2300 if (first_dreg_to_save == 8)
2301 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2302 else if (first_preg_to_save == 6)
2303 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2305 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2306 first_dreg_to_save, first_preg_to_save);
2308 output_asm_insn (buf, operands);
2311 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
2314 single_move_for_strmov (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
2316 rtx scratch = gen_reg_rtx (mode);
2319 srcmem = adjust_address_nv (src, mode, offset);
2320 dstmem = adjust_address_nv (dst, mode, offset);
2321 emit_move_insn (scratch, srcmem);
2322 emit_move_insn (dstmem, scratch);
2325 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2326 alignment ALIGN_EXP. Return true if successful, false if we should fall
2327 back on a different method. */
2330 bfin_expand_strmov (rtx dst, rtx src, rtx count_exp, rtx align_exp)
2332 rtx srcreg, destreg, countreg;
2333 HOST_WIDE_INT align = 0;
2334 unsigned HOST_WIDE_INT count = 0;
2336 if (GET_CODE (align_exp) == CONST_INT)
2337 align = INTVAL (align_exp);
2338 if (GET_CODE (count_exp) == CONST_INT)
2340 count = INTVAL (count_exp);
2342 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2347 /* If optimizing for size, only do single copies inline. */
2350 if (count == 2 && align < 2)
2352 if (count == 4 && align < 4)
2354 if (count != 1 && count != 2 && count != 4)
2357 if (align < 2 && count != 1)
2360 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2361 if (destreg != XEXP (dst, 0))
2362 dst = replace_equiv_address_nv (dst, destreg);
2363 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2364 if (srcreg != XEXP (src, 0))
2365 src = replace_equiv_address_nv (src, srcreg);
2367 if (count != 0 && align >= 2)
2369 unsigned HOST_WIDE_INT offset = 0;
2373 if ((count & ~3) == 4)
2375 single_move_for_strmov (dst, src, SImode, offset);
2378 else if (count & ~3)
2380 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2381 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2383 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2388 if ((count & ~1) == 2)
2390 single_move_for_strmov (dst, src, HImode, offset);
2393 else if (count & ~1)
2395 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2396 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2398 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2403 single_move_for_strmov (dst, src, HImode, offset);
2408 single_move_for_strmov (dst, src, QImode, offset);
2417 bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2419 enum attr_type insn_type, dep_insn_type;
2420 int dep_insn_code_number;
2422 /* Anti and output dependencies have zero cost. */
2423 if (REG_NOTE_KIND (link) != 0)
2426 dep_insn_code_number = recog_memoized (dep_insn);
2428 /* If we can't recognize the insns, we can't really do anything. */
2429 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2432 insn_type = get_attr_type (insn);
2433 dep_insn_type = get_attr_type (dep_insn);
2435 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
2437 rtx pat = PATTERN (dep_insn);
2438 rtx dest = SET_DEST (pat);
2439 rtx src = SET_SRC (pat);
2440 if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
2442 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
2448 /* We use the machine specific reorg pass for emitting CSYNC instructions
2449 after conditional branches as needed.
2451 The Blackfin is unusual in that a code sequence like
2454 may speculatively perform the load even if the condition isn't true. This
2455 happens for a branch that is predicted not taken, because the pipeline
2456 isn't flushed or stalled, so the early stages of the following instructions,
2457 which perform the memory reference, are allowed to execute before the
2458 jump condition is evaluated.
2459 Therefore, we must insert additional instructions in all places where this
2460 could lead to incorrect behavior. The manual recommends CSYNC, while
2461 VDSP seems to use NOPs (even though its corresponding compiler option is
2464 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
2465 When optimizing for size, we turn the branch into a predicted taken one.
2466 This may be slower due to mispredicts, but saves code size. */
2471 rtx insn, last_condjump = NULL_RTX;
2472 int cycles_since_jump = INT_MAX;
2474 if (! TARGET_SPECLD_ANOMALY || ! TARGET_CSYNC_ANOMALY)
2477 /* First pass: find predicted-false branches; if something after them
2478 needs nops, insert them or change the branch to predict true. */
2479 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2483 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
2486 pat = PATTERN (insn);
2487 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2488 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2489 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2494 if (any_condjump_p (insn)
2495 && ! cbranch_predicted_taken_p (insn))
2497 last_condjump = insn;
2498 cycles_since_jump = 0;
2501 cycles_since_jump = INT_MAX;
2503 else if (INSN_P (insn))
2505 enum attr_type type = get_attr_type (insn);
2506 int delay_needed = 0;
2507 if (cycles_since_jump < INT_MAX)
2508 cycles_since_jump++;
2510 if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
2512 rtx pat = single_set (insn);
2513 if (may_trap_p (SET_SRC (pat)))
2516 else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2519 if (delay_needed > cycles_since_jump)
2523 rtx *op = recog_data.operand;
2525 delay_needed -= cycles_since_jump;
2527 extract_insn (last_condjump);
2530 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
2532 cycles_since_jump = INT_MAX;
2535 /* Do not adjust cycles_since_jump in this case, so that
2536 we'll increase the number of NOPs for a subsequent insn
2538 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
2539 GEN_INT (delay_needed));
2540 PATTERN (last_condjump) = pat;
2541 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
2545 /* Second pass: for predicted-true branches, see if anything at the
2546 branch destination needs extra nops. */
2547 if (! TARGET_CSYNC_ANOMALY)
2550 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2553 && any_condjump_p (insn)
2554 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
2555 || cbranch_predicted_taken_p (insn)))
2557 rtx target = JUMP_LABEL (insn);
2559 cycles_since_jump = 0;
2560 for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
2564 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
2567 pat = PATTERN (target);
2568 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2569 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2570 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2573 if (INSN_P (target))
2575 enum attr_type type = get_attr_type (target);
2576 int delay_needed = 0;
2577 if (cycles_since_jump < INT_MAX)
2578 cycles_since_jump++;
2580 if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2583 if (delay_needed > cycles_since_jump)
2585 rtx prev = prev_real_insn (label);
2586 delay_needed -= cycles_since_jump;
2588 fprintf (dump_file, "Adding %d nops after %d\n",
2589 delay_needed, INSN_UID (label));
2591 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
2598 "Reducing nops on insn %d.\n",
2601 x = XVECEXP (x, 0, 1);
2602 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
2603 XVECEXP (x, 0, 0) = GEN_INT (v);
2605 while (delay_needed-- > 0)
2606 emit_insn_after (gen_nop (), label);
2615 /* Handle interrupt_handler, exception_handler and nmi_handler function
2616 attributes; arguments as in struct attribute_spec.handler. */
2619 handle_int_attribute (tree *node, tree name,
2620 tree args ATTRIBUTE_UNUSED,
2621 int flags ATTRIBUTE_UNUSED,
2625 if (TREE_CODE (x) == FUNCTION_DECL)
2628 if (TREE_CODE (x) != FUNCTION_TYPE)
2630 warning (OPT_Wattributes, "%qs attribute only applies to functions",
2631 IDENTIFIER_POINTER (name));
2632 *no_add_attrs = true;
2634 else if (funkind (x) != SUBROUTINE)
2635 error ("multiple function type attributes specified");
2640 /* Return 0 if the attributes for two types are incompatible, 1 if they
2641 are compatible, and 2 if they are nearly compatible (which causes a
2642 warning to be generated). */
2645 bfin_comp_type_attributes (tree type1, tree type2)
2647 e_funkind kind1, kind2;
2649 if (TREE_CODE (type1) != FUNCTION_TYPE)
2652 kind1 = funkind (type1);
2653 kind2 = funkind (type2);
2658 /* Check for mismatched modifiers */
2659 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
2660 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
2663 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
2664 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
2667 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
2668 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
2674 /* Table of valid machine attributes. */
2675 const struct attribute_spec bfin_attribute_table[] =
2677 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2678 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
2679 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
2680 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
2681 { "nesting", 0, 0, false, true, true, NULL },
2682 { "kspisusp", 0, 0, false, true, true, NULL },
2683 { "saveall", 0, 0, false, true, true, NULL },
2684 { NULL, 0, 0, false, false, false, NULL }
2687 /* Output the assembler code for a thunk function. THUNK_DECL is the
2688 declaration for the thunk function itself, FUNCTION is the decl for
2689 the target function. DELTA is an immediate constant offset to be
2690 added to THIS. If VCALL_OFFSET is nonzero, the word at
2691 *(*this + vcall_offset) should be added to THIS. */
2694 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
2695 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
2696 HOST_WIDE_INT vcall_offset, tree function)
2699 /* The this parameter is passed as the first argument. */
2700 rtx this = gen_rtx_REG (Pmode, REG_R0);
2702 /* Adjust the this parameter by a fixed constant. */
2706 if (delta >= -64 && delta <= 63)
2708 xops[0] = GEN_INT (delta);
2709 output_asm_insn ("%1 += %0;", xops);
2711 else if (delta >= -128 && delta < -64)
2713 xops[0] = GEN_INT (delta + 64);
2714 output_asm_insn ("%1 += -64; %1 += %0;", xops);
2716 else if (delta > 63 && delta <= 126)
2718 xops[0] = GEN_INT (delta - 63);
2719 output_asm_insn ("%1 += 63; %1 += %0;", xops);
2723 xops[0] = GEN_INT (delta);
2724 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
2728 /* Adjust the this parameter by a value stored in the vtable. */
2731 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
2732 rtx tmp = gen_rtx_REG (Pmode, REG_R2);
2736 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
2738 /* Adjust the this parameter. */
2739 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
2740 if (!memory_operand (xops[0], Pmode))
2742 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
2743 xops[0] = GEN_INT (vcall_offset);
2745 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
2746 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
2749 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
2752 xops[0] = XEXP (DECL_RTL (function), 0);
2753 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
2754 output_asm_insn ("jump.l\t%P0", xops);
2757 /* Codes for all the Blackfin builtins. */
2765 #define def_builtin(NAME, TYPE, CODE) \
2767 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
2771 /* Set up all builtin functions for this target. */
2773 bfin_init_builtins (void)
2775 tree void_ftype_void
2776 = build_function_type (void_type_node, void_list_node);
2778 /* Add the remaining MMX insns with somewhat more complicated types. */
2779 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
2780 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
2783 /* Expand an expression EXP that calls a built-in function,
2784 with result going to TARGET if that's convenient
2785 (and in mode MODE if that's convenient).
2786 SUBTARGET may be used as the target for computing one of EXP's operands.
2787 IGNORE is nonzero if the value is to be ignored. */
2790 bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
2791 rtx subtarget ATTRIBUTE_UNUSED,
2792 enum machine_mode mode ATTRIBUTE_UNUSED,
2793 int ignore ATTRIBUTE_UNUSED)
2795 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2796 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2800 case BFIN_BUILTIN_CSYNC:
2801 emit_insn (gen_csync ());
2803 case BFIN_BUILTIN_SSYNC:
2804 emit_insn (gen_ssync ());
2812 #undef TARGET_INIT_BUILTINS
2813 #define TARGET_INIT_BUILTINS bfin_init_builtins
2815 #undef TARGET_EXPAND_BUILTIN
2816 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
2818 #undef TARGET_ASM_GLOBALIZE_LABEL
2819 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
2821 #undef TARGET_ASM_FILE_START
2822 #define TARGET_ASM_FILE_START output_file_start
2824 #undef TARGET_ATTRIBUTE_TABLE
2825 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
2827 #undef TARGET_COMP_TYPE_ATTRIBUTES
2828 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
2830 #undef TARGET_RTX_COSTS
2831 #define TARGET_RTX_COSTS bfin_rtx_costs
2833 #undef TARGET_ADDRESS_COST
2834 #define TARGET_ADDRESS_COST bfin_address_cost
2836 #undef TARGET_ASM_INTERNAL_LABEL
2837 #define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
2839 #undef TARGET_MACHINE_DEPENDENT_REORG
2840 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
2842 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
2843 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
2845 #undef TARGET_ASM_OUTPUT_MI_THUNK
2846 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
2847 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2848 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
2850 #undef TARGET_SCHED_ADJUST_COST
2851 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
2853 #undef TARGET_PROMOTE_PROTOTYPES
2854 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2855 #undef TARGET_PROMOTE_FUNCTION_ARGS
2856 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
2857 #undef TARGET_PROMOTE_FUNCTION_RETURN
2858 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
2860 #undef TARGET_ARG_PARTIAL_BYTES
2861 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
2863 #undef TARGET_PASS_BY_REFERENCE
2864 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
2866 #undef TARGET_SETUP_INCOMING_VARARGS
2867 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
2869 #undef TARGET_STRUCT_VALUE_RTX
2870 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
2872 #undef TARGET_VECTOR_MODE_SUPPORTED_P
2873 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
2875 #undef TARGET_HANDLE_OPTION
2876 #define TARGET_HANDLE_OPTION bfin_handle_option
2878 #undef TARGET_DEFAULT_TARGET_FLAGS
2879 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
2881 struct gcc_target targetm = TARGET_INITIALIZER;