1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "insn-codes.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
42 #include "target-def.h"
47 #include "integrate.h"
48 #include "bfin-protos.h"
52 /* Test and compare insns in bfin.md store the information needed to
53 generate branch and scc insns here. */
54 rtx bfin_compare_op0, bfin_compare_op1;
56 /* RTX for condition code flag register and RETS register */
57 extern GTY(()) rtx bfin_cc_rtx;
58 extern GTY(()) rtx bfin_rets_rtx;
59 rtx bfin_cc_rtx, bfin_rets_rtx;
61 int max_arg_registers = 0;
63 /* Arrays used when emitting register names. */
64 const char *short_reg_names[] = SHORT_REGISTER_NAMES;
65 const char *high_reg_names[] = HIGH_REGISTER_NAMES;
66 const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
67 const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
69 static int arg_regs[] = FUNCTION_ARG_REGISTERS;
71 /* Nonzero if -mshared-library-id was given. */
72 static int bfin_lib_id_given;
75 bfin_globalize_label (FILE *stream, const char *name)
77 fputs (".global ", stream);
78 assemble_name (stream, name);
84 output_file_start (void)
86 FILE *file = asm_out_file;
89 fprintf (file, ".file \"%s\";\n", input_filename);
91 for (i = 0; arg_regs[i] >= 0; i++)
93 max_arg_registers = i; /* how many arg reg used */
96 /* Called early in the compilation to conditionally modify
97 fixed_regs/call_used_regs. */
100 conditional_register_usage (void)
102 /* initialize condition code flag register rtx */
103 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
104 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
107 /* Examine machine-dependent attributes of function type FUNTYPE and return its
108 type. See the definition of E_FUNKIND. */
110 static e_funkind funkind (tree funtype)
112 tree attrs = TYPE_ATTRIBUTES (funtype);
113 if (lookup_attribute ("interrupt_handler", attrs))
114 return INTERRUPT_HANDLER;
115 else if (lookup_attribute ("exception_handler", attrs))
116 return EXCPT_HANDLER;
117 else if (lookup_attribute ("nmi_handler", attrs))
123 /* Stack frame layout. */
125 /* Compute the number of DREGS to save with a push_multiple operation.
126 This could include registers that aren't modified in the function,
127 since push_multiple only takes a range of registers. */
130 n_dregs_to_save (void)
134 for (i = REG_R0; i <= REG_R7; i++)
136 if (regs_ever_live[i] && ! call_used_regs[i])
137 return REG_R7 - i + 1;
139 if (current_function_calls_eh_return)
144 unsigned test = EH_RETURN_DATA_REGNO (j);
145 if (test == INVALID_REGNUM)
148 return REG_R7 - i + 1;
156 /* Like n_dregs_to_save, but compute number of PREGS to save. */
159 n_pregs_to_save (void)
163 for (i = REG_P0; i <= REG_P5; i++)
164 if ((regs_ever_live[i] && ! call_used_regs[i])
165 || (i == PIC_OFFSET_TABLE_REGNUM
166 && (current_function_uses_pic_offset_table
167 || (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
168 return REG_P5 - i + 1;
172 /* Determine if we are going to save the frame pointer in the prologue. */
175 must_save_fp_p (void)
177 return (frame_pointer_needed || regs_ever_live[REG_FP]);
181 stack_frame_needed_p (void)
183 /* EH return puts a new return address into the frame using an
184 address relative to the frame pointer. */
185 if (current_function_calls_eh_return)
187 return frame_pointer_needed;
190 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
191 must save all registers; this is used for interrupt handlers.
192 SPREG contains (reg:SI REG_SP). */
195 expand_prologue_reg_save (rtx spreg, int saveall)
197 int ndregs = saveall ? 8 : n_dregs_to_save ();
198 int npregs = saveall ? 6 : n_pregs_to_save ();
199 int dregno = REG_R7 + 1 - ndregs;
200 int pregno = REG_P5 + 1 - npregs;
201 int total = ndregs + npregs;
208 val = GEN_INT (-total * 4);
209 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
210 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
211 UNSPEC_PUSH_MULTIPLE);
212 XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
213 gen_rtx_PLUS (Pmode, spreg,
215 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
216 for (i = 0; i < total; i++)
218 rtx memref = gen_rtx_MEM (word_mode,
219 gen_rtx_PLUS (Pmode, spreg,
220 GEN_INT (- i * 4 - 4)));
224 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
230 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
234 XVECEXP (pat, 0, i + 1) = subpat;
235 RTX_FRAME_RELATED_P (subpat) = 1;
237 insn = emit_insn (pat);
238 RTX_FRAME_RELATED_P (insn) = 1;
241 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
242 must save all registers; this is used for interrupt handlers.
243 SPREG contains (reg:SI REG_SP). */
246 expand_epilogue_reg_restore (rtx spreg, int saveall)
248 int ndregs = saveall ? 8 : n_dregs_to_save ();
249 int npregs = saveall ? 6 : n_pregs_to_save ();
250 int total = ndregs + npregs;
257 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
258 XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
259 gen_rtx_PLUS (Pmode, spreg,
260 GEN_INT (total * 4)));
267 for (i = 0; i < total; i++)
270 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
272 rtx memref = gen_rtx_MEM (word_mode, addr);
275 XVECEXP (pat, 0, i + 1)
276 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
285 insn = emit_insn (pat);
286 RTX_FRAME_RELATED_P (insn) = 1;
289 /* Perform any needed actions needed for a function that is receiving a
290 variable number of arguments.
294 MODE and TYPE are the mode and type of the current parameter.
296 PRETEND_SIZE is a variable that should be set to the amount of stack
297 that must be pushed by the prolog to pretend that our caller pushed
300 Normally, this macro will push all remaining incoming registers on the
301 stack and set PRETEND_SIZE to the length of the registers pushed.
304 - VDSP C compiler manual (our ABI) says that a variable args function
305 should save the R0, R1 and R2 registers in the stack.
306 - The caller will always leave space on the stack for the
307 arguments that are passed in registers, so we dont have
308 to leave any extra space.
309 - now, the vastart pointer can access all arguments from the stack. */
312 setup_incoming_varargs (CUMULATIVE_ARGS *cum,
313 enum machine_mode mode ATTRIBUTE_UNUSED,
314 tree type ATTRIBUTE_UNUSED, int *pretend_size,
323 /* The move for named arguments will be generated automatically by the
324 compiler. We need to generate the move rtx for the unnamed arguments
325 if they are in the first 3 words. We assume at least 1 named argument
326 exists, so we never generate [ARGP] = R0 here. */
328 for (i = cum->words + 1; i < max_arg_registers; i++)
330 mem = gen_rtx_MEM (Pmode,
331 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
332 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
338 /* Value should be nonzero if functions must have frame pointers.
339 Zero means the frame pointer need not be set up (and parms may
340 be accessed via the stack pointer) in functions that seem suitable. */
343 bfin_frame_pointer_required (void)
345 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
347 if (fkind != SUBROUTINE)
350 /* We turn on on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
351 so we have to override it for non-leaf functions. */
352 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
358 /* Return the number of registers pushed during the prologue. */
361 n_regs_saved_by_prologue (void)
363 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
364 int n = n_dregs_to_save () + n_pregs_to_save ();
366 if (stack_frame_needed_p ())
367 /* We use a LINK instruction in this case. */
371 if (must_save_fp_p ())
373 if (! current_function_is_leaf)
377 if (fkind != SUBROUTINE)
379 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
380 tree all = lookup_attribute ("saveall", attrs);
383 /* Increment once for ASTAT. */
387 if (lookup_attribute ("nesting", attrs))
390 for (i = REG_P7 + 1; i < REG_CC; i++)
393 || (!leaf_function_p () && call_used_regs[i]))
394 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
399 /* Return the offset between two registers, one to be eliminated, and the other
400 its replacement, at the start of a routine. */
403 bfin_initial_elimination_offset (int from, int to)
405 HOST_WIDE_INT offset = 0;
407 if (from == ARG_POINTER_REGNUM)
408 offset = n_regs_saved_by_prologue () * 4;
410 if (to == STACK_POINTER_REGNUM)
412 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
413 offset += current_function_outgoing_args_size;
414 else if (current_function_outgoing_args_size)
415 offset += FIXED_STACK_AREA;
417 offset += get_frame_size ();
423 /* Emit code to load a constant CONSTANT into register REG; setting
424 RTX_FRAME_RELATED_P on all insns we generate. Make sure that the insns
425 we generate need not be split. */
428 frame_related_constant_load (rtx reg, HOST_WIDE_INT constant)
431 rtx cst = GEN_INT (constant);
433 if (constant >= -32768 && constant < 65536)
434 insn = emit_move_insn (reg, cst);
437 /* We don't call split_load_immediate here, since dwarf2out.c can get
438 confused about some of the more clever sequences it can generate. */
439 insn = emit_insn (gen_movsi_high (reg, cst));
440 RTX_FRAME_RELATED_P (insn) = 1;
441 insn = emit_insn (gen_movsi_low (reg, reg, cst));
443 RTX_FRAME_RELATED_P (insn) = 1;
446 /* Generate efficient code to add a value to the frame pointer. We
447 can use P1 as a scratch register. Set RTX_FRAME_RELATED_P on the
448 generated insns if FRAME is nonzero. */
451 add_to_sp (rtx spreg, HOST_WIDE_INT value, int frame)
456 /* Choose whether to use a sequence using a temporary register, or
457 a sequence with multiple adds. We can add a signed 7 bit value
458 in one instruction. */
459 if (value > 120 || value < -120)
461 rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
465 frame_related_constant_load (tmpreg, value);
468 insn = emit_move_insn (tmpreg, GEN_INT (value));
470 RTX_FRAME_RELATED_P (insn) = 1;
473 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
475 RTX_FRAME_RELATED_P (insn) = 1;
486 /* We could use -62, but that would leave the stack unaligned, so
490 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (size)));
492 RTX_FRAME_RELATED_P (insn) = 1;
498 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
499 is too large, generate a sequence of insns that has the same effect.
500 SPREG contains (reg:SI REG_SP). */
503 emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
505 HOST_WIDE_INT link_size = frame_size;
509 if (link_size > 262140)
512 /* Use a LINK insn with as big a constant as possible, then subtract
513 any remaining size from the SP. */
514 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
515 RTX_FRAME_RELATED_P (insn) = 1;
517 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
519 rtx set = XVECEXP (PATTERN (insn), 0, i);
520 gcc_assert (GET_CODE (set) == SET);
521 RTX_FRAME_RELATED_P (set) = 1;
524 frame_size -= link_size;
528 /* Must use a call-clobbered PREG that isn't the static chain. */
529 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
531 frame_related_constant_load (tmpreg, -frame_size);
532 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
533 RTX_FRAME_RELATED_P (insn) = 1;
537 /* Return the number of bytes we must reserve for outgoing arguments
538 in the current function's stack frame. */
543 if (current_function_outgoing_args_size)
545 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
546 return current_function_outgoing_args_size;
548 return FIXED_STACK_AREA;
553 /* Save RETS and FP, and allocate a stack frame. */
556 do_link (rtx spreg, HOST_WIDE_INT frame_size)
558 frame_size += arg_area_size ();
560 if (stack_frame_needed_p ()
561 || (must_save_fp_p () && ! current_function_is_leaf))
562 emit_link_insn (spreg, frame_size);
565 if (! current_function_is_leaf)
567 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
568 gen_rtx_PRE_DEC (Pmode, spreg)),
570 rtx insn = emit_insn (pat);
571 RTX_FRAME_RELATED_P (insn) = 1;
573 if (must_save_fp_p ())
575 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
576 gen_rtx_PRE_DEC (Pmode, spreg)),
577 gen_rtx_REG (Pmode, REG_FP));
578 rtx insn = emit_insn (pat);
579 RTX_FRAME_RELATED_P (insn) = 1;
581 add_to_sp (spreg, -frame_size, 1);
585 /* Like do_link, but used for epilogues to deallocate the stack frame. */
588 do_unlink (rtx spreg, HOST_WIDE_INT frame_size)
590 frame_size += arg_area_size ();
592 if (stack_frame_needed_p ())
593 emit_insn (gen_unlink ());
596 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
598 add_to_sp (spreg, frame_size, 0);
599 if (must_save_fp_p ())
601 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
602 emit_move_insn (fpreg, postinc);
603 emit_insn (gen_rtx_USE (VOIDmode, fpreg));
605 if (! current_function_is_leaf)
607 emit_move_insn (bfin_rets_rtx, postinc);
608 emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
613 /* Generate a prologue suitable for a function of kind FKIND. This is
614 called for interrupt and exception handler prologues.
615 SPREG contains (reg:SI REG_SP). */
618 expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
621 HOST_WIDE_INT frame_size = get_frame_size ();
622 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
623 rtx predec = gen_rtx_MEM (SImode, predec1);
625 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
626 tree all = lookup_attribute ("saveall", attrs);
627 tree kspisusp = lookup_attribute ("kspisusp", attrs);
631 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
632 RTX_FRAME_RELATED_P (insn) = 1;
635 /* We need space on the stack in case we need to save the argument
637 if (fkind == EXCPT_HANDLER)
639 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
640 RTX_FRAME_RELATED_P (insn) = 1;
643 insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
644 RTX_FRAME_RELATED_P (insn) = 1;
646 expand_prologue_reg_save (spreg, all != NULL_TREE);
648 for (i = REG_P7 + 1; i < REG_CC; i++)
651 || (!leaf_function_p () && call_used_regs[i]))
653 if (i == REG_A0 || i == REG_A1)
654 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
655 gen_rtx_REG (PDImode, i));
657 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
658 RTX_FRAME_RELATED_P (insn) = 1;
661 if (lookup_attribute ("nesting", attrs))
663 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
664 : fkind == NMI_HANDLER ? REG_RETN
666 insn = emit_move_insn (predec, srcreg);
667 RTX_FRAME_RELATED_P (insn) = 1;
670 do_link (spreg, frame_size);
672 if (fkind == EXCPT_HANDLER)
674 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
675 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
676 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
679 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
680 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
682 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
683 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
685 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
686 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
688 insn = emit_move_insn (r1reg, spreg);
689 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
691 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
692 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
694 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
695 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
700 /* Generate an epilogue suitable for a function of kind FKIND. This is
701 called for interrupt and exception handler epilogues.
702 SPREG contains (reg:SI REG_SP). */
705 expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
708 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
709 rtx postinc = gen_rtx_MEM (SImode, postinc1);
710 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
711 tree all = lookup_attribute ("saveall", attrs);
713 /* A slightly crude technique to stop flow from trying to delete "dead"
715 MEM_VOLATILE_P (postinc) = 1;
717 do_unlink (spreg, get_frame_size ());
719 if (lookup_attribute ("nesting", attrs))
721 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
722 : fkind == NMI_HANDLER ? REG_RETN
724 emit_move_insn (srcreg, postinc);
727 for (i = REG_CC - 1; i > REG_P7; i--)
730 || (!leaf_function_p () && call_used_regs[i]))
732 if (i == REG_A0 || i == REG_A1)
734 rtx mem = gen_rtx_MEM (PDImode, postinc1);
735 MEM_VOLATILE_P (mem) = 1;
736 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
739 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
742 expand_epilogue_reg_restore (spreg, all != NULL_TREE);
744 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
746 /* Deallocate any space we left on the stack in case we needed to save the
747 argument registers. */
748 if (fkind == EXCPT_HANDLER)
749 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
751 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
754 /* Generate RTL for the prologue of the current function. */
757 bfin_expand_prologue (void)
760 HOST_WIDE_INT frame_size = get_frame_size ();
761 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
762 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
764 if (fkind != SUBROUTINE)
766 expand_interrupt_handler_prologue (spreg, fkind);
770 expand_prologue_reg_save (spreg, 0);
772 do_link (spreg, frame_size);
774 if (TARGET_ID_SHARED_LIBRARY
775 && (current_function_uses_pic_offset_table
776 || !current_function_is_leaf))
780 if (bfin_lib_id_given)
781 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
783 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
784 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
785 UNSPEC_LIBRARY_OFFSET));
786 insn = emit_insn (gen_movsi (pic_offset_table_rtx,
787 gen_rtx_MEM (Pmode, addr)));
788 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
792 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
793 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
794 eh_return pattern. */
797 bfin_expand_epilogue (int need_return, int eh_return)
799 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
800 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
802 if (fkind != SUBROUTINE)
804 expand_interrupt_handler_epilogue (spreg, fkind);
808 do_unlink (spreg, get_frame_size ());
810 expand_epilogue_reg_restore (spreg, 0);
812 /* Omit the return insn if this is for a sibcall. */
817 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
819 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
822 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
825 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
826 unsigned int new_reg)
828 /* Interrupt functions can only use registers that have already been
829 saved by the prologue, even if they would normally be
832 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
833 && !regs_ever_live[new_reg])
839 /* Return the value of the return address for the frame COUNT steps up
840 from the current frame, after the prologue.
841 We punt for everything but the current frame by returning const0_rtx. */
844 bfin_return_addr_rtx (int count)
849 return get_hard_reg_initial_val (Pmode, REG_RETS);
852 /* Try machine-dependent ways of modifying an illegitimate address X
853 to be legitimate. If we find one, return the new, valid address,
854 otherwise return NULL_RTX.
856 OLDX is the address as it was before break_out_memory_refs was called.
857 In some cases it is useful to look at this to decide what needs to be done.
859 MODE is the mode of the memory reference. */
862 legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
863 enum machine_mode mode ATTRIBUTE_UNUSED)
868 /* This predicate is used to compute the length of a load/store insn.
869 OP is a MEM rtx, we return nonzero if its addressing mode requires a
870 32 bit instruction. */
873 effective_address_32bit_p (rtx op, enum machine_mode mode)
875 HOST_WIDE_INT offset;
877 mode = GET_MODE (op);
880 if (GET_CODE (op) != PLUS)
882 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
883 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
887 offset = INTVAL (XEXP (op, 1));
889 /* All byte loads use a 16 bit offset. */
890 if (GET_MODE_SIZE (mode) == 1)
893 if (GET_MODE_SIZE (mode) == 4)
895 /* Frame pointer relative loads can use a negative offset, all others
896 are restricted to a small positive one. */
897 if (XEXP (op, 0) == frame_pointer_rtx)
898 return offset < -128 || offset > 60;
899 return offset < 0 || offset > 60;
902 /* Must be HImode now. */
903 return offset < 0 || offset > 30;
906 /* Return cost of the memory address ADDR.
907 All addressing modes are equally cheap on the Blackfin. */
910 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
915 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
918 print_address_operand (FILE *file, rtx x)
920 switch (GET_CODE (x))
923 output_address (XEXP (x, 0));
925 output_address (XEXP (x, 1));
929 fprintf (file, "--");
930 output_address (XEXP (x, 0));
933 output_address (XEXP (x, 0));
934 fprintf (file, "++");
937 output_address (XEXP (x, 0));
938 fprintf (file, "--");
942 gcc_assert (GET_CODE (x) != MEM);
943 print_operand (file, x, 0);
948 /* Adding intp DImode support by Tony
954 print_operand (FILE *file, rtx x, char code)
956 enum machine_mode mode = GET_MODE (x);
961 switch (GET_CODE (x))
967 fprintf (file, "ne");
976 fprintf (file, "ge");
979 fprintf (file, "le");
988 fprintf (file, "ge");
991 fprintf (file, "le");
994 output_operand_lossage ("invalid %%j value");
998 case 'J': /* reverse logic */
1002 fprintf (file, "ne");
1005 fprintf (file, "e");
1008 fprintf (file, "le");
1011 fprintf (file, "ge");
1014 fprintf (file, "l");
1017 fprintf (file, "g");
1020 fprintf (file, "le");
1023 fprintf (file, "ge");
1026 fprintf (file, "l");
1029 fprintf (file, "g");
1032 output_operand_lossage ("invalid %%J value");
1037 switch (GET_CODE (x))
1042 gcc_assert (REGNO (x) < 32);
1043 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1044 /*fprintf (file, "\n%d\n ", REGNO (x));*/
1047 else if (code == 'd')
1049 gcc_assert (REGNO (x) < 32);
1050 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1053 else if (code == 'w')
1055 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1056 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1058 else if (code == 'x')
1060 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1061 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1063 else if (code == 'D')
1065 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1067 else if (code == 'H')
1069 gcc_assert (mode == DImode || mode == DFmode);
1070 gcc_assert (REG_P (x));
1071 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1073 else if (code == 'T')
1075 gcc_assert (D_REGNO_P (REGNO (x)));
1076 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1079 fprintf (file, "%s", reg_names[REGNO (x)]);
1085 print_address_operand (file, x);
1090 /* Moves to half registers with d or h modifiers always use unsigned
1093 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1094 else if (code == 'h')
1095 x = GEN_INT (INTVAL (x) & 0xffff);
1096 else if (code == 'X')
1097 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1098 else if (code == 'Y')
1099 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1100 else if (code == 'Z')
1101 /* Used for LINK insns. */
1102 x = GEN_INT (-8 - INTVAL (x));
1107 output_addr_const (file, x);
1108 if (code == 'G' && flag_pic)
1109 fprintf (file, "@GOT");
1113 output_operand_lossage ("invalid const_double operand");
1117 switch (XINT (x, 1))
1119 case UNSPEC_MOVE_PIC:
1120 output_addr_const (file, XVECEXP (x, 0, 0));
1121 fprintf (file, "@GOT");
1124 case UNSPEC_LIBRARY_OFFSET:
1125 fprintf (file, "_current_shared_library_p5_offset_");
1134 output_addr_const (file, x);
1139 /* Argument support functions. */
1141 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1142 for a call to a function whose data type is FNTYPE.
1143 For a library call, FNTYPE is 0.
1144 VDSP C Compiler manual, our ABI says that
1145 first 3 words of arguments will use R0, R1 and R2.
1149 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1150 rtx libname ATTRIBUTE_UNUSED)
1152 static CUMULATIVE_ARGS zero_cum;
1156 /* Set up the number of registers to use for passing arguments. */
1158 cum->nregs = max_arg_registers;
1159 cum->arg_regs = arg_regs;
1161 cum->call_cookie = CALL_NORMAL;
1162 /* Check for a longcall attribute. */
1163 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1164 cum->call_cookie |= CALL_SHORT;
1165 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1166 cum->call_cookie |= CALL_LONG;
1171 /* Update the data in CUM to advance over an argument
1172 of mode MODE and data type TYPE.
1173 (TYPE is null for libcalls where that information may not be available.) */
1176 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1177 int named ATTRIBUTE_UNUSED)
1179 int count, bytes, words;
1181 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1182 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1184 cum->words += words;
1185 cum->nregs -= words;
1187 if (cum->nregs <= 0)
1190 cum->arg_regs = NULL;
1194 for (count = 1; count <= words; count++)
1201 /* Define where to put the arguments to a function.
1202 Value is zero to push the argument on the stack,
1203 or a hard register in which to store the argument.
1205 MODE is the argument's machine mode.
1206 TYPE is the data type of the argument (as a tree).
1207 This is null for libcalls where that information may
1209 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1210 the preceding args and about the function being called.
1211 NAMED is nonzero if this argument is a named parameter
1212 (otherwise it is an extra parameter matching an ellipsis). */
1215 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1216 int named ATTRIBUTE_UNUSED)
1219 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1221 if (mode == VOIDmode)
1222 /* Compute operand 2 of the call insn. */
1223 return GEN_INT (cum->call_cookie);
1229 return gen_rtx_REG (mode, *(cum->arg_regs));
1234 /* For an arg passed partly in registers and partly in memory,
1235 this is the number of bytes passed in registers.
1236 For args passed entirely in registers or entirely in memory, zero.
1238 Refer VDSP C Compiler manual, our ABI.
1239 First 3 words are in registers. So, if a an argument is larger
1240 than the registers available, it will span the register and
1244 bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1245 tree type ATTRIBUTE_UNUSED,
1246 bool named ATTRIBUTE_UNUSED)
1249 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1250 int bytes_left = cum->nregs * UNITS_PER_WORD;
1255 if (bytes_left == 0)
1257 if (bytes > bytes_left)
1262 /* Variable sized types are passed by reference. */
1265 bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1266 enum machine_mode mode ATTRIBUTE_UNUSED,
1267 tree type, bool named ATTRIBUTE_UNUSED)
1269 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1272 /* Decide whether a type should be returned in memory (true)
1273 or in a register (false). This is called by the macro
1274 RETURN_IN_MEMORY. */
1277 bfin_return_in_memory (tree type)
1280 enum machine_mode mode = TYPE_MODE (type);
1282 if (mode == BLKmode)
1284 size = int_size_in_bytes (type);
1289 /* Register in which address to store a structure value
1290 is passed to a function. */
1292 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1293 int incoming ATTRIBUTE_UNUSED)
1295 return gen_rtx_REG (Pmode, REG_P0);
1298 /* Return true when register may be used to pass function parameters. */
1301 function_arg_regno_p (int n)
1304 for (i = 0; arg_regs[i] != -1; i++)
1305 if (n == arg_regs[i])
1310 /* Returns 1 if OP contains a symbol reference */
1313 symbolic_reference_mentioned_p (rtx op)
1315 register const char *fmt;
1318 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1321 fmt = GET_RTX_FORMAT (GET_CODE (op));
1322 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1328 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1329 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1333 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1340 /* Decide whether we can make a sibling call to a function. DECL is the
1341 declaration of the function being targeted by the call and EXP is the
1342 CALL_EXPR representing the call. */
1345 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1346 tree exp ATTRIBUTE_UNUSED)
1351 /* Emit RTL insns to initialize the variable parts of a trampoline at
1352 TRAMP. FNADDR is an RTX for the address of the function's pure
1353 code. CXT is an RTX for the static chain value for the function. */
1356 initialize_trampoline (tramp, fnaddr, cxt)
1357 rtx tramp, fnaddr, cxt;
1359 rtx t1 = copy_to_reg (fnaddr);
1360 rtx t2 = copy_to_reg (cxt);
1363 addr = memory_address (Pmode, plus_constant (tramp, 2));
1364 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1365 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1366 addr = memory_address (Pmode, plus_constant (tramp, 6));
1367 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1369 addr = memory_address (Pmode, plus_constant (tramp, 10));
1370 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1371 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1372 addr = memory_address (Pmode, plus_constant (tramp, 14));
1373 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1376 /* Legitimize PIC addresses. If the address is already position-independent,
1377 we return ORIG. Newly generated position-independent addresses go into a
1378 reg. This is REG if nonzero, otherwise we allocate register(s) as
1382 legitimize_pic_address (rtx orig, rtx reg)
1387 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
1389 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
1395 gcc_assert (!no_new_pseudos);
1396 reg = gen_reg_rtx (Pmode);
1401 emit_insn (gen_movsi_high_pic (reg, addr));
1402 emit_insn (gen_movsi_low_pic (reg, reg, addr));
1403 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1404 new = gen_rtx_MEM (Pmode, reg);
1408 rtx tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
1410 new = gen_rtx_MEM (Pmode,
1411 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1414 emit_move_insn (reg, new);
1416 current_function_uses_pic_offset_table = 1;
1420 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
1424 if (GET_CODE (addr) == CONST)
1426 addr = XEXP (addr, 0);
1427 gcc_assert (GET_CODE (addr) == PLUS);
1430 if (XEXP (addr, 0) == pic_offset_table_rtx)
1435 gcc_assert (!no_new_pseudos);
1436 reg = gen_reg_rtx (Pmode);
1439 base = legitimize_pic_address (XEXP (addr, 0), reg);
1440 addr = legitimize_pic_address (XEXP (addr, 1),
1441 base == reg ? NULL_RTX : reg);
1443 if (GET_CODE (addr) == CONST_INT)
1445 gcc_assert (! reload_in_progress && ! reload_completed);
1446 addr = force_reg (Pmode, addr);
1449 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
1451 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
1452 addr = XEXP (addr, 1);
1455 return gen_rtx_PLUS (Pmode, base, addr);
1461 /* Emit insns to move operands[1] into operands[0]. */
1464 emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1466 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1468 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1469 operands[1] = force_reg (SImode, operands[1]);
1471 operands[1] = legitimize_pic_address (operands[1], temp);
1474 /* Expand a move operation in mode MODE. The operands are in OPERANDS. */
1477 expand_move (rtx *operands, enum machine_mode mode)
1479 if (flag_pic && SYMBOLIC_CONST (operands[1]))
1480 emit_pic_move (operands, mode);
1482 /* Don't generate memory->memory or constant->memory moves, go through a
1484 else if ((reload_in_progress | reload_completed) == 0
1485 && GET_CODE (operands[0]) == MEM
1486 && GET_CODE (operands[1]) != REG)
1487 operands[1] = force_reg (mode, operands[1]);
1490 /* Split one or more DImode RTL references into pairs of SImode
1491 references. The RTL can be REG, offsettable MEM, integer constant, or
1492 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1493 split and "num" is its length. lo_half and hi_half are output arrays
1494 that parallel "operands". */
1497 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1501 rtx op = operands[num];
1503 /* simplify_subreg refuse to split volatile memory addresses,
1504 but we still have to handle it. */
1505 if (GET_CODE (op) == MEM)
1507 lo_half[num] = adjust_address (op, SImode, 0);
1508 hi_half[num] = adjust_address (op, SImode, 4);
1512 lo_half[num] = simplify_gen_subreg (SImode, op,
1513 GET_MODE (op) == VOIDmode
1514 ? DImode : GET_MODE (op), 0);
1515 hi_half[num] = simplify_gen_subreg (SImode, op,
1516 GET_MODE (op) == VOIDmode
1517 ? DImode : GET_MODE (op), 4);
1523 bfin_longcall_p (rtx op, int call_cookie)
1525 gcc_assert (GET_CODE (op) == SYMBOL_REF);
1526 if (call_cookie & CALL_SHORT)
1528 if (call_cookie & CALL_LONG)
1530 if (TARGET_LONG_CALLS)
1535 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
1536 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
1537 SIBCALL is nonzero if this is a sibling call. */
1540 bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
1542 rtx use = NULL, call;
1543 rtx callee = XEXP (fnaddr, 0);
1544 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (sibcall ? 3 : 2));
1546 /* In an untyped call, we can get NULL for operand 2. */
1547 if (cookie == NULL_RTX)
1548 cookie = const0_rtx;
1550 /* Static functions and indirect calls don't need the pic register. */
1552 && GET_CODE (callee) == SYMBOL_REF
1553 && !SYMBOL_REF_LOCAL_P (callee))
1554 use_reg (&use, pic_offset_table_rtx);
1556 if ((!register_no_elim_operand (callee, Pmode)
1557 && GET_CODE (callee) != SYMBOL_REF)
1558 || (GET_CODE (callee) == SYMBOL_REF
1560 || bfin_longcall_p (callee, INTVAL (cookie)))))
1562 callee = copy_to_mode_reg (Pmode, callee);
1563 fnaddr = gen_rtx_MEM (Pmode, callee);
1565 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1568 call = gen_rtx_SET (VOIDmode, retval, call);
1570 XVECEXP (pat, 0, 0) = call;
1571 XVECEXP (pat, 0, 1) = gen_rtx_USE (VOIDmode, cookie);
1573 XVECEXP (pat, 0, 2) = gen_rtx_RETURN (VOIDmode);
1574 call = emit_call_insn (pat);
1576 CALL_INSN_FUNCTION_USAGE (call) = use;
1579 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
1582 hard_regno_mode_ok (int regno, enum machine_mode mode)
1584 /* Allow only dregs to store value of mode HI or QI */
1585 enum reg_class class = REGNO_REG_CLASS (regno);
1590 if (mode == V2HImode)
1591 return D_REGNO_P (regno);
1592 if (class == CCREGS)
1593 return mode == BImode;
1594 if (mode == PDImode)
1595 return regno == REG_A0 || regno == REG_A1;
1597 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1600 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1603 /* Implements target hook vector_mode_supported_p. */
1606 bfin_vector_mode_supported_p (enum machine_mode mode)
1608 return mode == V2HImode;
1611 /* Return the cost of moving data from a register in class CLASS1 to
1612 one in class CLASS2. A cost of 2 is the default. */
1615 bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1616 enum reg_class class1, enum reg_class class2)
1618 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
1622 /* There are some stalls involved when moving from a DREG to a different
1623 class reg, and using the value in one of the following instructions.
1624 Attempt to model this by slightly discouraging such moves. */
1625 if (class1 == DREGS && class2 != DREGS)
1631 /* Return the cost of moving data of mode M between a
1632 register and memory. A value of 2 is the default; this cost is
1633 relative to those in `REGISTER_MOVE_COST'.
1635 ??? In theory L1 memory has single-cycle latency. We should add a switch
1636 that tells the compiler whether we expect to use only L1 memory for the
1637 program; it'll make the costs more accurate. */
1640 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1641 enum reg_class class,
1642 int in ATTRIBUTE_UNUSED)
1644 /* Make memory accesses slightly more expensive than any register-register
1645 move. Also, penalize non-DP registers, since they need secondary
1646 reloads to load and store. */
1647 if (! reg_class_subset_p (class, DPREGS))
1653 /* Inform reload about cases where moving X with a mode MODE to a register in
1654 CLASS requires an extra scratch register. Return the class needed for the
1655 scratch register. */
1658 secondary_input_reload_class (enum reg_class class, enum machine_mode mode,
1661 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1662 in most other cases we can also use PREGS. */
1663 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1664 enum reg_class x_class = NO_REGS;
1665 enum rtx_code code = GET_CODE (x);
1668 x = SUBREG_REG (x), code = GET_CODE (x);
1671 int regno = REGNO (x);
1672 if (regno >= FIRST_PSEUDO_REGISTER)
1673 regno = reg_renumber[regno];
1678 x_class = REGNO_REG_CLASS (regno);
1681 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1682 This happens as a side effect of register elimination, and we need
1683 a scratch register to do it. */
1684 if (fp_plus_const_operand (x, mode))
1686 rtx op2 = XEXP (x, 1);
1687 int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
1689 if (class == PREGS || class == PREGS_CLOBBERED)
1691 /* If destination is a DREG, we can do this without a scratch register
1692 if the constant is valid for an add instruction. */
1693 if (class == DREGS || class == DPREGS)
1694 return large_constant_p ? PREGS : NO_REGS;
1695 /* Reloading to anything other than a DREG? Use a PREG scratch
1700 /* Data can usually be moved freely between registers of most classes.
1701 AREGS are an exception; they can only move to or from another register
1702 in AREGS or one in DREGS. They can also be assigned the constant 0. */
1703 if (x_class == AREGS)
1704 return class == DREGS || class == AREGS ? NO_REGS : DREGS;
1708 if (x != const0_rtx && x_class != DREGS)
1714 /* CCREGS can only be moved from/to DREGS. */
1715 if (class == CCREGS && x_class != DREGS)
1717 if (x_class == CCREGS && class != DREGS)
1719 /* All registers other than AREGS can load arbitrary constants. The only
1720 case that remains is MEM. */
1722 if (! reg_class_subset_p (class, default_class))
1723 return default_class;
1727 /* Like secondary_input_reload_class; and all we do is call that function. */
1730 secondary_output_reload_class (enum reg_class class, enum machine_mode mode,
1733 return secondary_input_reload_class (class, mode, x);
1736 /* Implement TARGET_HANDLE_OPTION. */
1739 bfin_handle_option (size_t code, const char *arg, int value)
1743 case OPT_mshared_library_id_:
1744 if (value > MAX_LIBRARY_ID)
1745 error ("-mshared-library-id=%s is not between 0 and %d",
1746 arg, MAX_LIBRARY_ID);
1747 bfin_lib_id_given = 1;
1755 /* Implement the macro OVERRIDE_OPTIONS. */
1758 override_options (void)
1760 if (TARGET_OMIT_LEAF_FRAME_POINTER)
1761 flag_omit_frame_pointer = 1;
1763 /* Library identification */
1764 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
1765 error ("-mshared-library-id= specified without -mid-shared-library");
1767 if (TARGET_ID_SHARED_LIBRARY)
1768 /* ??? Provide a way to use a bigger GOT. */
1771 flag_schedule_insns = 0;
1774 /* Return the destination address of BRANCH.
1775 We need to use this instead of get_attr_length, because the
1776 cbranch_with_nops pattern conservatively sets its length to 6, and
1777 we still prefer to use shorter sequences. */
1780 branch_dest (rtx branch)
1784 rtx pat = PATTERN (branch);
1785 if (GET_CODE (pat) == PARALLEL)
1786 pat = XVECEXP (pat, 0, 0);
1787 dest = SET_SRC (pat);
1788 if (GET_CODE (dest) == IF_THEN_ELSE)
1789 dest = XEXP (dest, 1);
1790 dest = XEXP (dest, 0);
1791 dest_uid = INSN_UID (dest);
1792 return INSN_ADDRESSES (dest_uid);
1795 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
1796 it's a branch that's predicted taken. */
1799 cbranch_predicted_taken_p (rtx insn)
1801 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
1805 int pred_val = INTVAL (XEXP (x, 0));
1807 return pred_val >= REG_BR_PROB_BASE / 2;
1813 /* Templates for use by asm_conditional_branch. */
1815 static const char *ccbranch_templates[][3] = {
1816 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
1817 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
1818 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
1819 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
1822 /* Output INSN, which is a conditional branch instruction with operands
1825 We deal with the various forms of conditional branches that can be generated
1826 by bfin_reorg to prevent the hardware from doing speculative loads, by
1827 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
1828 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
1829 Either of these is only necessary if the branch is short, otherwise the
1830 template we use ends in an unconditional jump which flushes the pipeline
1834 asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
1836 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
1837 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
1838 is to be taken from start of if cc rather than jump.
1839 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
1841 int len = (offset >= -1024 && offset <= 1022 ? 0
1842 : offset >= -4094 && offset <= 4096 ? 1
1844 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
1845 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
1846 output_asm_insn (ccbranch_templates[idx][len], operands);
1847 gcc_assert (n_nops == 0 || !bp);
1849 while (n_nops-- > 0)
1850 output_asm_insn ("nop;", NULL);
1853 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
1854 stored in bfin_compare_op0 and bfin_compare_op1 already. */
1857 bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
1859 enum rtx_code code1, code2;
1860 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
1861 rtx tem = bfin_cc_rtx;
1862 enum rtx_code code = GET_CODE (cmp);
1864 /* If we have a BImode input, then we already have a compare result, and
1865 do not need to emit another comparison. */
1866 if (GET_MODE (op0) == BImode)
1868 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
1869 tem = op0, code2 = code;
1874 /* bfin has these conditions */
1884 code1 = reverse_condition (code);
1888 emit_insn (gen_rtx_SET (BImode, tem,
1889 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
1892 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
1895 /* Return nonzero iff C has exactly one bit set if it is interpreted
1896 as a 32 bit constant. */
1899 log2constp (unsigned HOST_WIDE_INT c)
1902 return c != 0 && (c & (c-1)) == 0;
1905 /* Returns the number of consecutive least significant zeros in the binary
1906 representation of *V.
1907 We modify *V to contain the original value arithmetically shifted right by
1908 the number of zeroes. */
1911 shiftr_zero (HOST_WIDE_INT *v)
1913 unsigned HOST_WIDE_INT tmp = *v;
1914 unsigned HOST_WIDE_INT sgn;
1920 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
1921 while ((tmp & 0x1) == 0 && n <= 32)
1923 tmp = (tmp >> 1) | sgn;
1930 /* After reload, split the load of an immediate constant. OPERANDS are the
1931 operands of the movsi_insn pattern which we are splitting. We return
1932 nonzero if we emitted a sequence to load the constant, zero if we emitted
1933 nothing because we want to use the splitter's default sequence. */
1936 split_load_immediate (rtx operands[])
1938 HOST_WIDE_INT val = INTVAL (operands[1]);
1940 HOST_WIDE_INT shifted = val;
1941 HOST_WIDE_INT shifted_compl = ~val;
1942 int num_zero = shiftr_zero (&shifted);
1943 int num_compl_zero = shiftr_zero (&shifted_compl);
1944 unsigned int regno = REGNO (operands[0]);
1945 enum reg_class class1 = REGNO_REG_CLASS (regno);
1947 /* This case takes care of single-bit set/clear constants, which we could
1948 also implement with BITSET/BITCLR. */
1950 && shifted >= -32768 && shifted < 65536
1951 && (D_REGNO_P (regno)
1952 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
1954 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
1955 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
1960 tmp |= -(tmp & 0x8000);
1962 /* If high word has one bit set or clear, try to use a bit operation. */
1963 if (D_REGNO_P (regno))
1965 if (log2constp (val & 0xFFFF0000))
1967 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
1968 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
1971 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
1973 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
1974 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
1978 if (D_REGNO_P (regno))
1980 if (CONST_7BIT_IMM_P (tmp))
1982 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
1983 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
1987 if ((val & 0xFFFF0000) == 0)
1989 emit_insn (gen_movsi (operands[0], const0_rtx));
1990 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
1994 if ((val & 0xFFFF0000) == 0xFFFF0000)
1996 emit_insn (gen_movsi (operands[0], constm1_rtx));
1997 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2002 /* Need DREGs for the remaining case. */
2007 && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
2009 /* If optimizing for size, generate a sequence that has more instructions
2011 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2012 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2013 GEN_INT (num_compl_zero)));
2014 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2020 /* Return true if the legitimate memory address for a memory operand of mode
2021 MODE. Return false if not. */
2024 bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2026 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2027 int sz = GET_MODE_SIZE (mode);
2028 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2029 /* The usual offsettable_memref machinery doesn't work so well for this
2030 port, so we deal with the problem here. */
2031 unsigned HOST_WIDE_INT mask = sz == 8 ? 0x7ffe : 0x7fff;
2032 return (v & ~(mask << shift)) == 0;
2036 bfin_valid_reg_p (unsigned int regno, int strict)
2038 return ((strict && REGNO_OK_FOR_BASE_STRICT_P (regno))
2039 || (!strict && REGNO_OK_FOR_BASE_NONSTRICT_P (regno)));
2043 bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2045 switch (GET_CODE (x)) {
2047 if (bfin_valid_reg_p (REGNO (x), strict))
2051 if (REG_P (XEXP (x, 0))
2052 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict)
2053 && (GET_CODE (XEXP (x, 1)) == UNSPEC
2054 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2055 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2060 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2061 && REG_P (XEXP (x, 0))
2062 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2065 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2066 && XEXP (x, 0) == stack_pointer_rtx
2067 && REG_P (XEXP (x, 0))
2068 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2078 bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2080 int cost2 = COSTS_N_INSNS (1);
2085 if (outer_code == SET || outer_code == PLUS)
2086 *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2087 else if (outer_code == AND)
2088 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2089 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2090 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2091 else if (outer_code == LEU || outer_code == LTU)
2092 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2093 else if (outer_code == MULT)
2094 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2095 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2097 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2098 || outer_code == LSHIFTRT)
2099 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2100 else if (outer_code == IOR || outer_code == XOR)
2101 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2110 *total = COSTS_N_INSNS (2);
2114 if (GET_MODE (x) == Pmode)
2116 if (GET_CODE (XEXP (x, 0)) == MULT
2117 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2119 HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
2120 if (val == 2 || val == 4)
2123 *total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
2124 *total += rtx_cost (XEXP (x, 1), outer_code);
2136 if (GET_MODE (x) == DImode)
2143 if (GET_MODE (x) == DImode)
2148 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
2149 *total = COSTS_N_INSNS (3);
2158 bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2160 fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2163 /* Used for communication between {push,pop}_multiple_operation (which
2164 we use not only as a predicate) and the corresponding output functions. */
2165 static int first_preg_to_save, first_dreg_to_save;
2168 push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2170 int lastdreg = 8, lastpreg = 6;
2173 first_preg_to_save = lastpreg;
2174 first_dreg_to_save = lastdreg;
2175 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2177 rtx t = XVECEXP (op, 0, i);
2181 if (GET_CODE (t) != SET)
2185 dest = SET_DEST (t);
2186 if (GET_CODE (dest) != MEM || ! REG_P (src))
2188 dest = XEXP (dest, 0);
2189 if (GET_CODE (dest) != PLUS
2190 || ! REG_P (XEXP (dest, 0))
2191 || REGNO (XEXP (dest, 0)) != REG_SP
2192 || GET_CODE (XEXP (dest, 1)) != CONST_INT
2193 || INTVAL (XEXP (dest, 1)) != -i * 4)
2196 regno = REGNO (src);
2199 if (D_REGNO_P (regno))
2202 first_dreg_to_save = lastdreg = regno - REG_R0;
2204 else if (regno >= REG_P0 && regno <= REG_P7)
2207 first_preg_to_save = lastpreg = regno - REG_P0;
2217 if (regno >= REG_P0 && regno <= REG_P7)
2220 first_preg_to_save = lastpreg = regno - REG_P0;
2222 else if (regno != REG_R0 + lastdreg + 1)
2227 else if (group == 2)
2229 if (regno != REG_P0 + lastpreg + 1)
2238 pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2240 int lastdreg = 8, lastpreg = 6;
2243 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2245 rtx t = XVECEXP (op, 0, i);
2249 if (GET_CODE (t) != SET)
2253 dest = SET_DEST (t);
2254 if (GET_CODE (src) != MEM || ! REG_P (dest))
2256 src = XEXP (src, 0);
2260 if (! REG_P (src) || REGNO (src) != REG_SP)
2263 else if (GET_CODE (src) != PLUS
2264 || ! REG_P (XEXP (src, 0))
2265 || REGNO (XEXP (src, 0)) != REG_SP
2266 || GET_CODE (XEXP (src, 1)) != CONST_INT
2267 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2270 regno = REGNO (dest);
2273 if (regno == REG_R7)
2278 else if (regno != REG_P0 + lastpreg - 1)
2283 else if (group == 1)
2285 if (regno != REG_R0 + lastdreg - 1)
2291 first_dreg_to_save = lastdreg;
2292 first_preg_to_save = lastpreg;
2296 /* Emit assembly code for one multi-register push described by INSN, with
2297 operands in OPERANDS. */
2300 output_push_multiple (rtx insn, rtx *operands)
2305 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2306 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2309 if (first_dreg_to_save == 8)
2310 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2311 else if (first_preg_to_save == 6)
2312 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2314 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2315 first_dreg_to_save, first_preg_to_save);
2317 output_asm_insn (buf, operands);
2320 /* Emit assembly code for one multi-register pop described by INSN, with
2321 operands in OPERANDS. */
2324 output_pop_multiple (rtx insn, rtx *operands)
2329 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2330 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2333 if (first_dreg_to_save == 8)
2334 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2335 else if (first_preg_to_save == 6)
2336 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2338 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2339 first_dreg_to_save, first_preg_to_save);
2341 output_asm_insn (buf, operands);
2344 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
2347 single_move_for_strmov (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
2349 rtx scratch = gen_reg_rtx (mode);
2352 srcmem = adjust_address_nv (src, mode, offset);
2353 dstmem = adjust_address_nv (dst, mode, offset);
2354 emit_move_insn (scratch, srcmem);
2355 emit_move_insn (dstmem, scratch);
2358 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2359 alignment ALIGN_EXP. Return true if successful, false if we should fall
2360 back on a different method. */
2363 bfin_expand_strmov (rtx dst, rtx src, rtx count_exp, rtx align_exp)
2365 rtx srcreg, destreg, countreg;
2366 HOST_WIDE_INT align = 0;
2367 unsigned HOST_WIDE_INT count = 0;
2369 if (GET_CODE (align_exp) == CONST_INT)
2370 align = INTVAL (align_exp);
2371 if (GET_CODE (count_exp) == CONST_INT)
2373 count = INTVAL (count_exp);
2375 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2380 /* If optimizing for size, only do single copies inline. */
2383 if (count == 2 && align < 2)
2385 if (count == 4 && align < 4)
2387 if (count != 1 && count != 2 && count != 4)
2390 if (align < 2 && count != 1)
2393 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2394 if (destreg != XEXP (dst, 0))
2395 dst = replace_equiv_address_nv (dst, destreg);
2396 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2397 if (srcreg != XEXP (src, 0))
2398 src = replace_equiv_address_nv (src, srcreg);
2400 if (count != 0 && align >= 2)
2402 unsigned HOST_WIDE_INT offset = 0;
2406 if ((count & ~3) == 4)
2408 single_move_for_strmov (dst, src, SImode, offset);
2411 else if (count & ~3)
2413 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2414 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2416 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2421 if ((count & ~1) == 2)
2423 single_move_for_strmov (dst, src, HImode, offset);
2426 else if (count & ~1)
2428 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2429 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2431 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2436 single_move_for_strmov (dst, src, HImode, offset);
2441 single_move_for_strmov (dst, src, QImode, offset);
2450 bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2452 enum attr_type insn_type, dep_insn_type;
2453 int dep_insn_code_number;
2455 /* Anti and output dependencies have zero cost. */
2456 if (REG_NOTE_KIND (link) != 0)
2459 dep_insn_code_number = recog_memoized (dep_insn);
2461 /* If we can't recognize the insns, we can't really do anything. */
2462 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2465 insn_type = get_attr_type (insn);
2466 dep_insn_type = get_attr_type (dep_insn);
2468 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
2470 rtx pat = PATTERN (dep_insn);
2471 rtx dest = SET_DEST (pat);
2472 rtx src = SET_SRC (pat);
2473 if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
2475 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
2481 /* We use the machine specific reorg pass for emitting CSYNC instructions
2482 after conditional branches as needed.
2484 The Blackfin is unusual in that a code sequence like
2487 may speculatively perform the load even if the condition isn't true. This
2488 happens for a branch that is predicted not taken, because the pipeline
2489 isn't flushed or stalled, so the early stages of the following instructions,
2490 which perform the memory reference, are allowed to execute before the
2491 jump condition is evaluated.
2492 Therefore, we must insert additional instructions in all places where this
2493 could lead to incorrect behavior. The manual recommends CSYNC, while
2494 VDSP seems to use NOPs (even though its corresponding compiler option is
2497 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
2498 When optimizing for size, we turn the branch into a predicted taken one.
2499 This may be slower due to mispredicts, but saves code size. */
2504 rtx insn, last_condjump = NULL_RTX;
2505 int cycles_since_jump = INT_MAX;
2507 if (! TARGET_SPECLD_ANOMALY || ! TARGET_CSYNC_ANOMALY)
2510 /* First pass: find predicted-false branches; if something after them
2511 needs nops, insert them or change the branch to predict true. */
2512 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2516 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
2519 pat = PATTERN (insn);
2520 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2521 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2522 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2527 if (any_condjump_p (insn)
2528 && ! cbranch_predicted_taken_p (insn))
2530 last_condjump = insn;
2531 cycles_since_jump = 0;
2534 cycles_since_jump = INT_MAX;
2536 else if (INSN_P (insn))
2538 enum attr_type type = get_attr_type (insn);
2539 int delay_needed = 0;
2540 if (cycles_since_jump < INT_MAX)
2541 cycles_since_jump++;
2543 if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
2545 rtx pat = single_set (insn);
2546 if (may_trap_p (SET_SRC (pat)))
2549 else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2552 if (delay_needed > cycles_since_jump)
2556 rtx *op = recog_data.operand;
2558 delay_needed -= cycles_since_jump;
2560 extract_insn (last_condjump);
2563 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
2565 cycles_since_jump = INT_MAX;
2568 /* Do not adjust cycles_since_jump in this case, so that
2569 we'll increase the number of NOPs for a subsequent insn
2571 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
2572 GEN_INT (delay_needed));
2573 PATTERN (last_condjump) = pat;
2574 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
2578 /* Second pass: for predicted-true branches, see if anything at the
2579 branch destination needs extra nops. */
2580 if (! TARGET_CSYNC_ANOMALY)
2583 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2586 && any_condjump_p (insn)
2587 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
2588 || cbranch_predicted_taken_p (insn)))
2590 rtx target = JUMP_LABEL (insn);
2592 cycles_since_jump = 0;
2593 for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
2597 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
2600 pat = PATTERN (target);
2601 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2602 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2603 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2606 if (INSN_P (target))
2608 enum attr_type type = get_attr_type (target);
2609 int delay_needed = 0;
2610 if (cycles_since_jump < INT_MAX)
2611 cycles_since_jump++;
2613 if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2616 if (delay_needed > cycles_since_jump)
2618 rtx prev = prev_real_insn (label);
2619 delay_needed -= cycles_since_jump;
2621 fprintf (dump_file, "Adding %d nops after %d\n",
2622 delay_needed, INSN_UID (label));
2624 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
2631 "Reducing nops on insn %d.\n",
2634 x = XVECEXP (x, 0, 1);
2635 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
2636 XVECEXP (x, 0, 0) = GEN_INT (v);
2638 while (delay_needed-- > 0)
2639 emit_insn_after (gen_nop (), label);
2648 /* Handle interrupt_handler, exception_handler and nmi_handler function
2649 attributes; arguments as in struct attribute_spec.handler. */
2652 handle_int_attribute (tree *node, tree name,
2653 tree args ATTRIBUTE_UNUSED,
2654 int flags ATTRIBUTE_UNUSED,
2658 if (TREE_CODE (x) == FUNCTION_DECL)
2661 if (TREE_CODE (x) != FUNCTION_TYPE)
2663 warning (OPT_Wattributes, "%qs attribute only applies to functions",
2664 IDENTIFIER_POINTER (name));
2665 *no_add_attrs = true;
2667 else if (funkind (x) != SUBROUTINE)
2668 error ("multiple function type attributes specified");
2673 /* Return 0 if the attributes for two types are incompatible, 1 if they
2674 are compatible, and 2 if they are nearly compatible (which causes a
2675 warning to be generated). */
2678 bfin_comp_type_attributes (tree type1, tree type2)
2680 e_funkind kind1, kind2;
2682 if (TREE_CODE (type1) != FUNCTION_TYPE)
2685 kind1 = funkind (type1);
2686 kind2 = funkind (type2);
2691 /* Check for mismatched modifiers */
2692 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
2693 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
2696 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
2697 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
2700 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
2701 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
2704 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
2705 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
2711 /* Handle a "longcall" or "shortcall" attribute; arguments as in
2712 struct attribute_spec.handler. */
2715 bfin_handle_longcall_attribute (tree *node, tree name,
2716 tree args ATTRIBUTE_UNUSED,
2717 int flags ATTRIBUTE_UNUSED,
2720 if (TREE_CODE (*node) != FUNCTION_TYPE
2721 && TREE_CODE (*node) != FIELD_DECL
2722 && TREE_CODE (*node) != TYPE_DECL)
2724 warning (OPT_Wattributes, "`%s' attribute only applies to functions",
2725 IDENTIFIER_POINTER (name));
2726 *no_add_attrs = true;
2729 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
2730 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
2731 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
2732 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
2734 warning (OPT_Wattributes,
2735 "can't apply both longcall and shortcall attributes to the same function");
2736 *no_add_attrs = true;
2742 /* Table of valid machine attributes. */
2743 const struct attribute_spec bfin_attribute_table[] =
2745 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2746 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
2747 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
2748 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
2749 { "nesting", 0, 0, false, true, true, NULL },
2750 { "kspisusp", 0, 0, false, true, true, NULL },
2751 { "saveall", 0, 0, false, true, true, NULL },
2752 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
2753 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
2754 { NULL, 0, 0, false, false, false, NULL }
2757 /* Output the assembler code for a thunk function. THUNK_DECL is the
2758 declaration for the thunk function itself, FUNCTION is the decl for
2759 the target function. DELTA is an immediate constant offset to be
2760 added to THIS. If VCALL_OFFSET is nonzero, the word at
2761 *(*this + vcall_offset) should be added to THIS. */
2764 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
2765 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
2766 HOST_WIDE_INT vcall_offset, tree function)
2769 /* The this parameter is passed as the first argument. */
2770 rtx this = gen_rtx_REG (Pmode, REG_R0);
2772 /* Adjust the this parameter by a fixed constant. */
2776 if (delta >= -64 && delta <= 63)
2778 xops[0] = GEN_INT (delta);
2779 output_asm_insn ("%1 += %0;", xops);
2781 else if (delta >= -128 && delta < -64)
2783 xops[0] = GEN_INT (delta + 64);
2784 output_asm_insn ("%1 += -64; %1 += %0;", xops);
2786 else if (delta > 63 && delta <= 126)
2788 xops[0] = GEN_INT (delta - 63);
2789 output_asm_insn ("%1 += 63; %1 += %0;", xops);
2793 xops[0] = GEN_INT (delta);
2794 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
2798 /* Adjust the this parameter by a value stored in the vtable. */
2801 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
2802 rtx tmp = gen_rtx_REG (Pmode, REG_R2);
2806 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
2808 /* Adjust the this parameter. */
2809 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
2810 if (!memory_operand (xops[0], Pmode))
2812 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
2813 xops[0] = GEN_INT (vcall_offset);
2815 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
2816 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
2819 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
2822 xops[0] = XEXP (DECL_RTL (function), 0);
2823 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
2824 output_asm_insn ("jump.l\t%P0", xops);
2827 /* Codes for all the Blackfin builtins. */
2835 #define def_builtin(NAME, TYPE, CODE) \
2837 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
2841 /* Set up all builtin functions for this target. */
2843 bfin_init_builtins (void)
2845 tree void_ftype_void
2846 = build_function_type (void_type_node, void_list_node);
2848 /* Add the remaining MMX insns with somewhat more complicated types. */
2849 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
2850 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
2853 /* Expand an expression EXP that calls a built-in function,
2854 with result going to TARGET if that's convenient
2855 (and in mode MODE if that's convenient).
2856 SUBTARGET may be used as the target for computing one of EXP's operands.
2857 IGNORE is nonzero if the value is to be ignored. */
2860 bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
2861 rtx subtarget ATTRIBUTE_UNUSED,
2862 enum machine_mode mode ATTRIBUTE_UNUSED,
2863 int ignore ATTRIBUTE_UNUSED)
2865 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2866 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2870 case BFIN_BUILTIN_CSYNC:
2871 emit_insn (gen_csync ());
2873 case BFIN_BUILTIN_SSYNC:
2874 emit_insn (gen_ssync ());
2882 #undef TARGET_INIT_BUILTINS
2883 #define TARGET_INIT_BUILTINS bfin_init_builtins
2885 #undef TARGET_EXPAND_BUILTIN
2886 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
2888 #undef TARGET_ASM_GLOBALIZE_LABEL
2889 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
2891 #undef TARGET_ASM_FILE_START
2892 #define TARGET_ASM_FILE_START output_file_start
2894 #undef TARGET_ATTRIBUTE_TABLE
2895 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
2897 #undef TARGET_COMP_TYPE_ATTRIBUTES
2898 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
2900 #undef TARGET_RTX_COSTS
2901 #define TARGET_RTX_COSTS bfin_rtx_costs
2903 #undef TARGET_ADDRESS_COST
2904 #define TARGET_ADDRESS_COST bfin_address_cost
2906 #undef TARGET_ASM_INTERNAL_LABEL
2907 #define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
2909 #undef TARGET_MACHINE_DEPENDENT_REORG
2910 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
2912 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
2913 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
2915 #undef TARGET_ASM_OUTPUT_MI_THUNK
2916 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
2917 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2918 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
2920 #undef TARGET_SCHED_ADJUST_COST
2921 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
2923 #undef TARGET_PROMOTE_PROTOTYPES
2924 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2925 #undef TARGET_PROMOTE_FUNCTION_ARGS
2926 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
2927 #undef TARGET_PROMOTE_FUNCTION_RETURN
2928 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
2930 #undef TARGET_ARG_PARTIAL_BYTES
2931 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
2933 #undef TARGET_PASS_BY_REFERENCE
2934 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
2936 #undef TARGET_SETUP_INCOMING_VARARGS
2937 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
2939 #undef TARGET_STRUCT_VALUE_RTX
2940 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
2942 #undef TARGET_VECTOR_MODE_SUPPORTED_P
2943 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
2945 #undef TARGET_HANDLE_OPTION
2946 #define TARGET_HANDLE_OPTION bfin_handle_option
2948 #undef TARGET_DEFAULT_TARGET_FLAGS
2949 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
2951 struct gcc_target targetm = TARGET_INITIALIZER;