1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "insn-codes.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
42 #include "target-def.h"
48 #include "integrate.h"
50 #include "langhooks.h"
51 #include "bfin-protos.h"
54 #include "basic-block.h"
57 /* A C structure for machine-specific, per-function data.
58 This is added to the cfun structure. */
59 struct machine_function GTY(())
61 int has_hardware_loops;
64 /* Test and compare insns in bfin.md store the information needed to
65 generate branch and scc insns here. */
66 rtx bfin_compare_op0, bfin_compare_op1;
68 /* RTX for condition code flag register and RETS register */
69 extern GTY(()) rtx bfin_cc_rtx;
70 extern GTY(()) rtx bfin_rets_rtx;
71 rtx bfin_cc_rtx, bfin_rets_rtx;
73 int max_arg_registers = 0;
75 /* Arrays used when emitting register names. */
76 const char *short_reg_names[] = SHORT_REGISTER_NAMES;
77 const char *high_reg_names[] = HIGH_REGISTER_NAMES;
78 const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
79 const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
81 static int arg_regs[] = FUNCTION_ARG_REGISTERS;
83 /* Nonzero if -mshared-library-id was given. */
84 static int bfin_lib_id_given;
86 /* Nonzero if -fschedule-insns2 was given. We override it and
87 call the scheduler ourselves during reorg. */
88 static int bfin_flag_schedule_insns2;
90 /* Determines whether we run variable tracking in machine dependent
92 static int bfin_flag_var_tracking;
94 int splitting_for_sched;
97 bfin_globalize_label (FILE *stream, const char *name)
99 fputs (".global ", stream);
100 assemble_name (stream, name);
106 output_file_start (void)
108 FILE *file = asm_out_file;
111 /* Variable tracking should be run after all optimizations which change order
112 of insns. It also needs a valid CFG. This can't be done in
113 override_options, because flag_var_tracking is finalized after
115 bfin_flag_var_tracking = flag_var_tracking;
116 flag_var_tracking = 0;
118 fprintf (file, ".file \"%s\";\n", input_filename);
120 for (i = 0; arg_regs[i] >= 0; i++)
122 max_arg_registers = i; /* how many arg reg used */
125 /* Called early in the compilation to conditionally modify
126 fixed_regs/call_used_regs. */
129 conditional_register_usage (void)
131 /* initialize condition code flag register rtx */
132 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
133 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
136 /* Examine machine-dependent attributes of function type FUNTYPE and return its
137 type. See the definition of E_FUNKIND. */
139 static e_funkind funkind (tree funtype)
141 tree attrs = TYPE_ATTRIBUTES (funtype);
142 if (lookup_attribute ("interrupt_handler", attrs))
143 return INTERRUPT_HANDLER;
144 else if (lookup_attribute ("exception_handler", attrs))
145 return EXCPT_HANDLER;
146 else if (lookup_attribute ("nmi_handler", attrs))
152 /* Legitimize PIC addresses. If the address is already position-independent,
153 we return ORIG. Newly generated position-independent addresses go into a
154 reg. This is REG if nonzero, otherwise we allocate register(s) as
155 necessary. PICREG is the register holding the pointer to the PIC offset
159 legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
164 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
169 if (TARGET_ID_SHARED_LIBRARY)
170 unspec = UNSPEC_MOVE_PIC;
171 else if (GET_CODE (addr) == SYMBOL_REF
172 && SYMBOL_REF_FUNCTION_P (addr))
173 unspec = UNSPEC_FUNCDESC_GOT17M4;
175 unspec = UNSPEC_MOVE_FDPIC;
179 gcc_assert (!no_new_pseudos);
180 reg = gen_reg_rtx (Pmode);
183 tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
184 new = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
186 emit_move_insn (reg, new);
187 if (picreg == pic_offset_table_rtx)
188 current_function_uses_pic_offset_table = 1;
192 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
196 if (GET_CODE (addr) == CONST)
198 addr = XEXP (addr, 0);
199 gcc_assert (GET_CODE (addr) == PLUS);
202 if (XEXP (addr, 0) == picreg)
207 gcc_assert (!no_new_pseudos);
208 reg = gen_reg_rtx (Pmode);
211 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
212 addr = legitimize_pic_address (XEXP (addr, 1),
213 base == reg ? NULL_RTX : reg,
216 if (GET_CODE (addr) == CONST_INT)
218 gcc_assert (! reload_in_progress && ! reload_completed);
219 addr = force_reg (Pmode, addr);
222 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
224 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
225 addr = XEXP (addr, 1);
228 return gen_rtx_PLUS (Pmode, base, addr);
234 /* Stack frame layout. */
236 /* Compute the number of DREGS to save with a push_multiple operation.
237 This could include registers that aren't modified in the function,
238 since push_multiple only takes a range of registers.
239 If IS_INTHANDLER, then everything that is live must be saved, even
240 if normally call-clobbered. */
243 n_dregs_to_save (bool is_inthandler)
247 for (i = REG_R0; i <= REG_R7; i++)
249 if (regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
250 return REG_R7 - i + 1;
252 if (current_function_calls_eh_return)
257 unsigned test = EH_RETURN_DATA_REGNO (j);
258 if (test == INVALID_REGNUM)
261 return REG_R7 - i + 1;
269 /* Like n_dregs_to_save, but compute number of PREGS to save. */
272 n_pregs_to_save (bool is_inthandler)
276 for (i = REG_P0; i <= REG_P5; i++)
277 if ((regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
279 && i == PIC_OFFSET_TABLE_REGNUM
280 && (current_function_uses_pic_offset_table
281 || (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
282 return REG_P5 - i + 1;
286 /* Determine if we are going to save the frame pointer in the prologue. */
289 must_save_fp_p (void)
291 return frame_pointer_needed || regs_ever_live[REG_FP];
295 stack_frame_needed_p (void)
297 /* EH return puts a new return address into the frame using an
298 address relative to the frame pointer. */
299 if (current_function_calls_eh_return)
301 return frame_pointer_needed;
304 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
305 must save all registers; this is used for interrupt handlers.
306 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
307 this for an interrupt (or exception) handler. */
310 expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
312 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
313 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
314 int dregno = REG_R7 + 1 - ndregs;
315 int pregno = REG_P5 + 1 - npregs;
316 int total = ndregs + npregs;
323 val = GEN_INT (-total * 4);
324 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
325 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
326 UNSPEC_PUSH_MULTIPLE);
327 XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
328 gen_rtx_PLUS (Pmode, spreg,
330 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
331 for (i = 0; i < total; i++)
333 rtx memref = gen_rtx_MEM (word_mode,
334 gen_rtx_PLUS (Pmode, spreg,
335 GEN_INT (- i * 4 - 4)));
339 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
345 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
349 XVECEXP (pat, 0, i + 1) = subpat;
350 RTX_FRAME_RELATED_P (subpat) = 1;
352 insn = emit_insn (pat);
353 RTX_FRAME_RELATED_P (insn) = 1;
356 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
357 must save all registers; this is used for interrupt handlers.
358 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
359 this for an interrupt (or exception) handler. */
362 expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
364 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
365 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
366 int total = ndregs + npregs;
373 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
374 XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
375 gen_rtx_PLUS (Pmode, spreg,
376 GEN_INT (total * 4)));
383 for (i = 0; i < total; i++)
386 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
388 rtx memref = gen_rtx_MEM (word_mode, addr);
391 XVECEXP (pat, 0, i + 1)
392 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
401 insn = emit_insn (pat);
402 RTX_FRAME_RELATED_P (insn) = 1;
405 /* Perform any needed actions needed for a function that is receiving a
406 variable number of arguments.
410 MODE and TYPE are the mode and type of the current parameter.
412 PRETEND_SIZE is a variable that should be set to the amount of stack
413 that must be pushed by the prolog to pretend that our caller pushed
416 Normally, this macro will push all remaining incoming registers on the
417 stack and set PRETEND_SIZE to the length of the registers pushed.
420 - VDSP C compiler manual (our ABI) says that a variable args function
421 should save the R0, R1 and R2 registers in the stack.
422 - The caller will always leave space on the stack for the
423 arguments that are passed in registers, so we dont have
424 to leave any extra space.
425 - now, the vastart pointer can access all arguments from the stack. */
428 setup_incoming_varargs (CUMULATIVE_ARGS *cum,
429 enum machine_mode mode ATTRIBUTE_UNUSED,
430 tree type ATTRIBUTE_UNUSED, int *pretend_size,
439 /* The move for named arguments will be generated automatically by the
440 compiler. We need to generate the move rtx for the unnamed arguments
441 if they are in the first 3 words. We assume at least 1 named argument
442 exists, so we never generate [ARGP] = R0 here. */
444 for (i = cum->words + 1; i < max_arg_registers; i++)
446 mem = gen_rtx_MEM (Pmode,
447 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
448 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
454 /* Value should be nonzero if functions must have frame pointers.
455 Zero means the frame pointer need not be set up (and parms may
456 be accessed via the stack pointer) in functions that seem suitable. */
459 bfin_frame_pointer_required (void)
461 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
463 if (fkind != SUBROUTINE)
466 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
467 so we have to override it for non-leaf functions. */
468 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
474 /* Return the number of registers pushed during the prologue. */
477 n_regs_saved_by_prologue (void)
479 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
480 bool is_inthandler = fkind != SUBROUTINE;
481 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
482 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
483 || (is_inthandler && !current_function_is_leaf));
484 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler);
485 int npregs = all ? 6 : n_pregs_to_save (is_inthandler);
486 int n = ndregs + npregs;
488 if (all || stack_frame_needed_p ())
489 /* We use a LINK instruction in this case. */
493 if (must_save_fp_p ())
495 if (! current_function_is_leaf)
499 if (fkind != SUBROUTINE)
503 /* Increment once for ASTAT. */
507 if (lookup_attribute ("nesting", attrs))
510 for (i = REG_P7 + 1; i < REG_CC; i++)
513 || (!leaf_function_p () && call_used_regs[i]))
514 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
519 /* Return the offset between two registers, one to be eliminated, and the other
520 its replacement, at the start of a routine. */
523 bfin_initial_elimination_offset (int from, int to)
525 HOST_WIDE_INT offset = 0;
527 if (from == ARG_POINTER_REGNUM)
528 offset = n_regs_saved_by_prologue () * 4;
530 if (to == STACK_POINTER_REGNUM)
532 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
533 offset += current_function_outgoing_args_size;
534 else if (current_function_outgoing_args_size)
535 offset += FIXED_STACK_AREA;
537 offset += get_frame_size ();
543 /* Emit code to load a constant CONSTANT into register REG; setting
544 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
545 Make sure that the insns we generate need not be split. */
548 frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
551 rtx cst = GEN_INT (constant);
553 if (constant >= -32768 && constant < 65536)
554 insn = emit_move_insn (reg, cst);
557 /* We don't call split_load_immediate here, since dwarf2out.c can get
558 confused about some of the more clever sequences it can generate. */
559 insn = emit_insn (gen_movsi_high (reg, cst));
561 RTX_FRAME_RELATED_P (insn) = 1;
562 insn = emit_insn (gen_movsi_low (reg, reg, cst));
565 RTX_FRAME_RELATED_P (insn) = 1;
568 /* Generate efficient code to add a value to a P register. We can use
569 P1 as a scratch register. Set RTX_FRAME_RELATED_P on the generated
570 insns if FRAME is nonzero. */
573 add_to_reg (rtx reg, HOST_WIDE_INT value, int frame)
578 /* Choose whether to use a sequence using a temporary register, or
579 a sequence with multiple adds. We can add a signed 7 bit value
580 in one instruction. */
581 if (value > 120 || value < -120)
583 rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
587 frame_related_constant_load (tmpreg, value, TRUE);
589 insn = emit_move_insn (tmpreg, GEN_INT (value));
591 insn = emit_insn (gen_addsi3 (reg, reg, tmpreg));
593 RTX_FRAME_RELATED_P (insn) = 1;
604 /* We could use -62, but that would leave the stack unaligned, so
608 insn = emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
610 RTX_FRAME_RELATED_P (insn) = 1;
616 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
617 is too large, generate a sequence of insns that has the same effect.
618 SPREG contains (reg:SI REG_SP). */
621 emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
623 HOST_WIDE_INT link_size = frame_size;
627 if (link_size > 262140)
630 /* Use a LINK insn with as big a constant as possible, then subtract
631 any remaining size from the SP. */
632 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
633 RTX_FRAME_RELATED_P (insn) = 1;
635 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
637 rtx set = XVECEXP (PATTERN (insn), 0, i);
638 gcc_assert (GET_CODE (set) == SET);
639 RTX_FRAME_RELATED_P (set) = 1;
642 frame_size -= link_size;
646 /* Must use a call-clobbered PREG that isn't the static chain. */
647 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
649 frame_related_constant_load (tmpreg, -frame_size, TRUE);
650 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
651 RTX_FRAME_RELATED_P (insn) = 1;
655 /* Return the number of bytes we must reserve for outgoing arguments
656 in the current function's stack frame. */
661 if (current_function_outgoing_args_size)
663 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
664 return current_function_outgoing_args_size;
666 return FIXED_STACK_AREA;
671 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
672 function must save all its registers (true only for certain interrupt
676 do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
678 frame_size += arg_area_size ();
680 if (all || stack_frame_needed_p ()
681 || (must_save_fp_p () && ! current_function_is_leaf))
682 emit_link_insn (spreg, frame_size);
685 if (! current_function_is_leaf)
687 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
688 gen_rtx_PRE_DEC (Pmode, spreg)),
690 rtx insn = emit_insn (pat);
691 RTX_FRAME_RELATED_P (insn) = 1;
693 if (must_save_fp_p ())
695 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
696 gen_rtx_PRE_DEC (Pmode, spreg)),
697 gen_rtx_REG (Pmode, REG_FP));
698 rtx insn = emit_insn (pat);
699 RTX_FRAME_RELATED_P (insn) = 1;
701 add_to_reg (spreg, -frame_size, 1);
705 /* Like do_link, but used for epilogues to deallocate the stack frame. */
708 do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all)
710 frame_size += arg_area_size ();
712 if (all || stack_frame_needed_p ())
713 emit_insn (gen_unlink ());
716 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
718 add_to_reg (spreg, frame_size, 0);
719 if (must_save_fp_p ())
721 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
722 emit_move_insn (fpreg, postinc);
723 emit_insn (gen_rtx_USE (VOIDmode, fpreg));
725 if (! current_function_is_leaf)
727 emit_move_insn (bfin_rets_rtx, postinc);
728 emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
733 /* Generate a prologue suitable for a function of kind FKIND. This is
734 called for interrupt and exception handler prologues.
735 SPREG contains (reg:SI REG_SP). */
738 expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
741 HOST_WIDE_INT frame_size = get_frame_size ();
742 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
743 rtx predec = gen_rtx_MEM (SImode, predec1);
745 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
746 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
747 tree kspisusp = lookup_attribute ("kspisusp", attrs);
751 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
752 RTX_FRAME_RELATED_P (insn) = 1;
755 /* We need space on the stack in case we need to save the argument
757 if (fkind == EXCPT_HANDLER)
759 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
760 RTX_FRAME_RELATED_P (insn) = 1;
763 insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
764 RTX_FRAME_RELATED_P (insn) = 1;
766 /* If we're calling other functions, they won't save their call-clobbered
767 registers, so we must save everything here. */
768 if (!current_function_is_leaf)
770 expand_prologue_reg_save (spreg, all, true);
772 for (i = REG_P7 + 1; i < REG_CC; i++)
775 || (!leaf_function_p () && call_used_regs[i]))
777 if (i == REG_A0 || i == REG_A1)
778 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
779 gen_rtx_REG (PDImode, i));
781 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
782 RTX_FRAME_RELATED_P (insn) = 1;
785 if (lookup_attribute ("nesting", attrs))
787 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
788 : fkind == NMI_HANDLER ? REG_RETN
790 insn = emit_move_insn (predec, srcreg);
791 RTX_FRAME_RELATED_P (insn) = 1;
794 do_link (spreg, frame_size, all);
796 if (fkind == EXCPT_HANDLER)
798 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
799 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
800 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
803 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
804 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
806 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
807 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
809 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
810 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
812 insn = emit_move_insn (r1reg, spreg);
813 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
815 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
816 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
818 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
819 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
824 /* Generate an epilogue suitable for a function of kind FKIND. This is
825 called for interrupt and exception handler epilogues.
826 SPREG contains (reg:SI REG_SP). */
829 expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
832 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
833 rtx postinc = gen_rtx_MEM (SImode, postinc1);
834 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
835 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
837 /* A slightly crude technique to stop flow from trying to delete "dead"
839 MEM_VOLATILE_P (postinc) = 1;
841 do_unlink (spreg, get_frame_size (), all);
843 if (lookup_attribute ("nesting", attrs))
845 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
846 : fkind == NMI_HANDLER ? REG_RETN
848 emit_move_insn (srcreg, postinc);
851 /* If we're calling other functions, they won't save their call-clobbered
852 registers, so we must save (and restore) everything here. */
853 if (!current_function_is_leaf)
856 for (i = REG_CC - 1; i > REG_P7; i--)
859 || (!leaf_function_p () && call_used_regs[i]))
861 if (i == REG_A0 || i == REG_A1)
863 rtx mem = gen_rtx_MEM (PDImode, postinc1);
864 MEM_VOLATILE_P (mem) = 1;
865 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
868 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
871 expand_epilogue_reg_restore (spreg, all, true);
873 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
875 /* Deallocate any space we left on the stack in case we needed to save the
876 argument registers. */
877 if (fkind == EXCPT_HANDLER)
878 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
880 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
883 /* Used while emitting the prologue to generate code to load the correct value
884 into the PIC register, which is passed in DEST. */
887 bfin_load_pic_reg (rtx dest)
889 struct cgraph_local_info *i = NULL;
892 if (flag_unit_at_a_time)
893 i = cgraph_local_info (current_function_decl);
895 /* Functions local to the translation unit don't need to reload the
896 pic reg, since the caller always passes a usable one. */
898 return pic_offset_table_rtx;
900 if (bfin_lib_id_given)
901 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
903 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
904 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
905 UNSPEC_LIBRARY_OFFSET));
906 insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
907 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
911 /* Generate RTL for the prologue of the current function. */
914 bfin_expand_prologue (void)
917 HOST_WIDE_INT frame_size = get_frame_size ();
918 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
919 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
920 rtx pic_reg_loaded = NULL_RTX;
922 if (fkind != SUBROUTINE)
924 expand_interrupt_handler_prologue (spreg, fkind);
928 if (current_function_limit_stack
929 || TARGET_STACK_CHECK_L1)
932 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
933 STACK_POINTER_REGNUM);
934 rtx lim = current_function_limit_stack ? stack_limit_rtx : NULL_RTX;
935 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
939 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
940 emit_move_insn (p2reg, gen_int_mode (0xFFB00000, SImode));
941 emit_move_insn (p2reg, gen_rtx_MEM (Pmode, p2reg));
944 if (GET_CODE (lim) == SYMBOL_REF)
946 if (TARGET_ID_SHARED_LIBRARY)
948 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
950 pic_reg_loaded = bfin_load_pic_reg (p2reg);
951 val = legitimize_pic_address (stack_limit_rtx, p1reg,
953 emit_move_insn (p1reg, val);
954 frame_related_constant_load (p2reg, offset, FALSE);
955 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
960 rtx limit = plus_constant (lim, offset);
961 emit_move_insn (p2reg, limit);
968 emit_move_insn (p2reg, lim);
969 add_to_reg (p2reg, offset, 0);
972 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
973 emit_insn (gen_trapifcc ());
975 expand_prologue_reg_save (spreg, 0, false);
977 do_link (spreg, frame_size, false);
979 if (TARGET_ID_SHARED_LIBRARY
981 && (current_function_uses_pic_offset_table
982 || !current_function_is_leaf))
983 bfin_load_pic_reg (pic_offset_table_rtx);
986 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
987 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
988 eh_return pattern. */
991 bfin_expand_epilogue (int need_return, int eh_return)
993 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
994 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
996 if (fkind != SUBROUTINE)
998 expand_interrupt_handler_epilogue (spreg, fkind);
1002 do_unlink (spreg, get_frame_size (), false);
1004 expand_epilogue_reg_restore (spreg, false, false);
1006 /* Omit the return insn if this is for a sibcall. */
1011 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
1013 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
1016 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1019 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
1020 unsigned int new_reg)
1022 /* Interrupt functions can only use registers that have already been
1023 saved by the prologue, even if they would normally be
1026 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
1027 && !regs_ever_live[new_reg])
1033 /* Return the value of the return address for the frame COUNT steps up
1034 from the current frame, after the prologue.
1035 We punt for everything but the current frame by returning const0_rtx. */
1038 bfin_return_addr_rtx (int count)
1043 return get_hard_reg_initial_val (Pmode, REG_RETS);
1046 /* Try machine-dependent ways of modifying an illegitimate address X
1047 to be legitimate. If we find one, return the new, valid address,
1048 otherwise return NULL_RTX.
1050 OLDX is the address as it was before break_out_memory_refs was called.
1051 In some cases it is useful to look at this to decide what needs to be done.
1053 MODE is the mode of the memory reference. */
1056 legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
1057 enum machine_mode mode ATTRIBUTE_UNUSED)
1063 bfin_delegitimize_address (rtx orig_x)
1067 if (GET_CODE (x) != MEM)
1071 if (GET_CODE (x) == PLUS
1072 && GET_CODE (XEXP (x, 1)) == UNSPEC
1073 && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1074 && GET_CODE (XEXP (x, 0)) == REG
1075 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1076 return XVECEXP (XEXP (x, 1), 0, 0);
1081 /* This predicate is used to compute the length of a load/store insn.
1082 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1083 32 bit instruction. */
1086 effective_address_32bit_p (rtx op, enum machine_mode mode)
1088 HOST_WIDE_INT offset;
1090 mode = GET_MODE (op);
1093 if (GET_CODE (op) != PLUS)
1095 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1096 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1100 offset = INTVAL (XEXP (op, 1));
1102 /* All byte loads use a 16 bit offset. */
1103 if (GET_MODE_SIZE (mode) == 1)
1106 if (GET_MODE_SIZE (mode) == 4)
1108 /* Frame pointer relative loads can use a negative offset, all others
1109 are restricted to a small positive one. */
1110 if (XEXP (op, 0) == frame_pointer_rtx)
1111 return offset < -128 || offset > 60;
1112 return offset < 0 || offset > 60;
1115 /* Must be HImode now. */
1116 return offset < 0 || offset > 30;
1119 /* Returns true if X is a memory reference using an I register. */
1121 bfin_dsp_memref_p (rtx x)
1126 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1127 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1132 /* Return cost of the memory address ADDR.
1133 All addressing modes are equally cheap on the Blackfin. */
1136 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
1141 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1144 print_address_operand (FILE *file, rtx x)
1146 switch (GET_CODE (x))
1149 output_address (XEXP (x, 0));
1150 fprintf (file, "+");
1151 output_address (XEXP (x, 1));
1155 fprintf (file, "--");
1156 output_address (XEXP (x, 0));
1159 output_address (XEXP (x, 0));
1160 fprintf (file, "++");
1163 output_address (XEXP (x, 0));
1164 fprintf (file, "--");
1168 gcc_assert (GET_CODE (x) != MEM);
1169 print_operand (file, x, 0);
1174 /* Adding intp DImode support by Tony
1180 print_operand (FILE *file, rtx x, char code)
1182 enum machine_mode mode;
1186 if (GET_MODE (current_output_insn) == SImode)
1187 fprintf (file, " ||");
1189 fprintf (file, ";");
1193 mode = GET_MODE (x);
1198 switch (GET_CODE (x))
1201 fprintf (file, "e");
1204 fprintf (file, "ne");
1207 fprintf (file, "g");
1210 fprintf (file, "l");
1213 fprintf (file, "ge");
1216 fprintf (file, "le");
1219 fprintf (file, "g");
1222 fprintf (file, "l");
1225 fprintf (file, "ge");
1228 fprintf (file, "le");
1231 output_operand_lossage ("invalid %%j value");
1235 case 'J': /* reverse logic */
1236 switch (GET_CODE(x))
1239 fprintf (file, "ne");
1242 fprintf (file, "e");
1245 fprintf (file, "le");
1248 fprintf (file, "ge");
1251 fprintf (file, "l");
1254 fprintf (file, "g");
1257 fprintf (file, "le");
1260 fprintf (file, "ge");
1263 fprintf (file, "l");
1266 fprintf (file, "g");
1269 output_operand_lossage ("invalid %%J value");
1274 switch (GET_CODE (x))
1279 gcc_assert (REGNO (x) < 32);
1280 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1281 /*fprintf (file, "\n%d\n ", REGNO (x));*/
1284 else if (code == 'd')
1286 gcc_assert (REGNO (x) < 32);
1287 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1290 else if (code == 'w')
1292 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1293 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1295 else if (code == 'x')
1297 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1298 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1300 else if (code == 'D')
1302 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1304 else if (code == 'H')
1306 gcc_assert (mode == DImode || mode == DFmode);
1307 gcc_assert (REG_P (x));
1308 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1310 else if (code == 'T')
1312 gcc_assert (D_REGNO_P (REGNO (x)));
1313 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1316 fprintf (file, "%s", reg_names[REGNO (x)]);
1322 print_address_operand (file, x);
1334 fputs ("(FU)", file);
1337 fputs ("(T)", file);
1340 fputs ("(TFU)", file);
1343 fputs ("(W32)", file);
1346 fputs ("(IS)", file);
1349 fputs ("(IU)", file);
1352 fputs ("(IH)", file);
1355 fputs ("(M)", file);
1358 fputs ("(ISS2)", file);
1361 fputs ("(S2RND)", file);
1368 else if (code == 'b')
1370 if (INTVAL (x) == 0)
1372 else if (INTVAL (x) == 1)
1378 /* Moves to half registers with d or h modifiers always use unsigned
1380 else if (code == 'd')
1381 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1382 else if (code == 'h')
1383 x = GEN_INT (INTVAL (x) & 0xffff);
1384 else if (code == 'X')
1385 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1386 else if (code == 'Y')
1387 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1388 else if (code == 'Z')
1389 /* Used for LINK insns. */
1390 x = GEN_INT (-8 - INTVAL (x));
1395 output_addr_const (file, x);
1399 output_operand_lossage ("invalid const_double operand");
1403 switch (XINT (x, 1))
1405 case UNSPEC_MOVE_PIC:
1406 output_addr_const (file, XVECEXP (x, 0, 0));
1407 fprintf (file, "@GOT");
1410 case UNSPEC_MOVE_FDPIC:
1411 output_addr_const (file, XVECEXP (x, 0, 0));
1412 fprintf (file, "@GOT17M4");
1415 case UNSPEC_FUNCDESC_GOT17M4:
1416 output_addr_const (file, XVECEXP (x, 0, 0));
1417 fprintf (file, "@FUNCDESC_GOT17M4");
1420 case UNSPEC_LIBRARY_OFFSET:
1421 fprintf (file, "_current_shared_library_p5_offset_");
1430 output_addr_const (file, x);
1435 /* Argument support functions. */
1437 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1438 for a call to a function whose data type is FNTYPE.
1439 For a library call, FNTYPE is 0.
1440 VDSP C Compiler manual, our ABI says that
1441 first 3 words of arguments will use R0, R1 and R2.
1445 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1446 rtx libname ATTRIBUTE_UNUSED)
1448 static CUMULATIVE_ARGS zero_cum;
1452 /* Set up the number of registers to use for passing arguments. */
1454 cum->nregs = max_arg_registers;
1455 cum->arg_regs = arg_regs;
1457 cum->call_cookie = CALL_NORMAL;
1458 /* Check for a longcall attribute. */
1459 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1460 cum->call_cookie |= CALL_SHORT;
1461 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1462 cum->call_cookie |= CALL_LONG;
1467 /* Update the data in CUM to advance over an argument
1468 of mode MODE and data type TYPE.
1469 (TYPE is null for libcalls where that information may not be available.) */
1472 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1473 int named ATTRIBUTE_UNUSED)
1475 int count, bytes, words;
1477 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1478 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1480 cum->words += words;
1481 cum->nregs -= words;
1483 if (cum->nregs <= 0)
1486 cum->arg_regs = NULL;
1490 for (count = 1; count <= words; count++)
1497 /* Define where to put the arguments to a function.
1498 Value is zero to push the argument on the stack,
1499 or a hard register in which to store the argument.
1501 MODE is the argument's machine mode.
1502 TYPE is the data type of the argument (as a tree).
1503 This is null for libcalls where that information may
1505 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1506 the preceding args and about the function being called.
1507 NAMED is nonzero if this argument is a named parameter
1508 (otherwise it is an extra parameter matching an ellipsis). */
1511 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1512 int named ATTRIBUTE_UNUSED)
1515 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1517 if (mode == VOIDmode)
1518 /* Compute operand 2 of the call insn. */
1519 return GEN_INT (cum->call_cookie);
1525 return gen_rtx_REG (mode, *(cum->arg_regs));
1530 /* For an arg passed partly in registers and partly in memory,
1531 this is the number of bytes passed in registers.
1532 For args passed entirely in registers or entirely in memory, zero.
1534 Refer VDSP C Compiler manual, our ABI.
1535 First 3 words are in registers. So, if a an argument is larger
1536 than the registers available, it will span the register and
1540 bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1541 tree type ATTRIBUTE_UNUSED,
1542 bool named ATTRIBUTE_UNUSED)
1545 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1546 int bytes_left = cum->nregs * UNITS_PER_WORD;
1551 if (bytes_left == 0)
1553 if (bytes > bytes_left)
1558 /* Variable sized types are passed by reference. */
1561 bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1562 enum machine_mode mode ATTRIBUTE_UNUSED,
1563 tree type, bool named ATTRIBUTE_UNUSED)
1565 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1568 /* Decide whether a type should be returned in memory (true)
1569 or in a register (false). This is called by the macro
1570 RETURN_IN_MEMORY. */
1573 bfin_return_in_memory (tree type)
1575 int size = int_size_in_bytes (type);
1576 return size > 2 * UNITS_PER_WORD || size == -1;
1579 /* Register in which address to store a structure value
1580 is passed to a function. */
1582 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1583 int incoming ATTRIBUTE_UNUSED)
1585 return gen_rtx_REG (Pmode, REG_P0);
1588 /* Return true when register may be used to pass function parameters. */
1591 function_arg_regno_p (int n)
1594 for (i = 0; arg_regs[i] != -1; i++)
1595 if (n == arg_regs[i])
1600 /* Returns 1 if OP contains a symbol reference */
1603 symbolic_reference_mentioned_p (rtx op)
1605 register const char *fmt;
1608 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1611 fmt = GET_RTX_FORMAT (GET_CODE (op));
1612 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1618 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1619 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1623 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1630 /* Decide whether we can make a sibling call to a function. DECL is the
1631 declaration of the function being targeted by the call and EXP is the
1632 CALL_EXPR representing the call. */
1635 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1636 tree exp ATTRIBUTE_UNUSED)
1638 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1639 if (fkind != SUBROUTINE)
1641 if (!TARGET_ID_SHARED_LIBRARY || TARGET_SEP_DATA)
1644 /* When compiling for ID shared libraries, can't sibcall a local function
1645 from a non-local function, because the local function thinks it does
1646 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1647 sibcall epilogue, and we end up with the wrong value in P5. */
1649 if (!flag_unit_at_a_time || decl == NULL)
1650 /* Not enough information. */
1654 struct cgraph_local_info *this_func, *called_func;
1657 this_func = cgraph_local_info (current_function_decl);
1658 called_func = cgraph_local_info (decl);
1659 return !called_func->local || this_func->local;
1663 /* Emit RTL insns to initialize the variable parts of a trampoline at
1664 TRAMP. FNADDR is an RTX for the address of the function's pure
1665 code. CXT is an RTX for the static chain value for the function. */
1668 initialize_trampoline (tramp, fnaddr, cxt)
1669 rtx tramp, fnaddr, cxt;
1671 rtx t1 = copy_to_reg (fnaddr);
1672 rtx t2 = copy_to_reg (cxt);
1678 rtx a = memory_address (Pmode, plus_constant (tramp, 8));
1679 addr = memory_address (Pmode, tramp);
1680 emit_move_insn (gen_rtx_MEM (SImode, addr), a);
1684 addr = memory_address (Pmode, plus_constant (tramp, i + 2));
1685 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1686 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1687 addr = memory_address (Pmode, plus_constant (tramp, i + 6));
1688 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1690 addr = memory_address (Pmode, plus_constant (tramp, i + 10));
1691 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1692 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1693 addr = memory_address (Pmode, plus_constant (tramp, i + 14));
1694 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1697 /* Emit insns to move operands[1] into operands[0]. */
1700 emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1702 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1704 gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
1705 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1706 operands[1] = force_reg (SImode, operands[1]);
1708 operands[1] = legitimize_pic_address (operands[1], temp,
1709 TARGET_FDPIC ? OUR_FDPIC_REG
1710 : pic_offset_table_rtx);
1713 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1714 Returns true if no further code must be generated, false if the caller
1715 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1718 expand_move (rtx *operands, enum machine_mode mode)
1720 rtx op = operands[1];
1721 if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
1722 && SYMBOLIC_CONST (op))
1723 emit_pic_move (operands, mode);
1724 else if (mode == SImode && GET_CODE (op) == CONST
1725 && GET_CODE (XEXP (op, 0)) == PLUS
1726 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
1727 && !bfin_legitimate_constant_p (op))
1729 rtx dest = operands[0];
1731 gcc_assert (!reload_in_progress && !reload_completed);
1733 op0 = force_reg (mode, XEXP (op, 0));
1735 if (!insn_data[CODE_FOR_addsi3].operand[2].predicate (op1, mode))
1736 op1 = force_reg (mode, op1);
1737 if (GET_CODE (dest) == MEM)
1738 dest = gen_reg_rtx (mode);
1739 emit_insn (gen_addsi3 (dest, op0, op1));
1740 if (dest == operands[0])
1744 /* Don't generate memory->memory or constant->memory moves, go through a
1746 else if ((reload_in_progress | reload_completed) == 0
1747 && GET_CODE (operands[0]) == MEM
1748 && GET_CODE (operands[1]) != REG)
1749 operands[1] = force_reg (mode, operands[1]);
1753 /* Split one or more DImode RTL references into pairs of SImode
1754 references. The RTL can be REG, offsettable MEM, integer constant, or
1755 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1756 split and "num" is its length. lo_half and hi_half are output arrays
1757 that parallel "operands". */
1760 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1764 rtx op = operands[num];
1766 /* simplify_subreg refuse to split volatile memory addresses,
1767 but we still have to handle it. */
1768 if (GET_CODE (op) == MEM)
1770 lo_half[num] = adjust_address (op, SImode, 0);
1771 hi_half[num] = adjust_address (op, SImode, 4);
1775 lo_half[num] = simplify_gen_subreg (SImode, op,
1776 GET_MODE (op) == VOIDmode
1777 ? DImode : GET_MODE (op), 0);
1778 hi_half[num] = simplify_gen_subreg (SImode, op,
1779 GET_MODE (op) == VOIDmode
1780 ? DImode : GET_MODE (op), 4);
1786 bfin_longcall_p (rtx op, int call_cookie)
1788 gcc_assert (GET_CODE (op) == SYMBOL_REF);
1789 if (call_cookie & CALL_SHORT)
1791 if (call_cookie & CALL_LONG)
1793 if (TARGET_LONG_CALLS)
1798 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
1799 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
1800 SIBCALL is nonzero if this is a sibling call. */
1803 bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
1805 rtx use = NULL, call;
1806 rtx callee = XEXP (fnaddr, 0);
1807 int nelts = 2 + !!sibcall;
1809 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
1812 /* In an untyped call, we can get NULL for operand 2. */
1813 if (cookie == NULL_RTX)
1814 cookie = const0_rtx;
1816 /* Static functions and indirect calls don't need the pic register. */
1817 if (!TARGET_FDPIC && flag_pic
1818 && GET_CODE (callee) == SYMBOL_REF
1819 && !SYMBOL_REF_LOCAL_P (callee))
1820 use_reg (&use, pic_offset_table_rtx);
1824 if (GET_CODE (callee) != SYMBOL_REF
1825 || bfin_longcall_p (callee, INTVAL (cookie)))
1828 if (! address_operand (addr, Pmode))
1829 addr = force_reg (Pmode, addr);
1831 fnaddr = gen_reg_rtx (SImode);
1832 emit_insn (gen_load_funcdescsi (fnaddr, addr));
1833 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
1835 picreg = gen_reg_rtx (SImode);
1836 emit_insn (gen_load_funcdescsi (picreg,
1837 plus_constant (addr, 4)));
1842 else if ((!register_no_elim_operand (callee, Pmode)
1843 && GET_CODE (callee) != SYMBOL_REF)
1844 || (GET_CODE (callee) == SYMBOL_REF
1845 && ((TARGET_ID_SHARED_LIBRARY && !TARGET_LEAF_ID_SHARED_LIBRARY)
1846 || bfin_longcall_p (callee, INTVAL (cookie)))))
1848 callee = copy_to_mode_reg (Pmode, callee);
1849 fnaddr = gen_rtx_MEM (Pmode, callee);
1851 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1854 call = gen_rtx_SET (VOIDmode, retval, call);
1856 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
1858 XVECEXP (pat, 0, n++) = call;
1860 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
1861 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
1863 XVECEXP (pat, 0, n++) = gen_rtx_RETURN (VOIDmode);
1864 call = emit_call_insn (pat);
1866 CALL_INSN_FUNCTION_USAGE (call) = use;
1869 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
1872 hard_regno_mode_ok (int regno, enum machine_mode mode)
1874 /* Allow only dregs to store value of mode HI or QI */
1875 enum reg_class class = REGNO_REG_CLASS (regno);
1880 if (mode == V2HImode)
1881 return D_REGNO_P (regno);
1882 if (class == CCREGS)
1883 return mode == BImode;
1884 if (mode == PDImode || mode == V2PDImode)
1885 return regno == REG_A0 || regno == REG_A1;
1887 /* Allow all normal 32 bit regs, except REG_M3, in case regclass ever comes
1888 up with a bad register class (such as ALL_REGS) for DImode. */
1890 return regno < REG_M3;
1893 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1896 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1899 /* Implements target hook vector_mode_supported_p. */
1902 bfin_vector_mode_supported_p (enum machine_mode mode)
1904 return mode == V2HImode;
1907 /* Return the cost of moving data from a register in class CLASS1 to
1908 one in class CLASS2. A cost of 2 is the default. */
1911 bfin_register_move_cost (enum machine_mode mode,
1912 enum reg_class class1, enum reg_class class2)
1914 /* These need secondary reloads, so they're more expensive. */
1915 if ((class1 == CCREGS && class2 != DREGS)
1916 || (class1 != DREGS && class2 == CCREGS))
1919 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
1923 /* There are some stalls involved when moving from a DREG to a different
1924 class reg, and using the value in one of the following instructions.
1925 Attempt to model this by slightly discouraging such moves. */
1926 if (class1 == DREGS && class2 != DREGS)
1929 if (GET_MODE_CLASS (mode) == MODE_INT)
1931 /* Discourage trying to use the accumulators. */
1932 if (TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A0)
1933 || TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A1)
1934 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A0)
1935 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A1))
1941 /* Return the cost of moving data of mode M between a
1942 register and memory. A value of 2 is the default; this cost is
1943 relative to those in `REGISTER_MOVE_COST'.
1945 ??? In theory L1 memory has single-cycle latency. We should add a switch
1946 that tells the compiler whether we expect to use only L1 memory for the
1947 program; it'll make the costs more accurate. */
1950 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1951 enum reg_class class,
1952 int in ATTRIBUTE_UNUSED)
1954 /* Make memory accesses slightly more expensive than any register-register
1955 move. Also, penalize non-DP registers, since they need secondary
1956 reloads to load and store. */
1957 if (! reg_class_subset_p (class, DPREGS))
1963 /* Inform reload about cases where moving X with a mode MODE to a register in
1964 CLASS requires an extra scratch register. Return the class needed for the
1965 scratch register. */
1967 static enum reg_class
1968 bfin_secondary_reload (bool in_p, rtx x, enum reg_class class,
1969 enum machine_mode mode, secondary_reload_info *sri)
1971 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1972 in most other cases we can also use PREGS. */
1973 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1974 enum reg_class x_class = NO_REGS;
1975 enum rtx_code code = GET_CODE (x);
1978 x = SUBREG_REG (x), code = GET_CODE (x);
1981 int regno = REGNO (x);
1982 if (regno >= FIRST_PSEUDO_REGISTER)
1983 regno = reg_renumber[regno];
1988 x_class = REGNO_REG_CLASS (regno);
1991 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1992 This happens as a side effect of register elimination, and we need
1993 a scratch register to do it. */
1994 if (fp_plus_const_operand (x, mode))
1996 rtx op2 = XEXP (x, 1);
1997 int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
1999 if (class == PREGS || class == PREGS_CLOBBERED)
2001 /* If destination is a DREG, we can do this without a scratch register
2002 if the constant is valid for an add instruction. */
2003 if ((class == DREGS || class == DPREGS)
2004 && ! large_constant_p)
2006 /* Reloading to anything other than a DREG? Use a PREG scratch
2008 sri->icode = CODE_FOR_reload_insi;
2012 /* Data can usually be moved freely between registers of most classes.
2013 AREGS are an exception; they can only move to or from another register
2014 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2015 if (x_class == AREGS)
2016 return class == DREGS || class == AREGS ? NO_REGS : DREGS;
2020 if (x != const0_rtx && x_class != DREGS)
2026 /* CCREGS can only be moved from/to DREGS. */
2027 if (class == CCREGS && x_class != DREGS)
2029 if (x_class == CCREGS && class != DREGS)
2032 /* All registers other than AREGS can load arbitrary constants. The only
2033 case that remains is MEM. */
2035 if (! reg_class_subset_p (class, default_class))
2036 return default_class;
2040 /* Implement TARGET_HANDLE_OPTION. */
2043 bfin_handle_option (size_t code, const char *arg, int value)
2047 case OPT_mshared_library_id_:
2048 if (value > MAX_LIBRARY_ID)
2049 error ("-mshared-library-id=%s is not between 0 and %d",
2050 arg, MAX_LIBRARY_ID);
2051 bfin_lib_id_given = 1;
2059 static struct machine_function *
2060 bfin_init_machine_status (void)
2062 struct machine_function *f;
2064 f = ggc_alloc_cleared (sizeof (struct machine_function));
2069 /* Implement the macro OVERRIDE_OPTIONS. */
2072 override_options (void)
2074 if (TARGET_OMIT_LEAF_FRAME_POINTER)
2075 flag_omit_frame_pointer = 1;
2077 /* Library identification */
2078 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
2079 error ("-mshared-library-id= specified without -mid-shared-library");
2081 if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
2084 if (stack_limit_rtx && TARGET_STACK_CHECK_L1)
2085 error ("Can't use multiple stack checking methods together.");
2087 if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
2088 error ("ID shared libraries and FD-PIC mode can't be used together.");
2090 /* Don't allow the user to specify -mid-shared-library and -msep-data
2091 together, as it makes little sense from a user's point of view... */
2092 if (TARGET_SEP_DATA && TARGET_ID_SHARED_LIBRARY)
2093 error ("cannot specify both -msep-data and -mid-shared-library");
2094 /* ... internally, however, it's nearly the same. */
2095 if (TARGET_SEP_DATA)
2096 target_flags |= MASK_ID_SHARED_LIBRARY | MASK_LEAF_ID_SHARED_LIBRARY;
2098 /* There is no single unaligned SI op for PIC code. Sometimes we
2099 need to use ".4byte" and sometimes we need to use ".picptr".
2100 See bfin_assemble_integer for details. */
2102 targetm.asm_out.unaligned_op.si = 0;
2104 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2105 since we don't support it and it'll just break. */
2106 if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2109 flag_schedule_insns = 0;
2111 /* Passes after sched2 can break the helpful TImode annotations that
2112 haifa-sched puts on every insn. Just do scheduling in reorg. */
2113 bfin_flag_schedule_insns2 = flag_schedule_insns_after_reload;
2114 flag_schedule_insns_after_reload = 0;
2116 init_machine_status = bfin_init_machine_status;
2119 /* Return the destination address of BRANCH.
2120 We need to use this instead of get_attr_length, because the
2121 cbranch_with_nops pattern conservatively sets its length to 6, and
2122 we still prefer to use shorter sequences. */
2125 branch_dest (rtx branch)
2129 rtx pat = PATTERN (branch);
2130 if (GET_CODE (pat) == PARALLEL)
2131 pat = XVECEXP (pat, 0, 0);
2132 dest = SET_SRC (pat);
2133 if (GET_CODE (dest) == IF_THEN_ELSE)
2134 dest = XEXP (dest, 1);
2135 dest = XEXP (dest, 0);
2136 dest_uid = INSN_UID (dest);
2137 return INSN_ADDRESSES (dest_uid);
2140 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2141 it's a branch that's predicted taken. */
2144 cbranch_predicted_taken_p (rtx insn)
2146 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2150 int pred_val = INTVAL (XEXP (x, 0));
2152 return pred_val >= REG_BR_PROB_BASE / 2;
2158 /* Templates for use by asm_conditional_branch. */
2160 static const char *ccbranch_templates[][3] = {
2161 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2162 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2163 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2164 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2167 /* Output INSN, which is a conditional branch instruction with operands
2170 We deal with the various forms of conditional branches that can be generated
2171 by bfin_reorg to prevent the hardware from doing speculative loads, by
2172 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2173 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2174 Either of these is only necessary if the branch is short, otherwise the
2175 template we use ends in an unconditional jump which flushes the pipeline
2179 asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2181 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2182 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2183 is to be taken from start of if cc rather than jump.
2184 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2186 int len = (offset >= -1024 && offset <= 1022 ? 0
2187 : offset >= -4094 && offset <= 4096 ? 1
2189 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2190 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2191 output_asm_insn (ccbranch_templates[idx][len], operands);
2192 gcc_assert (n_nops == 0 || !bp);
2194 while (n_nops-- > 0)
2195 output_asm_insn ("nop;", NULL);
2198 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2199 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2202 bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2204 enum rtx_code code1, code2;
2205 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
2206 rtx tem = bfin_cc_rtx;
2207 enum rtx_code code = GET_CODE (cmp);
2209 /* If we have a BImode input, then we already have a compare result, and
2210 do not need to emit another comparison. */
2211 if (GET_MODE (op0) == BImode)
2213 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2214 tem = op0, code2 = code;
2219 /* bfin has these conditions */
2229 code1 = reverse_condition (code);
2233 emit_insn (gen_rtx_SET (BImode, tem,
2234 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2237 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2240 /* Return nonzero iff C has exactly one bit set if it is interpreted
2241 as a 32 bit constant. */
2244 log2constp (unsigned HOST_WIDE_INT c)
2247 return c != 0 && (c & (c-1)) == 0;
2250 /* Returns the number of consecutive least significant zeros in the binary
2251 representation of *V.
2252 We modify *V to contain the original value arithmetically shifted right by
2253 the number of zeroes. */
2256 shiftr_zero (HOST_WIDE_INT *v)
2258 unsigned HOST_WIDE_INT tmp = *v;
2259 unsigned HOST_WIDE_INT sgn;
2265 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2266 while ((tmp & 0x1) == 0 && n <= 32)
2268 tmp = (tmp >> 1) | sgn;
2275 /* After reload, split the load of an immediate constant. OPERANDS are the
2276 operands of the movsi_insn pattern which we are splitting. We return
2277 nonzero if we emitted a sequence to load the constant, zero if we emitted
2278 nothing because we want to use the splitter's default sequence. */
2281 split_load_immediate (rtx operands[])
2283 HOST_WIDE_INT val = INTVAL (operands[1]);
2285 HOST_WIDE_INT shifted = val;
2286 HOST_WIDE_INT shifted_compl = ~val;
2287 int num_zero = shiftr_zero (&shifted);
2288 int num_compl_zero = shiftr_zero (&shifted_compl);
2289 unsigned int regno = REGNO (operands[0]);
2290 enum reg_class class1 = REGNO_REG_CLASS (regno);
2292 /* This case takes care of single-bit set/clear constants, which we could
2293 also implement with BITSET/BITCLR. */
2295 && shifted >= -32768 && shifted < 65536
2296 && (D_REGNO_P (regno)
2297 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2299 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2300 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2305 tmp |= -(tmp & 0x8000);
2307 /* If high word has one bit set or clear, try to use a bit operation. */
2308 if (D_REGNO_P (regno))
2310 if (log2constp (val & 0xFFFF0000))
2312 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2313 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2316 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2318 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2319 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2323 if (D_REGNO_P (regno))
2325 if (CONST_7BIT_IMM_P (tmp))
2327 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2328 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2332 if ((val & 0xFFFF0000) == 0)
2334 emit_insn (gen_movsi (operands[0], const0_rtx));
2335 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2339 if ((val & 0xFFFF0000) == 0xFFFF0000)
2341 emit_insn (gen_movsi (operands[0], constm1_rtx));
2342 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2347 /* Need DREGs for the remaining case. */
2352 && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
2354 /* If optimizing for size, generate a sequence that has more instructions
2356 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2357 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2358 GEN_INT (num_compl_zero)));
2359 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2365 /* Return true if the legitimate memory address for a memory operand of mode
2366 MODE. Return false if not. */
2369 bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2371 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2372 int sz = GET_MODE_SIZE (mode);
2373 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2374 /* The usual offsettable_memref machinery doesn't work so well for this
2375 port, so we deal with the problem here. */
2376 if (value > 0 && sz == 8)
2378 return (v & ~(0x7fff << shift)) == 0;
2382 bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2383 enum rtx_code outer_code)
2386 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2388 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
2392 bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2394 switch (GET_CODE (x)) {
2396 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
2400 if (REG_P (XEXP (x, 0))
2401 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
2402 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
2403 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2404 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2409 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2410 && REG_P (XEXP (x, 0))
2411 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
2414 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2415 && XEXP (x, 0) == stack_pointer_rtx
2416 && REG_P (XEXP (x, 0))
2417 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
2426 /* Decide whether we can force certain constants to memory. If we
2427 decide we can't, the caller should be able to cope with it in
2431 bfin_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
2433 /* We have only one class of non-legitimate constants, and our movsi
2434 expander knows how to handle them. Dropping these constants into the
2435 data section would only shift the problem - we'd still get relocs
2436 outside the object, in the data section rather than the text section. */
2440 /* Ensure that for any constant of the form symbol + offset, the offset
2441 remains within the object. Any other constants are ok.
2442 This ensures that flat binaries never have to deal with relocations
2443 crossing section boundaries. */
2446 bfin_legitimate_constant_p (rtx x)
2449 HOST_WIDE_INT offset;
2451 if (GET_CODE (x) != CONST)
2455 gcc_assert (GET_CODE (x) == PLUS);
2459 if (GET_CODE (sym) != SYMBOL_REF
2460 || GET_CODE (x) != CONST_INT)
2462 offset = INTVAL (x);
2464 if (SYMBOL_REF_DECL (sym) == 0)
2467 || offset >= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym))))
2474 bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2476 int cost2 = COSTS_N_INSNS (1);
2481 if (outer_code == SET || outer_code == PLUS)
2482 *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2483 else if (outer_code == AND)
2484 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2485 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2486 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2487 else if (outer_code == LEU || outer_code == LTU)
2488 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2489 else if (outer_code == MULT)
2490 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2491 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2493 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2494 || outer_code == LSHIFTRT)
2495 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2496 else if (outer_code == IOR || outer_code == XOR)
2497 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2506 *total = COSTS_N_INSNS (2);
2510 if (GET_MODE (x) == Pmode)
2512 if (GET_CODE (XEXP (x, 0)) == MULT
2513 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2515 HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
2516 if (val == 2 || val == 4)
2519 *total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
2520 *total += rtx_cost (XEXP (x, 1), outer_code);
2532 if (GET_MODE (x) == DImode)
2539 if (GET_MODE (x) == DImode)
2544 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
2545 *total = COSTS_N_INSNS (3);
2550 *total = COSTS_N_INSNS (32);
2555 if (outer_code == SET)
2565 bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2567 fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2570 /* Used for communication between {push,pop}_multiple_operation (which
2571 we use not only as a predicate) and the corresponding output functions. */
2572 static int first_preg_to_save, first_dreg_to_save;
2575 push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2577 int lastdreg = 8, lastpreg = 6;
2580 first_preg_to_save = lastpreg;
2581 first_dreg_to_save = lastdreg;
2582 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2584 rtx t = XVECEXP (op, 0, i);
2588 if (GET_CODE (t) != SET)
2592 dest = SET_DEST (t);
2593 if (GET_CODE (dest) != MEM || ! REG_P (src))
2595 dest = XEXP (dest, 0);
2596 if (GET_CODE (dest) != PLUS
2597 || ! REG_P (XEXP (dest, 0))
2598 || REGNO (XEXP (dest, 0)) != REG_SP
2599 || GET_CODE (XEXP (dest, 1)) != CONST_INT
2600 || INTVAL (XEXP (dest, 1)) != -i * 4)
2603 regno = REGNO (src);
2606 if (D_REGNO_P (regno))
2609 first_dreg_to_save = lastdreg = regno - REG_R0;
2611 else if (regno >= REG_P0 && regno <= REG_P7)
2614 first_preg_to_save = lastpreg = regno - REG_P0;
2624 if (regno >= REG_P0 && regno <= REG_P7)
2627 first_preg_to_save = lastpreg = regno - REG_P0;
2629 else if (regno != REG_R0 + lastdreg + 1)
2634 else if (group == 2)
2636 if (regno != REG_P0 + lastpreg + 1)
2645 pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2647 int lastdreg = 8, lastpreg = 6;
2650 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2652 rtx t = XVECEXP (op, 0, i);
2656 if (GET_CODE (t) != SET)
2660 dest = SET_DEST (t);
2661 if (GET_CODE (src) != MEM || ! REG_P (dest))
2663 src = XEXP (src, 0);
2667 if (! REG_P (src) || REGNO (src) != REG_SP)
2670 else if (GET_CODE (src) != PLUS
2671 || ! REG_P (XEXP (src, 0))
2672 || REGNO (XEXP (src, 0)) != REG_SP
2673 || GET_CODE (XEXP (src, 1)) != CONST_INT
2674 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2677 regno = REGNO (dest);
2680 if (regno == REG_R7)
2685 else if (regno != REG_P0 + lastpreg - 1)
2690 else if (group == 1)
2692 if (regno != REG_R0 + lastdreg - 1)
2698 first_dreg_to_save = lastdreg;
2699 first_preg_to_save = lastpreg;
2703 /* Emit assembly code for one multi-register push described by INSN, with
2704 operands in OPERANDS. */
2707 output_push_multiple (rtx insn, rtx *operands)
2712 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2713 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2716 if (first_dreg_to_save == 8)
2717 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2718 else if (first_preg_to_save == 6)
2719 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2721 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2722 first_dreg_to_save, first_preg_to_save);
2724 output_asm_insn (buf, operands);
2727 /* Emit assembly code for one multi-register pop described by INSN, with
2728 operands in OPERANDS. */
2731 output_pop_multiple (rtx insn, rtx *operands)
2736 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2737 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2740 if (first_dreg_to_save == 8)
2741 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2742 else if (first_preg_to_save == 6)
2743 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2745 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2746 first_dreg_to_save, first_preg_to_save);
2748 output_asm_insn (buf, operands);
2751 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
2754 single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
2756 rtx scratch = gen_reg_rtx (mode);
2759 srcmem = adjust_address_nv (src, mode, offset);
2760 dstmem = adjust_address_nv (dst, mode, offset);
2761 emit_move_insn (scratch, srcmem);
2762 emit_move_insn (dstmem, scratch);
2765 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2766 alignment ALIGN_EXP. Return true if successful, false if we should fall
2767 back on a different method. */
2770 bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
2772 rtx srcreg, destreg, countreg;
2773 HOST_WIDE_INT align = 0;
2774 unsigned HOST_WIDE_INT count = 0;
2776 if (GET_CODE (align_exp) == CONST_INT)
2777 align = INTVAL (align_exp);
2778 if (GET_CODE (count_exp) == CONST_INT)
2780 count = INTVAL (count_exp);
2782 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2787 /* If optimizing for size, only do single copies inline. */
2790 if (count == 2 && align < 2)
2792 if (count == 4 && align < 4)
2794 if (count != 1 && count != 2 && count != 4)
2797 if (align < 2 && count != 1)
2800 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2801 if (destreg != XEXP (dst, 0))
2802 dst = replace_equiv_address_nv (dst, destreg);
2803 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2804 if (srcreg != XEXP (src, 0))
2805 src = replace_equiv_address_nv (src, srcreg);
2807 if (count != 0 && align >= 2)
2809 unsigned HOST_WIDE_INT offset = 0;
2813 if ((count & ~3) == 4)
2815 single_move_for_movmem (dst, src, SImode, offset);
2818 else if (count & ~3)
2820 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2821 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2823 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2827 single_move_for_movmem (dst, src, HImode, offset);
2833 if ((count & ~1) == 2)
2835 single_move_for_movmem (dst, src, HImode, offset);
2838 else if (count & ~1)
2840 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2841 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2843 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2848 single_move_for_movmem (dst, src, QImode, offset);
2855 /* Implement TARGET_SCHED_ISSUE_RATE. */
2858 bfin_issue_rate (void)
2864 bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2866 enum attr_type insn_type, dep_insn_type;
2867 int dep_insn_code_number;
2869 /* Anti and output dependencies have zero cost. */
2870 if (REG_NOTE_KIND (link) != 0)
2873 dep_insn_code_number = recog_memoized (dep_insn);
2875 /* If we can't recognize the insns, we can't really do anything. */
2876 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2879 insn_type = get_attr_type (insn);
2880 dep_insn_type = get_attr_type (dep_insn);
2882 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
2884 rtx pat = PATTERN (dep_insn);
2885 rtx dest = SET_DEST (pat);
2886 rtx src = SET_SRC (pat);
2887 if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
2889 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
2896 /* Increment the counter for the number of loop instructions in the
2897 current function. */
2900 bfin_hardware_loop (void)
2902 cfun->machine->has_hardware_loops++;
2905 /* Maximum loop nesting depth. */
2906 #define MAX_LOOP_DEPTH 2
2908 /* Maximum size of a loop. */
2909 #define MAX_LOOP_LENGTH 2042
2911 /* We need to keep a vector of loops */
2912 typedef struct loop_info *loop_info;
2913 DEF_VEC_P (loop_info);
2914 DEF_VEC_ALLOC_P (loop_info,heap);
2916 /* Information about a loop we have found (or are in the process of
2918 struct loop_info GTY (())
2920 /* loop number, for dumps */
2923 /* Predecessor block of the loop. This is the one that falls into
2924 the loop and contains the initialization instruction. */
2925 basic_block predecessor;
2927 /* First block in the loop. This is the one branched to by the loop_end
2931 /* Last block in the loop (the one with the loop_end insn). */
2934 /* The successor block of the loop. This is the one the loop_end insn
2936 basic_block successor;
2938 /* The last instruction in the tail. */
2941 /* The loop_end insn. */
2944 /* The iteration register. */
2947 /* The new initialization insn. */
2950 /* The new initialization instruction. */
2953 /* The new label placed at the beginning of the loop. */
2956 /* The new label placed at the end of the loop. */
2959 /* The length of the loop. */
2962 /* The nesting depth of the loop. */
2965 /* Nonzero if we can't optimize this loop. */
2968 /* True if we have visited this loop. */
2971 /* True if this loop body clobbers any of LC0, LT0, or LB0. */
2974 /* True if this loop body clobbers any of LC1, LT1, or LB1. */
2977 /* Next loop in the graph. */
2978 struct loop_info *next;
2980 /* Immediate outer loop of this loop. */
2981 struct loop_info *outer;
2983 /* Vector of blocks only within the loop, including those within
2985 VEC (basic_block,heap) *blocks;
2987 /* Same information in a bitmap. */
2988 bitmap block_bitmap;
2990 /* Vector of inner loops within this loop */
2991 VEC (loop_info,heap) *loops;
2995 bfin_dump_loops (loop_info loops)
2999 for (loop = loops; loop; loop = loop->next)
3005 fprintf (dump_file, ";; loop %d: ", loop->loop_no);
3007 fprintf (dump_file, "(bad) ");
3008 fprintf (dump_file, "{head:%d, depth:%d}", loop->head->index, loop->depth);
3010 fprintf (dump_file, " blocks: [ ");
3011 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, b); ix++)
3012 fprintf (dump_file, "%d ", b->index);
3013 fprintf (dump_file, "] ");
3015 fprintf (dump_file, " inner loops: [ ");
3016 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, i); ix++)
3017 fprintf (dump_file, "%d ", i->loop_no);
3018 fprintf (dump_file, "]\n");
3020 fprintf (dump_file, "\n");
3023 /* Scan the blocks of LOOP (and its inferiors) looking for basic block
3024 BB. Return true, if we find it. */
3027 bfin_bb_in_loop (loop_info loop, basic_block bb)
3029 return bitmap_bit_p (loop->block_bitmap, bb->index);
3032 /* Scan the blocks of LOOP (and its inferiors) looking for uses of
3033 REG. Return true, if we find any. Don't count the loop's loop_end
3034 insn if it matches LOOP_END. */
3037 bfin_scan_loop (loop_info loop, rtx reg, rtx loop_end)
3042 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3046 for (insn = BB_HEAD (bb);
3047 insn != NEXT_INSN (BB_END (bb));
3048 insn = NEXT_INSN (insn))
3052 if (insn == loop_end)
3054 if (reg_mentioned_p (reg, PATTERN (insn)))
3061 /* Optimize LOOP. */
3064 bfin_optimize_loop (loop_info loop)
3068 rtx insn, init_insn, last_insn, nop_insn;
3069 rtx loop_init, start_label, end_label;
3070 rtx reg_lc0, reg_lc1, reg_lt0, reg_lt1, reg_lb0, reg_lb1;
3072 rtx lc_reg, lt_reg, lb_reg;
3076 int inner_depth = 0;
3086 fprintf (dump_file, ";; loop %d bad when found\n", loop->loop_no);
3090 /* Every loop contains in its list of inner loops every loop nested inside
3091 it, even if there are intermediate loops. This works because we're doing
3092 a depth-first search here and never visit a loop more than once. */
3093 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
3095 bfin_optimize_loop (inner);
3097 if (!inner->bad && inner_depth < inner->depth)
3099 inner_depth = inner->depth;
3101 loop->clobber_loop0 |= inner->clobber_loop0;
3102 loop->clobber_loop1 |= inner->clobber_loop1;
3106 loop->depth = inner_depth + 1;
3107 if (loop->depth > MAX_LOOP_DEPTH)
3110 fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
3114 /* Get the loop iteration register. */
3115 iter_reg = loop->iter_reg;
3117 if (!DPREG_P (iter_reg))
3120 fprintf (dump_file, ";; loop %d iteration count NOT in PREG or DREG\n",
3125 /* Check if start_label appears before loop_end and calculate the
3126 offset between them. We calculate the length of instructions
3129 for (insn = loop->start_label;
3130 insn && insn != loop->loop_end;
3131 insn = NEXT_INSN (insn))
3133 if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
3135 if (TARGET_CSYNC_ANOMALY)
3137 else if (TARGET_SPECLD_ANOMALY)
3140 else if (LABEL_P (insn))
3142 if (TARGET_CSYNC_ANOMALY)
3147 length += get_attr_length (insn);
3153 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
3158 loop->length = length;
3159 if (loop->length > MAX_LOOP_LENGTH)
3162 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
3166 /* Scan all the blocks to make sure they don't use iter_reg. */
3167 if (bfin_scan_loop (loop, iter_reg, loop->loop_end))
3170 fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
3174 /* Scan all the insns to see if the loop body clobber
3175 any hardware loop registers. */
3177 reg_lc0 = gen_rtx_REG (SImode, REG_LC0);
3178 reg_lc1 = gen_rtx_REG (SImode, REG_LC1);
3179 reg_lt0 = gen_rtx_REG (SImode, REG_LT0);
3180 reg_lt1 = gen_rtx_REG (SImode, REG_LT1);
3181 reg_lb0 = gen_rtx_REG (SImode, REG_LB0);
3182 reg_lb1 = gen_rtx_REG (SImode, REG_LB1);
3184 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3188 for (insn = BB_HEAD (bb);
3189 insn != NEXT_INSN (BB_END (bb));
3190 insn = NEXT_INSN (insn))
3195 if (reg_set_p (reg_lc0, insn)
3196 || reg_set_p (reg_lt0, insn)
3197 || reg_set_p (reg_lb0, insn))
3198 loop->clobber_loop0 = 1;
3200 if (reg_set_p (reg_lc1, insn)
3201 || reg_set_p (reg_lt1, insn)
3202 || reg_set_p (reg_lb1, insn))
3203 loop->clobber_loop1 |= 1;
3207 if ((loop->clobber_loop0 && loop->clobber_loop1)
3208 || (loop->depth == MAX_LOOP_DEPTH && loop->clobber_loop0))
3210 loop->depth = MAX_LOOP_DEPTH + 1;
3212 fprintf (dump_file, ";; loop %d no loop reg available\n",
3217 /* There should be an instruction before the loop_end instruction
3218 in the same basic block. And the instruction must not be
3220 - CONDITIONAL BRANCH
3224 - Returns (RTS, RTN, etc.) */
3227 last_insn = PREV_INSN (loop->loop_end);
3231 for (; last_insn != PREV_INSN (BB_HEAD (bb));
3232 last_insn = PREV_INSN (last_insn))
3233 if (INSN_P (last_insn))
3236 if (last_insn != PREV_INSN (BB_HEAD (bb)))
3239 if (single_pred_p (bb)
3240 && single_pred (bb) != ENTRY_BLOCK_PTR)
3242 bb = single_pred (bb);
3243 last_insn = BB_END (bb);
3248 last_insn = NULL_RTX;
3256 fprintf (dump_file, ";; loop %d has no last instruction\n",
3261 if (JUMP_P (last_insn))
3263 loop_info inner = bb->aux;
3265 && inner->outer == loop
3266 && inner->loop_end == last_insn
3267 && inner->depth == 1)
3268 /* This jump_insn is the exact loop_end of an inner loop
3269 and to be optimized away. So use the inner's last_insn. */
3270 last_insn = inner->last_insn;
3274 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3279 else if (CALL_P (last_insn)
3280 || (GET_CODE (PATTERN (last_insn)) != SEQUENCE
3281 && get_attr_type (last_insn) == TYPE_SYNC)
3282 || recog_memoized (last_insn) == CODE_FOR_return_internal)
3285 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3290 if (GET_CODE (PATTERN (last_insn)) == ASM_INPUT
3291 || asm_noperands (PATTERN (last_insn)) >= 0
3292 || (GET_CODE (PATTERN (last_insn)) != SEQUENCE
3293 && get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI))
3295 nop_insn = emit_insn_after (gen_nop (), last_insn);
3296 last_insn = nop_insn;
3299 loop->last_insn = last_insn;
3301 /* The loop is good for replacement. */
3302 start_label = loop->start_label;
3303 end_label = gen_label_rtx ();
3304 iter_reg = loop->iter_reg;
3306 if (loop->depth == 1 && !loop->clobber_loop1)
3311 loop->clobber_loop1 = 1;
3318 loop->clobber_loop0 = 1;
3321 /* If iter_reg is a DREG, we need generate an instruction to load
3322 the loop count into LC register. */
3323 if (D_REGNO_P (REGNO (iter_reg)))
3325 init_insn = gen_movsi (lc_reg, iter_reg);
3326 loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
3330 else if (P_REGNO_P (REGNO (iter_reg)))
3332 init_insn = NULL_RTX;
3333 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3340 loop->init = init_insn;
3341 loop->end_label = end_label;
3342 loop->loop_init = loop_init;
3346 fprintf (dump_file, ";; replacing loop %d initializer with\n",
3348 print_rtl_single (dump_file, loop->loop_init);
3349 fprintf (dump_file, ";; replacing loop %d terminator with\n",
3351 print_rtl_single (dump_file, loop->loop_end);
3356 if (loop->init != NULL_RTX)
3357 emit_insn (loop->init);
3358 emit_insn(loop->loop_init);
3359 emit_label (loop->start_label);
3364 emit_insn_after (seq, BB_END (loop->predecessor));
3365 delete_insn (loop->loop_end);
3367 /* Insert the loop end label before the last instruction of the loop. */
3368 emit_label_before (loop->end_label, loop->last_insn);
3375 fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
3379 if (DPREG_P (loop->iter_reg))
3381 /* If loop->iter_reg is a DREG or PREG, we can split it here
3382 without scratch register. */
3385 emit_insn_before (gen_addsi3 (loop->iter_reg,
3390 emit_insn_before (gen_cmpsi (loop->iter_reg, const0_rtx),
3393 insn = emit_jump_insn_before (gen_bne (loop->start_label),
3396 JUMP_LABEL (insn) = loop->start_label;
3397 LABEL_NUSES (loop->start_label)++;
3398 delete_insn (loop->loop_end);
3402 /* Called from bfin_reorg_loops when a potential loop end is found. LOOP is
3403 a newly set up structure describing the loop, it is this function's
3404 responsibility to fill most of it. TAIL_BB and TAIL_INSN point to the
3405 loop_end insn and its enclosing basic block. */
3408 bfin_discover_loop (loop_info loop, basic_block tail_bb, rtx tail_insn)
3412 VEC (basic_block,heap) *works = VEC_alloc (basic_block,heap,20);
3414 loop->tail = tail_bb;
3415 loop->head = BRANCH_EDGE (tail_bb)->dest;
3416 loop->successor = FALLTHRU_EDGE (tail_bb)->dest;
3417 loop->predecessor = NULL;
3418 loop->loop_end = tail_insn;
3419 loop->last_insn = NULL_RTX;
3420 loop->iter_reg = SET_DEST (XVECEXP (PATTERN (tail_insn), 0, 1));
3421 loop->depth = loop->length = 0;
3423 loop->clobber_loop0 = loop->clobber_loop1 = 0;
3427 loop->init = loop->loop_init = NULL_RTX;
3428 loop->start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (tail_insn), 0, 0)), 1), 0);
3429 loop->end_label = NULL_RTX;
3432 VEC_safe_push (basic_block, heap, works, loop->head);
3434 while (VEC_iterate (basic_block, works, dwork++, bb))
3438 if (bb == EXIT_BLOCK_PTR)
3440 /* We've reached the exit block. The loop must be bad. */
3443 ";; Loop is bad - reached exit block while scanning\n");
3448 if (bitmap_bit_p (loop->block_bitmap, bb->index))
3451 /* We've not seen this block before. Add it to the loop's
3452 list and then add each successor to the work list. */
3454 VEC_safe_push (basic_block, heap, loop->blocks, bb);
3455 bitmap_set_bit (loop->block_bitmap, bb->index);
3459 FOR_EACH_EDGE (e, ei, bb->succs)
3461 basic_block succ = EDGE_SUCC (bb, ei.index)->dest;
3462 if (!REGNO_REG_SET_P (succ->il.rtl->global_live_at_start,
3463 REGNO (loop->iter_reg)))
3465 if (!VEC_space (basic_block, works, 1))
3469 VEC_block_remove (basic_block, works, 0, dwork);
3473 VEC_reserve (basic_block, heap, works, 1);
3475 VEC_quick_push (basic_block, works, succ);
3482 /* Make sure we only have one entry point. */
3483 if (EDGE_COUNT (loop->head->preds) == 2)
3485 loop->predecessor = EDGE_PRED (loop->head, 0)->src;
3486 if (loop->predecessor == loop->tail)
3487 /* We wanted the other predecessor. */
3488 loop->predecessor = EDGE_PRED (loop->head, 1)->src;
3490 /* We can only place a loop insn on a fall through edge of a
3491 single exit block. */
3492 if (EDGE_COUNT (loop->predecessor->succs) != 1
3493 || !(EDGE_SUCC (loop->predecessor, 0)->flags & EDGE_FALLTHRU)
3494 /* If loop->predecessor is in loop, loop->head is not really
3495 the head of the loop. */
3496 || bfin_bb_in_loop (loop, loop->predecessor))
3497 loop->predecessor = NULL;
3500 if (loop->predecessor == NULL)
3503 fprintf (dump_file, ";; loop has bad predecessor\n");
3508 #ifdef ENABLE_CHECKING
3509 /* Make sure nothing jumps into this loop. This shouldn't happen as we
3510 wouldn't have generated the counted loop patterns in such a case.
3511 However, this test must be done after the test above to detect loops
3512 with invalid headers. */
3514 for (dwork = 0; VEC_iterate (basic_block, loop->blocks, dwork, bb); dwork++)
3518 if (bb == loop->head)
3520 FOR_EACH_EDGE (e, ei, bb->preds)
3522 basic_block pred = EDGE_PRED (bb, ei.index)->src;
3523 if (!bfin_bb_in_loop (loop, pred))
3528 VEC_free (basic_block, heap, works);
3532 bfin_reorg_loops (FILE *dump_file)
3534 bitmap_obstack stack;
3537 loop_info loops = NULL;
3541 bitmap_obstack_initialize (&stack);
3543 /* Find all the possible loop tails. This means searching for every
3544 loop_end instruction. For each one found, create a loop_info
3545 structure and add the head block to the work list. */
3548 rtx tail = BB_END (bb);
3550 while (GET_CODE (tail) == NOTE)
3551 tail = PREV_INSN (tail);
3555 if (INSN_P (tail) && recog_memoized (tail) == CODE_FOR_loop_end)
3557 /* A possible loop end */
3559 loop = XNEW (struct loop_info);
3562 loop->loop_no = nloops++;
3563 loop->blocks = VEC_alloc (basic_block, heap, 20);
3564 loop->block_bitmap = BITMAP_ALLOC (&stack);
3569 fprintf (dump_file, ";; potential loop %d ending at\n",
3571 print_rtl_single (dump_file, tail);
3574 bfin_discover_loop (loop, bb, tail);
3578 tmp_bitmap = BITMAP_ALLOC (&stack);
3579 /* Compute loop nestings. */
3580 for (loop = loops; loop; loop = loop->next)
3586 for (other = loop->next; other; other = other->next)
3591 bitmap_and (tmp_bitmap, other->block_bitmap, loop->block_bitmap);
3592 if (bitmap_empty_p (tmp_bitmap))
3594 if (bitmap_equal_p (tmp_bitmap, other->block_bitmap))
3596 other->outer = loop;
3597 VEC_safe_push (loop_info, heap, loop->loops, other);
3599 else if (bitmap_equal_p (tmp_bitmap, loop->block_bitmap))
3601 loop->outer = other;
3602 VEC_safe_push (loop_info, heap, other->loops, loop);
3606 loop->bad = other->bad = 1;
3610 BITMAP_FREE (tmp_bitmap);
3614 fprintf (dump_file, ";; All loops found:\n\n");
3615 bfin_dump_loops (loops);
3618 /* Now apply the optimizations. */
3619 for (loop = loops; loop; loop = loop->next)
3620 bfin_optimize_loop (loop);
3624 fprintf (dump_file, ";; After hardware loops optimization:\n\n");
3625 bfin_dump_loops (loops);
3628 /* Free up the loop structures */
3633 VEC_free (loop_info, heap, loop->loops);
3634 VEC_free (basic_block, heap, loop->blocks);
3635 BITMAP_FREE (loop->block_bitmap);
3640 print_rtl (dump_file, get_insns ());
3646 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
3647 Returns true if we modified the insn chain, false otherwise. */
3649 gen_one_bundle (rtx slot[3])
3653 gcc_assert (slot[1] != NULL_RTX);
3655 /* Verify that we really can do the multi-issue. */
3658 rtx t = NEXT_INSN (slot[0]);
3659 while (t != slot[1])
3661 if (GET_CODE (t) != NOTE
3662 || NOTE_LINE_NUMBER (t) != NOTE_INSN_DELETED)
3669 rtx t = NEXT_INSN (slot[1]);
3670 while (t != slot[2])
3672 if (GET_CODE (t) != NOTE
3673 || NOTE_LINE_NUMBER (t) != NOTE_INSN_DELETED)
3679 if (slot[0] == NULL_RTX)
3680 slot[0] = emit_insn_before (gen_mnop (), slot[1]);
3681 if (slot[2] == NULL_RTX)
3682 slot[2] = emit_insn_after (gen_nop (), slot[1]);
3684 /* Avoid line number information being printed inside one bundle. */
3685 if (INSN_LOCATOR (slot[1])
3686 && INSN_LOCATOR (slot[1]) != INSN_LOCATOR (slot[0]))
3687 INSN_LOCATOR (slot[1]) = INSN_LOCATOR (slot[0]);
3688 if (INSN_LOCATOR (slot[2])
3689 && INSN_LOCATOR (slot[2]) != INSN_LOCATOR (slot[0]))
3690 INSN_LOCATOR (slot[2]) = INSN_LOCATOR (slot[0]);
3692 /* Terminate them with "|| " instead of ";" in the output. */
3693 PUT_MODE (slot[0], SImode);
3694 PUT_MODE (slot[1], SImode);
3696 /* This is a cheat to avoid emit_insn's special handling of SEQUENCEs.
3697 Generating a PARALLEL first and changing its code later is the
3698 easiest way to emit a SEQUENCE insn. */
3699 bundle = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (3, slot[0], slot[1], slot[2]));
3700 emit_insn_before (bundle, slot[0]);
3701 remove_insn (slot[0]);
3702 remove_insn (slot[1]);
3703 remove_insn (slot[2]);
3704 PUT_CODE (bundle, SEQUENCE);
3709 /* Go through all insns, and use the information generated during scheduling
3710 to generate SEQUENCEs to represent bundles of instructions issued
3714 bfin_gen_bundles (void)
3723 slot[0] = slot[1] = slot[2] = NULL_RTX;
3724 for (insn = BB_HEAD (bb);; insn = next)
3729 if (get_attr_type (insn) == TYPE_DSP32)
3731 else if (slot[1] == NULL_RTX)
3738 next = NEXT_INSN (insn);
3739 while (next && insn != BB_END (bb)
3741 && GET_CODE (PATTERN (next)) != USE
3742 && GET_CODE (PATTERN (next)) != CLOBBER))
3745 next = NEXT_INSN (insn);
3748 /* BB_END can change due to emitting extra NOPs, so check here. */
3749 at_end = insn == BB_END (bb);
3750 if (at_end || GET_MODE (next) == TImode)
3753 || !gen_one_bundle (slot))
3754 && slot[0] != NULL_RTX)
3756 rtx pat = PATTERN (slot[0]);
3757 if (GET_CODE (pat) == SET
3758 && GET_CODE (SET_SRC (pat)) == UNSPEC
3759 && XINT (SET_SRC (pat), 1) == UNSPEC_32BIT)
3761 SET_SRC (pat) = XVECEXP (SET_SRC (pat), 0, 0);
3762 INSN_CODE (slot[0]) = -1;
3766 slot[0] = slot[1] = slot[2] = NULL_RTX;
3774 /* Return an insn type for INSN that can be used by the caller for anomaly
3775 workarounds. This differs from plain get_attr_type in that it handles
3778 static enum attr_type
3779 type_for_anomaly (rtx insn)
3781 rtx pat = PATTERN (insn);
3782 if (GET_CODE (pat) == SEQUENCE)
3785 t = get_attr_type (XVECEXP (pat, 0, 1));
3788 t = get_attr_type (XVECEXP (pat, 0, 2));
3794 return get_attr_type (insn);
3797 /* Return nonzero if INSN contains any loads that may trap. It handles
3798 SEQUENCEs correctly. */
3801 trapping_loads_p (rtx insn)
3803 rtx pat = PATTERN (insn);
3804 if (GET_CODE (pat) == SEQUENCE)
3807 t = get_attr_type (XVECEXP (pat, 0, 1));
3808 if (t == TYPE_MCLD && may_trap_p (SET_SRC (XVECEXP (pat, 0, 1))))
3810 t = get_attr_type (XVECEXP (pat, 0, 2));
3811 if (t == TYPE_MCLD && may_trap_p (SET_SRC (XVECEXP (pat, 0, 2))))
3816 return may_trap_p (SET_SRC (single_set (insn)));
3819 /* We use the machine specific reorg pass for emitting CSYNC instructions
3820 after conditional branches as needed.
3822 The Blackfin is unusual in that a code sequence like
3825 may speculatively perform the load even if the condition isn't true. This
3826 happens for a branch that is predicted not taken, because the pipeline
3827 isn't flushed or stalled, so the early stages of the following instructions,
3828 which perform the memory reference, are allowed to execute before the
3829 jump condition is evaluated.
3830 Therefore, we must insert additional instructions in all places where this
3831 could lead to incorrect behavior. The manual recommends CSYNC, while
3832 VDSP seems to use NOPs (even though its corresponding compiler option is
3835 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
3836 When optimizing for size, we turn the branch into a predicted taken one.
3837 This may be slower due to mispredicts, but saves code size. */
3842 rtx insn, last_condjump = NULL_RTX;
3843 int cycles_since_jump = INT_MAX;
3845 /* We are freeing block_for_insn in the toplev to keep compatibility
3846 with old MDEP_REORGS that are not CFG based. Recompute it now. */
3847 compute_bb_for_insn ();
3849 if (bfin_flag_schedule_insns2)
3851 splitting_for_sched = 1;
3852 split_all_insns (0);
3853 splitting_for_sched = 0;
3855 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3857 timevar_push (TV_SCHED2);
3859 timevar_pop (TV_SCHED2);
3861 /* Examine the schedule and insert nops as necessary for 64 bit parallel
3863 bfin_gen_bundles ();
3866 /* Doloop optimization */
3867 if (cfun->machine->has_hardware_loops)
3868 bfin_reorg_loops (dump_file);
3870 if (! TARGET_SPECLD_ANOMALY && ! TARGET_CSYNC_ANOMALY)
3873 /* First pass: find predicted-false branches; if something after them
3874 needs nops, insert them or change the branch to predict true. */
3875 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3879 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
3882 pat = PATTERN (insn);
3883 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
3884 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
3885 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
3890 if (any_condjump_p (insn)
3891 && ! cbranch_predicted_taken_p (insn))
3893 last_condjump = insn;
3894 cycles_since_jump = 0;
3897 cycles_since_jump = INT_MAX;
3899 else if (INSN_P (insn))
3901 enum attr_type type = type_for_anomaly (insn);
3902 int delay_needed = 0;
3903 if (cycles_since_jump < INT_MAX)
3904 cycles_since_jump++;
3906 if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
3908 if (trapping_loads_p (insn))
3911 else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
3914 if (delay_needed > cycles_since_jump)
3918 rtx *op = recog_data.operand;
3920 delay_needed -= cycles_since_jump;
3922 extract_insn (last_condjump);
3925 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
3927 cycles_since_jump = INT_MAX;
3930 /* Do not adjust cycles_since_jump in this case, so that
3931 we'll increase the number of NOPs for a subsequent insn
3933 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
3934 GEN_INT (delay_needed));
3935 PATTERN (last_condjump) = pat;
3936 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
3940 /* Second pass: for predicted-true branches, see if anything at the
3941 branch destination needs extra nops. */
3942 if (! TARGET_CSYNC_ANOMALY)
3945 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3948 && any_condjump_p (insn)
3949 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
3950 || cbranch_predicted_taken_p (insn)))
3952 rtx target = JUMP_LABEL (insn);
3954 cycles_since_jump = 0;
3955 for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
3959 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
3962 pat = PATTERN (target);
3963 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
3964 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
3965 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
3968 if (INSN_P (target))
3970 enum attr_type type = type_for_anomaly (target);
3971 int delay_needed = 0;
3972 if (cycles_since_jump < INT_MAX)
3973 cycles_since_jump++;
3975 if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
3978 if (delay_needed > cycles_since_jump)
3980 rtx prev = prev_real_insn (label);
3981 delay_needed -= cycles_since_jump;
3983 fprintf (dump_file, "Adding %d nops after %d\n",
3984 delay_needed, INSN_UID (label));
3986 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
3993 "Reducing nops on insn %d.\n",
3996 x = XVECEXP (x, 0, 1);
3997 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
3998 XVECEXP (x, 0, 0) = GEN_INT (v);
4000 while (delay_needed-- > 0)
4001 emit_insn_after (gen_nop (), label);
4009 if (bfin_flag_var_tracking)
4011 timevar_push (TV_VAR_TRACKING);
4012 variable_tracking_main ();
4013 timevar_pop (TV_VAR_TRACKING);
4017 /* Handle interrupt_handler, exception_handler and nmi_handler function
4018 attributes; arguments as in struct attribute_spec.handler. */
4021 handle_int_attribute (tree *node, tree name,
4022 tree args ATTRIBUTE_UNUSED,
4023 int flags ATTRIBUTE_UNUSED,
4027 if (TREE_CODE (x) == FUNCTION_DECL)
4030 if (TREE_CODE (x) != FUNCTION_TYPE)
4032 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4033 IDENTIFIER_POINTER (name));
4034 *no_add_attrs = true;
4036 else if (funkind (x) != SUBROUTINE)
4037 error ("multiple function type attributes specified");
4042 /* Return 0 if the attributes for two types are incompatible, 1 if they
4043 are compatible, and 2 if they are nearly compatible (which causes a
4044 warning to be generated). */
4047 bfin_comp_type_attributes (tree type1, tree type2)
4049 e_funkind kind1, kind2;
4051 if (TREE_CODE (type1) != FUNCTION_TYPE)
4054 kind1 = funkind (type1);
4055 kind2 = funkind (type2);
4060 /* Check for mismatched modifiers */
4061 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
4062 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
4065 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
4066 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
4069 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
4070 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
4073 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
4074 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
4080 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4081 struct attribute_spec.handler. */
4084 bfin_handle_longcall_attribute (tree *node, tree name,
4085 tree args ATTRIBUTE_UNUSED,
4086 int flags ATTRIBUTE_UNUSED,
4089 if (TREE_CODE (*node) != FUNCTION_TYPE
4090 && TREE_CODE (*node) != FIELD_DECL
4091 && TREE_CODE (*node) != TYPE_DECL)
4093 warning (OPT_Wattributes, "`%s' attribute only applies to functions",
4094 IDENTIFIER_POINTER (name));
4095 *no_add_attrs = true;
4098 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
4099 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
4100 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
4101 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
4103 warning (OPT_Wattributes,
4104 "can't apply both longcall and shortcall attributes to the same function");
4105 *no_add_attrs = true;
4111 /* Table of valid machine attributes. */
4112 const struct attribute_spec bfin_attribute_table[] =
4114 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4115 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
4116 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
4117 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
4118 { "nesting", 0, 0, false, true, true, NULL },
4119 { "kspisusp", 0, 0, false, true, true, NULL },
4120 { "saveall", 0, 0, false, true, true, NULL },
4121 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
4122 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
4123 { NULL, 0, 0, false, false, false, NULL }
4126 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4127 tell the assembler to generate pointers to function descriptors in
4131 bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
4133 if (TARGET_FDPIC && size == UNITS_PER_WORD)
4135 if (GET_CODE (value) == SYMBOL_REF
4136 && SYMBOL_REF_FUNCTION_P (value))
4138 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
4139 output_addr_const (asm_out_file, value);
4140 fputs (")\n", asm_out_file);
4145 /* We've set the unaligned SI op to NULL, so we always have to
4146 handle the unaligned case here. */
4147 assemble_integer_with_op ("\t.4byte\t", value);
4151 return default_assemble_integer (value, size, aligned_p);
4154 /* Output the assembler code for a thunk function. THUNK_DECL is the
4155 declaration for the thunk function itself, FUNCTION is the decl for
4156 the target function. DELTA is an immediate constant offset to be
4157 added to THIS. If VCALL_OFFSET is nonzero, the word at
4158 *(*this + vcall_offset) should be added to THIS. */
4161 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
4162 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
4163 HOST_WIDE_INT vcall_offset, tree function)
4166 /* The this parameter is passed as the first argument. */
4167 rtx this = gen_rtx_REG (Pmode, REG_R0);
4169 /* Adjust the this parameter by a fixed constant. */
4173 if (delta >= -64 && delta <= 63)
4175 xops[0] = GEN_INT (delta);
4176 output_asm_insn ("%1 += %0;", xops);
4178 else if (delta >= -128 && delta < -64)
4180 xops[0] = GEN_INT (delta + 64);
4181 output_asm_insn ("%1 += -64; %1 += %0;", xops);
4183 else if (delta > 63 && delta <= 126)
4185 xops[0] = GEN_INT (delta - 63);
4186 output_asm_insn ("%1 += 63; %1 += %0;", xops);
4190 xops[0] = GEN_INT (delta);
4191 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
4195 /* Adjust the this parameter by a value stored in the vtable. */
4198 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
4199 rtx tmp = gen_rtx_REG (Pmode, REG_R2);
4203 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
4205 /* Adjust the this parameter. */
4206 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
4207 if (!memory_operand (xops[0], Pmode))
4209 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
4210 xops[0] = GEN_INT (vcall_offset);
4212 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
4213 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
4216 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
4219 xops[0] = XEXP (DECL_RTL (function), 0);
4220 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
4221 output_asm_insn ("jump.l\t%P0", xops);
4224 /* Codes for all the Blackfin builtins. */
4229 BFIN_BUILTIN_COMPOSE_2X16,
4230 BFIN_BUILTIN_EXTRACTLO,
4231 BFIN_BUILTIN_EXTRACTHI,
4233 BFIN_BUILTIN_SSADD_2X16,
4234 BFIN_BUILTIN_SSSUB_2X16,
4235 BFIN_BUILTIN_SSADDSUB_2X16,
4236 BFIN_BUILTIN_SSSUBADD_2X16,
4237 BFIN_BUILTIN_MULT_2X16,
4238 BFIN_BUILTIN_MULTR_2X16,
4239 BFIN_BUILTIN_NEG_2X16,
4240 BFIN_BUILTIN_ABS_2X16,
4241 BFIN_BUILTIN_MIN_2X16,
4242 BFIN_BUILTIN_MAX_2X16,
4244 BFIN_BUILTIN_SSADD_1X16,
4245 BFIN_BUILTIN_SSSUB_1X16,
4246 BFIN_BUILTIN_MULT_1X16,
4247 BFIN_BUILTIN_MULTR_1X16,
4248 BFIN_BUILTIN_NORM_1X16,
4249 BFIN_BUILTIN_NEG_1X16,
4250 BFIN_BUILTIN_ABS_1X16,
4251 BFIN_BUILTIN_MIN_1X16,
4252 BFIN_BUILTIN_MAX_1X16,
4254 BFIN_BUILTIN_DIFFHL_2X16,
4255 BFIN_BUILTIN_DIFFLH_2X16,
4257 BFIN_BUILTIN_SSADD_1X32,
4258 BFIN_BUILTIN_SSSUB_1X32,
4259 BFIN_BUILTIN_NORM_1X32,
4260 BFIN_BUILTIN_NEG_1X32,
4261 BFIN_BUILTIN_MIN_1X32,
4262 BFIN_BUILTIN_MAX_1X32,
4263 BFIN_BUILTIN_MULT_1X32,
4265 BFIN_BUILTIN_MULHISILL,
4266 BFIN_BUILTIN_MULHISILH,
4267 BFIN_BUILTIN_MULHISIHL,
4268 BFIN_BUILTIN_MULHISIHH,
4270 BFIN_BUILTIN_LSHIFT_1X16,
4271 BFIN_BUILTIN_LSHIFT_2X16,
4272 BFIN_BUILTIN_SSASHIFT_1X16,
4273 BFIN_BUILTIN_SSASHIFT_2X16,
4275 BFIN_BUILTIN_CPLX_MUL_16,
4276 BFIN_BUILTIN_CPLX_MAC_16,
4277 BFIN_BUILTIN_CPLX_MSU_16,
4282 #define def_builtin(NAME, TYPE, CODE) \
4284 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4288 /* Set up all builtin functions for this target. */
4290 bfin_init_builtins (void)
4292 tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
4293 tree void_ftype_void
4294 = build_function_type (void_type_node, void_list_node);
4295 tree short_ftype_short
4296 = build_function_type_list (short_integer_type_node, short_integer_type_node,
4298 tree short_ftype_int_int
4299 = build_function_type_list (short_integer_type_node, integer_type_node,
4300 integer_type_node, NULL_TREE);
4301 tree int_ftype_int_int
4302 = build_function_type_list (integer_type_node, integer_type_node,
4303 integer_type_node, NULL_TREE);
4305 = build_function_type_list (integer_type_node, integer_type_node,
4307 tree short_ftype_int
4308 = build_function_type_list (short_integer_type_node, integer_type_node,
4310 tree int_ftype_v2hi_v2hi
4311 = build_function_type_list (integer_type_node, V2HI_type_node,
4312 V2HI_type_node, NULL_TREE);
4313 tree v2hi_ftype_v2hi_v2hi
4314 = build_function_type_list (V2HI_type_node, V2HI_type_node,
4315 V2HI_type_node, NULL_TREE);
4316 tree v2hi_ftype_v2hi_v2hi_v2hi
4317 = build_function_type_list (V2HI_type_node, V2HI_type_node,
4318 V2HI_type_node, V2HI_type_node, NULL_TREE);
4319 tree v2hi_ftype_int_int
4320 = build_function_type_list (V2HI_type_node, integer_type_node,
4321 integer_type_node, NULL_TREE);
4322 tree v2hi_ftype_v2hi_int
4323 = build_function_type_list (V2HI_type_node, V2HI_type_node,
4324 integer_type_node, NULL_TREE);
4325 tree int_ftype_short_short
4326 = build_function_type_list (integer_type_node, short_integer_type_node,
4327 short_integer_type_node, NULL_TREE);
4328 tree v2hi_ftype_v2hi
4329 = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
4330 tree short_ftype_v2hi
4331 = build_function_type_list (short_integer_type_node, V2HI_type_node,
4334 /* Add the remaining MMX insns with somewhat more complicated types. */
4335 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
4336 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
4338 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
4339 BFIN_BUILTIN_COMPOSE_2X16);
4340 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
4341 BFIN_BUILTIN_EXTRACTHI);
4342 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
4343 BFIN_BUILTIN_EXTRACTLO);
4345 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
4346 BFIN_BUILTIN_MIN_2X16);
4347 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
4348 BFIN_BUILTIN_MAX_2X16);
4350 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
4351 BFIN_BUILTIN_SSADD_2X16);
4352 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
4353 BFIN_BUILTIN_SSSUB_2X16);
4354 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
4355 BFIN_BUILTIN_SSADDSUB_2X16);
4356 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
4357 BFIN_BUILTIN_SSSUBADD_2X16);
4358 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
4359 BFIN_BUILTIN_MULT_2X16);
4360 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
4361 BFIN_BUILTIN_MULTR_2X16);
4362 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
4363 BFIN_BUILTIN_NEG_2X16);
4364 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
4365 BFIN_BUILTIN_ABS_2X16);
4367 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
4368 BFIN_BUILTIN_SSADD_1X16);
4369 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
4370 BFIN_BUILTIN_SSSUB_1X16);
4371 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
4372 BFIN_BUILTIN_MULT_1X16);
4373 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
4374 BFIN_BUILTIN_MULTR_1X16);
4375 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
4376 BFIN_BUILTIN_NEG_1X16);
4377 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
4378 BFIN_BUILTIN_ABS_1X16);
4379 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
4380 BFIN_BUILTIN_NORM_1X16);
4382 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
4383 BFIN_BUILTIN_DIFFHL_2X16);
4384 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
4385 BFIN_BUILTIN_DIFFLH_2X16);
4387 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
4388 BFIN_BUILTIN_MULHISILL);
4389 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
4390 BFIN_BUILTIN_MULHISIHL);
4391 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
4392 BFIN_BUILTIN_MULHISILH);
4393 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
4394 BFIN_BUILTIN_MULHISIHH);
4396 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
4397 BFIN_BUILTIN_SSADD_1X32);
4398 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
4399 BFIN_BUILTIN_SSSUB_1X32);
4400 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
4401 BFIN_BUILTIN_NEG_1X32);
4402 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
4403 BFIN_BUILTIN_NORM_1X32);
4404 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
4405 BFIN_BUILTIN_MULT_1X32);
4408 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
4409 BFIN_BUILTIN_SSASHIFT_1X16);
4410 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
4411 BFIN_BUILTIN_SSASHIFT_2X16);
4412 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
4413 BFIN_BUILTIN_LSHIFT_1X16);
4414 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
4415 BFIN_BUILTIN_LSHIFT_2X16);
4417 /* Complex numbers. */
4418 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
4419 BFIN_BUILTIN_CPLX_MUL_16);
4420 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
4421 BFIN_BUILTIN_CPLX_MAC_16);
4422 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
4423 BFIN_BUILTIN_CPLX_MSU_16);
4427 struct builtin_description
4429 const enum insn_code icode;
4430 const char *const name;
4431 const enum bfin_builtins code;
4435 static const struct builtin_description bdesc_2arg[] =
4437 { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
4439 { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
4440 { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
4441 { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
4442 { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
4444 { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
4445 { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
4446 { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
4447 { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
4449 { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
4450 { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
4451 { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
4452 { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
4454 { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
4455 { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
4456 { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
4457 { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
4458 { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
4459 { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
4461 { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
4462 { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
4463 { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
4464 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
4465 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE }
4468 static const struct builtin_description bdesc_1arg[] =
4470 { CODE_FOR_signbitshi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
4471 { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
4472 { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
4474 { CODE_FOR_signbitssi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
4475 { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
4477 { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
4478 { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
4479 { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
4480 { CODE_FOR_absv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
4483 /* Errors in the source file can cause expand_expr to return const0_rtx
4484 where we expect a vector. To avoid crashing, use one of the vector
4485 clear instructions. */
4487 safe_vector_operand (rtx x, enum machine_mode mode)
4489 if (x != const0_rtx)
4491 x = gen_reg_rtx (SImode);
4493 emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
4494 return gen_lowpart (mode, x);
4497 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
4498 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
4501 bfin_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target,
4505 tree arg0 = TREE_VALUE (arglist);
4506 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4507 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4508 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4509 enum machine_mode op0mode = GET_MODE (op0);
4510 enum machine_mode op1mode = GET_MODE (op1);
4511 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4512 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4513 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4515 if (VECTOR_MODE_P (mode0))
4516 op0 = safe_vector_operand (op0, mode0);
4517 if (VECTOR_MODE_P (mode1))
4518 op1 = safe_vector_operand (op1, mode1);
4521 || GET_MODE (target) != tmode
4522 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4523 target = gen_reg_rtx (tmode);
4525 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
4528 op0 = gen_lowpart (HImode, op0);
4530 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
4533 op1 = gen_lowpart (HImode, op1);
4535 /* In case the insn wants input operands in modes different from
4536 the result, abort. */
4537 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
4538 && (op1mode == mode1 || op1mode == VOIDmode));
4540 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4541 op0 = copy_to_mode_reg (mode0, op0);
4542 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4543 op1 = copy_to_mode_reg (mode1, op1);
4546 pat = GEN_FCN (icode) (target, op0, op1);
4548 pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
4556 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
4559 bfin_expand_unop_builtin (enum insn_code icode, tree arglist,
4563 tree arg0 = TREE_VALUE (arglist);
4564 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4565 enum machine_mode op0mode = GET_MODE (op0);
4566 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4567 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4570 || GET_MODE (target) != tmode
4571 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4572 target = gen_reg_rtx (tmode);
4574 if (VECTOR_MODE_P (mode0))
4575 op0 = safe_vector_operand (op0, mode0);
4577 if (op0mode == SImode && mode0 == HImode)
4580 op0 = gen_lowpart (HImode, op0);
4582 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
4584 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4585 op0 = copy_to_mode_reg (mode0, op0);
4587 pat = GEN_FCN (icode) (target, op0);
4594 /* Expand an expression EXP that calls a built-in function,
4595 with result going to TARGET if that's convenient
4596 (and in mode MODE if that's convenient).
4597 SUBTARGET may be used as the target for computing one of EXP's operands.
4598 IGNORE is nonzero if the value is to be ignored. */
4601 bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
4602 rtx subtarget ATTRIBUTE_UNUSED,
4603 enum machine_mode mode ATTRIBUTE_UNUSED,
4604 int ignore ATTRIBUTE_UNUSED)
4607 enum insn_code icode;
4608 const struct builtin_description *d;
4609 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4610 tree arglist = TREE_OPERAND (exp, 1);
4611 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4612 tree arg0, arg1, arg2;
4613 rtx op0, op1, op2, accvec, pat, tmp1, tmp2;
4614 enum machine_mode tmode, mode0;
4618 case BFIN_BUILTIN_CSYNC:
4619 emit_insn (gen_csync ());
4621 case BFIN_BUILTIN_SSYNC:
4622 emit_insn (gen_ssync ());
4625 case BFIN_BUILTIN_DIFFHL_2X16:
4626 case BFIN_BUILTIN_DIFFLH_2X16:
4627 arg0 = TREE_VALUE (arglist);
4628 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4629 icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16
4630 ? CODE_FOR_subhilov2hi3 : CODE_FOR_sublohiv2hi3);
4631 tmode = insn_data[icode].operand[0].mode;
4632 mode0 = insn_data[icode].operand[1].mode;
4635 || GET_MODE (target) != tmode
4636 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4637 target = gen_reg_rtx (tmode);
4639 if (VECTOR_MODE_P (mode0))
4640 op0 = safe_vector_operand (op0, mode0);
4642 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4643 op0 = copy_to_mode_reg (mode0, op0);
4645 pat = GEN_FCN (icode) (target, op0, op0);
4651 case BFIN_BUILTIN_CPLX_MUL_16:
4652 arg0 = TREE_VALUE (arglist);
4653 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4654 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4655 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4656 accvec = gen_reg_rtx (V2PDImode);
4659 || GET_MODE (target) != V2HImode
4660 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
4661 target = gen_reg_rtx (tmode);
4662 if (! register_operand (op0, GET_MODE (op0)))
4663 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
4664 if (! register_operand (op1, GET_MODE (op1)))
4665 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
4667 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
4668 const0_rtx, const0_rtx,
4669 const1_rtx, GEN_INT (MACFLAG_NONE)));
4670 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
4671 const1_rtx, const1_rtx,
4672 const0_rtx, accvec, const1_rtx, const0_rtx,
4673 GEN_INT (MACFLAG_NONE), accvec));
4677 case BFIN_BUILTIN_CPLX_MAC_16:
4678 case BFIN_BUILTIN_CPLX_MSU_16:
4679 arg0 = TREE_VALUE (arglist);
4680 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4681 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4682 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4683 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4684 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4685 accvec = gen_reg_rtx (V2PDImode);
4688 || GET_MODE (target) != V2HImode
4689 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
4690 target = gen_reg_rtx (tmode);
4691 if (! register_operand (op0, GET_MODE (op0)))
4692 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
4693 if (! register_operand (op1, GET_MODE (op1)))
4694 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
4696 tmp1 = gen_reg_rtx (SImode);
4697 tmp2 = gen_reg_rtx (SImode);
4698 emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op2), GEN_INT (16)));
4699 emit_move_insn (tmp2, gen_lowpart (SImode, op2));
4700 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
4701 emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
4702 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op0, op1, const0_rtx,
4703 const0_rtx, const0_rtx,
4704 const1_rtx, accvec, const0_rtx,
4706 GEN_INT (MACFLAG_W32)));
4707 tmp1 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const1_rtx : const0_rtx);
4708 tmp2 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const0_rtx : const1_rtx);
4709 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
4710 const1_rtx, const1_rtx,
4711 const0_rtx, accvec, tmp1, tmp2,
4712 GEN_INT (MACFLAG_NONE), accvec));
4720 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4721 if (d->code == fcode)
4722 return bfin_expand_binop_builtin (d->icode, arglist, target,
4725 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4726 if (d->code == fcode)
4727 return bfin_expand_unop_builtin (d->icode, arglist, target);
4732 #undef TARGET_INIT_BUILTINS
4733 #define TARGET_INIT_BUILTINS bfin_init_builtins
4735 #undef TARGET_EXPAND_BUILTIN
4736 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
4738 #undef TARGET_ASM_GLOBALIZE_LABEL
4739 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
4741 #undef TARGET_ASM_FILE_START
4742 #define TARGET_ASM_FILE_START output_file_start
4744 #undef TARGET_ATTRIBUTE_TABLE
4745 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
4747 #undef TARGET_COMP_TYPE_ATTRIBUTES
4748 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
4750 #undef TARGET_RTX_COSTS
4751 #define TARGET_RTX_COSTS bfin_rtx_costs
4753 #undef TARGET_ADDRESS_COST
4754 #define TARGET_ADDRESS_COST bfin_address_cost
4756 #undef TARGET_ASM_INTERNAL_LABEL
4757 #define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
4759 #undef TARGET_ASM_INTEGER
4760 #define TARGET_ASM_INTEGER bfin_assemble_integer
4762 #undef TARGET_MACHINE_DEPENDENT_REORG
4763 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
4765 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4766 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
4768 #undef TARGET_ASM_OUTPUT_MI_THUNK
4769 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
4770 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
4771 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
4773 #undef TARGET_SCHED_ADJUST_COST
4774 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
4776 #undef TARGET_SCHED_ISSUE_RATE
4777 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
4779 #undef TARGET_PROMOTE_PROTOTYPES
4780 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
4781 #undef TARGET_PROMOTE_FUNCTION_ARGS
4782 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
4783 #undef TARGET_PROMOTE_FUNCTION_RETURN
4784 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
4786 #undef TARGET_ARG_PARTIAL_BYTES
4787 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
4789 #undef TARGET_PASS_BY_REFERENCE
4790 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
4792 #undef TARGET_SETUP_INCOMING_VARARGS
4793 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
4795 #undef TARGET_STRUCT_VALUE_RTX
4796 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
4798 #undef TARGET_VECTOR_MODE_SUPPORTED_P
4799 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
4801 #undef TARGET_HANDLE_OPTION
4802 #define TARGET_HANDLE_OPTION bfin_handle_option
4804 #undef TARGET_DEFAULT_TARGET_FLAGS
4805 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
4807 #undef TARGET_SECONDARY_RELOAD
4808 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
4810 #undef TARGET_DELEGITIMIZE_ADDRESS
4811 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
4813 #undef TARGET_CANNOT_FORCE_CONST_MEM
4814 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
4816 struct gcc_target targetm = TARGET_INITIALIZER;