1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "insn-codes.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
41 #include "target-def.h"
47 #include "integrate.h"
49 #include "langhooks.h"
50 #include "bfin-protos.h"
52 #include "tm-constrs.h"
54 #include "basic-block.h"
55 #include "cfglayout.h"
59 /* A C structure for machine-specific, per-function data.
60 This is added to the cfun structure. */
61 struct machine_function GTY(())
63 int has_hardware_loops;
66 /* Test and compare insns in bfin.md store the information needed to
67 generate branch and scc insns here. */
68 rtx bfin_compare_op0, bfin_compare_op1;
70 /* RTX for condition code flag register and RETS register */
71 extern GTY(()) rtx bfin_cc_rtx;
72 extern GTY(()) rtx bfin_rets_rtx;
73 rtx bfin_cc_rtx, bfin_rets_rtx;
75 int max_arg_registers = 0;
77 /* Arrays used when emitting register names. */
78 const char *short_reg_names[] = SHORT_REGISTER_NAMES;
79 const char *high_reg_names[] = HIGH_REGISTER_NAMES;
80 const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
81 const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
83 static int arg_regs[] = FUNCTION_ARG_REGISTERS;
85 /* Nonzero if -mshared-library-id was given. */
86 static int bfin_lib_id_given;
88 /* Nonzero if -fschedule-insns2 was given. We override it and
89 call the scheduler ourselves during reorg. */
90 static int bfin_flag_schedule_insns2;
92 /* Determines whether we run variable tracking in machine dependent
94 static int bfin_flag_var_tracking;
97 bfin_cpu_t bfin_cpu_type = BFIN_CPU_UNKNOWN;
99 /* -msi-revision support. There are three special values:
100 -1 -msi-revision=none.
101 0xffff -msi-revision=any. */
102 int bfin_si_revision;
104 /* The workarounds enabled */
105 unsigned int bfin_workarounds = 0;
112 unsigned int workarounds;
115 struct bfin_cpu bfin_cpus[] =
117 {"bf522", BFIN_CPU_BF522, 0x0000,
118 WA_SPECULATIVE_LOADS | WA_RETS},
120 {"bf523", BFIN_CPU_BF523, 0x0000,
121 WA_SPECULATIVE_LOADS | WA_RETS},
123 {"bf524", BFIN_CPU_BF524, 0x0000,
124 WA_SPECULATIVE_LOADS | WA_RETS},
126 {"bf525", BFIN_CPU_BF525, 0x0000,
127 WA_SPECULATIVE_LOADS | WA_RETS},
129 {"bf526", BFIN_CPU_BF526, 0x0000,
130 WA_SPECULATIVE_LOADS | WA_RETS},
132 {"bf527", BFIN_CPU_BF527, 0x0000,
133 WA_SPECULATIVE_LOADS | WA_RETS},
135 {"bf531", BFIN_CPU_BF531, 0x0005,
136 WA_SPECULATIVE_LOADS | WA_RETS},
137 {"bf531", BFIN_CPU_BF531, 0x0004,
138 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
139 {"bf531", BFIN_CPU_BF531, 0x0003,
140 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
142 {"bf532", BFIN_CPU_BF532, 0x0005,
143 WA_SPECULATIVE_LOADS | WA_RETS},
144 {"bf532", BFIN_CPU_BF532, 0x0004,
145 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
146 {"bf532", BFIN_CPU_BF532, 0x0003,
147 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
149 {"bf533", BFIN_CPU_BF533, 0x0005,
150 WA_SPECULATIVE_LOADS | WA_RETS},
151 {"bf533", BFIN_CPU_BF533, 0x0004,
152 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
153 {"bf533", BFIN_CPU_BF533, 0x0003,
154 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
156 {"bf534", BFIN_CPU_BF534, 0x0003,
157 WA_SPECULATIVE_LOADS | WA_RETS},
158 {"bf534", BFIN_CPU_BF534, 0x0002,
159 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
160 {"bf534", BFIN_CPU_BF534, 0x0001,
161 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
163 {"bf536", BFIN_CPU_BF536, 0x0003,
164 WA_SPECULATIVE_LOADS | WA_RETS},
165 {"bf536", BFIN_CPU_BF536, 0x0002,
166 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
167 {"bf536", BFIN_CPU_BF536, 0x0001,
168 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
170 {"bf537", BFIN_CPU_BF537, 0x0003,
171 WA_SPECULATIVE_LOADS | WA_RETS},
172 {"bf537", BFIN_CPU_BF537, 0x0002,
173 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
174 {"bf537", BFIN_CPU_BF537, 0x0001,
175 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
177 {"bf538", BFIN_CPU_BF538, 0x0004,
178 WA_SPECULATIVE_LOADS | WA_RETS},
179 {"bf538", BFIN_CPU_BF538, 0x0003,
180 WA_SPECULATIVE_LOADS | WA_RETS},
182 {"bf539", BFIN_CPU_BF539, 0x0004,
183 WA_SPECULATIVE_LOADS | WA_RETS},
184 {"bf539", BFIN_CPU_BF539, 0x0003,
185 WA_SPECULATIVE_LOADS | WA_RETS},
186 {"bf539", BFIN_CPU_BF539, 0x0002,
187 WA_SPECULATIVE_LOADS | WA_RETS},
189 {"bf542", BFIN_CPU_BF542, 0x0000,
190 WA_SPECULATIVE_LOADS | WA_RETS},
192 {"bf544", BFIN_CPU_BF544, 0x0000,
193 WA_SPECULATIVE_LOADS | WA_RETS},
195 {"bf547", BFIN_CPU_BF547, 0x0000,
196 WA_SPECULATIVE_LOADS | WA_RETS},
198 {"bf548", BFIN_CPU_BF548, 0x0000,
199 WA_SPECULATIVE_LOADS | WA_RETS},
201 {"bf549", BFIN_CPU_BF549, 0x0000,
202 WA_SPECULATIVE_LOADS | WA_RETS},
204 {"bf561", BFIN_CPU_BF561, 0x0005, WA_RETS},
205 {"bf561", BFIN_CPU_BF561, 0x0003,
206 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
207 {"bf561", BFIN_CPU_BF561, 0x0002,
208 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
213 int splitting_for_sched;
216 bfin_globalize_label (FILE *stream, const char *name)
218 fputs (".global ", stream);
219 assemble_name (stream, name);
225 output_file_start (void)
227 FILE *file = asm_out_file;
230 /* Variable tracking should be run after all optimizations which change order
231 of insns. It also needs a valid CFG. This can't be done in
232 override_options, because flag_var_tracking is finalized after
234 bfin_flag_var_tracking = flag_var_tracking;
235 flag_var_tracking = 0;
237 fprintf (file, ".file \"%s\";\n", input_filename);
239 for (i = 0; arg_regs[i] >= 0; i++)
241 max_arg_registers = i; /* how many arg reg used */
244 /* Called early in the compilation to conditionally modify
245 fixed_regs/call_used_regs. */
248 conditional_register_usage (void)
250 /* initialize condition code flag register rtx */
251 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
252 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
255 /* Examine machine-dependent attributes of function type FUNTYPE and return its
256 type. See the definition of E_FUNKIND. */
259 funkind (const_tree funtype)
261 tree attrs = TYPE_ATTRIBUTES (funtype);
262 if (lookup_attribute ("interrupt_handler", attrs))
263 return INTERRUPT_HANDLER;
264 else if (lookup_attribute ("exception_handler", attrs))
265 return EXCPT_HANDLER;
266 else if (lookup_attribute ("nmi_handler", attrs))
272 /* Legitimize PIC addresses. If the address is already position-independent,
273 we return ORIG. Newly generated position-independent addresses go into a
274 reg. This is REG if nonzero, otherwise we allocate register(s) as
275 necessary. PICREG is the register holding the pointer to the PIC offset
279 legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
284 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
289 if (TARGET_ID_SHARED_LIBRARY)
290 unspec = UNSPEC_MOVE_PIC;
291 else if (GET_CODE (addr) == SYMBOL_REF
292 && SYMBOL_REF_FUNCTION_P (addr))
293 unspec = UNSPEC_FUNCDESC_GOT17M4;
295 unspec = UNSPEC_MOVE_FDPIC;
299 gcc_assert (can_create_pseudo_p ());
300 reg = gen_reg_rtx (Pmode);
303 tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
304 new_rtx = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
306 emit_move_insn (reg, new_rtx);
307 if (picreg == pic_offset_table_rtx)
308 crtl->uses_pic_offset_table = 1;
312 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
316 if (GET_CODE (addr) == CONST)
318 addr = XEXP (addr, 0);
319 gcc_assert (GET_CODE (addr) == PLUS);
322 if (XEXP (addr, 0) == picreg)
327 gcc_assert (can_create_pseudo_p ());
328 reg = gen_reg_rtx (Pmode);
331 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
332 addr = legitimize_pic_address (XEXP (addr, 1),
333 base == reg ? NULL_RTX : reg,
336 if (GET_CODE (addr) == CONST_INT)
338 gcc_assert (! reload_in_progress && ! reload_completed);
339 addr = force_reg (Pmode, addr);
342 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
344 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
345 addr = XEXP (addr, 1);
348 return gen_rtx_PLUS (Pmode, base, addr);
354 /* Stack frame layout. */
356 /* For a given REGNO, determine whether it must be saved in the function
357 prologue. IS_INTHANDLER specifies whether we're generating a normal
358 prologue or an interrupt/exception one. */
360 must_save_p (bool is_inthandler, unsigned regno)
362 if (D_REGNO_P (regno))
364 bool is_eh_return_reg = false;
365 if (crtl->calls_eh_return)
370 unsigned test = EH_RETURN_DATA_REGNO (j);
371 if (test == INVALID_REGNUM)
374 is_eh_return_reg = true;
378 return (is_eh_return_reg
379 || (df_regs_ever_live_p (regno)
380 && !fixed_regs[regno]
381 && (is_inthandler || !call_used_regs[regno])));
383 else if (P_REGNO_P (regno))
385 return ((df_regs_ever_live_p (regno)
386 && !fixed_regs[regno]
387 && (is_inthandler || !call_used_regs[regno]))
389 && regno == PIC_OFFSET_TABLE_REGNUM
390 && (crtl->uses_pic_offset_table
391 || (TARGET_ID_SHARED_LIBRARY && !current_function_is_leaf))));
394 return ((is_inthandler || !call_used_regs[regno])
395 && (df_regs_ever_live_p (regno)
396 || (!leaf_function_p () && call_used_regs[regno])));
400 /* Compute the number of DREGS to save with a push_multiple operation.
401 This could include registers that aren't modified in the function,
402 since push_multiple only takes a range of registers.
403 If IS_INTHANDLER, then everything that is live must be saved, even
404 if normally call-clobbered.
405 If CONSECUTIVE, return the number of registers we can save in one
406 instruction with a push/pop multiple instruction. */
409 n_dregs_to_save (bool is_inthandler, bool consecutive)
414 for (i = REG_R7 + 1; i-- != REG_R0;)
416 if (must_save_p (is_inthandler, i))
418 else if (consecutive)
424 /* Like n_dregs_to_save, but compute number of PREGS to save. */
427 n_pregs_to_save (bool is_inthandler, bool consecutive)
432 for (i = REG_P5 + 1; i-- != REG_P0;)
433 if (must_save_p (is_inthandler, i))
435 else if (consecutive)
440 /* Determine if we are going to save the frame pointer in the prologue. */
443 must_save_fp_p (void)
445 return frame_pointer_needed || df_regs_ever_live_p (REG_FP);
449 stack_frame_needed_p (void)
451 /* EH return puts a new return address into the frame using an
452 address relative to the frame pointer. */
453 if (crtl->calls_eh_return)
455 return frame_pointer_needed;
458 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
459 must save all registers; this is used for interrupt handlers.
460 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
461 this for an interrupt (or exception) handler. */
464 expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
466 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
467 rtx predec = gen_rtx_MEM (SImode, predec1);
468 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
469 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
470 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
471 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
473 int total_consec = ndregs_consec + npregs_consec;
476 if (saveall || is_inthandler)
478 rtx insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
479 RTX_FRAME_RELATED_P (insn) = 1;
482 if (total_consec != 0)
485 rtx val = GEN_INT (-total_consec * 4);
486 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 2));
488 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
489 UNSPEC_PUSH_MULTIPLE);
490 XVECEXP (pat, 0, total_consec + 1) = gen_rtx_SET (VOIDmode, spreg,
494 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total_consec + 1)) = 1;
495 d_to_save = ndregs_consec;
496 dregno = REG_R7 + 1 - ndregs_consec;
497 pregno = REG_P5 + 1 - npregs_consec;
498 for (i = 0; i < total_consec; i++)
500 rtx memref = gen_rtx_MEM (word_mode,
501 gen_rtx_PLUS (Pmode, spreg,
502 GEN_INT (- i * 4 - 4)));
506 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
512 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
515 XVECEXP (pat, 0, i + 1) = subpat;
516 RTX_FRAME_RELATED_P (subpat) = 1;
518 insn = emit_insn (pat);
519 RTX_FRAME_RELATED_P (insn) = 1;
522 for (dregno = REG_R0; ndregs != ndregs_consec; dregno++)
524 if (must_save_p (is_inthandler, dregno))
526 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, dregno));
527 RTX_FRAME_RELATED_P (insn) = 1;
531 for (pregno = REG_P0; npregs != npregs_consec; pregno++)
533 if (must_save_p (is_inthandler, pregno))
535 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, pregno));
536 RTX_FRAME_RELATED_P (insn) = 1;
540 for (i = REG_P7 + 1; i < REG_CC; i++)
543 && (df_regs_ever_live_p (i)
544 || (!leaf_function_p () && call_used_regs[i]))))
547 if (i == REG_A0 || i == REG_A1)
548 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
549 gen_rtx_REG (PDImode, i));
551 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
552 RTX_FRAME_RELATED_P (insn) = 1;
556 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
557 must save all registers; this is used for interrupt handlers.
558 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
559 this for an interrupt (or exception) handler. */
562 expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
564 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
565 rtx postinc = gen_rtx_MEM (SImode, postinc1);
567 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
568 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
569 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
570 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
571 int total_consec = ndregs_consec + npregs_consec;
575 /* A slightly crude technique to stop flow from trying to delete "dead"
577 MEM_VOLATILE_P (postinc) = 1;
579 for (i = REG_CC - 1; i > REG_P7; i--)
582 && (df_regs_ever_live_p (i)
583 || (!leaf_function_p () && call_used_regs[i]))))
585 if (i == REG_A0 || i == REG_A1)
587 rtx mem = gen_rtx_MEM (PDImode, postinc1);
588 MEM_VOLATILE_P (mem) = 1;
589 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
592 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
595 regno = REG_P5 - npregs_consec;
596 for (; npregs != npregs_consec; regno--)
598 if (must_save_p (is_inthandler, regno))
600 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
604 regno = REG_R7 - ndregs_consec;
605 for (; ndregs != ndregs_consec; regno--)
607 if (must_save_p (is_inthandler, regno))
609 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
614 if (total_consec != 0)
616 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 1));
618 = gen_rtx_SET (VOIDmode, spreg,
619 gen_rtx_PLUS (Pmode, spreg,
620 GEN_INT (total_consec * 4)));
622 if (npregs_consec > 0)
627 for (i = 0; i < total_consec; i++)
630 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
632 rtx memref = gen_rtx_MEM (word_mode, addr);
635 XVECEXP (pat, 0, i + 1)
636 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
638 if (npregs_consec > 0)
640 if (--npregs_consec == 0)
645 insn = emit_insn (pat);
646 RTX_FRAME_RELATED_P (insn) = 1;
648 if (saveall || is_inthandler)
649 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
652 /* Perform any needed actions needed for a function that is receiving a
653 variable number of arguments.
657 MODE and TYPE are the mode and type of the current parameter.
659 PRETEND_SIZE is a variable that should be set to the amount of stack
660 that must be pushed by the prolog to pretend that our caller pushed
663 Normally, this macro will push all remaining incoming registers on the
664 stack and set PRETEND_SIZE to the length of the registers pushed.
667 - VDSP C compiler manual (our ABI) says that a variable args function
668 should save the R0, R1 and R2 registers in the stack.
669 - The caller will always leave space on the stack for the
670 arguments that are passed in registers, so we dont have
671 to leave any extra space.
672 - now, the vastart pointer can access all arguments from the stack. */
675 setup_incoming_varargs (CUMULATIVE_ARGS *cum,
676 enum machine_mode mode ATTRIBUTE_UNUSED,
677 tree type ATTRIBUTE_UNUSED, int *pretend_size,
686 /* The move for named arguments will be generated automatically by the
687 compiler. We need to generate the move rtx for the unnamed arguments
688 if they are in the first 3 words. We assume at least 1 named argument
689 exists, so we never generate [ARGP] = R0 here. */
691 for (i = cum->words + 1; i < max_arg_registers; i++)
693 mem = gen_rtx_MEM (Pmode,
694 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
695 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
701 /* Value should be nonzero if functions must have frame pointers.
702 Zero means the frame pointer need not be set up (and parms may
703 be accessed via the stack pointer) in functions that seem suitable. */
706 bfin_frame_pointer_required (void)
708 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
710 if (fkind != SUBROUTINE)
713 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
714 so we have to override it for non-leaf functions. */
715 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
721 /* Return the number of registers pushed during the prologue. */
724 n_regs_saved_by_prologue (void)
726 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
727 bool is_inthandler = fkind != SUBROUTINE;
728 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
729 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
730 || (is_inthandler && !current_function_is_leaf));
731 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler, false);
732 int npregs = all ? 6 : n_pregs_to_save (is_inthandler, false);
733 int n = ndregs + npregs;
736 if (all || stack_frame_needed_p ())
737 /* We use a LINK instruction in this case. */
741 if (must_save_fp_p ())
743 if (! current_function_is_leaf)
747 if (fkind != SUBROUTINE || all)
748 /* Increment once for ASTAT. */
751 if (fkind != SUBROUTINE)
754 if (lookup_attribute ("nesting", attrs))
758 for (i = REG_P7 + 1; i < REG_CC; i++)
760 || (fkind != SUBROUTINE
761 && (df_regs_ever_live_p (i)
762 || (!leaf_function_p () && call_used_regs[i]))))
763 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
768 /* Return the offset between two registers, one to be eliminated, and the other
769 its replacement, at the start of a routine. */
772 bfin_initial_elimination_offset (int from, int to)
774 HOST_WIDE_INT offset = 0;
776 if (from == ARG_POINTER_REGNUM)
777 offset = n_regs_saved_by_prologue () * 4;
779 if (to == STACK_POINTER_REGNUM)
781 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
782 offset += crtl->outgoing_args_size;
783 else if (crtl->outgoing_args_size)
784 offset += FIXED_STACK_AREA;
786 offset += get_frame_size ();
792 /* Emit code to load a constant CONSTANT into register REG; setting
793 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
794 Make sure that the insns we generate need not be split. */
797 frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
800 rtx cst = GEN_INT (constant);
802 if (constant >= -32768 && constant < 65536)
803 insn = emit_move_insn (reg, cst);
806 /* We don't call split_load_immediate here, since dwarf2out.c can get
807 confused about some of the more clever sequences it can generate. */
808 insn = emit_insn (gen_movsi_high (reg, cst));
810 RTX_FRAME_RELATED_P (insn) = 1;
811 insn = emit_insn (gen_movsi_low (reg, reg, cst));
814 RTX_FRAME_RELATED_P (insn) = 1;
817 /* Generate efficient code to add a value to a P register.
818 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
819 EPILOGUE_P is zero if this function is called for prologue,
820 otherwise it's nonzero. And it's less than zero if this is for
824 add_to_reg (rtx reg, HOST_WIDE_INT value, int frame, int epilogue_p)
829 /* Choose whether to use a sequence using a temporary register, or
830 a sequence with multiple adds. We can add a signed 7-bit value
831 in one instruction. */
832 if (value > 120 || value < -120)
840 /* For prologue or normal epilogue, P1 can be safely used
841 as the temporary register. For sibcall epilogue, we try to find
842 a call used P register, which will be restored in epilogue.
843 If we cannot find such a P register, we have to use one I register
847 tmpreg = gen_rtx_REG (SImode, REG_P1);
851 for (i = REG_P0; i <= REG_P5; i++)
852 if ((df_regs_ever_live_p (i) && ! call_used_regs[i])
854 && i == PIC_OFFSET_TABLE_REGNUM
855 && (crtl->uses_pic_offset_table
856 || (TARGET_ID_SHARED_LIBRARY
857 && ! current_function_is_leaf))))
860 tmpreg = gen_rtx_REG (SImode, i);
863 tmpreg = gen_rtx_REG (SImode, REG_P1);
864 tmpreg2 = gen_rtx_REG (SImode, REG_I0);
865 emit_move_insn (tmpreg2, tmpreg);
870 frame_related_constant_load (tmpreg, value, TRUE);
872 insn = emit_move_insn (tmpreg, GEN_INT (value));
874 insn = emit_insn (gen_addsi3 (reg, reg, tmpreg));
876 RTX_FRAME_RELATED_P (insn) = 1;
878 if (tmpreg2 != NULL_RTX)
879 emit_move_insn (tmpreg, tmpreg2);
890 /* We could use -62, but that would leave the stack unaligned, so
894 insn = emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
896 RTX_FRAME_RELATED_P (insn) = 1;
902 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
903 is too large, generate a sequence of insns that has the same effect.
904 SPREG contains (reg:SI REG_SP). */
907 emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
909 HOST_WIDE_INT link_size = frame_size;
913 if (link_size > 262140)
916 /* Use a LINK insn with as big a constant as possible, then subtract
917 any remaining size from the SP. */
918 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
919 RTX_FRAME_RELATED_P (insn) = 1;
921 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
923 rtx set = XVECEXP (PATTERN (insn), 0, i);
924 gcc_assert (GET_CODE (set) == SET);
925 RTX_FRAME_RELATED_P (set) = 1;
928 frame_size -= link_size;
932 /* Must use a call-clobbered PREG that isn't the static chain. */
933 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
935 frame_related_constant_load (tmpreg, -frame_size, TRUE);
936 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
937 RTX_FRAME_RELATED_P (insn) = 1;
941 /* Return the number of bytes we must reserve for outgoing arguments
942 in the current function's stack frame. */
947 if (crtl->outgoing_args_size)
949 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
950 return crtl->outgoing_args_size;
952 return FIXED_STACK_AREA;
957 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
958 function must save all its registers (true only for certain interrupt
962 do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
964 frame_size += arg_area_size ();
966 if (all || stack_frame_needed_p ()
967 || (must_save_fp_p () && ! current_function_is_leaf))
968 emit_link_insn (spreg, frame_size);
971 if (! current_function_is_leaf)
973 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
974 gen_rtx_PRE_DEC (Pmode, spreg)),
976 rtx insn = emit_insn (pat);
977 RTX_FRAME_RELATED_P (insn) = 1;
979 if (must_save_fp_p ())
981 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
982 gen_rtx_PRE_DEC (Pmode, spreg)),
983 gen_rtx_REG (Pmode, REG_FP));
984 rtx insn = emit_insn (pat);
985 RTX_FRAME_RELATED_P (insn) = 1;
987 add_to_reg (spreg, -frame_size, 1, 0);
991 /* Like do_link, but used for epilogues to deallocate the stack frame.
992 EPILOGUE_P is zero if this function is called for prologue,
993 otherwise it's nonzero. And it's less than zero if this is for
997 do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all, int epilogue_p)
999 frame_size += arg_area_size ();
1001 if (all || stack_frame_needed_p ())
1002 emit_insn (gen_unlink ());
1005 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
1007 add_to_reg (spreg, frame_size, 0, epilogue_p);
1008 if (must_save_fp_p ())
1010 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
1011 emit_move_insn (fpreg, postinc);
1014 if (! current_function_is_leaf)
1016 emit_move_insn (bfin_rets_rtx, postinc);
1017 emit_use (bfin_rets_rtx);
1022 /* Generate a prologue suitable for a function of kind FKIND. This is
1023 called for interrupt and exception handler prologues.
1024 SPREG contains (reg:SI REG_SP). */
1027 expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind, bool all)
1029 HOST_WIDE_INT frame_size = get_frame_size ();
1030 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
1031 rtx predec = gen_rtx_MEM (SImode, predec1);
1033 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1034 tree kspisusp = lookup_attribute ("kspisusp", attrs);
1038 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
1039 RTX_FRAME_RELATED_P (insn) = 1;
1042 /* We need space on the stack in case we need to save the argument
1044 if (fkind == EXCPT_HANDLER)
1046 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
1047 RTX_FRAME_RELATED_P (insn) = 1;
1050 /* If we're calling other functions, they won't save their call-clobbered
1051 registers, so we must save everything here. */
1052 if (!current_function_is_leaf)
1054 expand_prologue_reg_save (spreg, all, true);
1056 if (lookup_attribute ("nesting", attrs))
1058 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
1059 : fkind == NMI_HANDLER ? REG_RETN
1061 insn = emit_move_insn (predec, srcreg);
1062 RTX_FRAME_RELATED_P (insn) = 1;
1065 do_link (spreg, frame_size, all);
1067 if (fkind == EXCPT_HANDLER)
1069 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
1070 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
1071 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
1074 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
1075 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
1076 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
1077 insn = emit_move_insn (r1reg, spreg);
1078 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
1079 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
1083 /* Generate an epilogue suitable for a function of kind FKIND. This is
1084 called for interrupt and exception handler epilogues.
1085 SPREG contains (reg:SI REG_SP). */
1088 expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind, bool all)
1090 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1091 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
1092 rtx postinc = gen_rtx_MEM (SImode, postinc1);
1094 /* A slightly crude technique to stop flow from trying to delete "dead"
1096 MEM_VOLATILE_P (postinc) = 1;
1098 do_unlink (spreg, get_frame_size (), all, 1);
1100 if (lookup_attribute ("nesting", attrs))
1102 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
1103 : fkind == NMI_HANDLER ? REG_RETN
1105 emit_move_insn (srcreg, postinc);
1108 /* If we're calling other functions, they won't save their call-clobbered
1109 registers, so we must save (and restore) everything here. */
1110 if (!current_function_is_leaf)
1113 expand_epilogue_reg_restore (spreg, all, true);
1115 /* Deallocate any space we left on the stack in case we needed to save the
1116 argument registers. */
1117 if (fkind == EXCPT_HANDLER)
1118 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
1120 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
1123 /* Used while emitting the prologue to generate code to load the correct value
1124 into the PIC register, which is passed in DEST. */
1127 bfin_load_pic_reg (rtx dest)
1129 struct cgraph_local_info *i = NULL;
1132 i = cgraph_local_info (current_function_decl);
1134 /* Functions local to the translation unit don't need to reload the
1135 pic reg, since the caller always passes a usable one. */
1137 return pic_offset_table_rtx;
1139 if (bfin_lib_id_given)
1140 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
1142 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1143 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
1144 UNSPEC_LIBRARY_OFFSET));
1145 insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
1149 /* Generate RTL for the prologue of the current function. */
1152 bfin_expand_prologue (void)
1154 HOST_WIDE_INT frame_size = get_frame_size ();
1155 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1156 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1157 rtx pic_reg_loaded = NULL_RTX;
1158 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1159 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
1161 if (fkind != SUBROUTINE)
1163 expand_interrupt_handler_prologue (spreg, fkind, all);
1167 if (crtl->limit_stack
1168 || (TARGET_STACK_CHECK_L1
1169 && !DECL_NO_LIMIT_STACK (current_function_decl)))
1171 HOST_WIDE_INT offset
1172 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
1173 STACK_POINTER_REGNUM);
1174 rtx lim = crtl->limit_stack ? stack_limit_rtx : NULL_RTX;
1175 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
1179 emit_move_insn (p2reg, gen_int_mode (0xFFB00000, SImode));
1180 emit_move_insn (p2reg, gen_rtx_MEM (Pmode, p2reg));
1183 if (GET_CODE (lim) == SYMBOL_REF)
1185 if (TARGET_ID_SHARED_LIBRARY)
1187 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
1189 pic_reg_loaded = bfin_load_pic_reg (p2reg);
1190 val = legitimize_pic_address (stack_limit_rtx, p1reg,
1192 emit_move_insn (p1reg, val);
1193 frame_related_constant_load (p2reg, offset, FALSE);
1194 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
1199 rtx limit = plus_constant (lim, offset);
1200 emit_move_insn (p2reg, limit);
1207 emit_move_insn (p2reg, lim);
1208 add_to_reg (p2reg, offset, 0, 0);
1211 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
1212 emit_insn (gen_trapifcc ());
1214 expand_prologue_reg_save (spreg, all, false);
1216 do_link (spreg, frame_size, false);
1218 if (TARGET_ID_SHARED_LIBRARY
1220 && (crtl->uses_pic_offset_table
1221 || !current_function_is_leaf))
1222 bfin_load_pic_reg (pic_offset_table_rtx);
1225 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1226 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1227 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1231 bfin_expand_epilogue (int need_return, int eh_return, bool sibcall_p)
1233 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1234 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1235 int e = sibcall_p ? -1 : 1;
1236 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1237 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
1239 if (fkind != SUBROUTINE)
1241 expand_interrupt_handler_epilogue (spreg, fkind, all);
1245 do_unlink (spreg, get_frame_size (), false, e);
1247 expand_epilogue_reg_restore (spreg, all, false);
1249 /* Omit the return insn if this is for a sibcall. */
1254 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
1256 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
1259 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1262 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
1263 unsigned int new_reg)
1265 /* Interrupt functions can only use registers that have already been
1266 saved by the prologue, even if they would normally be
1269 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
1270 && !df_regs_ever_live_p (new_reg))
1276 /* Return the value of the return address for the frame COUNT steps up
1277 from the current frame, after the prologue.
1278 We punt for everything but the current frame by returning const0_rtx. */
1281 bfin_return_addr_rtx (int count)
1286 return get_hard_reg_initial_val (Pmode, REG_RETS);
1289 /* Try machine-dependent ways of modifying an illegitimate address X
1290 to be legitimate. If we find one, return the new, valid address,
1291 otherwise return NULL_RTX.
1293 OLDX is the address as it was before break_out_memory_refs was called.
1294 In some cases it is useful to look at this to decide what needs to be done.
1296 MODE is the mode of the memory reference. */
1299 legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
1300 enum machine_mode mode ATTRIBUTE_UNUSED)
1306 bfin_delegitimize_address (rtx orig_x)
1310 if (GET_CODE (x) != MEM)
1314 if (GET_CODE (x) == PLUS
1315 && GET_CODE (XEXP (x, 1)) == UNSPEC
1316 && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1317 && GET_CODE (XEXP (x, 0)) == REG
1318 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1319 return XVECEXP (XEXP (x, 1), 0, 0);
1324 /* This predicate is used to compute the length of a load/store insn.
1325 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1326 32-bit instruction. */
1329 effective_address_32bit_p (rtx op, enum machine_mode mode)
1331 HOST_WIDE_INT offset;
1333 mode = GET_MODE (op);
1336 if (GET_CODE (op) != PLUS)
1338 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1339 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1343 if (GET_CODE (XEXP (op, 1)) == UNSPEC)
1346 offset = INTVAL (XEXP (op, 1));
1348 /* All byte loads use a 16-bit offset. */
1349 if (GET_MODE_SIZE (mode) == 1)
1352 if (GET_MODE_SIZE (mode) == 4)
1354 /* Frame pointer relative loads can use a negative offset, all others
1355 are restricted to a small positive one. */
1356 if (XEXP (op, 0) == frame_pointer_rtx)
1357 return offset < -128 || offset > 60;
1358 return offset < 0 || offset > 60;
1361 /* Must be HImode now. */
1362 return offset < 0 || offset > 30;
1365 /* Returns true if X is a memory reference using an I register. */
1367 bfin_dsp_memref_p (rtx x)
1372 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1373 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1378 /* Return cost of the memory address ADDR.
1379 All addressing modes are equally cheap on the Blackfin. */
1382 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED, bool speed ATTRIBUTE_UNUSED)
1387 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1390 print_address_operand (FILE *file, rtx x)
1392 switch (GET_CODE (x))
1395 output_address (XEXP (x, 0));
1396 fprintf (file, "+");
1397 output_address (XEXP (x, 1));
1401 fprintf (file, "--");
1402 output_address (XEXP (x, 0));
1405 output_address (XEXP (x, 0));
1406 fprintf (file, "++");
1409 output_address (XEXP (x, 0));
1410 fprintf (file, "--");
1414 gcc_assert (GET_CODE (x) != MEM);
1415 print_operand (file, x, 0);
1420 /* Adding intp DImode support by Tony
1426 print_operand (FILE *file, rtx x, char code)
1428 enum machine_mode mode;
1432 if (GET_MODE (current_output_insn) == SImode)
1433 fprintf (file, " ||");
1435 fprintf (file, ";");
1439 mode = GET_MODE (x);
1444 switch (GET_CODE (x))
1447 fprintf (file, "e");
1450 fprintf (file, "ne");
1453 fprintf (file, "g");
1456 fprintf (file, "l");
1459 fprintf (file, "ge");
1462 fprintf (file, "le");
1465 fprintf (file, "g");
1468 fprintf (file, "l");
1471 fprintf (file, "ge");
1474 fprintf (file, "le");
1477 output_operand_lossage ("invalid %%j value");
1481 case 'J': /* reverse logic */
1482 switch (GET_CODE(x))
1485 fprintf (file, "ne");
1488 fprintf (file, "e");
1491 fprintf (file, "le");
1494 fprintf (file, "ge");
1497 fprintf (file, "l");
1500 fprintf (file, "g");
1503 fprintf (file, "le");
1506 fprintf (file, "ge");
1509 fprintf (file, "l");
1512 fprintf (file, "g");
1515 output_operand_lossage ("invalid %%J value");
1520 switch (GET_CODE (x))
1526 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1528 output_operand_lossage ("invalid operand for code '%c'", code);
1530 else if (code == 'd')
1533 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1535 output_operand_lossage ("invalid operand for code '%c'", code);
1537 else if (code == 'w')
1539 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1540 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1542 output_operand_lossage ("invalid operand for code '%c'", code);
1544 else if (code == 'x')
1546 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1547 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1549 output_operand_lossage ("invalid operand for code '%c'", code);
1551 else if (code == 'v')
1553 if (REGNO (x) == REG_A0)
1554 fprintf (file, "AV0");
1555 else if (REGNO (x) == REG_A1)
1556 fprintf (file, "AV1");
1558 output_operand_lossage ("invalid operand for code '%c'", code);
1560 else if (code == 'D')
1562 if (D_REGNO_P (REGNO (x)))
1563 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1565 output_operand_lossage ("invalid operand for code '%c'", code);
1567 else if (code == 'H')
1569 if ((mode == DImode || mode == DFmode) && REG_P (x))
1570 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1572 output_operand_lossage ("invalid operand for code '%c'", code);
1574 else if (code == 'T')
1576 if (D_REGNO_P (REGNO (x)))
1577 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1579 output_operand_lossage ("invalid operand for code '%c'", code);
1582 fprintf (file, "%s", reg_names[REGNO (x)]);
1588 print_address_operand (file, x);
1600 fputs ("(FU)", file);
1603 fputs ("(T)", file);
1606 fputs ("(TFU)", file);
1609 fputs ("(W32)", file);
1612 fputs ("(IS)", file);
1615 fputs ("(IU)", file);
1618 fputs ("(IH)", file);
1621 fputs ("(M)", file);
1624 fputs ("(IS,M)", file);
1627 fputs ("(ISS2)", file);
1630 fputs ("(S2RND)", file);
1637 else if (code == 'b')
1639 if (INTVAL (x) == 0)
1641 else if (INTVAL (x) == 1)
1647 /* Moves to half registers with d or h modifiers always use unsigned
1649 else if (code == 'd')
1650 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1651 else if (code == 'h')
1652 x = GEN_INT (INTVAL (x) & 0xffff);
1653 else if (code == 'N')
1654 x = GEN_INT (-INTVAL (x));
1655 else if (code == 'X')
1656 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1657 else if (code == 'Y')
1658 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1659 else if (code == 'Z')
1660 /* Used for LINK insns. */
1661 x = GEN_INT (-8 - INTVAL (x));
1666 output_addr_const (file, x);
1670 output_operand_lossage ("invalid const_double operand");
1674 switch (XINT (x, 1))
1676 case UNSPEC_MOVE_PIC:
1677 output_addr_const (file, XVECEXP (x, 0, 0));
1678 fprintf (file, "@GOT");
1681 case UNSPEC_MOVE_FDPIC:
1682 output_addr_const (file, XVECEXP (x, 0, 0));
1683 fprintf (file, "@GOT17M4");
1686 case UNSPEC_FUNCDESC_GOT17M4:
1687 output_addr_const (file, XVECEXP (x, 0, 0));
1688 fprintf (file, "@FUNCDESC_GOT17M4");
1691 case UNSPEC_LIBRARY_OFFSET:
1692 fprintf (file, "_current_shared_library_p5_offset_");
1701 output_addr_const (file, x);
1706 /* Argument support functions. */
1708 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1709 for a call to a function whose data type is FNTYPE.
1710 For a library call, FNTYPE is 0.
1711 VDSP C Compiler manual, our ABI says that
1712 first 3 words of arguments will use R0, R1 and R2.
1716 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1717 rtx libname ATTRIBUTE_UNUSED)
1719 static CUMULATIVE_ARGS zero_cum;
1723 /* Set up the number of registers to use for passing arguments. */
1725 cum->nregs = max_arg_registers;
1726 cum->arg_regs = arg_regs;
1728 cum->call_cookie = CALL_NORMAL;
1729 /* Check for a longcall attribute. */
1730 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1731 cum->call_cookie |= CALL_SHORT;
1732 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1733 cum->call_cookie |= CALL_LONG;
1738 /* Update the data in CUM to advance over an argument
1739 of mode MODE and data type TYPE.
1740 (TYPE is null for libcalls where that information may not be available.) */
1743 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1744 int named ATTRIBUTE_UNUSED)
1746 int count, bytes, words;
1748 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1749 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1751 cum->words += words;
1752 cum->nregs -= words;
1754 if (cum->nregs <= 0)
1757 cum->arg_regs = NULL;
1761 for (count = 1; count <= words; count++)
1768 /* Define where to put the arguments to a function.
1769 Value is zero to push the argument on the stack,
1770 or a hard register in which to store the argument.
1772 MODE is the argument's machine mode.
1773 TYPE is the data type of the argument (as a tree).
1774 This is null for libcalls where that information may
1776 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1777 the preceding args and about the function being called.
1778 NAMED is nonzero if this argument is a named parameter
1779 (otherwise it is an extra parameter matching an ellipsis). */
1782 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1783 int named ATTRIBUTE_UNUSED)
1786 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1788 if (mode == VOIDmode)
1789 /* Compute operand 2 of the call insn. */
1790 return GEN_INT (cum->call_cookie);
1796 return gen_rtx_REG (mode, *(cum->arg_regs));
1801 /* For an arg passed partly in registers and partly in memory,
1802 this is the number of bytes passed in registers.
1803 For args passed entirely in registers or entirely in memory, zero.
1805 Refer VDSP C Compiler manual, our ABI.
1806 First 3 words are in registers. So, if an argument is larger
1807 than the registers available, it will span the register and
1811 bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1812 tree type ATTRIBUTE_UNUSED,
1813 bool named ATTRIBUTE_UNUSED)
1816 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1817 int bytes_left = cum->nregs * UNITS_PER_WORD;
1822 if (bytes_left == 0)
1824 if (bytes > bytes_left)
1829 /* Variable sized types are passed by reference. */
1832 bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1833 enum machine_mode mode ATTRIBUTE_UNUSED,
1834 const_tree type, bool named ATTRIBUTE_UNUSED)
1836 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1839 /* Decide whether a type should be returned in memory (true)
1840 or in a register (false). This is called by the macro
1841 TARGET_RETURN_IN_MEMORY. */
1844 bfin_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1846 int size = int_size_in_bytes (type);
1847 return size > 2 * UNITS_PER_WORD || size == -1;
1850 /* Register in which address to store a structure value
1851 is passed to a function. */
1853 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1854 int incoming ATTRIBUTE_UNUSED)
1856 return gen_rtx_REG (Pmode, REG_P0);
1859 /* Return true when register may be used to pass function parameters. */
1862 function_arg_regno_p (int n)
1865 for (i = 0; arg_regs[i] != -1; i++)
1866 if (n == arg_regs[i])
1871 /* Returns 1 if OP contains a symbol reference */
1874 symbolic_reference_mentioned_p (rtx op)
1876 register const char *fmt;
1879 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1882 fmt = GET_RTX_FORMAT (GET_CODE (op));
1883 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1889 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1890 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1894 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1901 /* Decide whether we can make a sibling call to a function. DECL is the
1902 declaration of the function being targeted by the call and EXP is the
1903 CALL_EXPR representing the call. */
1906 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1907 tree exp ATTRIBUTE_UNUSED)
1909 struct cgraph_local_info *this_func, *called_func;
1910 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1911 if (fkind != SUBROUTINE)
1913 if (!TARGET_ID_SHARED_LIBRARY || TARGET_SEP_DATA)
1916 /* When compiling for ID shared libraries, can't sibcall a local function
1917 from a non-local function, because the local function thinks it does
1918 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1919 sibcall epilogue, and we end up with the wrong value in P5. */
1922 this_func = cgraph_local_info (current_function_decl);
1923 called_func = cgraph_local_info (decl);
1924 return !called_func->local || this_func->local;
1927 /* Emit RTL insns to initialize the variable parts of a trampoline at
1928 TRAMP. FNADDR is an RTX for the address of the function's pure
1929 code. CXT is an RTX for the static chain value for the function. */
1932 initialize_trampoline (rtx tramp, rtx fnaddr, rtx cxt)
1934 rtx t1 = copy_to_reg (fnaddr);
1935 rtx t2 = copy_to_reg (cxt);
1941 rtx a = memory_address (Pmode, plus_constant (tramp, 8));
1942 addr = memory_address (Pmode, tramp);
1943 emit_move_insn (gen_rtx_MEM (SImode, addr), a);
1947 addr = memory_address (Pmode, plus_constant (tramp, i + 2));
1948 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1949 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1950 addr = memory_address (Pmode, plus_constant (tramp, i + 6));
1951 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1953 addr = memory_address (Pmode, plus_constant (tramp, i + 10));
1954 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1955 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1956 addr = memory_address (Pmode, plus_constant (tramp, i + 14));
1957 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1960 /* Emit insns to move operands[1] into operands[0]. */
1963 emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1965 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1967 gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
1968 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1969 operands[1] = force_reg (SImode, operands[1]);
1971 operands[1] = legitimize_pic_address (operands[1], temp,
1972 TARGET_FDPIC ? OUR_FDPIC_REG
1973 : pic_offset_table_rtx);
1976 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1977 Returns true if no further code must be generated, false if the caller
1978 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1981 expand_move (rtx *operands, enum machine_mode mode)
1983 rtx op = operands[1];
1984 if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
1985 && SYMBOLIC_CONST (op))
1986 emit_pic_move (operands, mode);
1987 else if (mode == SImode && GET_CODE (op) == CONST
1988 && GET_CODE (XEXP (op, 0)) == PLUS
1989 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
1990 && !bfin_legitimate_constant_p (op))
1992 rtx dest = operands[0];
1994 gcc_assert (!reload_in_progress && !reload_completed);
1996 op0 = force_reg (mode, XEXP (op, 0));
1998 if (!insn_data[CODE_FOR_addsi3].operand[2].predicate (op1, mode))
1999 op1 = force_reg (mode, op1);
2000 if (GET_CODE (dest) == MEM)
2001 dest = gen_reg_rtx (mode);
2002 emit_insn (gen_addsi3 (dest, op0, op1));
2003 if (dest == operands[0])
2007 /* Don't generate memory->memory or constant->memory moves, go through a
2009 else if ((reload_in_progress | reload_completed) == 0
2010 && GET_CODE (operands[0]) == MEM
2011 && GET_CODE (operands[1]) != REG)
2012 operands[1] = force_reg (mode, operands[1]);
2016 /* Split one or more DImode RTL references into pairs of SImode
2017 references. The RTL can be REG, offsettable MEM, integer constant, or
2018 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
2019 split and "num" is its length. lo_half and hi_half are output arrays
2020 that parallel "operands". */
2023 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
2027 rtx op = operands[num];
2029 /* simplify_subreg refuse to split volatile memory addresses,
2030 but we still have to handle it. */
2031 if (GET_CODE (op) == MEM)
2033 lo_half[num] = adjust_address (op, SImode, 0);
2034 hi_half[num] = adjust_address (op, SImode, 4);
2038 lo_half[num] = simplify_gen_subreg (SImode, op,
2039 GET_MODE (op) == VOIDmode
2040 ? DImode : GET_MODE (op), 0);
2041 hi_half[num] = simplify_gen_subreg (SImode, op,
2042 GET_MODE (op) == VOIDmode
2043 ? DImode : GET_MODE (op), 4);
2049 bfin_longcall_p (rtx op, int call_cookie)
2051 gcc_assert (GET_CODE (op) == SYMBOL_REF);
2052 if (call_cookie & CALL_SHORT)
2054 if (call_cookie & CALL_LONG)
2056 if (TARGET_LONG_CALLS)
2061 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
2062 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2063 SIBCALL is nonzero if this is a sibling call. */
2066 bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
2068 rtx use = NULL, call;
2069 rtx callee = XEXP (fnaddr, 0);
2070 int nelts = 2 + !!sibcall;
2072 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
2075 /* In an untyped call, we can get NULL for operand 2. */
2076 if (cookie == NULL_RTX)
2077 cookie = const0_rtx;
2079 /* Static functions and indirect calls don't need the pic register. */
2080 if (!TARGET_FDPIC && flag_pic
2081 && GET_CODE (callee) == SYMBOL_REF
2082 && !SYMBOL_REF_LOCAL_P (callee))
2083 use_reg (&use, pic_offset_table_rtx);
2087 int caller_has_l1_text, callee_has_l1_text;
2089 caller_has_l1_text = callee_has_l1_text = 0;
2091 if (lookup_attribute ("l1_text",
2092 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
2093 caller_has_l1_text = 1;
2095 if (GET_CODE (callee) == SYMBOL_REF
2096 && SYMBOL_REF_DECL (callee) && DECL_P (SYMBOL_REF_DECL (callee))
2099 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2100 callee_has_l1_text = 1;
2102 if (GET_CODE (callee) != SYMBOL_REF
2103 || bfin_longcall_p (callee, INTVAL (cookie))
2104 || (GET_CODE (callee) == SYMBOL_REF
2105 && !SYMBOL_REF_LOCAL_P (callee)
2106 && TARGET_INLINE_PLT)
2107 || caller_has_l1_text != callee_has_l1_text
2108 || (caller_has_l1_text && callee_has_l1_text
2109 && (GET_CODE (callee) != SYMBOL_REF
2110 || !SYMBOL_REF_LOCAL_P (callee))))
2113 if (! address_operand (addr, Pmode))
2114 addr = force_reg (Pmode, addr);
2116 fnaddr = gen_reg_rtx (SImode);
2117 emit_insn (gen_load_funcdescsi (fnaddr, addr));
2118 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
2120 picreg = gen_reg_rtx (SImode);
2121 emit_insn (gen_load_funcdescsi (picreg,
2122 plus_constant (addr, 4)));
2127 else if ((!register_no_elim_operand (callee, Pmode)
2128 && GET_CODE (callee) != SYMBOL_REF)
2129 || (GET_CODE (callee) == SYMBOL_REF
2130 && ((TARGET_ID_SHARED_LIBRARY && !TARGET_LEAF_ID_SHARED_LIBRARY)
2131 || bfin_longcall_p (callee, INTVAL (cookie)))))
2133 callee = copy_to_mode_reg (Pmode, callee);
2134 fnaddr = gen_rtx_MEM (Pmode, callee);
2136 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
2139 call = gen_rtx_SET (VOIDmode, retval, call);
2141 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
2143 XVECEXP (pat, 0, n++) = call;
2145 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
2146 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
2148 XVECEXP (pat, 0, n++) = gen_rtx_RETURN (VOIDmode);
2149 call = emit_call_insn (pat);
2151 CALL_INSN_FUNCTION_USAGE (call) = use;
2154 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2157 hard_regno_mode_ok (int regno, enum machine_mode mode)
2159 /* Allow only dregs to store value of mode HI or QI */
2160 enum reg_class rclass = REGNO_REG_CLASS (regno);
2165 if (mode == V2HImode)
2166 return D_REGNO_P (regno);
2167 if (rclass == CCREGS)
2168 return mode == BImode;
2169 if (mode == PDImode || mode == V2PDImode)
2170 return regno == REG_A0 || regno == REG_A1;
2172 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2173 up with a bad register class (such as ALL_REGS) for DImode. */
2175 return regno < REG_M3;
2178 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
2181 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
2184 /* Implements target hook vector_mode_supported_p. */
2187 bfin_vector_mode_supported_p (enum machine_mode mode)
2189 return mode == V2HImode;
2192 /* Return the cost of moving data from a register in class CLASS1 to
2193 one in class CLASS2. A cost of 2 is the default. */
2196 bfin_register_move_cost (enum machine_mode mode,
2197 enum reg_class class1, enum reg_class class2)
2199 /* These need secondary reloads, so they're more expensive. */
2200 if ((class1 == CCREGS && class2 != DREGS)
2201 || (class1 != DREGS && class2 == CCREGS))
2204 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2208 /* There are some stalls involved when moving from a DREG to a different
2209 class reg, and using the value in one of the following instructions.
2210 Attempt to model this by slightly discouraging such moves. */
2211 if (class1 == DREGS && class2 != DREGS)
2214 if (GET_MODE_CLASS (mode) == MODE_INT)
2216 /* Discourage trying to use the accumulators. */
2217 if (TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A0)
2218 || TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A1)
2219 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A0)
2220 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A1))
2226 /* Return the cost of moving data of mode M between a
2227 register and memory. A value of 2 is the default; this cost is
2228 relative to those in `REGISTER_MOVE_COST'.
2230 ??? In theory L1 memory has single-cycle latency. We should add a switch
2231 that tells the compiler whether we expect to use only L1 memory for the
2232 program; it'll make the costs more accurate. */
2235 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2236 enum reg_class rclass,
2237 int in ATTRIBUTE_UNUSED)
2239 /* Make memory accesses slightly more expensive than any register-register
2240 move. Also, penalize non-DP registers, since they need secondary
2241 reloads to load and store. */
2242 if (! reg_class_subset_p (rclass, DPREGS))
2248 /* Inform reload about cases where moving X with a mode MODE to a register in
2249 RCLASS requires an extra scratch register. Return the class needed for the
2250 scratch register. */
2252 static enum reg_class
2253 bfin_secondary_reload (bool in_p, rtx x, enum reg_class rclass,
2254 enum machine_mode mode, secondary_reload_info *sri)
2256 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2257 in most other cases we can also use PREGS. */
2258 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
2259 enum reg_class x_class = NO_REGS;
2260 enum rtx_code code = GET_CODE (x);
2263 x = SUBREG_REG (x), code = GET_CODE (x);
2266 int regno = REGNO (x);
2267 if (regno >= FIRST_PSEUDO_REGISTER)
2268 regno = reg_renumber[regno];
2273 x_class = REGNO_REG_CLASS (regno);
2276 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2277 This happens as a side effect of register elimination, and we need
2278 a scratch register to do it. */
2279 if (fp_plus_const_operand (x, mode))
2281 rtx op2 = XEXP (x, 1);
2282 int large_constant_p = ! satisfies_constraint_Ks7 (op2);
2284 if (rclass == PREGS || rclass == PREGS_CLOBBERED)
2286 /* If destination is a DREG, we can do this without a scratch register
2287 if the constant is valid for an add instruction. */
2288 if ((rclass == DREGS || rclass == DPREGS)
2289 && ! large_constant_p)
2291 /* Reloading to anything other than a DREG? Use a PREG scratch
2293 sri->icode = CODE_FOR_reload_insi;
2297 /* Data can usually be moved freely between registers of most classes.
2298 AREGS are an exception; they can only move to or from another register
2299 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2300 if (x_class == AREGS || x_class == EVEN_AREGS || x_class == ODD_AREGS)
2301 return (rclass == DREGS || rclass == AREGS || rclass == EVEN_AREGS
2302 || rclass == ODD_AREGS
2305 if (rclass == AREGS || rclass == EVEN_AREGS || rclass == ODD_AREGS)
2309 sri->icode = in_p ? CODE_FOR_reload_inpdi : CODE_FOR_reload_outpdi;
2313 if (x != const0_rtx && x_class != DREGS)
2321 /* CCREGS can only be moved from/to DREGS. */
2322 if (rclass == CCREGS && x_class != DREGS)
2324 if (x_class == CCREGS && rclass != DREGS)
2327 /* All registers other than AREGS can load arbitrary constants. The only
2328 case that remains is MEM. */
2330 if (! reg_class_subset_p (rclass, default_class))
2331 return default_class;
2336 /* Implement TARGET_HANDLE_OPTION. */
2339 bfin_handle_option (size_t code, const char *arg, int value)
2343 case OPT_mshared_library_id_:
2344 if (value > MAX_LIBRARY_ID)
2345 error ("-mshared-library-id=%s is not between 0 and %d",
2346 arg, MAX_LIBRARY_ID);
2347 bfin_lib_id_given = 1;
2356 while ((p = bfin_cpus[i].name) != NULL)
2358 if (strncmp (arg, p, strlen (p)) == 0)
2365 error ("-mcpu=%s is not valid", arg);
2369 bfin_cpu_type = bfin_cpus[i].type;
2371 q = arg + strlen (p);
2375 bfin_si_revision = bfin_cpus[i].si_revision;
2376 bfin_workarounds |= bfin_cpus[i].workarounds;
2378 else if (strcmp (q, "-none") == 0)
2379 bfin_si_revision = -1;
2380 else if (strcmp (q, "-any") == 0)
2382 bfin_si_revision = 0xffff;
2383 while (bfin_cpus[i].type == bfin_cpu_type)
2385 bfin_workarounds |= bfin_cpus[i].workarounds;
2391 unsigned int si_major, si_minor;
2394 rev_len = strlen (q);
2396 if (sscanf (q, "-%u.%u%n", &si_major, &si_minor, &n) != 2
2398 || si_major > 0xff || si_minor > 0xff)
2400 invalid_silicon_revision:
2401 error ("-mcpu=%s has invalid silicon revision", arg);
2405 bfin_si_revision = (si_major << 8) | si_minor;
2407 while (bfin_cpus[i].type == bfin_cpu_type
2408 && bfin_cpus[i].si_revision != bfin_si_revision)
2411 if (bfin_cpus[i].type != bfin_cpu_type)
2412 goto invalid_silicon_revision;
2414 bfin_workarounds |= bfin_cpus[i].workarounds;
2417 if (bfin_cpu_type == BFIN_CPU_BF561)
2418 warning (0, "bf561 support is incomplete yet.");
2428 static struct machine_function *
2429 bfin_init_machine_status (void)
2431 struct machine_function *f;
2433 f = GGC_CNEW (struct machine_function);
2438 /* Implement the macro OVERRIDE_OPTIONS. */
2441 override_options (void)
2443 /* If processor type is not specified, enable all workarounds. */
2444 if (bfin_cpu_type == BFIN_CPU_UNKNOWN)
2448 for (i = 0; bfin_cpus[i].name != NULL; i++)
2449 bfin_workarounds |= bfin_cpus[i].workarounds;
2451 bfin_si_revision = 0xffff;
2454 if (bfin_csync_anomaly == 1)
2455 bfin_workarounds |= WA_SPECULATIVE_SYNCS;
2456 else if (bfin_csync_anomaly == 0)
2457 bfin_workarounds &= ~WA_SPECULATIVE_SYNCS;
2459 if (bfin_specld_anomaly == 1)
2460 bfin_workarounds |= WA_SPECULATIVE_LOADS;
2461 else if (bfin_specld_anomaly == 0)
2462 bfin_workarounds &= ~WA_SPECULATIVE_LOADS;
2464 if (TARGET_OMIT_LEAF_FRAME_POINTER)
2465 flag_omit_frame_pointer = 1;
2467 /* Library identification */
2468 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
2469 error ("-mshared-library-id= specified without -mid-shared-library");
2471 if (stack_limit_rtx && TARGET_STACK_CHECK_L1)
2472 error ("Can't use multiple stack checking methods together.");
2474 if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
2475 error ("ID shared libraries and FD-PIC mode can't be used together.");
2477 /* Don't allow the user to specify -mid-shared-library and -msep-data
2478 together, as it makes little sense from a user's point of view... */
2479 if (TARGET_SEP_DATA && TARGET_ID_SHARED_LIBRARY)
2480 error ("cannot specify both -msep-data and -mid-shared-library");
2481 /* ... internally, however, it's nearly the same. */
2482 if (TARGET_SEP_DATA)
2483 target_flags |= MASK_ID_SHARED_LIBRARY | MASK_LEAF_ID_SHARED_LIBRARY;
2485 if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
2488 /* There is no single unaligned SI op for PIC code. Sometimes we
2489 need to use ".4byte" and sometimes we need to use ".picptr".
2490 See bfin_assemble_integer for details. */
2492 targetm.asm_out.unaligned_op.si = 0;
2494 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2495 since we don't support it and it'll just break. */
2496 if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2499 if (TARGET_MULTICORE && bfin_cpu_type != BFIN_CPU_BF561)
2500 error ("-mmulticore can only be used with BF561");
2502 if (TARGET_COREA && !TARGET_MULTICORE)
2503 error ("-mcorea should be used with -mmulticore");
2505 if (TARGET_COREB && !TARGET_MULTICORE)
2506 error ("-mcoreb should be used with -mmulticore");
2508 if (TARGET_COREA && TARGET_COREB)
2509 error ("-mcorea and -mcoreb can't be used together");
2511 flag_schedule_insns = 0;
2513 /* Passes after sched2 can break the helpful TImode annotations that
2514 haifa-sched puts on every insn. Just do scheduling in reorg. */
2515 bfin_flag_schedule_insns2 = flag_schedule_insns_after_reload;
2516 flag_schedule_insns_after_reload = 0;
2518 init_machine_status = bfin_init_machine_status;
2521 /* Return the destination address of BRANCH.
2522 We need to use this instead of get_attr_length, because the
2523 cbranch_with_nops pattern conservatively sets its length to 6, and
2524 we still prefer to use shorter sequences. */
2527 branch_dest (rtx branch)
2531 rtx pat = PATTERN (branch);
2532 if (GET_CODE (pat) == PARALLEL)
2533 pat = XVECEXP (pat, 0, 0);
2534 dest = SET_SRC (pat);
2535 if (GET_CODE (dest) == IF_THEN_ELSE)
2536 dest = XEXP (dest, 1);
2537 dest = XEXP (dest, 0);
2538 dest_uid = INSN_UID (dest);
2539 return INSN_ADDRESSES (dest_uid);
2542 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2543 it's a branch that's predicted taken. */
2546 cbranch_predicted_taken_p (rtx insn)
2548 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2552 int pred_val = INTVAL (XEXP (x, 0));
2554 return pred_val >= REG_BR_PROB_BASE / 2;
2560 /* Templates for use by asm_conditional_branch. */
2562 static const char *ccbranch_templates[][3] = {
2563 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2564 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2565 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2566 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2569 /* Output INSN, which is a conditional branch instruction with operands
2572 We deal with the various forms of conditional branches that can be generated
2573 by bfin_reorg to prevent the hardware from doing speculative loads, by
2574 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2575 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2576 Either of these is only necessary if the branch is short, otherwise the
2577 template we use ends in an unconditional jump which flushes the pipeline
2581 asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2583 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2584 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2585 is to be taken from start of if cc rather than jump.
2586 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2588 int len = (offset >= -1024 && offset <= 1022 ? 0
2589 : offset >= -4094 && offset <= 4096 ? 1
2591 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2592 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2593 output_asm_insn (ccbranch_templates[idx][len], operands);
2594 gcc_assert (n_nops == 0 || !bp);
2596 while (n_nops-- > 0)
2597 output_asm_insn ("nop;", NULL);
2600 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2601 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2604 bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2606 enum rtx_code code1, code2;
2607 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
2608 rtx tem = bfin_cc_rtx;
2609 enum rtx_code code = GET_CODE (cmp);
2611 /* If we have a BImode input, then we already have a compare result, and
2612 do not need to emit another comparison. */
2613 if (GET_MODE (op0) == BImode)
2615 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2616 tem = op0, code2 = code;
2621 /* bfin has these conditions */
2631 code1 = reverse_condition (code);
2635 emit_insn (gen_rtx_SET (BImode, tem,
2636 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2639 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2642 /* Return nonzero iff C has exactly one bit set if it is interpreted
2643 as a 32-bit constant. */
2646 log2constp (unsigned HOST_WIDE_INT c)
2649 return c != 0 && (c & (c-1)) == 0;
2652 /* Returns the number of consecutive least significant zeros in the binary
2653 representation of *V.
2654 We modify *V to contain the original value arithmetically shifted right by
2655 the number of zeroes. */
2658 shiftr_zero (HOST_WIDE_INT *v)
2660 unsigned HOST_WIDE_INT tmp = *v;
2661 unsigned HOST_WIDE_INT sgn;
2667 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2668 while ((tmp & 0x1) == 0 && n <= 32)
2670 tmp = (tmp >> 1) | sgn;
2677 /* After reload, split the load of an immediate constant. OPERANDS are the
2678 operands of the movsi_insn pattern which we are splitting. We return
2679 nonzero if we emitted a sequence to load the constant, zero if we emitted
2680 nothing because we want to use the splitter's default sequence. */
2683 split_load_immediate (rtx operands[])
2685 HOST_WIDE_INT val = INTVAL (operands[1]);
2687 HOST_WIDE_INT shifted = val;
2688 HOST_WIDE_INT shifted_compl = ~val;
2689 int num_zero = shiftr_zero (&shifted);
2690 int num_compl_zero = shiftr_zero (&shifted_compl);
2691 unsigned int regno = REGNO (operands[0]);
2693 /* This case takes care of single-bit set/clear constants, which we could
2694 also implement with BITSET/BITCLR. */
2696 && shifted >= -32768 && shifted < 65536
2697 && (D_REGNO_P (regno)
2698 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2700 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2701 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2706 tmp |= -(tmp & 0x8000);
2708 /* If high word has one bit set or clear, try to use a bit operation. */
2709 if (D_REGNO_P (regno))
2711 if (log2constp (val & 0xFFFF0000))
2713 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2714 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2717 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2719 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2720 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2724 if (D_REGNO_P (regno))
2726 if (tmp >= -64 && tmp <= 63)
2728 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2729 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2733 if ((val & 0xFFFF0000) == 0)
2735 emit_insn (gen_movsi (operands[0], const0_rtx));
2736 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2740 if ((val & 0xFFFF0000) == 0xFFFF0000)
2742 emit_insn (gen_movsi (operands[0], constm1_rtx));
2743 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2748 /* Need DREGs for the remaining case. */
2753 && num_compl_zero && shifted_compl >= -64 && shifted_compl <= 63)
2755 /* If optimizing for size, generate a sequence that has more instructions
2757 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2758 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2759 GEN_INT (num_compl_zero)));
2760 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2766 /* Return true if the legitimate memory address for a memory operand of mode
2767 MODE. Return false if not. */
2770 bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2772 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2773 int sz = GET_MODE_SIZE (mode);
2774 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2775 /* The usual offsettable_memref machinery doesn't work so well for this
2776 port, so we deal with the problem here. */
2777 if (value > 0 && sz == 8)
2779 return (v & ~(0x7fff << shift)) == 0;
2783 bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2784 enum rtx_code outer_code)
2787 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2789 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
2793 bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2795 switch (GET_CODE (x)) {
2797 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
2801 if (REG_P (XEXP (x, 0))
2802 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
2803 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
2804 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2805 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2810 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2811 && REG_P (XEXP (x, 0))
2812 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
2815 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2816 && XEXP (x, 0) == stack_pointer_rtx
2817 && REG_P (XEXP (x, 0))
2818 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
2827 /* Decide whether we can force certain constants to memory. If we
2828 decide we can't, the caller should be able to cope with it in
2832 bfin_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
2834 /* We have only one class of non-legitimate constants, and our movsi
2835 expander knows how to handle them. Dropping these constants into the
2836 data section would only shift the problem - we'd still get relocs
2837 outside the object, in the data section rather than the text section. */
2841 /* Ensure that for any constant of the form symbol + offset, the offset
2842 remains within the object. Any other constants are ok.
2843 This ensures that flat binaries never have to deal with relocations
2844 crossing section boundaries. */
2847 bfin_legitimate_constant_p (rtx x)
2850 HOST_WIDE_INT offset;
2852 if (GET_CODE (x) != CONST)
2856 gcc_assert (GET_CODE (x) == PLUS);
2860 if (GET_CODE (sym) != SYMBOL_REF
2861 || GET_CODE (x) != CONST_INT)
2863 offset = INTVAL (x);
2865 if (SYMBOL_REF_DECL (sym) == 0)
2868 || offset >= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym))))
2875 bfin_rtx_costs (rtx x, int code, int outer_code, int *total, bool speed)
2877 int cost2 = COSTS_N_INSNS (1);
2883 if (outer_code == SET || outer_code == PLUS)
2884 *total = satisfies_constraint_Ks7 (x) ? 0 : cost2;
2885 else if (outer_code == AND)
2886 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2887 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2888 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2889 else if (outer_code == LEU || outer_code == LTU)
2890 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2891 else if (outer_code == MULT)
2892 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2893 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2895 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2896 || outer_code == LSHIFTRT)
2897 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2898 else if (outer_code == IOR || outer_code == XOR)
2899 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2908 *total = COSTS_N_INSNS (2);
2914 if (GET_MODE (x) == SImode)
2916 if (GET_CODE (op0) == MULT
2917 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
2919 HOST_WIDE_INT val = INTVAL (XEXP (op0, 1));
2920 if (val == 2 || val == 4)
2923 *total += rtx_cost (XEXP (op0, 0), outer_code, speed);
2924 *total += rtx_cost (op1, outer_code, speed);
2929 if (GET_CODE (op0) != REG
2930 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2931 *total += rtx_cost (op0, SET, speed);
2932 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2933 towards creating too many induction variables. */
2934 if (!reg_or_7bit_operand (op1, SImode))
2935 *total += rtx_cost (op1, SET, speed);
2938 else if (GET_MODE (x) == DImode)
2941 if (GET_CODE (op1) != CONST_INT
2942 || !satisfies_constraint_Ks7 (op1))
2943 *total += rtx_cost (op1, PLUS, speed);
2944 if (GET_CODE (op0) != REG
2945 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2946 *total += rtx_cost (op0, PLUS, speed);
2951 if (GET_MODE (x) == DImode)
2960 if (GET_MODE (x) == DImode)
2967 if (GET_CODE (op0) != REG
2968 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2969 *total += rtx_cost (op0, code, speed);
2979 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2982 if ((GET_CODE (op0) == LSHIFTRT && GET_CODE (op1) == ASHIFT)
2983 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == ZERO_EXTEND)
2984 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
2985 || (GET_CODE (op0) == AND && GET_CODE (op1) == CONST_INT))
2992 if (GET_CODE (op0) != REG
2993 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2994 *total += rtx_cost (op0, code, speed);
2996 if (GET_MODE (x) == DImode)
3002 if (GET_MODE (x) != SImode)
3007 if (! rhs_andsi3_operand (XEXP (x, 1), SImode))
3008 *total += rtx_cost (XEXP (x, 1), code, speed);
3012 if (! regorlog2_operand (XEXP (x, 1), SImode))
3013 *total += rtx_cost (XEXP (x, 1), code, speed);
3020 if (outer_code == SET
3021 && XEXP (x, 1) == const1_rtx
3022 && GET_CODE (XEXP (x, 2)) == CONST_INT)
3038 if (GET_CODE (op0) == GET_CODE (op1)
3039 && (GET_CODE (op0) == ZERO_EXTEND
3040 || GET_CODE (op0) == SIGN_EXTEND))
3042 *total = COSTS_N_INSNS (1);
3043 op0 = XEXP (op0, 0);
3044 op1 = XEXP (op1, 0);
3047 *total = COSTS_N_INSNS (1);
3049 *total = COSTS_N_INSNS (3);
3051 if (GET_CODE (op0) != REG
3052 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
3053 *total += rtx_cost (op0, MULT, speed);
3054 if (GET_CODE (op1) != REG
3055 && (GET_CODE (op1) != SUBREG || GET_CODE (SUBREG_REG (op1)) != REG))
3056 *total += rtx_cost (op1, MULT, speed);
3062 *total = COSTS_N_INSNS (32);
3067 if (outer_code == SET)
3076 /* Used for communication between {push,pop}_multiple_operation (which
3077 we use not only as a predicate) and the corresponding output functions. */
3078 static int first_preg_to_save, first_dreg_to_save;
3081 push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3083 int lastdreg = 8, lastpreg = 6;
3086 first_preg_to_save = lastpreg;
3087 first_dreg_to_save = lastdreg;
3088 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
3090 rtx t = XVECEXP (op, 0, i);
3094 if (GET_CODE (t) != SET)
3098 dest = SET_DEST (t);
3099 if (GET_CODE (dest) != MEM || ! REG_P (src))
3101 dest = XEXP (dest, 0);
3102 if (GET_CODE (dest) != PLUS
3103 || ! REG_P (XEXP (dest, 0))
3104 || REGNO (XEXP (dest, 0)) != REG_SP
3105 || GET_CODE (XEXP (dest, 1)) != CONST_INT
3106 || INTVAL (XEXP (dest, 1)) != -i * 4)
3109 regno = REGNO (src);
3112 if (D_REGNO_P (regno))
3115 first_dreg_to_save = lastdreg = regno - REG_R0;
3117 else if (regno >= REG_P0 && regno <= REG_P7)
3120 first_preg_to_save = lastpreg = regno - REG_P0;
3130 if (regno >= REG_P0 && regno <= REG_P7)
3133 first_preg_to_save = lastpreg = regno - REG_P0;
3135 else if (regno != REG_R0 + lastdreg + 1)
3140 else if (group == 2)
3142 if (regno != REG_P0 + lastpreg + 1)
3151 pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3153 int lastdreg = 8, lastpreg = 6;
3156 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
3158 rtx t = XVECEXP (op, 0, i);
3162 if (GET_CODE (t) != SET)
3166 dest = SET_DEST (t);
3167 if (GET_CODE (src) != MEM || ! REG_P (dest))
3169 src = XEXP (src, 0);
3173 if (! REG_P (src) || REGNO (src) != REG_SP)
3176 else if (GET_CODE (src) != PLUS
3177 || ! REG_P (XEXP (src, 0))
3178 || REGNO (XEXP (src, 0)) != REG_SP
3179 || GET_CODE (XEXP (src, 1)) != CONST_INT
3180 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
3183 regno = REGNO (dest);
3186 if (regno == REG_R7)
3191 else if (regno != REG_P0 + lastpreg - 1)
3196 else if (group == 1)
3198 if (regno != REG_R0 + lastdreg - 1)
3204 first_dreg_to_save = lastdreg;
3205 first_preg_to_save = lastpreg;
3209 /* Emit assembly code for one multi-register push described by INSN, with
3210 operands in OPERANDS. */
3213 output_push_multiple (rtx insn, rtx *operands)
3218 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3219 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
3222 if (first_dreg_to_save == 8)
3223 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
3224 else if (first_preg_to_save == 6)
3225 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
3227 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
3228 first_dreg_to_save, first_preg_to_save);
3230 output_asm_insn (buf, operands);
3233 /* Emit assembly code for one multi-register pop described by INSN, with
3234 operands in OPERANDS. */
3237 output_pop_multiple (rtx insn, rtx *operands)
3242 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3243 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
3246 if (first_dreg_to_save == 8)
3247 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
3248 else if (first_preg_to_save == 6)
3249 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
3251 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
3252 first_dreg_to_save, first_preg_to_save);
3254 output_asm_insn (buf, operands);
3257 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3260 single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
3262 rtx scratch = gen_reg_rtx (mode);
3265 srcmem = adjust_address_nv (src, mode, offset);
3266 dstmem = adjust_address_nv (dst, mode, offset);
3267 emit_move_insn (scratch, srcmem);
3268 emit_move_insn (dstmem, scratch);
3271 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3272 alignment ALIGN_EXP. Return true if successful, false if we should fall
3273 back on a different method. */
3276 bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
3278 rtx srcreg, destreg, countreg;
3279 HOST_WIDE_INT align = 0;
3280 unsigned HOST_WIDE_INT count = 0;
3282 if (GET_CODE (align_exp) == CONST_INT)
3283 align = INTVAL (align_exp);
3284 if (GET_CODE (count_exp) == CONST_INT)
3286 count = INTVAL (count_exp);
3288 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
3293 /* If optimizing for size, only do single copies inline. */
3296 if (count == 2 && align < 2)
3298 if (count == 4 && align < 4)
3300 if (count != 1 && count != 2 && count != 4)
3303 if (align < 2 && count != 1)
3306 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
3307 if (destreg != XEXP (dst, 0))
3308 dst = replace_equiv_address_nv (dst, destreg);
3309 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
3310 if (srcreg != XEXP (src, 0))
3311 src = replace_equiv_address_nv (src, srcreg);
3313 if (count != 0 && align >= 2)
3315 unsigned HOST_WIDE_INT offset = 0;
3319 if ((count & ~3) == 4)
3321 single_move_for_movmem (dst, src, SImode, offset);
3324 else if (count & ~3)
3326 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
3327 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3329 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
3333 single_move_for_movmem (dst, src, HImode, offset);
3339 if ((count & ~1) == 2)
3341 single_move_for_movmem (dst, src, HImode, offset);
3344 else if (count & ~1)
3346 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
3347 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3349 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
3354 single_move_for_movmem (dst, src, QImode, offset);
3361 /* Compute the alignment for a local variable.
3362 TYPE is the data type, and ALIGN is the alignment that
3363 the object would ordinarily have. The value of this macro is used
3364 instead of that alignment to align the object. */
3367 bfin_local_alignment (tree type, int align)
3369 /* Increasing alignment for (relatively) big types allows the builtin
3370 memcpy can use 32 bit loads/stores. */
3371 if (TYPE_SIZE (type)
3372 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3373 && (TREE_INT_CST_LOW (TYPE_SIZE (type)) > 8
3374 || TREE_INT_CST_HIGH (TYPE_SIZE (type))) && align < 32)
3379 /* Implement TARGET_SCHED_ISSUE_RATE. */
3382 bfin_issue_rate (void)
3388 bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3390 enum attr_type insn_type, dep_insn_type;
3391 int dep_insn_code_number;
3393 /* Anti and output dependencies have zero cost. */
3394 if (REG_NOTE_KIND (link) != 0)
3397 dep_insn_code_number = recog_memoized (dep_insn);
3399 /* If we can't recognize the insns, we can't really do anything. */
3400 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
3403 insn_type = get_attr_type (insn);
3404 dep_insn_type = get_attr_type (dep_insn);
3406 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
3408 rtx pat = PATTERN (dep_insn);
3409 if (GET_CODE (pat) == PARALLEL)
3410 pat = XVECEXP (pat, 0, 0);
3411 rtx dest = SET_DEST (pat);
3412 rtx src = SET_SRC (pat);
3413 if (! ADDRESS_REGNO_P (REGNO (dest))
3414 || ! (MEM_P (src) || D_REGNO_P (REGNO (src))))
3416 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
3423 /* Increment the counter for the number of loop instructions in the
3424 current function. */
3427 bfin_hardware_loop (void)
3429 cfun->machine->has_hardware_loops++;
3432 /* Maximum loop nesting depth. */
3433 #define MAX_LOOP_DEPTH 2
3435 /* Maximum size of a loop. */
3436 #define MAX_LOOP_LENGTH 2042
3438 /* Maximum distance of the LSETUP instruction from the loop start. */
3439 #define MAX_LSETUP_DISTANCE 30
3441 /* We need to keep a vector of loops */
3442 typedef struct loop_info *loop_info;
3443 DEF_VEC_P (loop_info);
3444 DEF_VEC_ALLOC_P (loop_info,heap);
3446 /* Information about a loop we have found (or are in the process of
3448 struct loop_info GTY (())
3450 /* loop number, for dumps */
3453 /* All edges that jump into and out of the loop. */
3454 VEC(edge,gc) *incoming;
3456 /* We can handle two cases: all incoming edges have the same destination
3457 block, or all incoming edges have the same source block. These two
3458 members are set to the common source or destination we found, or NULL
3459 if different blocks were found. If both are NULL the loop can't be
3461 basic_block incoming_src;
3462 basic_block incoming_dest;
3464 /* First block in the loop. This is the one branched to by the loop_end
3468 /* Last block in the loop (the one with the loop_end insn). */
3471 /* The successor block of the loop. This is the one the loop_end insn
3473 basic_block successor;
3475 /* The last instruction in the tail. */
3478 /* The loop_end insn. */
3481 /* The iteration register. */
3484 /* The new initialization insn. */
3487 /* The new initialization instruction. */
3490 /* The new label placed at the beginning of the loop. */
3493 /* The new label placed at the end of the loop. */
3496 /* The length of the loop. */
3499 /* The nesting depth of the loop. */
3502 /* Nonzero if we can't optimize this loop. */
3505 /* True if we have visited this loop. */
3508 /* True if this loop body clobbers any of LC0, LT0, or LB0. */
3511 /* True if this loop body clobbers any of LC1, LT1, or LB1. */
3514 /* Next loop in the graph. */
3515 struct loop_info *next;
3517 /* Immediate outer loop of this loop. */
3518 struct loop_info *outer;
3520 /* Vector of blocks only within the loop, including those within
3522 VEC (basic_block,heap) *blocks;
3524 /* Same information in a bitmap. */
3525 bitmap block_bitmap;
3527 /* Vector of inner loops within this loop */
3528 VEC (loop_info,heap) *loops;
3532 bfin_dump_loops (loop_info loops)
3536 for (loop = loops; loop; loop = loop->next)
3542 fprintf (dump_file, ";; loop %d: ", loop->loop_no);
3544 fprintf (dump_file, "(bad) ");
3545 fprintf (dump_file, "{head:%d, depth:%d}", loop->head->index, loop->depth);
3547 fprintf (dump_file, " blocks: [ ");
3548 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, b); ix++)
3549 fprintf (dump_file, "%d ", b->index);
3550 fprintf (dump_file, "] ");
3552 fprintf (dump_file, " inner loops: [ ");
3553 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, i); ix++)
3554 fprintf (dump_file, "%d ", i->loop_no);
3555 fprintf (dump_file, "]\n");
3557 fprintf (dump_file, "\n");
3560 /* Scan the blocks of LOOP (and its inferiors) looking for basic block
3561 BB. Return true, if we find it. */
3564 bfin_bb_in_loop (loop_info loop, basic_block bb)
3566 return bitmap_bit_p (loop->block_bitmap, bb->index);
3569 /* Scan the blocks of LOOP (and its inferiors) looking for uses of
3570 REG. Return true, if we find any. Don't count the loop's loop_end
3571 insn if it matches LOOP_END. */
3574 bfin_scan_loop (loop_info loop, rtx reg, rtx loop_end)
3579 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3583 for (insn = BB_HEAD (bb);
3584 insn != NEXT_INSN (BB_END (bb));
3585 insn = NEXT_INSN (insn))
3589 if (insn == loop_end)
3591 if (reg_mentioned_p (reg, PATTERN (insn)))
3598 /* Estimate the length of INSN conservatively. */
3601 length_for_loop (rtx insn)
3604 if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
3606 if (ENABLE_WA_SPECULATIVE_SYNCS)
3608 else if (ENABLE_WA_SPECULATIVE_LOADS)
3611 else if (LABEL_P (insn))
3613 if (ENABLE_WA_SPECULATIVE_SYNCS)
3618 length += get_attr_length (insn);
3623 /* Optimize LOOP. */
3626 bfin_optimize_loop (loop_info loop)
3630 rtx insn, init_insn, last_insn, nop_insn;
3631 rtx loop_init, start_label, end_label;
3632 rtx reg_lc0, reg_lc1, reg_lt0, reg_lt1, reg_lb0, reg_lb1;
3634 rtx lc_reg, lt_reg, lb_reg;
3638 int inner_depth = 0;
3648 fprintf (dump_file, ";; loop %d bad when found\n", loop->loop_no);
3652 /* Every loop contains in its list of inner loops every loop nested inside
3653 it, even if there are intermediate loops. This works because we're doing
3654 a depth-first search here and never visit a loop more than once. */
3655 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
3657 bfin_optimize_loop (inner);
3659 if (!inner->bad && inner_depth < inner->depth)
3661 inner_depth = inner->depth;
3663 loop->clobber_loop0 |= inner->clobber_loop0;
3664 loop->clobber_loop1 |= inner->clobber_loop1;
3668 loop->depth = inner_depth + 1;
3669 if (loop->depth > MAX_LOOP_DEPTH)
3672 fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
3676 /* Get the loop iteration register. */
3677 iter_reg = loop->iter_reg;
3679 if (!DPREG_P (iter_reg))
3682 fprintf (dump_file, ";; loop %d iteration count NOT in PREG or DREG\n",
3687 if (loop->incoming_src)
3689 /* Make sure the predecessor is before the loop start label, as required by
3690 the LSETUP instruction. */
3692 for (insn = BB_END (loop->incoming_src);
3693 insn && insn != loop->start_label;
3694 insn = NEXT_INSN (insn))
3695 length += length_for_loop (insn);
3700 fprintf (dump_file, ";; loop %d lsetup not before loop_start\n",
3705 if (length > MAX_LSETUP_DISTANCE)
3708 fprintf (dump_file, ";; loop %d lsetup too far away\n", loop->loop_no);
3713 /* Check if start_label appears before loop_end and calculate the
3714 offset between them. We calculate the length of instructions
3717 for (insn = loop->start_label;
3718 insn && insn != loop->loop_end;
3719 insn = NEXT_INSN (insn))
3720 length += length_for_loop (insn);
3725 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
3730 loop->length = length;
3731 if (loop->length > MAX_LOOP_LENGTH)
3734 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
3738 /* Scan all the blocks to make sure they don't use iter_reg. */
3739 if (bfin_scan_loop (loop, iter_reg, loop->loop_end))
3742 fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
3746 /* Scan all the insns to see if the loop body clobber
3747 any hardware loop registers. */
3749 reg_lc0 = gen_rtx_REG (SImode, REG_LC0);
3750 reg_lc1 = gen_rtx_REG (SImode, REG_LC1);
3751 reg_lt0 = gen_rtx_REG (SImode, REG_LT0);
3752 reg_lt1 = gen_rtx_REG (SImode, REG_LT1);
3753 reg_lb0 = gen_rtx_REG (SImode, REG_LB0);
3754 reg_lb1 = gen_rtx_REG (SImode, REG_LB1);
3756 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3760 for (insn = BB_HEAD (bb);
3761 insn != NEXT_INSN (BB_END (bb));
3762 insn = NEXT_INSN (insn))
3767 if (reg_set_p (reg_lc0, insn)
3768 || reg_set_p (reg_lt0, insn)
3769 || reg_set_p (reg_lb0, insn))
3770 loop->clobber_loop0 = 1;
3772 if (reg_set_p (reg_lc1, insn)
3773 || reg_set_p (reg_lt1, insn)
3774 || reg_set_p (reg_lb1, insn))
3775 loop->clobber_loop1 |= 1;
3779 if ((loop->clobber_loop0 && loop->clobber_loop1)
3780 || (loop->depth == MAX_LOOP_DEPTH && loop->clobber_loop0))
3782 loop->depth = MAX_LOOP_DEPTH + 1;
3784 fprintf (dump_file, ";; loop %d no loop reg available\n",
3789 /* There should be an instruction before the loop_end instruction
3790 in the same basic block. And the instruction must not be
3792 - CONDITIONAL BRANCH
3796 - Returns (RTS, RTN, etc.) */
3799 last_insn = PREV_INSN (loop->loop_end);
3803 for (; last_insn != PREV_INSN (BB_HEAD (bb));
3804 last_insn = PREV_INSN (last_insn))
3805 if (INSN_P (last_insn))
3808 if (last_insn != PREV_INSN (BB_HEAD (bb)))
3811 if (single_pred_p (bb)
3812 && single_pred (bb) != ENTRY_BLOCK_PTR)
3814 bb = single_pred (bb);
3815 last_insn = BB_END (bb);
3820 last_insn = NULL_RTX;
3828 fprintf (dump_file, ";; loop %d has no last instruction\n",
3833 if (JUMP_P (last_insn))
3835 loop_info inner = (loop_info) bb->aux;
3837 && inner->outer == loop
3838 && inner->loop_end == last_insn
3839 && inner->depth == 1)
3840 /* This jump_insn is the exact loop_end of an inner loop
3841 and to be optimized away. So use the inner's last_insn. */
3842 last_insn = inner->last_insn;
3846 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3851 else if (CALL_P (last_insn)
3852 || (GET_CODE (PATTERN (last_insn)) != SEQUENCE
3853 && get_attr_type (last_insn) == TYPE_SYNC)
3854 || recog_memoized (last_insn) == CODE_FOR_return_internal)
3857 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3862 if (GET_CODE (PATTERN (last_insn)) == ASM_INPUT
3863 || asm_noperands (PATTERN (last_insn)) >= 0
3864 || (GET_CODE (PATTERN (last_insn)) != SEQUENCE
3865 && get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI))
3867 nop_insn = emit_insn_after (gen_nop (), last_insn);
3868 last_insn = nop_insn;
3871 loop->last_insn = last_insn;
3873 /* The loop is good for replacement. */
3874 start_label = loop->start_label;
3875 end_label = gen_label_rtx ();
3876 iter_reg = loop->iter_reg;
3878 if (loop->depth == 1 && !loop->clobber_loop1)
3883 loop->clobber_loop1 = 1;
3890 loop->clobber_loop0 = 1;
3893 /* If iter_reg is a DREG, we need generate an instruction to load
3894 the loop count into LC register. */
3895 if (D_REGNO_P (REGNO (iter_reg)))
3897 init_insn = gen_movsi (lc_reg, iter_reg);
3898 loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
3902 else if (P_REGNO_P (REGNO (iter_reg)))
3904 init_insn = NULL_RTX;
3905 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3912 loop->init = init_insn;
3913 loop->end_label = end_label;
3914 loop->loop_init = loop_init;
3918 fprintf (dump_file, ";; replacing loop %d initializer with\n",
3920 print_rtl_single (dump_file, loop->loop_init);
3921 fprintf (dump_file, ";; replacing loop %d terminator with\n",
3923 print_rtl_single (dump_file, loop->loop_end);
3928 if (loop->init != NULL_RTX)
3929 emit_insn (loop->init);
3930 seq_end = emit_insn (loop->loop_init);
3935 if (loop->incoming_src)
3937 rtx prev = BB_END (loop->incoming_src);
3938 if (VEC_length (edge, loop->incoming) > 1
3939 || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
3941 gcc_assert (JUMP_P (prev));
3942 prev = PREV_INSN (prev);
3944 emit_insn_after (seq, prev);
3952 if (loop->head != loop->incoming_dest)
3954 FOR_EACH_EDGE (e, ei, loop->head->preds)
3956 if (e->flags & EDGE_FALLTHRU)
3958 rtx newjump = gen_jump (loop->start_label);
3959 emit_insn_before (newjump, BB_HEAD (loop->head));
3960 new_bb = create_basic_block (newjump, newjump, loop->head->prev_bb);
3961 gcc_assert (new_bb = loop->head->prev_bb);
3967 emit_insn_before (seq, BB_HEAD (loop->head));
3968 seq = emit_label_before (gen_label_rtx (), seq);
3970 new_bb = create_basic_block (seq, seq_end, loop->head->prev_bb);
3971 FOR_EACH_EDGE (e, ei, loop->incoming)
3973 if (!(e->flags & EDGE_FALLTHRU)
3974 || e->dest != loop->head)
3975 redirect_edge_and_branch_force (e, new_bb);
3977 redirect_edge_succ (e, new_bb);
3981 delete_insn (loop->loop_end);
3982 /* Insert the loop end label before the last instruction of the loop. */
3983 emit_label_before (loop->end_label, loop->last_insn);
3990 fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
3994 if (DPREG_P (loop->iter_reg))
3996 /* If loop->iter_reg is a DREG or PREG, we can split it here
3997 without scratch register. */
4000 emit_insn_before (gen_addsi3 (loop->iter_reg,
4005 emit_insn_before (gen_cmpsi (loop->iter_reg, const0_rtx),
4008 insn = emit_jump_insn_before (gen_bne (loop->start_label),
4011 JUMP_LABEL (insn) = loop->start_label;
4012 LABEL_NUSES (loop->start_label)++;
4013 delete_insn (loop->loop_end);
4017 /* Called from bfin_reorg_loops when a potential loop end is found. LOOP is
4018 a newly set up structure describing the loop, it is this function's
4019 responsibility to fill most of it. TAIL_BB and TAIL_INSN point to the
4020 loop_end insn and its enclosing basic block. */
4023 bfin_discover_loop (loop_info loop, basic_block tail_bb, rtx tail_insn)
4027 VEC (basic_block,heap) *works = VEC_alloc (basic_block,heap,20);
4029 loop->tail = tail_bb;
4030 loop->head = BRANCH_EDGE (tail_bb)->dest;
4031 loop->successor = FALLTHRU_EDGE (tail_bb)->dest;
4032 loop->loop_end = tail_insn;
4033 loop->last_insn = NULL_RTX;
4034 loop->iter_reg = SET_DEST (XVECEXP (PATTERN (tail_insn), 0, 1));
4035 loop->depth = loop->length = 0;
4037 loop->clobber_loop0 = loop->clobber_loop1 = 0;
4040 loop->incoming = VEC_alloc (edge, gc, 2);
4041 loop->init = loop->loop_init = NULL_RTX;
4042 loop->start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (tail_insn), 0, 0)), 1), 0);
4043 loop->end_label = NULL_RTX;
4046 VEC_safe_push (basic_block, heap, works, loop->head);
4048 while (VEC_iterate (basic_block, works, dwork++, bb))
4052 if (bb == EXIT_BLOCK_PTR)
4054 /* We've reached the exit block. The loop must be bad. */
4057 ";; Loop is bad - reached exit block while scanning\n");
4062 if (bitmap_bit_p (loop->block_bitmap, bb->index))
4065 /* We've not seen this block before. Add it to the loop's
4066 list and then add each successor to the work list. */
4068 VEC_safe_push (basic_block, heap, loop->blocks, bb);
4069 bitmap_set_bit (loop->block_bitmap, bb->index);
4073 FOR_EACH_EDGE (e, ei, bb->succs)
4075 basic_block succ = EDGE_SUCC (bb, ei.index)->dest;
4076 if (!REGNO_REG_SET_P (df_get_live_in (succ),
4077 REGNO (loop->iter_reg)))
4079 if (!VEC_space (basic_block, works, 1))
4083 VEC_block_remove (basic_block, works, 0, dwork);
4087 VEC_reserve (basic_block, heap, works, 1);
4089 VEC_quick_push (basic_block, works, succ);
4094 /* Find the predecessor, and make sure nothing else jumps into this loop. */
4098 for (dwork = 0; VEC_iterate (basic_block, loop->blocks, dwork, bb); dwork++)
4102 FOR_EACH_EDGE (e, ei, bb->preds)
4104 basic_block pred = e->src;
4106 if (!bfin_bb_in_loop (loop, pred))
4109 fprintf (dump_file, ";; Loop %d: incoming edge %d -> %d\n",
4110 loop->loop_no, pred->index,
4112 VEC_safe_push (edge, gc, loop->incoming, e);
4117 for (pass = 0, retry = 1; retry && pass < 2; pass++)
4124 FOR_EACH_EDGE (e, ei, loop->incoming)
4128 loop->incoming_src = e->src;
4129 loop->incoming_dest = e->dest;
4134 if (e->dest != loop->incoming_dest)
4135 loop->incoming_dest = NULL;
4136 if (e->src != loop->incoming_src)
4137 loop->incoming_src = NULL;
4139 if (loop->incoming_src == NULL && loop->incoming_dest == NULL)
4145 ";; retrying loop %d with forwarder blocks\n",
4153 ";; can't find suitable entry for loop %d\n",
4161 FOR_EACH_EDGE (e, ei, loop->incoming)
4163 if (forwarder_block_p (e->src))
4170 ";; Adding forwarder block %d to loop %d and retrying\n",
4171 e->src->index, loop->loop_no);
4172 VEC_safe_push (basic_block, heap, loop->blocks, e->src);
4173 bitmap_set_bit (loop->block_bitmap, e->src->index);
4174 FOR_EACH_EDGE (e2, ei2, e->src->preds)
4175 VEC_safe_push (edge, gc, loop->incoming, e2);
4176 VEC_unordered_remove (edge, loop->incoming, ei.index);
4186 VEC_free (basic_block, heap, works);
4189 /* Analyze the structure of the loops in the current function. Use STACK
4190 for bitmap allocations. Returns all the valid candidates for hardware
4191 loops found in this function. */
4193 bfin_discover_loops (bitmap_obstack *stack, FILE *dump_file)
4195 loop_info loops = NULL;
4201 /* Find all the possible loop tails. This means searching for every
4202 loop_end instruction. For each one found, create a loop_info
4203 structure and add the head block to the work list. */
4206 rtx tail = BB_END (bb);
4208 while (GET_CODE (tail) == NOTE)
4209 tail = PREV_INSN (tail);
4213 if (INSN_P (tail) && recog_memoized (tail) == CODE_FOR_loop_end)
4216 /* A possible loop end */
4218 /* There's a degenerate case we can handle - an empty loop consisting
4219 of only a back branch. Handle that by deleting the branch. */
4220 insn = BB_HEAD (BRANCH_EDGE (bb)->dest);
4221 if (next_real_insn (insn) == tail)
4225 fprintf (dump_file, ";; degenerate loop ending at\n");
4226 print_rtl_single (dump_file, tail);
4228 delete_insn_and_edges (tail);
4232 loop = XNEW (struct loop_info);
4235 loop->loop_no = nloops++;
4236 loop->blocks = VEC_alloc (basic_block, heap, 20);
4237 loop->block_bitmap = BITMAP_ALLOC (stack);
4242 fprintf (dump_file, ";; potential loop %d ending at\n",
4244 print_rtl_single (dump_file, tail);
4247 bfin_discover_loop (loop, bb, tail);
4251 tmp_bitmap = BITMAP_ALLOC (stack);
4252 /* Compute loop nestings. */
4253 for (loop = loops; loop; loop = loop->next)
4259 for (other = loop->next; other; other = other->next)
4264 bitmap_and (tmp_bitmap, other->block_bitmap, loop->block_bitmap);
4265 if (bitmap_empty_p (tmp_bitmap))
4267 if (bitmap_equal_p (tmp_bitmap, other->block_bitmap))
4269 other->outer = loop;
4270 VEC_safe_push (loop_info, heap, loop->loops, other);
4272 else if (bitmap_equal_p (tmp_bitmap, loop->block_bitmap))
4274 loop->outer = other;
4275 VEC_safe_push (loop_info, heap, other->loops, loop);
4281 ";; can't find suitable nesting for loops %d and %d\n",
4282 loop->loop_no, other->loop_no);
4283 loop->bad = other->bad = 1;
4287 BITMAP_FREE (tmp_bitmap);
4292 /* Free up the loop structures in LOOPS. */
4294 free_loops (loop_info loops)
4298 loop_info loop = loops;
4300 VEC_free (loop_info, heap, loop->loops);
4301 VEC_free (basic_block, heap, loop->blocks);
4302 BITMAP_FREE (loop->block_bitmap);
4307 #define BB_AUX_INDEX(BB) ((unsigned)(BB)->aux)
4309 /* The taken-branch edge from the loop end can actually go forward. Since the
4310 Blackfin's LSETUP instruction requires that the loop end be after the loop
4311 start, try to reorder a loop's basic blocks when we find such a case. */
4313 bfin_reorder_loops (loop_info loops, FILE *dump_file)
4320 cfg_layout_initialize (0);
4322 for (loop = loops; loop; loop = loop->next)
4332 /* Recreate an index for basic blocks that represents their order. */
4333 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
4334 bb != EXIT_BLOCK_PTR;
4335 bb = bb->next_bb, index++)
4336 bb->aux = (PTR) index;
4338 if (BB_AUX_INDEX (loop->head) < BB_AUX_INDEX (loop->tail))
4341 FOR_EACH_EDGE (e, ei, loop->head->succs)
4343 if (bitmap_bit_p (loop->block_bitmap, e->dest->index)
4344 && BB_AUX_INDEX (e->dest) < BB_AUX_INDEX (loop->tail))
4346 basic_block start_bb = e->dest;
4347 basic_block start_prev_bb = start_bb->prev_bb;
4350 fprintf (dump_file, ";; Moving block %d before block %d\n",
4351 loop->head->index, start_bb->index);
4352 loop->head->prev_bb->next_bb = loop->head->next_bb;
4353 loop->head->next_bb->prev_bb = loop->head->prev_bb;
4355 loop->head->prev_bb = start_prev_bb;
4356 loop->head->next_bb = start_bb;
4357 start_prev_bb->next_bb = start_bb->prev_bb = loop->head;
4361 loops = loops->next;
4366 if (bb->next_bb != EXIT_BLOCK_PTR)
4367 bb->aux = bb->next_bb;
4371 cfg_layout_finalize ();
4375 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
4376 and tries to rewrite the RTL of these loops so that proper Blackfin
4377 hardware loops are generated. */
4380 bfin_reorg_loops (FILE *dump_file)
4382 loop_info loops = NULL;
4385 bitmap_obstack stack;
4387 bitmap_obstack_initialize (&stack);
4390 fprintf (dump_file, ";; Find loops, first pass\n\n");
4392 loops = bfin_discover_loops (&stack, dump_file);
4395 bfin_dump_loops (loops);
4397 bfin_reorder_loops (loops, dump_file);
4401 fprintf (dump_file, ";; Find loops, second pass\n\n");
4403 loops = bfin_discover_loops (&stack, dump_file);
4406 fprintf (dump_file, ";; All loops found:\n\n");
4407 bfin_dump_loops (loops);
4410 /* Now apply the optimizations. */
4411 for (loop = loops; loop; loop = loop->next)
4412 bfin_optimize_loop (loop);
4416 fprintf (dump_file, ";; After hardware loops optimization:\n\n");
4417 bfin_dump_loops (loops);
4423 print_rtl (dump_file, get_insns ());
4429 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
4430 Returns true if we modified the insn chain, false otherwise. */
4432 gen_one_bundle (rtx slot[3])
4434 gcc_assert (slot[1] != NULL_RTX);
4436 /* Verify that we really can do the multi-issue. */
4439 rtx t = NEXT_INSN (slot[0]);
4440 while (t != slot[1])
4442 if (GET_CODE (t) != NOTE
4443 || NOTE_KIND (t) != NOTE_INSN_DELETED)
4450 rtx t = NEXT_INSN (slot[1]);
4451 while (t != slot[2])
4453 if (GET_CODE (t) != NOTE
4454 || NOTE_KIND (t) != NOTE_INSN_DELETED)
4460 if (slot[0] == NULL_RTX)
4462 slot[0] = emit_insn_before (gen_mnop (), slot[1]);
4463 df_insn_rescan (slot[0]);
4465 if (slot[2] == NULL_RTX)
4467 slot[2] = emit_insn_after (gen_forced_nop (), slot[1]);
4468 df_insn_rescan (slot[2]);
4471 /* Avoid line number information being printed inside one bundle. */
4472 if (INSN_LOCATOR (slot[1])
4473 && INSN_LOCATOR (slot[1]) != INSN_LOCATOR (slot[0]))
4474 INSN_LOCATOR (slot[1]) = INSN_LOCATOR (slot[0]);
4475 if (INSN_LOCATOR (slot[2])
4476 && INSN_LOCATOR (slot[2]) != INSN_LOCATOR (slot[0]))
4477 INSN_LOCATOR (slot[2]) = INSN_LOCATOR (slot[0]);
4479 /* Terminate them with "|| " instead of ";" in the output. */
4480 PUT_MODE (slot[0], SImode);
4481 PUT_MODE (slot[1], SImode);
4482 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
4483 PUT_MODE (slot[2], QImode);
4487 /* Go through all insns, and use the information generated during scheduling
4488 to generate SEQUENCEs to represent bundles of instructions issued
4492 bfin_gen_bundles (void)
4501 slot[0] = slot[1] = slot[2] = NULL_RTX;
4502 for (insn = BB_HEAD (bb);; insn = next)
4507 if (get_attr_type (insn) == TYPE_DSP32)
4509 else if (slot[1] == NULL_RTX)
4516 next = NEXT_INSN (insn);
4517 while (next && insn != BB_END (bb)
4519 && GET_CODE (PATTERN (next)) != USE
4520 && GET_CODE (PATTERN (next)) != CLOBBER))
4523 next = NEXT_INSN (insn);
4526 /* BB_END can change due to emitting extra NOPs, so check here. */
4527 at_end = insn == BB_END (bb);
4528 if (at_end || GET_MODE (next) == TImode)
4531 || !gen_one_bundle (slot))
4532 && slot[0] != NULL_RTX)
4534 rtx pat = PATTERN (slot[0]);
4535 if (GET_CODE (pat) == SET
4536 && GET_CODE (SET_SRC (pat)) == UNSPEC
4537 && XINT (SET_SRC (pat), 1) == UNSPEC_32BIT)
4539 SET_SRC (pat) = XVECEXP (SET_SRC (pat), 0, 0);
4540 INSN_CODE (slot[0]) = -1;
4541 df_insn_rescan (slot[0]);
4545 slot[0] = slot[1] = slot[2] = NULL_RTX;
4553 /* Ensure that no var tracking notes are emitted in the middle of a
4554 three-instruction bundle. */
4557 reorder_var_tracking_notes (void)
4563 rtx queue = NULL_RTX;
4564 bool in_bundle = false;
4566 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4568 next = NEXT_INSN (insn);
4572 /* Emit queued up notes at the last instruction of a bundle. */
4573 if (GET_MODE (insn) == QImode)
4577 rtx next_queue = PREV_INSN (queue);
4578 PREV_INSN (NEXT_INSN (insn)) = queue;
4579 NEXT_INSN (queue) = NEXT_INSN (insn);
4580 NEXT_INSN (insn) = queue;
4581 PREV_INSN (queue) = insn;
4586 else if (GET_MODE (insn) == SImode)
4589 else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4593 rtx prev = PREV_INSN (insn);
4594 PREV_INSN (next) = prev;
4595 NEXT_INSN (prev) = next;
4597 PREV_INSN (insn) = queue;
4605 /* Return an insn type for INSN that can be used by the caller for anomaly
4606 workarounds. This differs from plain get_attr_type in that it handles
4609 static enum attr_type
4610 type_for_anomaly (rtx insn)
4612 rtx pat = PATTERN (insn);
4613 if (GET_CODE (pat) == SEQUENCE)
4616 t = get_attr_type (XVECEXP (pat, 0, 1));
4619 t = get_attr_type (XVECEXP (pat, 0, 2));
4625 return get_attr_type (insn);
4628 /* Return nonzero if INSN contains any loads that may trap. It handles
4629 SEQUENCEs correctly. */
4632 trapping_loads_p (rtx insn)
4634 rtx pat = PATTERN (insn);
4635 if (GET_CODE (pat) == SEQUENCE)
4638 t = get_attr_type (XVECEXP (pat, 0, 1));
4640 && may_trap_p (SET_SRC (PATTERN (XVECEXP (pat, 0, 1)))))
4642 t = get_attr_type (XVECEXP (pat, 0, 2));
4644 && may_trap_p (SET_SRC (PATTERN (XVECEXP (pat, 0, 2)))))
4649 return may_trap_p (SET_SRC (single_set (insn)));
4652 /* This function acts like NEXT_INSN, but is aware of three-insn bundles and
4653 skips all subsequent parallel instructions if INSN is the start of such
4656 find_next_insn_start (rtx insn)
4658 if (GET_MODE (insn) == SImode)
4660 while (GET_MODE (insn) != QImode)
4661 insn = NEXT_INSN (insn);
4663 return NEXT_INSN (insn);
4666 /* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4667 a three-insn bundle, see if one of them is a load and return that if so.
4668 Return NULL_RTX if the insn does not contain loads. */
4670 find_load (rtx insn)
4672 if (get_attr_type (insn) == TYPE_MCLD)
4674 if (GET_MODE (insn) != SImode)
4677 insn = NEXT_INSN (insn);
4678 if ((GET_MODE (insn) == SImode || GET_MODE (insn) == QImode)
4679 && get_attr_type (insn) == TYPE_MCLD)
4681 } while (GET_MODE (insn) != QImode);
4685 /* We use the machine specific reorg pass for emitting CSYNC instructions
4686 after conditional branches as needed.
4688 The Blackfin is unusual in that a code sequence like
4691 may speculatively perform the load even if the condition isn't true. This
4692 happens for a branch that is predicted not taken, because the pipeline
4693 isn't flushed or stalled, so the early stages of the following instructions,
4694 which perform the memory reference, are allowed to execute before the
4695 jump condition is evaluated.
4696 Therefore, we must insert additional instructions in all places where this
4697 could lead to incorrect behavior. The manual recommends CSYNC, while
4698 VDSP seems to use NOPs (even though its corresponding compiler option is
4701 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4702 When optimizing for size, we turn the branch into a predicted taken one.
4703 This may be slower due to mispredicts, but saves code size. */
4709 rtx last_condjump = NULL_RTX;
4710 int cycles_since_jump = INT_MAX;
4712 /* We are freeing block_for_insn in the toplev to keep compatibility
4713 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4714 compute_bb_for_insn ();
4716 if (bfin_flag_schedule_insns2)
4718 splitting_for_sched = 1;
4720 splitting_for_sched = 0;
4722 timevar_push (TV_SCHED2);
4724 timevar_pop (TV_SCHED2);
4726 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4728 bfin_gen_bundles ();
4733 /* Doloop optimization */
4734 if (cfun->machine->has_hardware_loops)
4735 bfin_reorg_loops (dump_file);
4737 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS)
4740 /* First pass: find predicted-false branches; if something after them
4741 needs nops, insert them or change the branch to predict true. */
4742 for (insn = get_insns (); insn; insn = next)
4746 next = find_next_insn_start (insn);
4748 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
4751 pat = PATTERN (insn);
4752 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4753 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4754 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4759 if (any_condjump_p (insn)
4760 && ! cbranch_predicted_taken_p (insn))
4762 last_condjump = insn;
4763 cycles_since_jump = 0;
4766 cycles_since_jump = INT_MAX;
4768 else if (INSN_P (insn))
4770 rtx load_insn = find_load (insn);
4771 enum attr_type type = type_for_anomaly (insn);
4772 int delay_needed = 0;
4773 if (cycles_since_jump < INT_MAX)
4774 cycles_since_jump++;
4776 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
4778 if (trapping_loads_p (load_insn))
4781 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
4784 if (delay_needed > cycles_since_jump)
4788 rtx *op = recog_data.operand;
4790 delay_needed -= cycles_since_jump;
4792 extract_insn (last_condjump);
4795 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
4797 cycles_since_jump = INT_MAX;
4800 /* Do not adjust cycles_since_jump in this case, so that
4801 we'll increase the number of NOPs for a subsequent insn
4803 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
4804 GEN_INT (delay_needed));
4805 PATTERN (last_condjump) = pat;
4806 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
4810 /* Second pass: for predicted-true branches, see if anything at the
4811 branch destination needs extra nops. */
4812 if (! ENABLE_WA_SPECULATIVE_SYNCS)
4815 if (! ENABLE_WA_RETS)
4818 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4821 && any_condjump_p (insn)
4822 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
4823 || cbranch_predicted_taken_p (insn)))
4825 rtx target = JUMP_LABEL (insn);
4827 cycles_since_jump = 0;
4828 for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
4832 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
4835 pat = PATTERN (target);
4836 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4837 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4838 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4841 if (INSN_P (target))
4843 enum attr_type type = type_for_anomaly (target);
4844 int delay_needed = 0;
4845 if (cycles_since_jump < INT_MAX)
4846 cycles_since_jump++;
4848 if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
4851 if (delay_needed > cycles_since_jump)
4853 rtx prev = prev_real_insn (label);
4854 delay_needed -= cycles_since_jump;
4856 fprintf (dump_file, "Adding %d nops after %d\n",
4857 delay_needed, INSN_UID (label));
4859 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
4866 "Reducing nops on insn %d.\n",
4869 x = XVECEXP (x, 0, 1);
4870 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
4871 XVECEXP (x, 0, 0) = GEN_INT (v);
4873 while (delay_needed-- > 0)
4874 emit_insn_after (gen_nop (), label);
4882 if (bfin_flag_var_tracking)
4884 timevar_push (TV_VAR_TRACKING);
4885 variable_tracking_main ();
4886 reorder_var_tracking_notes ();
4887 timevar_pop (TV_VAR_TRACKING);
4889 df_finish_pass (false);
4892 /* Handle interrupt_handler, exception_handler and nmi_handler function
4893 attributes; arguments as in struct attribute_spec.handler. */
4896 handle_int_attribute (tree *node, tree name,
4897 tree args ATTRIBUTE_UNUSED,
4898 int flags ATTRIBUTE_UNUSED,
4902 if (TREE_CODE (x) == FUNCTION_DECL)
4905 if (TREE_CODE (x) != FUNCTION_TYPE)
4907 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4908 IDENTIFIER_POINTER (name));
4909 *no_add_attrs = true;
4911 else if (funkind (x) != SUBROUTINE)
4912 error ("multiple function type attributes specified");
4917 /* Return 0 if the attributes for two types are incompatible, 1 if they
4918 are compatible, and 2 if they are nearly compatible (which causes a
4919 warning to be generated). */
4922 bfin_comp_type_attributes (const_tree type1, const_tree type2)
4924 e_funkind kind1, kind2;
4926 if (TREE_CODE (type1) != FUNCTION_TYPE)
4929 kind1 = funkind (type1);
4930 kind2 = funkind (type2);
4935 /* Check for mismatched modifiers */
4936 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
4937 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
4940 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
4941 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
4944 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
4945 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
4948 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
4949 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
4955 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4956 struct attribute_spec.handler. */
4959 bfin_handle_longcall_attribute (tree *node, tree name,
4960 tree args ATTRIBUTE_UNUSED,
4961 int flags ATTRIBUTE_UNUSED,
4964 if (TREE_CODE (*node) != FUNCTION_TYPE
4965 && TREE_CODE (*node) != FIELD_DECL
4966 && TREE_CODE (*node) != TYPE_DECL)
4968 warning (OPT_Wattributes, "`%s' attribute only applies to functions",
4969 IDENTIFIER_POINTER (name));
4970 *no_add_attrs = true;
4973 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
4974 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
4975 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
4976 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
4978 warning (OPT_Wattributes,
4979 "can't apply both longcall and shortcall attributes to the same function");
4980 *no_add_attrs = true;
4986 /* Handle a "l1_text" attribute; arguments as in
4987 struct attribute_spec.handler. */
4990 bfin_handle_l1_text_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4991 int ARG_UNUSED (flags), bool *no_add_attrs)
4995 if (TREE_CODE (decl) != FUNCTION_DECL)
4997 error ("`%s' attribute only applies to functions",
4998 IDENTIFIER_POINTER (name));
4999 *no_add_attrs = true;
5002 /* The decl may have already been given a section attribute
5003 from a previous declaration. Ensure they match. */
5004 else if (DECL_SECTION_NAME (decl) != NULL_TREE
5005 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
5008 error ("section of %q+D conflicts with previous declaration",
5010 *no_add_attrs = true;
5013 DECL_SECTION_NAME (decl) = build_string (9, ".l1.text");
5018 /* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
5019 arguments as in struct attribute_spec.handler. */
5022 bfin_handle_l1_data_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5023 int ARG_UNUSED (flags), bool *no_add_attrs)
5027 if (TREE_CODE (decl) != VAR_DECL)
5029 error ("`%s' attribute only applies to variables",
5030 IDENTIFIER_POINTER (name));
5031 *no_add_attrs = true;
5033 else if (current_function_decl != NULL_TREE
5034 && !TREE_STATIC (decl))
5036 error ("`%s' attribute cannot be specified for local variables",
5037 IDENTIFIER_POINTER (name));
5038 *no_add_attrs = true;
5042 const char *section_name;
5044 if (strcmp (IDENTIFIER_POINTER (name), "l1_data") == 0)
5045 section_name = ".l1.data";
5046 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_A") == 0)
5047 section_name = ".l1.data.A";
5048 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_B") == 0)
5049 section_name = ".l1.data.B";
5053 /* The decl may have already been given a section attribute
5054 from a previous declaration. Ensure they match. */
5055 if (DECL_SECTION_NAME (decl) != NULL_TREE
5056 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
5059 error ("section of %q+D conflicts with previous declaration",
5061 *no_add_attrs = true;
5064 DECL_SECTION_NAME (decl)
5065 = build_string (strlen (section_name) + 1, section_name);
5071 /* Table of valid machine attributes. */
5072 const struct attribute_spec bfin_attribute_table[] =
5074 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
5075 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
5076 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
5077 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
5078 { "nesting", 0, 0, false, true, true, NULL },
5079 { "kspisusp", 0, 0, false, true, true, NULL },
5080 { "saveall", 0, 0, false, true, true, NULL },
5081 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
5082 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
5083 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute },
5084 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute },
5085 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute },
5086 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute },
5087 { NULL, 0, 0, false, false, false, NULL }
5090 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
5091 tell the assembler to generate pointers to function descriptors in
5095 bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
5097 if (TARGET_FDPIC && size == UNITS_PER_WORD)
5099 if (GET_CODE (value) == SYMBOL_REF
5100 && SYMBOL_REF_FUNCTION_P (value))
5102 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
5103 output_addr_const (asm_out_file, value);
5104 fputs (")\n", asm_out_file);
5109 /* We've set the unaligned SI op to NULL, so we always have to
5110 handle the unaligned case here. */
5111 assemble_integer_with_op ("\t.4byte\t", value);
5115 return default_assemble_integer (value, size, aligned_p);
5118 /* Output the assembler code for a thunk function. THUNK_DECL is the
5119 declaration for the thunk function itself, FUNCTION is the decl for
5120 the target function. DELTA is an immediate constant offset to be
5121 added to THIS. If VCALL_OFFSET is nonzero, the word at
5122 *(*this + vcall_offset) should be added to THIS. */
5125 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
5126 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
5127 HOST_WIDE_INT vcall_offset, tree function)
5130 /* The this parameter is passed as the first argument. */
5131 rtx this_rtx = gen_rtx_REG (Pmode, REG_R0);
5133 /* Adjust the this parameter by a fixed constant. */
5137 if (delta >= -64 && delta <= 63)
5139 xops[0] = GEN_INT (delta);
5140 output_asm_insn ("%1 += %0;", xops);
5142 else if (delta >= -128 && delta < -64)
5144 xops[0] = GEN_INT (delta + 64);
5145 output_asm_insn ("%1 += -64; %1 += %0;", xops);
5147 else if (delta > 63 && delta <= 126)
5149 xops[0] = GEN_INT (delta - 63);
5150 output_asm_insn ("%1 += 63; %1 += %0;", xops);
5154 xops[0] = GEN_INT (delta);
5155 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
5159 /* Adjust the this parameter by a value stored in the vtable. */
5162 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
5163 rtx tmp = gen_rtx_REG (Pmode, REG_R3);
5167 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
5169 /* Adjust the this parameter. */
5170 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
5171 if (!memory_operand (xops[0], Pmode))
5173 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
5174 xops[0] = GEN_INT (vcall_offset);
5176 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
5177 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
5180 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
5183 xops[0] = XEXP (DECL_RTL (function), 0);
5184 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
5185 output_asm_insn ("jump.l\t%P0", xops);
5188 /* Codes for all the Blackfin builtins. */
5194 BFIN_BUILTIN_COMPOSE_2X16,
5195 BFIN_BUILTIN_EXTRACTLO,
5196 BFIN_BUILTIN_EXTRACTHI,
5198 BFIN_BUILTIN_SSADD_2X16,
5199 BFIN_BUILTIN_SSSUB_2X16,
5200 BFIN_BUILTIN_SSADDSUB_2X16,
5201 BFIN_BUILTIN_SSSUBADD_2X16,
5202 BFIN_BUILTIN_MULT_2X16,
5203 BFIN_BUILTIN_MULTR_2X16,
5204 BFIN_BUILTIN_NEG_2X16,
5205 BFIN_BUILTIN_ABS_2X16,
5206 BFIN_BUILTIN_MIN_2X16,
5207 BFIN_BUILTIN_MAX_2X16,
5209 BFIN_BUILTIN_SSADD_1X16,
5210 BFIN_BUILTIN_SSSUB_1X16,
5211 BFIN_BUILTIN_MULT_1X16,
5212 BFIN_BUILTIN_MULTR_1X16,
5213 BFIN_BUILTIN_NORM_1X16,
5214 BFIN_BUILTIN_NEG_1X16,
5215 BFIN_BUILTIN_ABS_1X16,
5216 BFIN_BUILTIN_MIN_1X16,
5217 BFIN_BUILTIN_MAX_1X16,
5219 BFIN_BUILTIN_SUM_2X16,
5220 BFIN_BUILTIN_DIFFHL_2X16,
5221 BFIN_BUILTIN_DIFFLH_2X16,
5223 BFIN_BUILTIN_SSADD_1X32,
5224 BFIN_BUILTIN_SSSUB_1X32,
5225 BFIN_BUILTIN_NORM_1X32,
5226 BFIN_BUILTIN_ROUND_1X32,
5227 BFIN_BUILTIN_NEG_1X32,
5228 BFIN_BUILTIN_ABS_1X32,
5229 BFIN_BUILTIN_MIN_1X32,
5230 BFIN_BUILTIN_MAX_1X32,
5231 BFIN_BUILTIN_MULT_1X32,
5232 BFIN_BUILTIN_MULT_1X32X32,
5233 BFIN_BUILTIN_MULT_1X32X32NS,
5235 BFIN_BUILTIN_MULHISILL,
5236 BFIN_BUILTIN_MULHISILH,
5237 BFIN_BUILTIN_MULHISIHL,
5238 BFIN_BUILTIN_MULHISIHH,
5240 BFIN_BUILTIN_LSHIFT_1X16,
5241 BFIN_BUILTIN_LSHIFT_2X16,
5242 BFIN_BUILTIN_SSASHIFT_1X16,
5243 BFIN_BUILTIN_SSASHIFT_2X16,
5244 BFIN_BUILTIN_SSASHIFT_1X32,
5246 BFIN_BUILTIN_CPLX_MUL_16,
5247 BFIN_BUILTIN_CPLX_MAC_16,
5248 BFIN_BUILTIN_CPLX_MSU_16,
5250 BFIN_BUILTIN_CPLX_MUL_16_S40,
5251 BFIN_BUILTIN_CPLX_MAC_16_S40,
5252 BFIN_BUILTIN_CPLX_MSU_16_S40,
5254 BFIN_BUILTIN_CPLX_SQU,
5256 BFIN_BUILTIN_LOADBYTES,
5261 #define def_builtin(NAME, TYPE, CODE) \
5263 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5267 /* Set up all builtin functions for this target. */
5269 bfin_init_builtins (void)
5271 tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
5272 tree void_ftype_void
5273 = build_function_type (void_type_node, void_list_node);
5274 tree short_ftype_short
5275 = build_function_type_list (short_integer_type_node, short_integer_type_node,
5277 tree short_ftype_int_int
5278 = build_function_type_list (short_integer_type_node, integer_type_node,
5279 integer_type_node, NULL_TREE);
5280 tree int_ftype_int_int
5281 = build_function_type_list (integer_type_node, integer_type_node,
5282 integer_type_node, NULL_TREE);
5284 = build_function_type_list (integer_type_node, integer_type_node,
5286 tree short_ftype_int
5287 = build_function_type_list (short_integer_type_node, integer_type_node,
5289 tree int_ftype_v2hi_v2hi
5290 = build_function_type_list (integer_type_node, V2HI_type_node,
5291 V2HI_type_node, NULL_TREE);
5292 tree v2hi_ftype_v2hi_v2hi
5293 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5294 V2HI_type_node, NULL_TREE);
5295 tree v2hi_ftype_v2hi_v2hi_v2hi
5296 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5297 V2HI_type_node, V2HI_type_node, NULL_TREE);
5298 tree v2hi_ftype_int_int
5299 = build_function_type_list (V2HI_type_node, integer_type_node,
5300 integer_type_node, NULL_TREE);
5301 tree v2hi_ftype_v2hi_int
5302 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5303 integer_type_node, NULL_TREE);
5304 tree int_ftype_short_short
5305 = build_function_type_list (integer_type_node, short_integer_type_node,
5306 short_integer_type_node, NULL_TREE);
5307 tree v2hi_ftype_v2hi
5308 = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
5309 tree short_ftype_v2hi
5310 = build_function_type_list (short_integer_type_node, V2HI_type_node,
5313 = build_function_type_list (integer_type_node,
5314 build_pointer_type (integer_type_node),
5317 /* Add the remaining MMX insns with somewhat more complicated types. */
5318 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
5319 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
5321 def_builtin ("__builtin_bfin_ones", short_ftype_int, BFIN_BUILTIN_ONES);
5323 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
5324 BFIN_BUILTIN_COMPOSE_2X16);
5325 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
5326 BFIN_BUILTIN_EXTRACTHI);
5327 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
5328 BFIN_BUILTIN_EXTRACTLO);
5330 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
5331 BFIN_BUILTIN_MIN_2X16);
5332 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
5333 BFIN_BUILTIN_MAX_2X16);
5335 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
5336 BFIN_BUILTIN_SSADD_2X16);
5337 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
5338 BFIN_BUILTIN_SSSUB_2X16);
5339 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
5340 BFIN_BUILTIN_SSADDSUB_2X16);
5341 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
5342 BFIN_BUILTIN_SSSUBADD_2X16);
5343 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
5344 BFIN_BUILTIN_MULT_2X16);
5345 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
5346 BFIN_BUILTIN_MULTR_2X16);
5347 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
5348 BFIN_BUILTIN_NEG_2X16);
5349 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
5350 BFIN_BUILTIN_ABS_2X16);
5352 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int,
5353 BFIN_BUILTIN_MIN_1X16);
5354 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int,
5355 BFIN_BUILTIN_MAX_1X16);
5357 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
5358 BFIN_BUILTIN_SSADD_1X16);
5359 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
5360 BFIN_BUILTIN_SSSUB_1X16);
5361 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
5362 BFIN_BUILTIN_MULT_1X16);
5363 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
5364 BFIN_BUILTIN_MULTR_1X16);
5365 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
5366 BFIN_BUILTIN_NEG_1X16);
5367 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
5368 BFIN_BUILTIN_ABS_1X16);
5369 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
5370 BFIN_BUILTIN_NORM_1X16);
5372 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi,
5373 BFIN_BUILTIN_SUM_2X16);
5374 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
5375 BFIN_BUILTIN_DIFFHL_2X16);
5376 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
5377 BFIN_BUILTIN_DIFFLH_2X16);
5379 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
5380 BFIN_BUILTIN_MULHISILL);
5381 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
5382 BFIN_BUILTIN_MULHISIHL);
5383 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
5384 BFIN_BUILTIN_MULHISILH);
5385 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
5386 BFIN_BUILTIN_MULHISIHH);
5388 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int,
5389 BFIN_BUILTIN_MIN_1X32);
5390 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int,
5391 BFIN_BUILTIN_MAX_1X32);
5393 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
5394 BFIN_BUILTIN_SSADD_1X32);
5395 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
5396 BFIN_BUILTIN_SSSUB_1X32);
5397 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
5398 BFIN_BUILTIN_NEG_1X32);
5399 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int,
5400 BFIN_BUILTIN_ABS_1X32);
5401 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
5402 BFIN_BUILTIN_NORM_1X32);
5403 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int,
5404 BFIN_BUILTIN_ROUND_1X32);
5405 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
5406 BFIN_BUILTIN_MULT_1X32);
5407 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int,
5408 BFIN_BUILTIN_MULT_1X32X32);
5409 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int,
5410 BFIN_BUILTIN_MULT_1X32X32NS);
5413 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
5414 BFIN_BUILTIN_SSASHIFT_1X16);
5415 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
5416 BFIN_BUILTIN_SSASHIFT_2X16);
5417 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
5418 BFIN_BUILTIN_LSHIFT_1X16);
5419 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
5420 BFIN_BUILTIN_LSHIFT_2X16);
5421 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int,
5422 BFIN_BUILTIN_SSASHIFT_1X32);
5424 /* Complex numbers. */
5425 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi,
5426 BFIN_BUILTIN_SSADD_2X16);
5427 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi,
5428 BFIN_BUILTIN_SSSUB_2X16);
5429 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
5430 BFIN_BUILTIN_CPLX_MUL_16);
5431 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
5432 BFIN_BUILTIN_CPLX_MAC_16);
5433 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
5434 BFIN_BUILTIN_CPLX_MSU_16);
5435 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi,
5436 BFIN_BUILTIN_CPLX_MUL_16_S40);
5437 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5438 BFIN_BUILTIN_CPLX_MAC_16_S40);
5439 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5440 BFIN_BUILTIN_CPLX_MSU_16_S40);
5441 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi,
5442 BFIN_BUILTIN_CPLX_SQU);
5444 /* "Unaligned" load. */
5445 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint,
5446 BFIN_BUILTIN_LOADBYTES);
5451 struct builtin_description
5453 const enum insn_code icode;
5454 const char *const name;
5455 const enum bfin_builtins code;
5459 static const struct builtin_description bdesc_2arg[] =
5461 { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
5463 { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
5464 { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
5465 { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
5466 { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
5467 { CODE_FOR_ssashiftsi3, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32, -1 },
5469 { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
5470 { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
5471 { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
5472 { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
5474 { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
5475 { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
5476 { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
5477 { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
5479 { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
5480 { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
5481 { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
5482 { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
5483 { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
5484 { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
5486 { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
5487 { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
5488 { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
5489 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
5490 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE }
5493 static const struct builtin_description bdesc_1arg[] =
5495 { CODE_FOR_loadbytes, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES, 0 },
5497 { CODE_FOR_ones, "__builtin_bfin_ones", BFIN_BUILTIN_ONES, 0 },
5499 { CODE_FOR_signbitshi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
5500 { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
5501 { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
5503 { CODE_FOR_signbitssi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
5504 { CODE_FOR_ssroundsi2, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32, 0 },
5505 { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
5506 { CODE_FOR_ssabssi2, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32, 0 },
5508 { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
5509 { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
5510 { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
5511 { CODE_FOR_ssabsv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
5514 /* Errors in the source file can cause expand_expr to return const0_rtx
5515 where we expect a vector. To avoid crashing, use one of the vector
5516 clear instructions. */
5518 safe_vector_operand (rtx x, enum machine_mode mode)
5520 if (x != const0_rtx)
5522 x = gen_reg_rtx (SImode);
5524 emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
5525 return gen_lowpart (mode, x);
5528 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5529 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5532 bfin_expand_binop_builtin (enum insn_code icode, tree exp, rtx target,
5536 tree arg0 = CALL_EXPR_ARG (exp, 0);
5537 tree arg1 = CALL_EXPR_ARG (exp, 1);
5538 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5539 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5540 enum machine_mode op0mode = GET_MODE (op0);
5541 enum machine_mode op1mode = GET_MODE (op1);
5542 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5543 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5544 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5546 if (VECTOR_MODE_P (mode0))
5547 op0 = safe_vector_operand (op0, mode0);
5548 if (VECTOR_MODE_P (mode1))
5549 op1 = safe_vector_operand (op1, mode1);
5552 || GET_MODE (target) != tmode
5553 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5554 target = gen_reg_rtx (tmode);
5556 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
5559 op0 = gen_lowpart (HImode, op0);
5561 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
5564 op1 = gen_lowpart (HImode, op1);
5566 /* In case the insn wants input operands in modes different from
5567 the result, abort. */
5568 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
5569 && (op1mode == mode1 || op1mode == VOIDmode));
5571 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5572 op0 = copy_to_mode_reg (mode0, op0);
5573 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5574 op1 = copy_to_mode_reg (mode1, op1);
5577 pat = GEN_FCN (icode) (target, op0, op1);
5579 pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
5587 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
5590 bfin_expand_unop_builtin (enum insn_code icode, tree exp,
5594 tree arg0 = CALL_EXPR_ARG (exp, 0);
5595 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5596 enum machine_mode op0mode = GET_MODE (op0);
5597 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5598 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5601 || GET_MODE (target) != tmode
5602 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5603 target = gen_reg_rtx (tmode);
5605 if (VECTOR_MODE_P (mode0))
5606 op0 = safe_vector_operand (op0, mode0);
5608 if (op0mode == SImode && mode0 == HImode)
5611 op0 = gen_lowpart (HImode, op0);
5613 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
5615 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5616 op0 = copy_to_mode_reg (mode0, op0);
5618 pat = GEN_FCN (icode) (target, op0);
5625 /* Expand an expression EXP that calls a built-in function,
5626 with result going to TARGET if that's convenient
5627 (and in mode MODE if that's convenient).
5628 SUBTARGET may be used as the target for computing one of EXP's operands.
5629 IGNORE is nonzero if the value is to be ignored. */
5632 bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5633 rtx subtarget ATTRIBUTE_UNUSED,
5634 enum machine_mode mode ATTRIBUTE_UNUSED,
5635 int ignore ATTRIBUTE_UNUSED)
5638 enum insn_code icode;
5639 const struct builtin_description *d;
5640 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
5641 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5642 tree arg0, arg1, arg2;
5643 rtx op0, op1, op2, accvec, pat, tmp1, tmp2, a0reg, a1reg;
5644 enum machine_mode tmode, mode0;
5648 case BFIN_BUILTIN_CSYNC:
5649 emit_insn (gen_csync ());
5651 case BFIN_BUILTIN_SSYNC:
5652 emit_insn (gen_ssync ());
5655 case BFIN_BUILTIN_DIFFHL_2X16:
5656 case BFIN_BUILTIN_DIFFLH_2X16:
5657 case BFIN_BUILTIN_SUM_2X16:
5658 arg0 = CALL_EXPR_ARG (exp, 0);
5659 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5660 icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16 ? CODE_FOR_subhilov2hi3
5661 : fcode == BFIN_BUILTIN_DIFFLH_2X16 ? CODE_FOR_sublohiv2hi3
5662 : CODE_FOR_ssaddhilov2hi3);
5663 tmode = insn_data[icode].operand[0].mode;
5664 mode0 = insn_data[icode].operand[1].mode;
5667 || GET_MODE (target) != tmode
5668 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5669 target = gen_reg_rtx (tmode);
5671 if (VECTOR_MODE_P (mode0))
5672 op0 = safe_vector_operand (op0, mode0);
5674 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5675 op0 = copy_to_mode_reg (mode0, op0);
5677 pat = GEN_FCN (icode) (target, op0, op0);
5683 case BFIN_BUILTIN_MULT_1X32X32:
5684 case BFIN_BUILTIN_MULT_1X32X32NS:
5685 arg0 = CALL_EXPR_ARG (exp, 0);
5686 arg1 = CALL_EXPR_ARG (exp, 1);
5687 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5688 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5690 || !register_operand (target, SImode))
5691 target = gen_reg_rtx (SImode);
5693 a1reg = gen_rtx_REG (PDImode, REG_A1);
5694 a0reg = gen_rtx_REG (PDImode, REG_A0);
5695 tmp1 = gen_lowpart (V2HImode, op0);
5696 tmp2 = gen_lowpart (V2HImode, op1);
5697 emit_insn (gen_flag_macinit1hi (a1reg,
5698 gen_lowpart (HImode, op0),
5699 gen_lowpart (HImode, op1),
5700 GEN_INT (MACFLAG_FU)));
5701 emit_insn (gen_lshrpdi3 (a1reg, a1reg, GEN_INT (16)));
5703 if (fcode == BFIN_BUILTIN_MULT_1X32X32)
5704 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg, a1reg, tmp1, tmp2,
5705 const1_rtx, const1_rtx,
5706 const1_rtx, const0_rtx, a1reg,
5707 const0_rtx, GEN_INT (MACFLAG_NONE),
5708 GEN_INT (MACFLAG_M)));
5711 /* For saturating multiplication, there's exactly one special case
5712 to be handled: multiplying the smallest negative value with
5713 itself. Due to shift correction in fractional multiplies, this
5714 can overflow. Iff this happens, OP2 will contain 1, which, when
5715 added in 32 bits to the smallest negative, wraps to the largest
5716 positive, which is the result we want. */
5717 op2 = gen_reg_rtx (V2HImode);
5718 emit_insn (gen_packv2hi (op2, tmp1, tmp2, const0_rtx, const0_rtx));
5719 emit_insn (gen_movsibi (gen_rtx_REG (BImode, REG_CC),
5720 gen_lowpart (SImode, op2)));
5721 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg, a1reg, tmp1, tmp2,
5722 const1_rtx, const1_rtx,
5723 const1_rtx, const0_rtx, a1reg,
5724 const0_rtx, GEN_INT (MACFLAG_NONE),
5725 GEN_INT (MACFLAG_M)));
5726 op2 = gen_reg_rtx (SImode);
5727 emit_insn (gen_movbisi (op2, gen_rtx_REG (BImode, REG_CC)));
5729 emit_insn (gen_flag_machi_parts_acconly (a1reg, tmp2, tmp1,
5730 const1_rtx, const0_rtx,
5731 a1reg, const0_rtx, GEN_INT (MACFLAG_M)));
5732 emit_insn (gen_ashrpdi3 (a1reg, a1reg, GEN_INT (15)));
5733 emit_insn (gen_sum_of_accumulators (target, a0reg, a0reg, a1reg));
5734 if (fcode == BFIN_BUILTIN_MULT_1X32X32NS)
5735 emit_insn (gen_addsi3 (target, target, op2));
5738 case BFIN_BUILTIN_CPLX_MUL_16:
5739 case BFIN_BUILTIN_CPLX_MUL_16_S40:
5740 arg0 = CALL_EXPR_ARG (exp, 0);
5741 arg1 = CALL_EXPR_ARG (exp, 1);
5742 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5743 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5744 accvec = gen_reg_rtx (V2PDImode);
5747 || GET_MODE (target) != V2HImode
5748 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5749 target = gen_reg_rtx (tmode);
5750 if (! register_operand (op0, GET_MODE (op0)))
5751 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5752 if (! register_operand (op1, GET_MODE (op1)))
5753 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
5755 if (fcode == BFIN_BUILTIN_CPLX_MUL_16)
5756 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5757 const0_rtx, const0_rtx,
5758 const1_rtx, GEN_INT (MACFLAG_W32)));
5760 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5761 const0_rtx, const0_rtx,
5762 const1_rtx, GEN_INT (MACFLAG_NONE)));
5763 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
5764 const1_rtx, const1_rtx,
5765 const0_rtx, accvec, const1_rtx, const0_rtx,
5766 GEN_INT (MACFLAG_NONE), accvec));
5770 case BFIN_BUILTIN_CPLX_MAC_16:
5771 case BFIN_BUILTIN_CPLX_MSU_16:
5772 case BFIN_BUILTIN_CPLX_MAC_16_S40:
5773 case BFIN_BUILTIN_CPLX_MSU_16_S40:
5774 arg0 = CALL_EXPR_ARG (exp, 0);
5775 arg1 = CALL_EXPR_ARG (exp, 1);
5776 arg2 = CALL_EXPR_ARG (exp, 2);
5777 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5778 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5779 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5780 accvec = gen_reg_rtx (V2PDImode);
5783 || GET_MODE (target) != V2HImode
5784 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5785 target = gen_reg_rtx (tmode);
5786 if (! register_operand (op1, GET_MODE (op1)))
5787 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
5788 if (! register_operand (op2, GET_MODE (op2)))
5789 op2 = copy_to_mode_reg (GET_MODE (op2), op2);
5791 tmp1 = gen_reg_rtx (SImode);
5792 tmp2 = gen_reg_rtx (SImode);
5793 emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op0), GEN_INT (16)));
5794 emit_move_insn (tmp2, gen_lowpart (SImode, op0));
5795 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
5796 emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
5797 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5798 || fcode == BFIN_BUILTIN_CPLX_MSU_16)
5799 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5800 const0_rtx, const0_rtx,
5801 const1_rtx, accvec, const0_rtx,
5803 GEN_INT (MACFLAG_W32)));
5805 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5806 const0_rtx, const0_rtx,
5807 const1_rtx, accvec, const0_rtx,
5809 GEN_INT (MACFLAG_NONE)));
5810 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5811 || fcode == BFIN_BUILTIN_CPLX_MAC_16_S40)
5821 emit_insn (gen_flag_macv2hi_parts (target, op1, op2, const1_rtx,
5822 const1_rtx, const1_rtx,
5823 const0_rtx, accvec, tmp1, tmp2,
5824 GEN_INT (MACFLAG_NONE), accvec));
5828 case BFIN_BUILTIN_CPLX_SQU:
5829 arg0 = CALL_EXPR_ARG (exp, 0);
5830 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5831 accvec = gen_reg_rtx (V2PDImode);
5832 icode = CODE_FOR_flag_mulv2hi;
5833 tmp1 = gen_reg_rtx (V2HImode);
5834 tmp2 = gen_reg_rtx (V2HImode);
5837 || GET_MODE (target) != V2HImode
5838 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5839 target = gen_reg_rtx (V2HImode);
5840 if (! register_operand (op0, GET_MODE (op0)))
5841 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5843 emit_insn (gen_flag_mulv2hi (tmp1, op0, op0, GEN_INT (MACFLAG_NONE)));
5845 emit_insn (gen_flag_mulhi_parts (tmp2, op0, op0, const0_rtx,
5846 const0_rtx, const1_rtx,
5847 GEN_INT (MACFLAG_NONE)));
5849 emit_insn (gen_ssaddhi3_parts (target, tmp2, tmp2, const1_rtx,
5850 const0_rtx, const0_rtx));
5852 emit_insn (gen_sssubhi3_parts (target, tmp1, tmp1, const0_rtx,
5853 const0_rtx, const1_rtx));
5861 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5862 if (d->code == fcode)
5863 return bfin_expand_binop_builtin (d->icode, exp, target,
5866 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5867 if (d->code == fcode)
5868 return bfin_expand_unop_builtin (d->icode, exp, target);
5873 #undef TARGET_INIT_BUILTINS
5874 #define TARGET_INIT_BUILTINS bfin_init_builtins
5876 #undef TARGET_EXPAND_BUILTIN
5877 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5879 #undef TARGET_ASM_GLOBALIZE_LABEL
5880 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5882 #undef TARGET_ASM_FILE_START
5883 #define TARGET_ASM_FILE_START output_file_start
5885 #undef TARGET_ATTRIBUTE_TABLE
5886 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5888 #undef TARGET_COMP_TYPE_ATTRIBUTES
5889 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5891 #undef TARGET_RTX_COSTS
5892 #define TARGET_RTX_COSTS bfin_rtx_costs
5894 #undef TARGET_ADDRESS_COST
5895 #define TARGET_ADDRESS_COST bfin_address_cost
5897 #undef TARGET_ASM_INTEGER
5898 #define TARGET_ASM_INTEGER bfin_assemble_integer
5900 #undef TARGET_MACHINE_DEPENDENT_REORG
5901 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5903 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5904 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5906 #undef TARGET_ASM_OUTPUT_MI_THUNK
5907 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5908 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5909 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
5911 #undef TARGET_SCHED_ADJUST_COST
5912 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5914 #undef TARGET_SCHED_ISSUE_RATE
5915 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5917 #undef TARGET_PROMOTE_PROTOTYPES
5918 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
5919 #undef TARGET_PROMOTE_FUNCTION_ARGS
5920 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
5921 #undef TARGET_PROMOTE_FUNCTION_RETURN
5922 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
5924 #undef TARGET_ARG_PARTIAL_BYTES
5925 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5927 #undef TARGET_PASS_BY_REFERENCE
5928 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5930 #undef TARGET_SETUP_INCOMING_VARARGS
5931 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5933 #undef TARGET_STRUCT_VALUE_RTX
5934 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5936 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5937 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5939 #undef TARGET_HANDLE_OPTION
5940 #define TARGET_HANDLE_OPTION bfin_handle_option
5942 #undef TARGET_DEFAULT_TARGET_FLAGS
5943 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
5945 #undef TARGET_SECONDARY_RELOAD
5946 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5948 #undef TARGET_DELEGITIMIZE_ADDRESS
5949 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5951 #undef TARGET_CANNOT_FORCE_CONST_MEM
5952 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5954 #undef TARGET_RETURN_IN_MEMORY
5955 #define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5957 struct gcc_target targetm = TARGET_INITIALIZER;