1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "insn-codes.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
41 #include "target-def.h"
47 #include "integrate.h"
49 #include "langhooks.h"
50 #include "bfin-protos.h"
52 #include "tm-constrs.h"
54 #include "basic-block.h"
55 #include "cfglayout.h"
59 /* A C structure for machine-specific, per-function data.
60 This is added to the cfun structure. */
61 struct machine_function GTY(())
63 int has_hardware_loops;
66 /* Test and compare insns in bfin.md store the information needed to
67 generate branch and scc insns here. */
68 rtx bfin_compare_op0, bfin_compare_op1;
70 /* RTX for condition code flag register and RETS register */
71 extern GTY(()) rtx bfin_cc_rtx;
72 extern GTY(()) rtx bfin_rets_rtx;
73 rtx bfin_cc_rtx, bfin_rets_rtx;
75 int max_arg_registers = 0;
77 /* Arrays used when emitting register names. */
78 const char *short_reg_names[] = SHORT_REGISTER_NAMES;
79 const char *high_reg_names[] = HIGH_REGISTER_NAMES;
80 const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
81 const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
83 static int arg_regs[] = FUNCTION_ARG_REGISTERS;
85 /* Nonzero if -mshared-library-id was given. */
86 static int bfin_lib_id_given;
88 /* Nonzero if -fschedule-insns2 was given. We override it and
89 call the scheduler ourselves during reorg. */
90 static int bfin_flag_schedule_insns2;
92 /* Determines whether we run variable tracking in machine dependent
94 static int bfin_flag_var_tracking;
97 bfin_cpu_t bfin_cpu_type = BFIN_CPU_UNKNOWN;
99 /* -msi-revision support. There are three special values:
100 -1 -msi-revision=none.
101 0xffff -msi-revision=any. */
102 int bfin_si_revision;
104 /* The workarounds enabled */
105 unsigned int bfin_workarounds = 0;
112 unsigned int workarounds;
115 struct bfin_cpu bfin_cpus[] =
117 {"bf522", BFIN_CPU_BF522, 0x0001,
118 WA_SPECULATIVE_LOADS | WA_RETS},
119 {"bf522", BFIN_CPU_BF522, 0x0000,
120 WA_SPECULATIVE_LOADS | WA_RETS},
122 {"bf523", BFIN_CPU_BF523, 0x0001,
123 WA_SPECULATIVE_LOADS | WA_RETS},
124 {"bf523", BFIN_CPU_BF523, 0x0000,
125 WA_SPECULATIVE_LOADS | WA_RETS},
127 {"bf524", BFIN_CPU_BF524, 0x0001,
128 WA_SPECULATIVE_LOADS | WA_RETS},
129 {"bf524", BFIN_CPU_BF524, 0x0000,
130 WA_SPECULATIVE_LOADS | WA_RETS},
132 {"bf525", BFIN_CPU_BF525, 0x0001,
133 WA_SPECULATIVE_LOADS | WA_RETS},
134 {"bf525", BFIN_CPU_BF525, 0x0000,
135 WA_SPECULATIVE_LOADS | WA_RETS},
137 {"bf526", BFIN_CPU_BF526, 0x0001,
138 WA_SPECULATIVE_LOADS | WA_RETS},
139 {"bf526", BFIN_CPU_BF526, 0x0000,
140 WA_SPECULATIVE_LOADS | WA_RETS},
142 {"bf527", BFIN_CPU_BF527, 0x0001,
143 WA_SPECULATIVE_LOADS | WA_RETS},
144 {"bf527", BFIN_CPU_BF527, 0x0000,
145 WA_SPECULATIVE_LOADS | WA_RETS},
147 {"bf531", BFIN_CPU_BF531, 0x0005,
148 WA_SPECULATIVE_LOADS | WA_RETS},
149 {"bf531", BFIN_CPU_BF531, 0x0004,
150 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
151 {"bf531", BFIN_CPU_BF531, 0x0003,
152 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
154 {"bf532", BFIN_CPU_BF532, 0x0005,
155 WA_SPECULATIVE_LOADS | WA_RETS},
156 {"bf532", BFIN_CPU_BF532, 0x0004,
157 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
158 {"bf532", BFIN_CPU_BF532, 0x0003,
159 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
161 {"bf533", BFIN_CPU_BF533, 0x0005,
162 WA_SPECULATIVE_LOADS | WA_RETS},
163 {"bf533", BFIN_CPU_BF533, 0x0004,
164 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
165 {"bf533", BFIN_CPU_BF533, 0x0003,
166 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
168 {"bf534", BFIN_CPU_BF534, 0x0003,
169 WA_SPECULATIVE_LOADS | WA_RETS},
170 {"bf534", BFIN_CPU_BF534, 0x0002,
171 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
172 {"bf534", BFIN_CPU_BF534, 0x0001,
173 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
175 {"bf536", BFIN_CPU_BF536, 0x0003,
176 WA_SPECULATIVE_LOADS | WA_RETS},
177 {"bf536", BFIN_CPU_BF536, 0x0002,
178 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
179 {"bf536", BFIN_CPU_BF536, 0x0001,
180 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
182 {"bf537", BFIN_CPU_BF537, 0x0003,
183 WA_SPECULATIVE_LOADS | WA_RETS},
184 {"bf537", BFIN_CPU_BF537, 0x0002,
185 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
186 {"bf537", BFIN_CPU_BF537, 0x0001,
187 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
189 {"bf538", BFIN_CPU_BF538, 0x0004,
190 WA_SPECULATIVE_LOADS | WA_RETS},
191 {"bf538", BFIN_CPU_BF538, 0x0003,
192 WA_SPECULATIVE_LOADS | WA_RETS},
193 {"bf538", BFIN_CPU_BF538, 0x0002,
194 WA_SPECULATIVE_LOADS | WA_RETS},
196 {"bf539", BFIN_CPU_BF539, 0x0004,
197 WA_SPECULATIVE_LOADS | WA_RETS},
198 {"bf539", BFIN_CPU_BF539, 0x0003,
199 WA_SPECULATIVE_LOADS | WA_RETS},
200 {"bf539", BFIN_CPU_BF539, 0x0002,
201 WA_SPECULATIVE_LOADS | WA_RETS},
203 {"bf542", BFIN_CPU_BF542, 0x0001,
204 WA_SPECULATIVE_LOADS | WA_RETS},
205 {"bf542", BFIN_CPU_BF542, 0x0000,
206 WA_SPECULATIVE_LOADS | WA_RETS},
208 {"bf544", BFIN_CPU_BF544, 0x0001,
209 WA_SPECULATIVE_LOADS | WA_RETS},
210 {"bf544", BFIN_CPU_BF544, 0x0000,
211 WA_SPECULATIVE_LOADS | WA_RETS},
213 {"bf547", BFIN_CPU_BF547, 0x0001,
214 WA_SPECULATIVE_LOADS | WA_RETS},
215 {"bf547", BFIN_CPU_BF547, 0x0000,
216 WA_SPECULATIVE_LOADS | WA_RETS},
218 {"bf548", BFIN_CPU_BF548, 0x0001,
219 WA_SPECULATIVE_LOADS | WA_RETS},
220 {"bf548", BFIN_CPU_BF548, 0x0000,
221 WA_SPECULATIVE_LOADS | WA_RETS},
223 {"bf549", BFIN_CPU_BF549, 0x0001,
224 WA_SPECULATIVE_LOADS | WA_RETS},
225 {"bf549", BFIN_CPU_BF549, 0x0000,
226 WA_SPECULATIVE_LOADS | WA_RETS},
228 {"bf561", BFIN_CPU_BF561, 0x0005, WA_RETS},
229 {"bf561", BFIN_CPU_BF561, 0x0003,
230 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
231 {"bf561", BFIN_CPU_BF561, 0x0002,
232 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
237 int splitting_for_sched;
240 bfin_globalize_label (FILE *stream, const char *name)
242 fputs (".global ", stream);
243 assemble_name (stream, name);
249 output_file_start (void)
251 FILE *file = asm_out_file;
254 /* Variable tracking should be run after all optimizations which change order
255 of insns. It also needs a valid CFG. This can't be done in
256 override_options, because flag_var_tracking is finalized after
258 bfin_flag_var_tracking = flag_var_tracking;
259 flag_var_tracking = 0;
261 fprintf (file, ".file \"%s\";\n", input_filename);
263 for (i = 0; arg_regs[i] >= 0; i++)
265 max_arg_registers = i; /* how many arg reg used */
268 /* Called early in the compilation to conditionally modify
269 fixed_regs/call_used_regs. */
272 conditional_register_usage (void)
274 /* initialize condition code flag register rtx */
275 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
276 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
279 /* Examine machine-dependent attributes of function type FUNTYPE and return its
280 type. See the definition of E_FUNKIND. */
283 funkind (const_tree funtype)
285 tree attrs = TYPE_ATTRIBUTES (funtype);
286 if (lookup_attribute ("interrupt_handler", attrs))
287 return INTERRUPT_HANDLER;
288 else if (lookup_attribute ("exception_handler", attrs))
289 return EXCPT_HANDLER;
290 else if (lookup_attribute ("nmi_handler", attrs))
296 /* Legitimize PIC addresses. If the address is already position-independent,
297 we return ORIG. Newly generated position-independent addresses go into a
298 reg. This is REG if nonzero, otherwise we allocate register(s) as
299 necessary. PICREG is the register holding the pointer to the PIC offset
303 legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
308 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
313 if (TARGET_ID_SHARED_LIBRARY)
314 unspec = UNSPEC_MOVE_PIC;
315 else if (GET_CODE (addr) == SYMBOL_REF
316 && SYMBOL_REF_FUNCTION_P (addr))
317 unspec = UNSPEC_FUNCDESC_GOT17M4;
319 unspec = UNSPEC_MOVE_FDPIC;
323 gcc_assert (can_create_pseudo_p ());
324 reg = gen_reg_rtx (Pmode);
327 tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
328 new_rtx = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
330 emit_move_insn (reg, new_rtx);
331 if (picreg == pic_offset_table_rtx)
332 crtl->uses_pic_offset_table = 1;
336 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
340 if (GET_CODE (addr) == CONST)
342 addr = XEXP (addr, 0);
343 gcc_assert (GET_CODE (addr) == PLUS);
346 if (XEXP (addr, 0) == picreg)
351 gcc_assert (can_create_pseudo_p ());
352 reg = gen_reg_rtx (Pmode);
355 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
356 addr = legitimize_pic_address (XEXP (addr, 1),
357 base == reg ? NULL_RTX : reg,
360 if (GET_CODE (addr) == CONST_INT)
362 gcc_assert (! reload_in_progress && ! reload_completed);
363 addr = force_reg (Pmode, addr);
366 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
368 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
369 addr = XEXP (addr, 1);
372 return gen_rtx_PLUS (Pmode, base, addr);
378 /* Stack frame layout. */
380 /* For a given REGNO, determine whether it must be saved in the function
381 prologue. IS_INTHANDLER specifies whether we're generating a normal
382 prologue or an interrupt/exception one. */
384 must_save_p (bool is_inthandler, unsigned regno)
386 if (D_REGNO_P (regno))
388 bool is_eh_return_reg = false;
389 if (crtl->calls_eh_return)
394 unsigned test = EH_RETURN_DATA_REGNO (j);
395 if (test == INVALID_REGNUM)
398 is_eh_return_reg = true;
402 return (is_eh_return_reg
403 || (df_regs_ever_live_p (regno)
404 && !fixed_regs[regno]
405 && (is_inthandler || !call_used_regs[regno])));
407 else if (P_REGNO_P (regno))
409 return ((df_regs_ever_live_p (regno)
410 && !fixed_regs[regno]
411 && (is_inthandler || !call_used_regs[regno]))
413 && regno == PIC_OFFSET_TABLE_REGNUM
414 && (crtl->uses_pic_offset_table
415 || (TARGET_ID_SHARED_LIBRARY && !current_function_is_leaf))));
418 return ((is_inthandler || !call_used_regs[regno])
419 && (df_regs_ever_live_p (regno)
420 || (!leaf_function_p () && call_used_regs[regno])));
424 /* Compute the number of DREGS to save with a push_multiple operation.
425 This could include registers that aren't modified in the function,
426 since push_multiple only takes a range of registers.
427 If IS_INTHANDLER, then everything that is live must be saved, even
428 if normally call-clobbered.
429 If CONSECUTIVE, return the number of registers we can save in one
430 instruction with a push/pop multiple instruction. */
433 n_dregs_to_save (bool is_inthandler, bool consecutive)
438 for (i = REG_R7 + 1; i-- != REG_R0;)
440 if (must_save_p (is_inthandler, i))
442 else if (consecutive)
448 /* Like n_dregs_to_save, but compute number of PREGS to save. */
451 n_pregs_to_save (bool is_inthandler, bool consecutive)
456 for (i = REG_P5 + 1; i-- != REG_P0;)
457 if (must_save_p (is_inthandler, i))
459 else if (consecutive)
464 /* Determine if we are going to save the frame pointer in the prologue. */
467 must_save_fp_p (void)
469 return frame_pointer_needed || df_regs_ever_live_p (REG_FP);
473 stack_frame_needed_p (void)
475 /* EH return puts a new return address into the frame using an
476 address relative to the frame pointer. */
477 if (crtl->calls_eh_return)
479 return frame_pointer_needed;
482 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
483 must save all registers; this is used for interrupt handlers.
484 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
485 this for an interrupt (or exception) handler. */
488 expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
490 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
491 rtx predec = gen_rtx_MEM (SImode, predec1);
492 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
493 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
494 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
495 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
497 int total_consec = ndregs_consec + npregs_consec;
500 if (saveall || is_inthandler)
502 rtx insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
503 RTX_FRAME_RELATED_P (insn) = 1;
506 if (total_consec != 0)
509 rtx val = GEN_INT (-total_consec * 4);
510 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 2));
512 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
513 UNSPEC_PUSH_MULTIPLE);
514 XVECEXP (pat, 0, total_consec + 1) = gen_rtx_SET (VOIDmode, spreg,
518 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total_consec + 1)) = 1;
519 d_to_save = ndregs_consec;
520 dregno = REG_R7 + 1 - ndregs_consec;
521 pregno = REG_P5 + 1 - npregs_consec;
522 for (i = 0; i < total_consec; i++)
524 rtx memref = gen_rtx_MEM (word_mode,
525 gen_rtx_PLUS (Pmode, spreg,
526 GEN_INT (- i * 4 - 4)));
530 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
536 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
539 XVECEXP (pat, 0, i + 1) = subpat;
540 RTX_FRAME_RELATED_P (subpat) = 1;
542 insn = emit_insn (pat);
543 RTX_FRAME_RELATED_P (insn) = 1;
546 for (dregno = REG_R0; ndregs != ndregs_consec; dregno++)
548 if (must_save_p (is_inthandler, dregno))
550 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, dregno));
551 RTX_FRAME_RELATED_P (insn) = 1;
555 for (pregno = REG_P0; npregs != npregs_consec; pregno++)
557 if (must_save_p (is_inthandler, pregno))
559 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, pregno));
560 RTX_FRAME_RELATED_P (insn) = 1;
564 for (i = REG_P7 + 1; i < REG_CC; i++)
567 && (df_regs_ever_live_p (i)
568 || (!leaf_function_p () && call_used_regs[i]))))
571 if (i == REG_A0 || i == REG_A1)
572 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
573 gen_rtx_REG (PDImode, i));
575 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
576 RTX_FRAME_RELATED_P (insn) = 1;
580 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
581 must save all registers; this is used for interrupt handlers.
582 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
583 this for an interrupt (or exception) handler. */
586 expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
588 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
589 rtx postinc = gen_rtx_MEM (SImode, postinc1);
591 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
592 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
593 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
594 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
595 int total_consec = ndregs_consec + npregs_consec;
599 /* A slightly crude technique to stop flow from trying to delete "dead"
601 MEM_VOLATILE_P (postinc) = 1;
603 for (i = REG_CC - 1; i > REG_P7; i--)
606 && (df_regs_ever_live_p (i)
607 || (!leaf_function_p () && call_used_regs[i]))))
609 if (i == REG_A0 || i == REG_A1)
611 rtx mem = gen_rtx_MEM (PDImode, postinc1);
612 MEM_VOLATILE_P (mem) = 1;
613 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
616 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
619 regno = REG_P5 - npregs_consec;
620 for (; npregs != npregs_consec; regno--)
622 if (must_save_p (is_inthandler, regno))
624 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
628 regno = REG_R7 - ndregs_consec;
629 for (; ndregs != ndregs_consec; regno--)
631 if (must_save_p (is_inthandler, regno))
633 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
638 if (total_consec != 0)
640 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 1));
642 = gen_rtx_SET (VOIDmode, spreg,
643 gen_rtx_PLUS (Pmode, spreg,
644 GEN_INT (total_consec * 4)));
646 if (npregs_consec > 0)
651 for (i = 0; i < total_consec; i++)
654 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
656 rtx memref = gen_rtx_MEM (word_mode, addr);
659 XVECEXP (pat, 0, i + 1)
660 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
662 if (npregs_consec > 0)
664 if (--npregs_consec == 0)
669 insn = emit_insn (pat);
670 RTX_FRAME_RELATED_P (insn) = 1;
672 if (saveall || is_inthandler)
673 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
676 /* Perform any needed actions needed for a function that is receiving a
677 variable number of arguments.
681 MODE and TYPE are the mode and type of the current parameter.
683 PRETEND_SIZE is a variable that should be set to the amount of stack
684 that must be pushed by the prolog to pretend that our caller pushed
687 Normally, this macro will push all remaining incoming registers on the
688 stack and set PRETEND_SIZE to the length of the registers pushed.
691 - VDSP C compiler manual (our ABI) says that a variable args function
692 should save the R0, R1 and R2 registers in the stack.
693 - The caller will always leave space on the stack for the
694 arguments that are passed in registers, so we dont have
695 to leave any extra space.
696 - now, the vastart pointer can access all arguments from the stack. */
699 setup_incoming_varargs (CUMULATIVE_ARGS *cum,
700 enum machine_mode mode ATTRIBUTE_UNUSED,
701 tree type ATTRIBUTE_UNUSED, int *pretend_size,
710 /* The move for named arguments will be generated automatically by the
711 compiler. We need to generate the move rtx for the unnamed arguments
712 if they are in the first 3 words. We assume at least 1 named argument
713 exists, so we never generate [ARGP] = R0 here. */
715 for (i = cum->words + 1; i < max_arg_registers; i++)
717 mem = gen_rtx_MEM (Pmode,
718 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
719 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
725 /* Value should be nonzero if functions must have frame pointers.
726 Zero means the frame pointer need not be set up (and parms may
727 be accessed via the stack pointer) in functions that seem suitable. */
730 bfin_frame_pointer_required (void)
732 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
734 if (fkind != SUBROUTINE)
737 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
738 so we have to override it for non-leaf functions. */
739 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
745 /* Return the number of registers pushed during the prologue. */
748 n_regs_saved_by_prologue (void)
750 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
751 bool is_inthandler = fkind != SUBROUTINE;
752 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
753 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
754 || (is_inthandler && !current_function_is_leaf));
755 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler, false);
756 int npregs = all ? 6 : n_pregs_to_save (is_inthandler, false);
757 int n = ndregs + npregs;
760 if (all || stack_frame_needed_p ())
761 /* We use a LINK instruction in this case. */
765 if (must_save_fp_p ())
767 if (! current_function_is_leaf)
771 if (fkind != SUBROUTINE || all)
772 /* Increment once for ASTAT. */
775 if (fkind != SUBROUTINE)
778 if (lookup_attribute ("nesting", attrs))
782 for (i = REG_P7 + 1; i < REG_CC; i++)
784 || (fkind != SUBROUTINE
785 && (df_regs_ever_live_p (i)
786 || (!leaf_function_p () && call_used_regs[i]))))
787 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
792 /* Return the offset between two registers, one to be eliminated, and the other
793 its replacement, at the start of a routine. */
796 bfin_initial_elimination_offset (int from, int to)
798 HOST_WIDE_INT offset = 0;
800 if (from == ARG_POINTER_REGNUM)
801 offset = n_regs_saved_by_prologue () * 4;
803 if (to == STACK_POINTER_REGNUM)
805 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
806 offset += crtl->outgoing_args_size;
807 else if (crtl->outgoing_args_size)
808 offset += FIXED_STACK_AREA;
810 offset += get_frame_size ();
816 /* Emit code to load a constant CONSTANT into register REG; setting
817 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
818 Make sure that the insns we generate need not be split. */
821 frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
824 rtx cst = GEN_INT (constant);
826 if (constant >= -32768 && constant < 65536)
827 insn = emit_move_insn (reg, cst);
830 /* We don't call split_load_immediate here, since dwarf2out.c can get
831 confused about some of the more clever sequences it can generate. */
832 insn = emit_insn (gen_movsi_high (reg, cst));
834 RTX_FRAME_RELATED_P (insn) = 1;
835 insn = emit_insn (gen_movsi_low (reg, reg, cst));
838 RTX_FRAME_RELATED_P (insn) = 1;
841 /* Generate efficient code to add a value to a P register.
842 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
843 EPILOGUE_P is zero if this function is called for prologue,
844 otherwise it's nonzero. And it's less than zero if this is for
848 add_to_reg (rtx reg, HOST_WIDE_INT value, int frame, int epilogue_p)
853 /* Choose whether to use a sequence using a temporary register, or
854 a sequence with multiple adds. We can add a signed 7-bit value
855 in one instruction. */
856 if (value > 120 || value < -120)
864 /* For prologue or normal epilogue, P1 can be safely used
865 as the temporary register. For sibcall epilogue, we try to find
866 a call used P register, which will be restored in epilogue.
867 If we cannot find such a P register, we have to use one I register
871 tmpreg = gen_rtx_REG (SImode, REG_P1);
875 for (i = REG_P0; i <= REG_P5; i++)
876 if ((df_regs_ever_live_p (i) && ! call_used_regs[i])
878 && i == PIC_OFFSET_TABLE_REGNUM
879 && (crtl->uses_pic_offset_table
880 || (TARGET_ID_SHARED_LIBRARY
881 && ! current_function_is_leaf))))
884 tmpreg = gen_rtx_REG (SImode, i);
887 tmpreg = gen_rtx_REG (SImode, REG_P1);
888 tmpreg2 = gen_rtx_REG (SImode, REG_I0);
889 emit_move_insn (tmpreg2, tmpreg);
894 frame_related_constant_load (tmpreg, value, TRUE);
896 insn = emit_move_insn (tmpreg, GEN_INT (value));
898 insn = emit_insn (gen_addsi3 (reg, reg, tmpreg));
900 RTX_FRAME_RELATED_P (insn) = 1;
902 if (tmpreg2 != NULL_RTX)
903 emit_move_insn (tmpreg, tmpreg2);
914 /* We could use -62, but that would leave the stack unaligned, so
918 insn = emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
920 RTX_FRAME_RELATED_P (insn) = 1;
926 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
927 is too large, generate a sequence of insns that has the same effect.
928 SPREG contains (reg:SI REG_SP). */
931 emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
933 HOST_WIDE_INT link_size = frame_size;
937 if (link_size > 262140)
940 /* Use a LINK insn with as big a constant as possible, then subtract
941 any remaining size from the SP. */
942 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
943 RTX_FRAME_RELATED_P (insn) = 1;
945 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
947 rtx set = XVECEXP (PATTERN (insn), 0, i);
948 gcc_assert (GET_CODE (set) == SET);
949 RTX_FRAME_RELATED_P (set) = 1;
952 frame_size -= link_size;
956 /* Must use a call-clobbered PREG that isn't the static chain. */
957 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
959 frame_related_constant_load (tmpreg, -frame_size, TRUE);
960 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
961 RTX_FRAME_RELATED_P (insn) = 1;
965 /* Return the number of bytes we must reserve for outgoing arguments
966 in the current function's stack frame. */
971 if (crtl->outgoing_args_size)
973 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
974 return crtl->outgoing_args_size;
976 return FIXED_STACK_AREA;
981 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
982 function must save all its registers (true only for certain interrupt
986 do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
988 frame_size += arg_area_size ();
990 if (all || stack_frame_needed_p ()
991 || (must_save_fp_p () && ! current_function_is_leaf))
992 emit_link_insn (spreg, frame_size);
995 if (! current_function_is_leaf)
997 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
998 gen_rtx_PRE_DEC (Pmode, spreg)),
1000 rtx insn = emit_insn (pat);
1001 RTX_FRAME_RELATED_P (insn) = 1;
1003 if (must_save_fp_p ())
1005 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
1006 gen_rtx_PRE_DEC (Pmode, spreg)),
1007 gen_rtx_REG (Pmode, REG_FP));
1008 rtx insn = emit_insn (pat);
1009 RTX_FRAME_RELATED_P (insn) = 1;
1011 add_to_reg (spreg, -frame_size, 1, 0);
1015 /* Like do_link, but used for epilogues to deallocate the stack frame.
1016 EPILOGUE_P is zero if this function is called for prologue,
1017 otherwise it's nonzero. And it's less than zero if this is for
1018 sibcall epilogue. */
1021 do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all, int epilogue_p)
1023 frame_size += arg_area_size ();
1025 if (all || stack_frame_needed_p ())
1026 emit_insn (gen_unlink ());
1029 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
1031 add_to_reg (spreg, frame_size, 0, epilogue_p);
1032 if (must_save_fp_p ())
1034 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
1035 emit_move_insn (fpreg, postinc);
1038 if (! current_function_is_leaf)
1040 emit_move_insn (bfin_rets_rtx, postinc);
1041 emit_use (bfin_rets_rtx);
1046 /* Generate a prologue suitable for a function of kind FKIND. This is
1047 called for interrupt and exception handler prologues.
1048 SPREG contains (reg:SI REG_SP). */
1051 expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind, bool all)
1053 HOST_WIDE_INT frame_size = get_frame_size ();
1054 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
1055 rtx predec = gen_rtx_MEM (SImode, predec1);
1057 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1058 tree kspisusp = lookup_attribute ("kspisusp", attrs);
1062 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
1063 RTX_FRAME_RELATED_P (insn) = 1;
1066 /* We need space on the stack in case we need to save the argument
1068 if (fkind == EXCPT_HANDLER)
1070 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
1071 RTX_FRAME_RELATED_P (insn) = 1;
1074 /* If we're calling other functions, they won't save their call-clobbered
1075 registers, so we must save everything here. */
1076 if (!current_function_is_leaf)
1078 expand_prologue_reg_save (spreg, all, true);
1080 if (lookup_attribute ("nesting", attrs))
1082 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
1083 : fkind == NMI_HANDLER ? REG_RETN
1085 insn = emit_move_insn (predec, srcreg);
1086 RTX_FRAME_RELATED_P (insn) = 1;
1089 do_link (spreg, frame_size, all);
1091 if (fkind == EXCPT_HANDLER)
1093 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
1094 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
1095 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
1098 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
1099 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
1100 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
1101 insn = emit_move_insn (r1reg, spreg);
1102 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
1103 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
1107 /* Generate an epilogue suitable for a function of kind FKIND. This is
1108 called for interrupt and exception handler epilogues.
1109 SPREG contains (reg:SI REG_SP). */
1112 expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind, bool all)
1114 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1115 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
1116 rtx postinc = gen_rtx_MEM (SImode, postinc1);
1118 /* A slightly crude technique to stop flow from trying to delete "dead"
1120 MEM_VOLATILE_P (postinc) = 1;
1122 do_unlink (spreg, get_frame_size (), all, 1);
1124 if (lookup_attribute ("nesting", attrs))
1126 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
1127 : fkind == NMI_HANDLER ? REG_RETN
1129 emit_move_insn (srcreg, postinc);
1132 /* If we're calling other functions, they won't save their call-clobbered
1133 registers, so we must save (and restore) everything here. */
1134 if (!current_function_is_leaf)
1137 expand_epilogue_reg_restore (spreg, all, true);
1139 /* Deallocate any space we left on the stack in case we needed to save the
1140 argument registers. */
1141 if (fkind == EXCPT_HANDLER)
1142 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
1144 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
1147 /* Used while emitting the prologue to generate code to load the correct value
1148 into the PIC register, which is passed in DEST. */
1151 bfin_load_pic_reg (rtx dest)
1153 struct cgraph_local_info *i = NULL;
1156 i = cgraph_local_info (current_function_decl);
1158 /* Functions local to the translation unit don't need to reload the
1159 pic reg, since the caller always passes a usable one. */
1161 return pic_offset_table_rtx;
1163 if (bfin_lib_id_given)
1164 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
1166 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1167 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
1168 UNSPEC_LIBRARY_OFFSET));
1169 insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
1173 /* Generate RTL for the prologue of the current function. */
1176 bfin_expand_prologue (void)
1178 HOST_WIDE_INT frame_size = get_frame_size ();
1179 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1180 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1181 rtx pic_reg_loaded = NULL_RTX;
1182 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1183 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
1185 if (fkind != SUBROUTINE)
1187 expand_interrupt_handler_prologue (spreg, fkind, all);
1191 if (crtl->limit_stack
1192 || (TARGET_STACK_CHECK_L1
1193 && !DECL_NO_LIMIT_STACK (current_function_decl)))
1195 HOST_WIDE_INT offset
1196 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
1197 STACK_POINTER_REGNUM);
1198 rtx lim = crtl->limit_stack ? stack_limit_rtx : NULL_RTX;
1199 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
1203 emit_move_insn (p2reg, gen_int_mode (0xFFB00000, SImode));
1204 emit_move_insn (p2reg, gen_rtx_MEM (Pmode, p2reg));
1207 if (GET_CODE (lim) == SYMBOL_REF)
1209 if (TARGET_ID_SHARED_LIBRARY)
1211 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
1213 pic_reg_loaded = bfin_load_pic_reg (p2reg);
1214 val = legitimize_pic_address (stack_limit_rtx, p1reg,
1216 emit_move_insn (p1reg, val);
1217 frame_related_constant_load (p2reg, offset, FALSE);
1218 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
1223 rtx limit = plus_constant (lim, offset);
1224 emit_move_insn (p2reg, limit);
1231 emit_move_insn (p2reg, lim);
1232 add_to_reg (p2reg, offset, 0, 0);
1235 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
1236 emit_insn (gen_trapifcc ());
1238 expand_prologue_reg_save (spreg, all, false);
1240 do_link (spreg, frame_size, false);
1242 if (TARGET_ID_SHARED_LIBRARY
1244 && (crtl->uses_pic_offset_table
1245 || !current_function_is_leaf))
1246 bfin_load_pic_reg (pic_offset_table_rtx);
1249 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1250 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1251 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1255 bfin_expand_epilogue (int need_return, int eh_return, bool sibcall_p)
1257 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1258 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1259 int e = sibcall_p ? -1 : 1;
1260 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1261 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
1263 if (fkind != SUBROUTINE)
1265 expand_interrupt_handler_epilogue (spreg, fkind, all);
1269 do_unlink (spreg, get_frame_size (), false, e);
1271 expand_epilogue_reg_restore (spreg, all, false);
1273 /* Omit the return insn if this is for a sibcall. */
1278 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
1280 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
1283 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1286 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
1287 unsigned int new_reg)
1289 /* Interrupt functions can only use registers that have already been
1290 saved by the prologue, even if they would normally be
1293 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
1294 && !df_regs_ever_live_p (new_reg))
1300 /* Return the value of the return address for the frame COUNT steps up
1301 from the current frame, after the prologue.
1302 We punt for everything but the current frame by returning const0_rtx. */
1305 bfin_return_addr_rtx (int count)
1310 return get_hard_reg_initial_val (Pmode, REG_RETS);
1313 /* Try machine-dependent ways of modifying an illegitimate address X
1314 to be legitimate. If we find one, return the new, valid address,
1315 otherwise return NULL_RTX.
1317 OLDX is the address as it was before break_out_memory_refs was called.
1318 In some cases it is useful to look at this to decide what needs to be done.
1320 MODE is the mode of the memory reference. */
1323 legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
1324 enum machine_mode mode ATTRIBUTE_UNUSED)
1330 bfin_delegitimize_address (rtx orig_x)
1334 if (GET_CODE (x) != MEM)
1338 if (GET_CODE (x) == PLUS
1339 && GET_CODE (XEXP (x, 1)) == UNSPEC
1340 && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1341 && GET_CODE (XEXP (x, 0)) == REG
1342 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1343 return XVECEXP (XEXP (x, 1), 0, 0);
1348 /* This predicate is used to compute the length of a load/store insn.
1349 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1350 32-bit instruction. */
1353 effective_address_32bit_p (rtx op, enum machine_mode mode)
1355 HOST_WIDE_INT offset;
1357 mode = GET_MODE (op);
1360 if (GET_CODE (op) != PLUS)
1362 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1363 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1367 if (GET_CODE (XEXP (op, 1)) == UNSPEC)
1370 offset = INTVAL (XEXP (op, 1));
1372 /* All byte loads use a 16-bit offset. */
1373 if (GET_MODE_SIZE (mode) == 1)
1376 if (GET_MODE_SIZE (mode) == 4)
1378 /* Frame pointer relative loads can use a negative offset, all others
1379 are restricted to a small positive one. */
1380 if (XEXP (op, 0) == frame_pointer_rtx)
1381 return offset < -128 || offset > 60;
1382 return offset < 0 || offset > 60;
1385 /* Must be HImode now. */
1386 return offset < 0 || offset > 30;
1389 /* Returns true if X is a memory reference using an I register. */
1391 bfin_dsp_memref_p (rtx x)
1396 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1397 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1402 /* Return cost of the memory address ADDR.
1403 All addressing modes are equally cheap on the Blackfin. */
1406 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED, bool speed ATTRIBUTE_UNUSED)
1411 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1414 print_address_operand (FILE *file, rtx x)
1416 switch (GET_CODE (x))
1419 output_address (XEXP (x, 0));
1420 fprintf (file, "+");
1421 output_address (XEXP (x, 1));
1425 fprintf (file, "--");
1426 output_address (XEXP (x, 0));
1429 output_address (XEXP (x, 0));
1430 fprintf (file, "++");
1433 output_address (XEXP (x, 0));
1434 fprintf (file, "--");
1438 gcc_assert (GET_CODE (x) != MEM);
1439 print_operand (file, x, 0);
1444 /* Adding intp DImode support by Tony
1450 print_operand (FILE *file, rtx x, char code)
1452 enum machine_mode mode;
1456 if (GET_MODE (current_output_insn) == SImode)
1457 fprintf (file, " ||");
1459 fprintf (file, ";");
1463 mode = GET_MODE (x);
1468 switch (GET_CODE (x))
1471 fprintf (file, "e");
1474 fprintf (file, "ne");
1477 fprintf (file, "g");
1480 fprintf (file, "l");
1483 fprintf (file, "ge");
1486 fprintf (file, "le");
1489 fprintf (file, "g");
1492 fprintf (file, "l");
1495 fprintf (file, "ge");
1498 fprintf (file, "le");
1501 output_operand_lossage ("invalid %%j value");
1505 case 'J': /* reverse logic */
1506 switch (GET_CODE(x))
1509 fprintf (file, "ne");
1512 fprintf (file, "e");
1515 fprintf (file, "le");
1518 fprintf (file, "ge");
1521 fprintf (file, "l");
1524 fprintf (file, "g");
1527 fprintf (file, "le");
1530 fprintf (file, "ge");
1533 fprintf (file, "l");
1536 fprintf (file, "g");
1539 output_operand_lossage ("invalid %%J value");
1544 switch (GET_CODE (x))
1550 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1552 output_operand_lossage ("invalid operand for code '%c'", code);
1554 else if (code == 'd')
1557 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1559 output_operand_lossage ("invalid operand for code '%c'", code);
1561 else if (code == 'w')
1563 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1564 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1566 output_operand_lossage ("invalid operand for code '%c'", code);
1568 else if (code == 'x')
1570 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1571 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1573 output_operand_lossage ("invalid operand for code '%c'", code);
1575 else if (code == 'v')
1577 if (REGNO (x) == REG_A0)
1578 fprintf (file, "AV0");
1579 else if (REGNO (x) == REG_A1)
1580 fprintf (file, "AV1");
1582 output_operand_lossage ("invalid operand for code '%c'", code);
1584 else if (code == 'D')
1586 if (D_REGNO_P (REGNO (x)))
1587 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1589 output_operand_lossage ("invalid operand for code '%c'", code);
1591 else if (code == 'H')
1593 if ((mode == DImode || mode == DFmode) && REG_P (x))
1594 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1596 output_operand_lossage ("invalid operand for code '%c'", code);
1598 else if (code == 'T')
1600 if (D_REGNO_P (REGNO (x)))
1601 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1603 output_operand_lossage ("invalid operand for code '%c'", code);
1606 fprintf (file, "%s", reg_names[REGNO (x)]);
1612 print_address_operand (file, x);
1624 fputs ("(FU)", file);
1627 fputs ("(T)", file);
1630 fputs ("(TFU)", file);
1633 fputs ("(W32)", file);
1636 fputs ("(IS)", file);
1639 fputs ("(IU)", file);
1642 fputs ("(IH)", file);
1645 fputs ("(M)", file);
1648 fputs ("(IS,M)", file);
1651 fputs ("(ISS2)", file);
1654 fputs ("(S2RND)", file);
1661 else if (code == 'b')
1663 if (INTVAL (x) == 0)
1665 else if (INTVAL (x) == 1)
1671 /* Moves to half registers with d or h modifiers always use unsigned
1673 else if (code == 'd')
1674 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1675 else if (code == 'h')
1676 x = GEN_INT (INTVAL (x) & 0xffff);
1677 else if (code == 'N')
1678 x = GEN_INT (-INTVAL (x));
1679 else if (code == 'X')
1680 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1681 else if (code == 'Y')
1682 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1683 else if (code == 'Z')
1684 /* Used for LINK insns. */
1685 x = GEN_INT (-8 - INTVAL (x));
1690 output_addr_const (file, x);
1694 output_operand_lossage ("invalid const_double operand");
1698 switch (XINT (x, 1))
1700 case UNSPEC_MOVE_PIC:
1701 output_addr_const (file, XVECEXP (x, 0, 0));
1702 fprintf (file, "@GOT");
1705 case UNSPEC_MOVE_FDPIC:
1706 output_addr_const (file, XVECEXP (x, 0, 0));
1707 fprintf (file, "@GOT17M4");
1710 case UNSPEC_FUNCDESC_GOT17M4:
1711 output_addr_const (file, XVECEXP (x, 0, 0));
1712 fprintf (file, "@FUNCDESC_GOT17M4");
1715 case UNSPEC_LIBRARY_OFFSET:
1716 fprintf (file, "_current_shared_library_p5_offset_");
1725 output_addr_const (file, x);
1730 /* Argument support functions. */
1732 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1733 for a call to a function whose data type is FNTYPE.
1734 For a library call, FNTYPE is 0.
1735 VDSP C Compiler manual, our ABI says that
1736 first 3 words of arguments will use R0, R1 and R2.
1740 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1741 rtx libname ATTRIBUTE_UNUSED)
1743 static CUMULATIVE_ARGS zero_cum;
1747 /* Set up the number of registers to use for passing arguments. */
1749 cum->nregs = max_arg_registers;
1750 cum->arg_regs = arg_regs;
1752 cum->call_cookie = CALL_NORMAL;
1753 /* Check for a longcall attribute. */
1754 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1755 cum->call_cookie |= CALL_SHORT;
1756 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1757 cum->call_cookie |= CALL_LONG;
1762 /* Update the data in CUM to advance over an argument
1763 of mode MODE and data type TYPE.
1764 (TYPE is null for libcalls where that information may not be available.) */
1767 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1768 int named ATTRIBUTE_UNUSED)
1770 int count, bytes, words;
1772 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1773 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1775 cum->words += words;
1776 cum->nregs -= words;
1778 if (cum->nregs <= 0)
1781 cum->arg_regs = NULL;
1785 for (count = 1; count <= words; count++)
1792 /* Define where to put the arguments to a function.
1793 Value is zero to push the argument on the stack,
1794 or a hard register in which to store the argument.
1796 MODE is the argument's machine mode.
1797 TYPE is the data type of the argument (as a tree).
1798 This is null for libcalls where that information may
1800 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1801 the preceding args and about the function being called.
1802 NAMED is nonzero if this argument is a named parameter
1803 (otherwise it is an extra parameter matching an ellipsis). */
1806 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1807 int named ATTRIBUTE_UNUSED)
1810 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1812 if (mode == VOIDmode)
1813 /* Compute operand 2 of the call insn. */
1814 return GEN_INT (cum->call_cookie);
1820 return gen_rtx_REG (mode, *(cum->arg_regs));
1825 /* For an arg passed partly in registers and partly in memory,
1826 this is the number of bytes passed in registers.
1827 For args passed entirely in registers or entirely in memory, zero.
1829 Refer VDSP C Compiler manual, our ABI.
1830 First 3 words are in registers. So, if an argument is larger
1831 than the registers available, it will span the register and
1835 bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1836 tree type ATTRIBUTE_UNUSED,
1837 bool named ATTRIBUTE_UNUSED)
1840 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1841 int bytes_left = cum->nregs * UNITS_PER_WORD;
1846 if (bytes_left == 0)
1848 if (bytes > bytes_left)
1853 /* Variable sized types are passed by reference. */
1856 bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1857 enum machine_mode mode ATTRIBUTE_UNUSED,
1858 const_tree type, bool named ATTRIBUTE_UNUSED)
1860 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1863 /* Decide whether a type should be returned in memory (true)
1864 or in a register (false). This is called by the macro
1865 TARGET_RETURN_IN_MEMORY. */
1868 bfin_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1870 int size = int_size_in_bytes (type);
1871 return size > 2 * UNITS_PER_WORD || size == -1;
1874 /* Register in which address to store a structure value
1875 is passed to a function. */
1877 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1878 int incoming ATTRIBUTE_UNUSED)
1880 return gen_rtx_REG (Pmode, REG_P0);
1883 /* Return true when register may be used to pass function parameters. */
1886 function_arg_regno_p (int n)
1889 for (i = 0; arg_regs[i] != -1; i++)
1890 if (n == arg_regs[i])
1895 /* Returns 1 if OP contains a symbol reference */
1898 symbolic_reference_mentioned_p (rtx op)
1900 register const char *fmt;
1903 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1906 fmt = GET_RTX_FORMAT (GET_CODE (op));
1907 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1913 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1914 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1918 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1925 /* Decide whether we can make a sibling call to a function. DECL is the
1926 declaration of the function being targeted by the call and EXP is the
1927 CALL_EXPR representing the call. */
1930 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1931 tree exp ATTRIBUTE_UNUSED)
1933 struct cgraph_local_info *this_func, *called_func;
1934 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1935 if (fkind != SUBROUTINE)
1937 if (!TARGET_ID_SHARED_LIBRARY || TARGET_SEP_DATA)
1940 /* When compiling for ID shared libraries, can't sibcall a local function
1941 from a non-local function, because the local function thinks it does
1942 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1943 sibcall epilogue, and we end up with the wrong value in P5. */
1946 /* Not enough information. */
1949 this_func = cgraph_local_info (current_function_decl);
1950 called_func = cgraph_local_info (decl);
1951 return !called_func->local || this_func->local;
1954 /* Emit RTL insns to initialize the variable parts of a trampoline at
1955 TRAMP. FNADDR is an RTX for the address of the function's pure
1956 code. CXT is an RTX for the static chain value for the function. */
1959 initialize_trampoline (rtx tramp, rtx fnaddr, rtx cxt)
1961 rtx t1 = copy_to_reg (fnaddr);
1962 rtx t2 = copy_to_reg (cxt);
1968 rtx a = memory_address (Pmode, plus_constant (tramp, 8));
1969 addr = memory_address (Pmode, tramp);
1970 emit_move_insn (gen_rtx_MEM (SImode, addr), a);
1974 addr = memory_address (Pmode, plus_constant (tramp, i + 2));
1975 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1976 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1977 addr = memory_address (Pmode, plus_constant (tramp, i + 6));
1978 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1980 addr = memory_address (Pmode, plus_constant (tramp, i + 10));
1981 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1982 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1983 addr = memory_address (Pmode, plus_constant (tramp, i + 14));
1984 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1987 /* Emit insns to move operands[1] into operands[0]. */
1990 emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1992 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1994 gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
1995 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1996 operands[1] = force_reg (SImode, operands[1]);
1998 operands[1] = legitimize_pic_address (operands[1], temp,
1999 TARGET_FDPIC ? OUR_FDPIC_REG
2000 : pic_offset_table_rtx);
2003 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
2004 Returns true if no further code must be generated, false if the caller
2005 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
2008 expand_move (rtx *operands, enum machine_mode mode)
2010 rtx op = operands[1];
2011 if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
2012 && SYMBOLIC_CONST (op))
2013 emit_pic_move (operands, mode);
2014 else if (mode == SImode && GET_CODE (op) == CONST
2015 && GET_CODE (XEXP (op, 0)) == PLUS
2016 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
2017 && !bfin_legitimate_constant_p (op))
2019 rtx dest = operands[0];
2021 gcc_assert (!reload_in_progress && !reload_completed);
2023 op0 = force_reg (mode, XEXP (op, 0));
2025 if (!insn_data[CODE_FOR_addsi3].operand[2].predicate (op1, mode))
2026 op1 = force_reg (mode, op1);
2027 if (GET_CODE (dest) == MEM)
2028 dest = gen_reg_rtx (mode);
2029 emit_insn (gen_addsi3 (dest, op0, op1));
2030 if (dest == operands[0])
2034 /* Don't generate memory->memory or constant->memory moves, go through a
2036 else if ((reload_in_progress | reload_completed) == 0
2037 && GET_CODE (operands[0]) == MEM
2038 && GET_CODE (operands[1]) != REG)
2039 operands[1] = force_reg (mode, operands[1]);
2043 /* Split one or more DImode RTL references into pairs of SImode
2044 references. The RTL can be REG, offsettable MEM, integer constant, or
2045 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
2046 split and "num" is its length. lo_half and hi_half are output arrays
2047 that parallel "operands". */
2050 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
2054 rtx op = operands[num];
2056 /* simplify_subreg refuse to split volatile memory addresses,
2057 but we still have to handle it. */
2058 if (GET_CODE (op) == MEM)
2060 lo_half[num] = adjust_address (op, SImode, 0);
2061 hi_half[num] = adjust_address (op, SImode, 4);
2065 lo_half[num] = simplify_gen_subreg (SImode, op,
2066 GET_MODE (op) == VOIDmode
2067 ? DImode : GET_MODE (op), 0);
2068 hi_half[num] = simplify_gen_subreg (SImode, op,
2069 GET_MODE (op) == VOIDmode
2070 ? DImode : GET_MODE (op), 4);
2076 bfin_longcall_p (rtx op, int call_cookie)
2078 gcc_assert (GET_CODE (op) == SYMBOL_REF);
2079 if (call_cookie & CALL_SHORT)
2081 if (call_cookie & CALL_LONG)
2083 if (TARGET_LONG_CALLS)
2088 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
2089 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2090 SIBCALL is nonzero if this is a sibling call. */
2093 bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
2095 rtx use = NULL, call;
2096 rtx callee = XEXP (fnaddr, 0);
2097 int nelts = 2 + !!sibcall;
2099 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
2102 /* In an untyped call, we can get NULL for operand 2. */
2103 if (cookie == NULL_RTX)
2104 cookie = const0_rtx;
2106 /* Static functions and indirect calls don't need the pic register. */
2107 if (!TARGET_FDPIC && flag_pic
2108 && GET_CODE (callee) == SYMBOL_REF
2109 && !SYMBOL_REF_LOCAL_P (callee))
2110 use_reg (&use, pic_offset_table_rtx);
2114 int caller_has_l1_text, callee_has_l1_text;
2116 caller_has_l1_text = callee_has_l1_text = 0;
2118 if (lookup_attribute ("l1_text",
2119 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
2120 caller_has_l1_text = 1;
2122 if (GET_CODE (callee) == SYMBOL_REF
2123 && SYMBOL_REF_DECL (callee) && DECL_P (SYMBOL_REF_DECL (callee))
2126 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2127 callee_has_l1_text = 1;
2129 if (GET_CODE (callee) != SYMBOL_REF
2130 || bfin_longcall_p (callee, INTVAL (cookie))
2131 || (GET_CODE (callee) == SYMBOL_REF
2132 && !SYMBOL_REF_LOCAL_P (callee)
2133 && TARGET_INLINE_PLT)
2134 || caller_has_l1_text != callee_has_l1_text
2135 || (caller_has_l1_text && callee_has_l1_text
2136 && (GET_CODE (callee) != SYMBOL_REF
2137 || !SYMBOL_REF_LOCAL_P (callee))))
2140 if (! address_operand (addr, Pmode))
2141 addr = force_reg (Pmode, addr);
2143 fnaddr = gen_reg_rtx (SImode);
2144 emit_insn (gen_load_funcdescsi (fnaddr, addr));
2145 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
2147 picreg = gen_reg_rtx (SImode);
2148 emit_insn (gen_load_funcdescsi (picreg,
2149 plus_constant (addr, 4)));
2154 else if ((!register_no_elim_operand (callee, Pmode)
2155 && GET_CODE (callee) != SYMBOL_REF)
2156 || (GET_CODE (callee) == SYMBOL_REF
2157 && ((TARGET_ID_SHARED_LIBRARY && !TARGET_LEAF_ID_SHARED_LIBRARY)
2158 || bfin_longcall_p (callee, INTVAL (cookie)))))
2160 callee = copy_to_mode_reg (Pmode, callee);
2161 fnaddr = gen_rtx_MEM (Pmode, callee);
2163 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
2166 call = gen_rtx_SET (VOIDmode, retval, call);
2168 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
2170 XVECEXP (pat, 0, n++) = call;
2172 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
2173 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
2175 XVECEXP (pat, 0, n++) = gen_rtx_RETURN (VOIDmode);
2176 call = emit_call_insn (pat);
2178 CALL_INSN_FUNCTION_USAGE (call) = use;
2181 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2184 hard_regno_mode_ok (int regno, enum machine_mode mode)
2186 /* Allow only dregs to store value of mode HI or QI */
2187 enum reg_class rclass = REGNO_REG_CLASS (regno);
2192 if (mode == V2HImode)
2193 return D_REGNO_P (regno);
2194 if (rclass == CCREGS)
2195 return mode == BImode;
2196 if (mode == PDImode || mode == V2PDImode)
2197 return regno == REG_A0 || regno == REG_A1;
2199 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2200 up with a bad register class (such as ALL_REGS) for DImode. */
2202 return regno < REG_M3;
2205 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
2208 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
2211 /* Implements target hook vector_mode_supported_p. */
2214 bfin_vector_mode_supported_p (enum machine_mode mode)
2216 return mode == V2HImode;
2219 /* Return the cost of moving data from a register in class CLASS1 to
2220 one in class CLASS2. A cost of 2 is the default. */
2223 bfin_register_move_cost (enum machine_mode mode,
2224 enum reg_class class1, enum reg_class class2)
2226 /* These need secondary reloads, so they're more expensive. */
2227 if ((class1 == CCREGS && class2 != DREGS)
2228 || (class1 != DREGS && class2 == CCREGS))
2231 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2235 /* There are some stalls involved when moving from a DREG to a different
2236 class reg, and using the value in one of the following instructions.
2237 Attempt to model this by slightly discouraging such moves. */
2238 if (class1 == DREGS && class2 != DREGS)
2241 if (GET_MODE_CLASS (mode) == MODE_INT)
2243 /* Discourage trying to use the accumulators. */
2244 if (TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A0)
2245 || TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A1)
2246 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A0)
2247 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A1))
2253 /* Return the cost of moving data of mode M between a
2254 register and memory. A value of 2 is the default; this cost is
2255 relative to those in `REGISTER_MOVE_COST'.
2257 ??? In theory L1 memory has single-cycle latency. We should add a switch
2258 that tells the compiler whether we expect to use only L1 memory for the
2259 program; it'll make the costs more accurate. */
2262 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2263 enum reg_class rclass,
2264 int in ATTRIBUTE_UNUSED)
2266 /* Make memory accesses slightly more expensive than any register-register
2267 move. Also, penalize non-DP registers, since they need secondary
2268 reloads to load and store. */
2269 if (! reg_class_subset_p (rclass, DPREGS))
2275 /* Inform reload about cases where moving X with a mode MODE to a register in
2276 RCLASS requires an extra scratch register. Return the class needed for the
2277 scratch register. */
2279 static enum reg_class
2280 bfin_secondary_reload (bool in_p, rtx x, enum reg_class rclass,
2281 enum machine_mode mode, secondary_reload_info *sri)
2283 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2284 in most other cases we can also use PREGS. */
2285 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
2286 enum reg_class x_class = NO_REGS;
2287 enum rtx_code code = GET_CODE (x);
2290 x = SUBREG_REG (x), code = GET_CODE (x);
2293 int regno = REGNO (x);
2294 if (regno >= FIRST_PSEUDO_REGISTER)
2295 regno = reg_renumber[regno];
2300 x_class = REGNO_REG_CLASS (regno);
2303 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2304 This happens as a side effect of register elimination, and we need
2305 a scratch register to do it. */
2306 if (fp_plus_const_operand (x, mode))
2308 rtx op2 = XEXP (x, 1);
2309 int large_constant_p = ! satisfies_constraint_Ks7 (op2);
2311 if (rclass == PREGS || rclass == PREGS_CLOBBERED)
2313 /* If destination is a DREG, we can do this without a scratch register
2314 if the constant is valid for an add instruction. */
2315 if ((rclass == DREGS || rclass == DPREGS)
2316 && ! large_constant_p)
2318 /* Reloading to anything other than a DREG? Use a PREG scratch
2320 sri->icode = CODE_FOR_reload_insi;
2324 /* Data can usually be moved freely between registers of most classes.
2325 AREGS are an exception; they can only move to or from another register
2326 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2327 if (x_class == AREGS || x_class == EVEN_AREGS || x_class == ODD_AREGS)
2328 return (rclass == DREGS || rclass == AREGS || rclass == EVEN_AREGS
2329 || rclass == ODD_AREGS
2332 if (rclass == AREGS || rclass == EVEN_AREGS || rclass == ODD_AREGS)
2336 sri->icode = in_p ? CODE_FOR_reload_inpdi : CODE_FOR_reload_outpdi;
2340 if (x != const0_rtx && x_class != DREGS)
2348 /* CCREGS can only be moved from/to DREGS. */
2349 if (rclass == CCREGS && x_class != DREGS)
2351 if (x_class == CCREGS && rclass != DREGS)
2354 /* All registers other than AREGS can load arbitrary constants. The only
2355 case that remains is MEM. */
2357 if (! reg_class_subset_p (rclass, default_class))
2358 return default_class;
2363 /* Implement TARGET_HANDLE_OPTION. */
2366 bfin_handle_option (size_t code, const char *arg, int value)
2370 case OPT_mshared_library_id_:
2371 if (value > MAX_LIBRARY_ID)
2372 error ("-mshared-library-id=%s is not between 0 and %d",
2373 arg, MAX_LIBRARY_ID);
2374 bfin_lib_id_given = 1;
2383 while ((p = bfin_cpus[i].name) != NULL)
2385 if (strncmp (arg, p, strlen (p)) == 0)
2392 error ("-mcpu=%s is not valid", arg);
2396 bfin_cpu_type = bfin_cpus[i].type;
2398 q = arg + strlen (p);
2402 bfin_si_revision = bfin_cpus[i].si_revision;
2403 bfin_workarounds |= bfin_cpus[i].workarounds;
2405 else if (strcmp (q, "-none") == 0)
2406 bfin_si_revision = -1;
2407 else if (strcmp (q, "-any") == 0)
2409 bfin_si_revision = 0xffff;
2410 while (bfin_cpus[i].type == bfin_cpu_type)
2412 bfin_workarounds |= bfin_cpus[i].workarounds;
2418 unsigned int si_major, si_minor;
2421 rev_len = strlen (q);
2423 if (sscanf (q, "-%u.%u%n", &si_major, &si_minor, &n) != 2
2425 || si_major > 0xff || si_minor > 0xff)
2427 invalid_silicon_revision:
2428 error ("-mcpu=%s has invalid silicon revision", arg);
2432 bfin_si_revision = (si_major << 8) | si_minor;
2434 while (bfin_cpus[i].type == bfin_cpu_type
2435 && bfin_cpus[i].si_revision != bfin_si_revision)
2438 if (bfin_cpus[i].type != bfin_cpu_type)
2439 goto invalid_silicon_revision;
2441 bfin_workarounds |= bfin_cpus[i].workarounds;
2452 static struct machine_function *
2453 bfin_init_machine_status (void)
2455 struct machine_function *f;
2457 f = GGC_CNEW (struct machine_function);
2462 /* Implement the macro OVERRIDE_OPTIONS. */
2465 override_options (void)
2467 /* If processor type is not specified, enable all workarounds. */
2468 if (bfin_cpu_type == BFIN_CPU_UNKNOWN)
2472 for (i = 0; bfin_cpus[i].name != NULL; i++)
2473 bfin_workarounds |= bfin_cpus[i].workarounds;
2475 bfin_si_revision = 0xffff;
2478 if (bfin_csync_anomaly == 1)
2479 bfin_workarounds |= WA_SPECULATIVE_SYNCS;
2480 else if (bfin_csync_anomaly == 0)
2481 bfin_workarounds &= ~WA_SPECULATIVE_SYNCS;
2483 if (bfin_specld_anomaly == 1)
2484 bfin_workarounds |= WA_SPECULATIVE_LOADS;
2485 else if (bfin_specld_anomaly == 0)
2486 bfin_workarounds &= ~WA_SPECULATIVE_LOADS;
2488 if (TARGET_OMIT_LEAF_FRAME_POINTER)
2489 flag_omit_frame_pointer = 1;
2491 /* Library identification */
2492 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
2493 error ("-mshared-library-id= specified without -mid-shared-library");
2495 if (stack_limit_rtx && TARGET_STACK_CHECK_L1)
2496 error ("Can't use multiple stack checking methods together.");
2498 if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
2499 error ("ID shared libraries and FD-PIC mode can't be used together.");
2501 /* Don't allow the user to specify -mid-shared-library and -msep-data
2502 together, as it makes little sense from a user's point of view... */
2503 if (TARGET_SEP_DATA && TARGET_ID_SHARED_LIBRARY)
2504 error ("cannot specify both -msep-data and -mid-shared-library");
2505 /* ... internally, however, it's nearly the same. */
2506 if (TARGET_SEP_DATA)
2507 target_flags |= MASK_ID_SHARED_LIBRARY | MASK_LEAF_ID_SHARED_LIBRARY;
2509 if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
2512 /* There is no single unaligned SI op for PIC code. Sometimes we
2513 need to use ".4byte" and sometimes we need to use ".picptr".
2514 See bfin_assemble_integer for details. */
2516 targetm.asm_out.unaligned_op.si = 0;
2518 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2519 since we don't support it and it'll just break. */
2520 if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2523 if (TARGET_MULTICORE && bfin_cpu_type != BFIN_CPU_BF561)
2524 error ("-mmulticore can only be used with BF561");
2526 if (TARGET_COREA && !TARGET_MULTICORE)
2527 error ("-mcorea should be used with -mmulticore");
2529 if (TARGET_COREB && !TARGET_MULTICORE)
2530 error ("-mcoreb should be used with -mmulticore");
2532 if (TARGET_COREA && TARGET_COREB)
2533 error ("-mcorea and -mcoreb can't be used together");
2535 flag_schedule_insns = 0;
2537 /* Passes after sched2 can break the helpful TImode annotations that
2538 haifa-sched puts on every insn. Just do scheduling in reorg. */
2539 bfin_flag_schedule_insns2 = flag_schedule_insns_after_reload;
2540 flag_schedule_insns_after_reload = 0;
2542 init_machine_status = bfin_init_machine_status;
2545 /* Return the destination address of BRANCH.
2546 We need to use this instead of get_attr_length, because the
2547 cbranch_with_nops pattern conservatively sets its length to 6, and
2548 we still prefer to use shorter sequences. */
2551 branch_dest (rtx branch)
2555 rtx pat = PATTERN (branch);
2556 if (GET_CODE (pat) == PARALLEL)
2557 pat = XVECEXP (pat, 0, 0);
2558 dest = SET_SRC (pat);
2559 if (GET_CODE (dest) == IF_THEN_ELSE)
2560 dest = XEXP (dest, 1);
2561 dest = XEXP (dest, 0);
2562 dest_uid = INSN_UID (dest);
2563 return INSN_ADDRESSES (dest_uid);
2566 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2567 it's a branch that's predicted taken. */
2570 cbranch_predicted_taken_p (rtx insn)
2572 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2576 int pred_val = INTVAL (XEXP (x, 0));
2578 return pred_val >= REG_BR_PROB_BASE / 2;
2584 /* Templates for use by asm_conditional_branch. */
2586 static const char *ccbranch_templates[][3] = {
2587 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2588 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2589 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2590 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2593 /* Output INSN, which is a conditional branch instruction with operands
2596 We deal with the various forms of conditional branches that can be generated
2597 by bfin_reorg to prevent the hardware from doing speculative loads, by
2598 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2599 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2600 Either of these is only necessary if the branch is short, otherwise the
2601 template we use ends in an unconditional jump which flushes the pipeline
2605 asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2607 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2608 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2609 is to be taken from start of if cc rather than jump.
2610 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2612 int len = (offset >= -1024 && offset <= 1022 ? 0
2613 : offset >= -4094 && offset <= 4096 ? 1
2615 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2616 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2617 output_asm_insn (ccbranch_templates[idx][len], operands);
2618 gcc_assert (n_nops == 0 || !bp);
2620 while (n_nops-- > 0)
2621 output_asm_insn ("nop;", NULL);
2624 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2625 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2628 bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2630 enum rtx_code code1, code2;
2631 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
2632 rtx tem = bfin_cc_rtx;
2633 enum rtx_code code = GET_CODE (cmp);
2635 /* If we have a BImode input, then we already have a compare result, and
2636 do not need to emit another comparison. */
2637 if (GET_MODE (op0) == BImode)
2639 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2640 tem = op0, code2 = code;
2645 /* bfin has these conditions */
2655 code1 = reverse_condition (code);
2659 emit_insn (gen_rtx_SET (BImode, tem,
2660 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2663 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2666 /* Return nonzero iff C has exactly one bit set if it is interpreted
2667 as a 32-bit constant. */
2670 log2constp (unsigned HOST_WIDE_INT c)
2673 return c != 0 && (c & (c-1)) == 0;
2676 /* Returns the number of consecutive least significant zeros in the binary
2677 representation of *V.
2678 We modify *V to contain the original value arithmetically shifted right by
2679 the number of zeroes. */
2682 shiftr_zero (HOST_WIDE_INT *v)
2684 unsigned HOST_WIDE_INT tmp = *v;
2685 unsigned HOST_WIDE_INT sgn;
2691 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2692 while ((tmp & 0x1) == 0 && n <= 32)
2694 tmp = (tmp >> 1) | sgn;
2701 /* After reload, split the load of an immediate constant. OPERANDS are the
2702 operands of the movsi_insn pattern which we are splitting. We return
2703 nonzero if we emitted a sequence to load the constant, zero if we emitted
2704 nothing because we want to use the splitter's default sequence. */
2707 split_load_immediate (rtx operands[])
2709 HOST_WIDE_INT val = INTVAL (operands[1]);
2711 HOST_WIDE_INT shifted = val;
2712 HOST_WIDE_INT shifted_compl = ~val;
2713 int num_zero = shiftr_zero (&shifted);
2714 int num_compl_zero = shiftr_zero (&shifted_compl);
2715 unsigned int regno = REGNO (operands[0]);
2717 /* This case takes care of single-bit set/clear constants, which we could
2718 also implement with BITSET/BITCLR. */
2720 && shifted >= -32768 && shifted < 65536
2721 && (D_REGNO_P (regno)
2722 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2724 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2725 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2730 tmp |= -(tmp & 0x8000);
2732 /* If high word has one bit set or clear, try to use a bit operation. */
2733 if (D_REGNO_P (regno))
2735 if (log2constp (val & 0xFFFF0000))
2737 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2738 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2741 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2743 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2744 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2748 if (D_REGNO_P (regno))
2750 if (tmp >= -64 && tmp <= 63)
2752 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2753 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2757 if ((val & 0xFFFF0000) == 0)
2759 emit_insn (gen_movsi (operands[0], const0_rtx));
2760 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2764 if ((val & 0xFFFF0000) == 0xFFFF0000)
2766 emit_insn (gen_movsi (operands[0], constm1_rtx));
2767 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2772 /* Need DREGs for the remaining case. */
2777 && num_compl_zero && shifted_compl >= -64 && shifted_compl <= 63)
2779 /* If optimizing for size, generate a sequence that has more instructions
2781 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2782 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2783 GEN_INT (num_compl_zero)));
2784 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2790 /* Return true if the legitimate memory address for a memory operand of mode
2791 MODE. Return false if not. */
2794 bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2796 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2797 int sz = GET_MODE_SIZE (mode);
2798 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2799 /* The usual offsettable_memref machinery doesn't work so well for this
2800 port, so we deal with the problem here. */
2801 if (value > 0 && sz == 8)
2803 return (v & ~(0x7fff << shift)) == 0;
2807 bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2808 enum rtx_code outer_code)
2811 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2813 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
2817 bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2819 switch (GET_CODE (x)) {
2821 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
2825 if (REG_P (XEXP (x, 0))
2826 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
2827 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
2828 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2829 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2834 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2835 && REG_P (XEXP (x, 0))
2836 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
2839 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2840 && XEXP (x, 0) == stack_pointer_rtx
2841 && REG_P (XEXP (x, 0))
2842 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
2851 /* Decide whether we can force certain constants to memory. If we
2852 decide we can't, the caller should be able to cope with it in
2856 bfin_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
2858 /* We have only one class of non-legitimate constants, and our movsi
2859 expander knows how to handle them. Dropping these constants into the
2860 data section would only shift the problem - we'd still get relocs
2861 outside the object, in the data section rather than the text section. */
2865 /* Ensure that for any constant of the form symbol + offset, the offset
2866 remains within the object. Any other constants are ok.
2867 This ensures that flat binaries never have to deal with relocations
2868 crossing section boundaries. */
2871 bfin_legitimate_constant_p (rtx x)
2874 HOST_WIDE_INT offset;
2876 if (GET_CODE (x) != CONST)
2880 gcc_assert (GET_CODE (x) == PLUS);
2884 if (GET_CODE (sym) != SYMBOL_REF
2885 || GET_CODE (x) != CONST_INT)
2887 offset = INTVAL (x);
2889 if (SYMBOL_REF_DECL (sym) == 0)
2892 || offset >= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym))))
2899 bfin_rtx_costs (rtx x, int code, int outer_code, int *total, bool speed)
2901 int cost2 = COSTS_N_INSNS (1);
2907 if (outer_code == SET || outer_code == PLUS)
2908 *total = satisfies_constraint_Ks7 (x) ? 0 : cost2;
2909 else if (outer_code == AND)
2910 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2911 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2912 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2913 else if (outer_code == LEU || outer_code == LTU)
2914 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2915 else if (outer_code == MULT)
2916 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2917 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2919 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2920 || outer_code == LSHIFTRT)
2921 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2922 else if (outer_code == IOR || outer_code == XOR)
2923 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2932 *total = COSTS_N_INSNS (2);
2938 if (GET_MODE (x) == SImode)
2940 if (GET_CODE (op0) == MULT
2941 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
2943 HOST_WIDE_INT val = INTVAL (XEXP (op0, 1));
2944 if (val == 2 || val == 4)
2947 *total += rtx_cost (XEXP (op0, 0), outer_code, speed);
2948 *total += rtx_cost (op1, outer_code, speed);
2953 if (GET_CODE (op0) != REG
2954 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2955 *total += rtx_cost (op0, SET, speed);
2956 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2957 towards creating too many induction variables. */
2958 if (!reg_or_7bit_operand (op1, SImode))
2959 *total += rtx_cost (op1, SET, speed);
2962 else if (GET_MODE (x) == DImode)
2965 if (GET_CODE (op1) != CONST_INT
2966 || !satisfies_constraint_Ks7 (op1))
2967 *total += rtx_cost (op1, PLUS, speed);
2968 if (GET_CODE (op0) != REG
2969 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2970 *total += rtx_cost (op0, PLUS, speed);
2975 if (GET_MODE (x) == DImode)
2984 if (GET_MODE (x) == DImode)
2991 if (GET_CODE (op0) != REG
2992 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2993 *total += rtx_cost (op0, code, speed);
3003 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
3006 if ((GET_CODE (op0) == LSHIFTRT && GET_CODE (op1) == ASHIFT)
3007 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == ZERO_EXTEND)
3008 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
3009 || (GET_CODE (op0) == AND && GET_CODE (op1) == CONST_INT))
3016 if (GET_CODE (op0) != REG
3017 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
3018 *total += rtx_cost (op0, code, speed);
3020 if (GET_MODE (x) == DImode)
3026 if (GET_MODE (x) != SImode)
3031 if (! rhs_andsi3_operand (XEXP (x, 1), SImode))
3032 *total += rtx_cost (XEXP (x, 1), code, speed);
3036 if (! regorlog2_operand (XEXP (x, 1), SImode))
3037 *total += rtx_cost (XEXP (x, 1), code, speed);
3044 if (outer_code == SET
3045 && XEXP (x, 1) == const1_rtx
3046 && GET_CODE (XEXP (x, 2)) == CONST_INT)
3062 if (GET_CODE (op0) == GET_CODE (op1)
3063 && (GET_CODE (op0) == ZERO_EXTEND
3064 || GET_CODE (op0) == SIGN_EXTEND))
3066 *total = COSTS_N_INSNS (1);
3067 op0 = XEXP (op0, 0);
3068 op1 = XEXP (op1, 0);
3071 *total = COSTS_N_INSNS (1);
3073 *total = COSTS_N_INSNS (3);
3075 if (GET_CODE (op0) != REG
3076 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
3077 *total += rtx_cost (op0, MULT, speed);
3078 if (GET_CODE (op1) != REG
3079 && (GET_CODE (op1) != SUBREG || GET_CODE (SUBREG_REG (op1)) != REG))
3080 *total += rtx_cost (op1, MULT, speed);
3086 *total = COSTS_N_INSNS (32);
3091 if (outer_code == SET)
3100 /* Used for communication between {push,pop}_multiple_operation (which
3101 we use not only as a predicate) and the corresponding output functions. */
3102 static int first_preg_to_save, first_dreg_to_save;
3105 push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3107 int lastdreg = 8, lastpreg = 6;
3110 first_preg_to_save = lastpreg;
3111 first_dreg_to_save = lastdreg;
3112 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
3114 rtx t = XVECEXP (op, 0, i);
3118 if (GET_CODE (t) != SET)
3122 dest = SET_DEST (t);
3123 if (GET_CODE (dest) != MEM || ! REG_P (src))
3125 dest = XEXP (dest, 0);
3126 if (GET_CODE (dest) != PLUS
3127 || ! REG_P (XEXP (dest, 0))
3128 || REGNO (XEXP (dest, 0)) != REG_SP
3129 || GET_CODE (XEXP (dest, 1)) != CONST_INT
3130 || INTVAL (XEXP (dest, 1)) != -i * 4)
3133 regno = REGNO (src);
3136 if (D_REGNO_P (regno))
3139 first_dreg_to_save = lastdreg = regno - REG_R0;
3141 else if (regno >= REG_P0 && regno <= REG_P7)
3144 first_preg_to_save = lastpreg = regno - REG_P0;
3154 if (regno >= REG_P0 && regno <= REG_P7)
3157 first_preg_to_save = lastpreg = regno - REG_P0;
3159 else if (regno != REG_R0 + lastdreg + 1)
3164 else if (group == 2)
3166 if (regno != REG_P0 + lastpreg + 1)
3175 pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3177 int lastdreg = 8, lastpreg = 6;
3180 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
3182 rtx t = XVECEXP (op, 0, i);
3186 if (GET_CODE (t) != SET)
3190 dest = SET_DEST (t);
3191 if (GET_CODE (src) != MEM || ! REG_P (dest))
3193 src = XEXP (src, 0);
3197 if (! REG_P (src) || REGNO (src) != REG_SP)
3200 else if (GET_CODE (src) != PLUS
3201 || ! REG_P (XEXP (src, 0))
3202 || REGNO (XEXP (src, 0)) != REG_SP
3203 || GET_CODE (XEXP (src, 1)) != CONST_INT
3204 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
3207 regno = REGNO (dest);
3210 if (regno == REG_R7)
3215 else if (regno != REG_P0 + lastpreg - 1)
3220 else if (group == 1)
3222 if (regno != REG_R0 + lastdreg - 1)
3228 first_dreg_to_save = lastdreg;
3229 first_preg_to_save = lastpreg;
3233 /* Emit assembly code for one multi-register push described by INSN, with
3234 operands in OPERANDS. */
3237 output_push_multiple (rtx insn, rtx *operands)
3242 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3243 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
3246 if (first_dreg_to_save == 8)
3247 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
3248 else if (first_preg_to_save == 6)
3249 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
3251 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
3252 first_dreg_to_save, first_preg_to_save);
3254 output_asm_insn (buf, operands);
3257 /* Emit assembly code for one multi-register pop described by INSN, with
3258 operands in OPERANDS. */
3261 output_pop_multiple (rtx insn, rtx *operands)
3266 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3267 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
3270 if (first_dreg_to_save == 8)
3271 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
3272 else if (first_preg_to_save == 6)
3273 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
3275 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
3276 first_dreg_to_save, first_preg_to_save);
3278 output_asm_insn (buf, operands);
3281 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3284 single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
3286 rtx scratch = gen_reg_rtx (mode);
3289 srcmem = adjust_address_nv (src, mode, offset);
3290 dstmem = adjust_address_nv (dst, mode, offset);
3291 emit_move_insn (scratch, srcmem);
3292 emit_move_insn (dstmem, scratch);
3295 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3296 alignment ALIGN_EXP. Return true if successful, false if we should fall
3297 back on a different method. */
3300 bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
3302 rtx srcreg, destreg, countreg;
3303 HOST_WIDE_INT align = 0;
3304 unsigned HOST_WIDE_INT count = 0;
3306 if (GET_CODE (align_exp) == CONST_INT)
3307 align = INTVAL (align_exp);
3308 if (GET_CODE (count_exp) == CONST_INT)
3310 count = INTVAL (count_exp);
3312 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
3317 /* If optimizing for size, only do single copies inline. */
3320 if (count == 2 && align < 2)
3322 if (count == 4 && align < 4)
3324 if (count != 1 && count != 2 && count != 4)
3327 if (align < 2 && count != 1)
3330 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
3331 if (destreg != XEXP (dst, 0))
3332 dst = replace_equiv_address_nv (dst, destreg);
3333 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
3334 if (srcreg != XEXP (src, 0))
3335 src = replace_equiv_address_nv (src, srcreg);
3337 if (count != 0 && align >= 2)
3339 unsigned HOST_WIDE_INT offset = 0;
3343 if ((count & ~3) == 4)
3345 single_move_for_movmem (dst, src, SImode, offset);
3348 else if (count & ~3)
3350 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
3351 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3353 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
3357 single_move_for_movmem (dst, src, HImode, offset);
3363 if ((count & ~1) == 2)
3365 single_move_for_movmem (dst, src, HImode, offset);
3368 else if (count & ~1)
3370 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
3371 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3373 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
3378 single_move_for_movmem (dst, src, QImode, offset);
3385 /* Compute the alignment for a local variable.
3386 TYPE is the data type, and ALIGN is the alignment that
3387 the object would ordinarily have. The value of this macro is used
3388 instead of that alignment to align the object. */
3391 bfin_local_alignment (tree type, int align)
3393 /* Increasing alignment for (relatively) big types allows the builtin
3394 memcpy can use 32 bit loads/stores. */
3395 if (TYPE_SIZE (type)
3396 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3397 && (TREE_INT_CST_LOW (TYPE_SIZE (type)) > 8
3398 || TREE_INT_CST_HIGH (TYPE_SIZE (type))) && align < 32)
3403 /* Implement TARGET_SCHED_ISSUE_RATE. */
3406 bfin_issue_rate (void)
3412 bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3414 enum attr_type insn_type, dep_insn_type;
3415 int dep_insn_code_number;
3417 /* Anti and output dependencies have zero cost. */
3418 if (REG_NOTE_KIND (link) != 0)
3421 dep_insn_code_number = recog_memoized (dep_insn);
3423 /* If we can't recognize the insns, we can't really do anything. */
3424 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
3427 insn_type = get_attr_type (insn);
3428 dep_insn_type = get_attr_type (dep_insn);
3430 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
3432 rtx pat = PATTERN (dep_insn);
3433 if (GET_CODE (pat) == PARALLEL)
3434 pat = XVECEXP (pat, 0, 0);
3435 rtx dest = SET_DEST (pat);
3436 rtx src = SET_SRC (pat);
3437 if (! ADDRESS_REGNO_P (REGNO (dest))
3438 || ! (MEM_P (src) || D_REGNO_P (REGNO (src))))
3440 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
3447 /* Increment the counter for the number of loop instructions in the
3448 current function. */
3451 bfin_hardware_loop (void)
3453 cfun->machine->has_hardware_loops++;
3456 /* Maximum loop nesting depth. */
3457 #define MAX_LOOP_DEPTH 2
3459 /* Maximum size of a loop. */
3460 #define MAX_LOOP_LENGTH 2042
3462 /* Maximum distance of the LSETUP instruction from the loop start. */
3463 #define MAX_LSETUP_DISTANCE 30
3465 /* We need to keep a vector of loops */
3466 typedef struct loop_info *loop_info;
3467 DEF_VEC_P (loop_info);
3468 DEF_VEC_ALLOC_P (loop_info,heap);
3470 /* Information about a loop we have found (or are in the process of
3472 struct loop_info GTY (())
3474 /* loop number, for dumps */
3477 /* All edges that jump into and out of the loop. */
3478 VEC(edge,gc) *incoming;
3480 /* We can handle two cases: all incoming edges have the same destination
3481 block, or all incoming edges have the same source block. These two
3482 members are set to the common source or destination we found, or NULL
3483 if different blocks were found. If both are NULL the loop can't be
3485 basic_block incoming_src;
3486 basic_block incoming_dest;
3488 /* First block in the loop. This is the one branched to by the loop_end
3492 /* Last block in the loop (the one with the loop_end insn). */
3495 /* The successor block of the loop. This is the one the loop_end insn
3497 basic_block successor;
3499 /* The last instruction in the tail. */
3502 /* The loop_end insn. */
3505 /* The iteration register. */
3508 /* The new initialization insn. */
3511 /* The new initialization instruction. */
3514 /* The new label placed at the beginning of the loop. */
3517 /* The new label placed at the end of the loop. */
3520 /* The length of the loop. */
3523 /* The nesting depth of the loop. */
3526 /* Nonzero if we can't optimize this loop. */
3529 /* True if we have visited this loop. */
3532 /* True if this loop body clobbers any of LC0, LT0, or LB0. */
3535 /* True if this loop body clobbers any of LC1, LT1, or LB1. */
3538 /* Next loop in the graph. */
3539 struct loop_info *next;
3541 /* Immediate outer loop of this loop. */
3542 struct loop_info *outer;
3544 /* Vector of blocks only within the loop, including those within
3546 VEC (basic_block,heap) *blocks;
3548 /* Same information in a bitmap. */
3549 bitmap block_bitmap;
3551 /* Vector of inner loops within this loop */
3552 VEC (loop_info,heap) *loops;
3556 bfin_dump_loops (loop_info loops)
3560 for (loop = loops; loop; loop = loop->next)
3566 fprintf (dump_file, ";; loop %d: ", loop->loop_no);
3568 fprintf (dump_file, "(bad) ");
3569 fprintf (dump_file, "{head:%d, depth:%d}", loop->head->index, loop->depth);
3571 fprintf (dump_file, " blocks: [ ");
3572 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, b); ix++)
3573 fprintf (dump_file, "%d ", b->index);
3574 fprintf (dump_file, "] ");
3576 fprintf (dump_file, " inner loops: [ ");
3577 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, i); ix++)
3578 fprintf (dump_file, "%d ", i->loop_no);
3579 fprintf (dump_file, "]\n");
3581 fprintf (dump_file, "\n");
3584 /* Scan the blocks of LOOP (and its inferiors) looking for basic block
3585 BB. Return true, if we find it. */
3588 bfin_bb_in_loop (loop_info loop, basic_block bb)
3590 return bitmap_bit_p (loop->block_bitmap, bb->index);
3593 /* Scan the blocks of LOOP (and its inferiors) looking for uses of
3594 REG. Return true, if we find any. Don't count the loop's loop_end
3595 insn if it matches LOOP_END. */
3598 bfin_scan_loop (loop_info loop, rtx reg, rtx loop_end)
3603 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3607 for (insn = BB_HEAD (bb);
3608 insn != NEXT_INSN (BB_END (bb));
3609 insn = NEXT_INSN (insn))
3613 if (insn == loop_end)
3615 if (reg_mentioned_p (reg, PATTERN (insn)))
3622 /* Estimate the length of INSN conservatively. */
3625 length_for_loop (rtx insn)
3628 if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
3630 if (ENABLE_WA_SPECULATIVE_SYNCS)
3632 else if (ENABLE_WA_SPECULATIVE_LOADS)
3635 else if (LABEL_P (insn))
3637 if (ENABLE_WA_SPECULATIVE_SYNCS)
3642 length += get_attr_length (insn);
3647 /* Optimize LOOP. */
3650 bfin_optimize_loop (loop_info loop)
3654 rtx insn, init_insn, last_insn, nop_insn;
3655 rtx loop_init, start_label, end_label;
3656 rtx reg_lc0, reg_lc1, reg_lt0, reg_lt1, reg_lb0, reg_lb1;
3658 rtx lc_reg, lt_reg, lb_reg;
3662 int inner_depth = 0;
3672 fprintf (dump_file, ";; loop %d bad when found\n", loop->loop_no);
3676 /* Every loop contains in its list of inner loops every loop nested inside
3677 it, even if there are intermediate loops. This works because we're doing
3678 a depth-first search here and never visit a loop more than once. */
3679 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
3681 bfin_optimize_loop (inner);
3683 if (!inner->bad && inner_depth < inner->depth)
3685 inner_depth = inner->depth;
3687 loop->clobber_loop0 |= inner->clobber_loop0;
3688 loop->clobber_loop1 |= inner->clobber_loop1;
3692 loop->depth = inner_depth + 1;
3693 if (loop->depth > MAX_LOOP_DEPTH)
3696 fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
3700 /* Get the loop iteration register. */
3701 iter_reg = loop->iter_reg;
3703 if (!DPREG_P (iter_reg))
3706 fprintf (dump_file, ";; loop %d iteration count NOT in PREG or DREG\n",
3711 if (loop->incoming_src)
3713 /* Make sure the predecessor is before the loop start label, as required by
3714 the LSETUP instruction. */
3716 for (insn = BB_END (loop->incoming_src);
3717 insn && insn != loop->start_label;
3718 insn = NEXT_INSN (insn))
3719 length += length_for_loop (insn);
3724 fprintf (dump_file, ";; loop %d lsetup not before loop_start\n",
3729 if (length > MAX_LSETUP_DISTANCE)
3732 fprintf (dump_file, ";; loop %d lsetup too far away\n", loop->loop_no);
3737 /* Check if start_label appears before loop_end and calculate the
3738 offset between them. We calculate the length of instructions
3741 for (insn = loop->start_label;
3742 insn && insn != loop->loop_end;
3743 insn = NEXT_INSN (insn))
3744 length += length_for_loop (insn);
3749 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
3754 loop->length = length;
3755 if (loop->length > MAX_LOOP_LENGTH)
3758 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
3762 /* Scan all the blocks to make sure they don't use iter_reg. */
3763 if (bfin_scan_loop (loop, iter_reg, loop->loop_end))
3766 fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
3770 /* Scan all the insns to see if the loop body clobber
3771 any hardware loop registers. */
3773 reg_lc0 = gen_rtx_REG (SImode, REG_LC0);
3774 reg_lc1 = gen_rtx_REG (SImode, REG_LC1);
3775 reg_lt0 = gen_rtx_REG (SImode, REG_LT0);
3776 reg_lt1 = gen_rtx_REG (SImode, REG_LT1);
3777 reg_lb0 = gen_rtx_REG (SImode, REG_LB0);
3778 reg_lb1 = gen_rtx_REG (SImode, REG_LB1);
3780 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3784 for (insn = BB_HEAD (bb);
3785 insn != NEXT_INSN (BB_END (bb));
3786 insn = NEXT_INSN (insn))
3791 if (reg_set_p (reg_lc0, insn)
3792 || reg_set_p (reg_lt0, insn)
3793 || reg_set_p (reg_lb0, insn))
3794 loop->clobber_loop0 = 1;
3796 if (reg_set_p (reg_lc1, insn)
3797 || reg_set_p (reg_lt1, insn)
3798 || reg_set_p (reg_lb1, insn))
3799 loop->clobber_loop1 |= 1;
3803 if ((loop->clobber_loop0 && loop->clobber_loop1)
3804 || (loop->depth == MAX_LOOP_DEPTH && loop->clobber_loop0))
3806 loop->depth = MAX_LOOP_DEPTH + 1;
3808 fprintf (dump_file, ";; loop %d no loop reg available\n",
3813 /* There should be an instruction before the loop_end instruction
3814 in the same basic block. And the instruction must not be
3816 - CONDITIONAL BRANCH
3820 - Returns (RTS, RTN, etc.) */
3823 last_insn = PREV_INSN (loop->loop_end);
3827 for (; last_insn != PREV_INSN (BB_HEAD (bb));
3828 last_insn = PREV_INSN (last_insn))
3829 if (INSN_P (last_insn))
3832 if (last_insn != PREV_INSN (BB_HEAD (bb)))
3835 if (single_pred_p (bb)
3836 && single_pred (bb) != ENTRY_BLOCK_PTR)
3838 bb = single_pred (bb);
3839 last_insn = BB_END (bb);
3844 last_insn = NULL_RTX;
3852 fprintf (dump_file, ";; loop %d has no last instruction\n",
3857 if (JUMP_P (last_insn))
3859 loop_info inner = (loop_info) bb->aux;
3861 && inner->outer == loop
3862 && inner->loop_end == last_insn
3863 && inner->depth == 1)
3864 /* This jump_insn is the exact loop_end of an inner loop
3865 and to be optimized away. So use the inner's last_insn. */
3866 last_insn = inner->last_insn;
3870 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3875 else if (CALL_P (last_insn)
3876 || (GET_CODE (PATTERN (last_insn)) != SEQUENCE
3877 && get_attr_type (last_insn) == TYPE_SYNC)
3878 || recog_memoized (last_insn) == CODE_FOR_return_internal)
3881 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3886 if (GET_CODE (PATTERN (last_insn)) == ASM_INPUT
3887 || asm_noperands (PATTERN (last_insn)) >= 0
3888 || (GET_CODE (PATTERN (last_insn)) != SEQUENCE
3889 && get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI))
3891 nop_insn = emit_insn_after (gen_nop (), last_insn);
3892 last_insn = nop_insn;
3895 loop->last_insn = last_insn;
3897 /* The loop is good for replacement. */
3898 start_label = loop->start_label;
3899 end_label = gen_label_rtx ();
3900 iter_reg = loop->iter_reg;
3902 if (loop->depth == 1 && !loop->clobber_loop1)
3907 loop->clobber_loop1 = 1;
3914 loop->clobber_loop0 = 1;
3917 /* If iter_reg is a DREG, we need generate an instruction to load
3918 the loop count into LC register. */
3919 if (D_REGNO_P (REGNO (iter_reg)))
3921 init_insn = gen_movsi (lc_reg, iter_reg);
3922 loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
3926 else if (P_REGNO_P (REGNO (iter_reg)))
3928 init_insn = NULL_RTX;
3929 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3936 loop->init = init_insn;
3937 loop->end_label = end_label;
3938 loop->loop_init = loop_init;
3942 fprintf (dump_file, ";; replacing loop %d initializer with\n",
3944 print_rtl_single (dump_file, loop->loop_init);
3945 fprintf (dump_file, ";; replacing loop %d terminator with\n",
3947 print_rtl_single (dump_file, loop->loop_end);
3952 if (loop->init != NULL_RTX)
3953 emit_insn (loop->init);
3954 seq_end = emit_insn (loop->loop_init);
3959 if (loop->incoming_src)
3961 rtx prev = BB_END (loop->incoming_src);
3962 if (VEC_length (edge, loop->incoming) > 1
3963 || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
3965 gcc_assert (JUMP_P (prev));
3966 prev = PREV_INSN (prev);
3968 emit_insn_after (seq, prev);
3976 if (loop->head != loop->incoming_dest)
3978 FOR_EACH_EDGE (e, ei, loop->head->preds)
3980 if (e->flags & EDGE_FALLTHRU)
3982 rtx newjump = gen_jump (loop->start_label);
3983 emit_insn_before (newjump, BB_HEAD (loop->head));
3984 new_bb = create_basic_block (newjump, newjump, loop->head->prev_bb);
3985 gcc_assert (new_bb = loop->head->prev_bb);
3991 emit_insn_before (seq, BB_HEAD (loop->head));
3992 seq = emit_label_before (gen_label_rtx (), seq);
3994 new_bb = create_basic_block (seq, seq_end, loop->head->prev_bb);
3995 FOR_EACH_EDGE (e, ei, loop->incoming)
3997 if (!(e->flags & EDGE_FALLTHRU)
3998 || e->dest != loop->head)
3999 redirect_edge_and_branch_force (e, new_bb);
4001 redirect_edge_succ (e, new_bb);
4005 delete_insn (loop->loop_end);
4006 /* Insert the loop end label before the last instruction of the loop. */
4007 emit_label_before (loop->end_label, loop->last_insn);
4014 fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
4018 if (DPREG_P (loop->iter_reg))
4020 /* If loop->iter_reg is a DREG or PREG, we can split it here
4021 without scratch register. */
4024 emit_insn_before (gen_addsi3 (loop->iter_reg,
4029 emit_insn_before (gen_cmpsi (loop->iter_reg, const0_rtx),
4032 insn = emit_jump_insn_before (gen_bne (loop->start_label),
4035 JUMP_LABEL (insn) = loop->start_label;
4036 LABEL_NUSES (loop->start_label)++;
4037 delete_insn (loop->loop_end);
4041 /* Called from bfin_reorg_loops when a potential loop end is found. LOOP is
4042 a newly set up structure describing the loop, it is this function's
4043 responsibility to fill most of it. TAIL_BB and TAIL_INSN point to the
4044 loop_end insn and its enclosing basic block. */
4047 bfin_discover_loop (loop_info loop, basic_block tail_bb, rtx tail_insn)
4051 VEC (basic_block,heap) *works = VEC_alloc (basic_block,heap,20);
4053 loop->tail = tail_bb;
4054 loop->head = BRANCH_EDGE (tail_bb)->dest;
4055 loop->successor = FALLTHRU_EDGE (tail_bb)->dest;
4056 loop->loop_end = tail_insn;
4057 loop->last_insn = NULL_RTX;
4058 loop->iter_reg = SET_DEST (XVECEXP (PATTERN (tail_insn), 0, 1));
4059 loop->depth = loop->length = 0;
4061 loop->clobber_loop0 = loop->clobber_loop1 = 0;
4064 loop->incoming = VEC_alloc (edge, gc, 2);
4065 loop->init = loop->loop_init = NULL_RTX;
4066 loop->start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (tail_insn), 0, 0)), 1), 0);
4067 loop->end_label = NULL_RTX;
4070 VEC_safe_push (basic_block, heap, works, loop->head);
4072 while (VEC_iterate (basic_block, works, dwork++, bb))
4076 if (bb == EXIT_BLOCK_PTR)
4078 /* We've reached the exit block. The loop must be bad. */
4081 ";; Loop is bad - reached exit block while scanning\n");
4086 if (bitmap_bit_p (loop->block_bitmap, bb->index))
4089 /* We've not seen this block before. Add it to the loop's
4090 list and then add each successor to the work list. */
4092 VEC_safe_push (basic_block, heap, loop->blocks, bb);
4093 bitmap_set_bit (loop->block_bitmap, bb->index);
4097 FOR_EACH_EDGE (e, ei, bb->succs)
4099 basic_block succ = EDGE_SUCC (bb, ei.index)->dest;
4100 if (!REGNO_REG_SET_P (df_get_live_in (succ),
4101 REGNO (loop->iter_reg)))
4103 if (!VEC_space (basic_block, works, 1))
4107 VEC_block_remove (basic_block, works, 0, dwork);
4111 VEC_reserve (basic_block, heap, works, 1);
4113 VEC_quick_push (basic_block, works, succ);
4118 /* Find the predecessor, and make sure nothing else jumps into this loop. */
4122 for (dwork = 0; VEC_iterate (basic_block, loop->blocks, dwork, bb); dwork++)
4126 FOR_EACH_EDGE (e, ei, bb->preds)
4128 basic_block pred = e->src;
4130 if (!bfin_bb_in_loop (loop, pred))
4133 fprintf (dump_file, ";; Loop %d: incoming edge %d -> %d\n",
4134 loop->loop_no, pred->index,
4136 VEC_safe_push (edge, gc, loop->incoming, e);
4141 for (pass = 0, retry = 1; retry && pass < 2; pass++)
4148 FOR_EACH_EDGE (e, ei, loop->incoming)
4152 loop->incoming_src = e->src;
4153 loop->incoming_dest = e->dest;
4158 if (e->dest != loop->incoming_dest)
4159 loop->incoming_dest = NULL;
4160 if (e->src != loop->incoming_src)
4161 loop->incoming_src = NULL;
4163 if (loop->incoming_src == NULL && loop->incoming_dest == NULL)
4169 ";; retrying loop %d with forwarder blocks\n",
4177 ";; can't find suitable entry for loop %d\n",
4185 FOR_EACH_EDGE (e, ei, loop->incoming)
4187 if (forwarder_block_p (e->src))
4194 ";; Adding forwarder block %d to loop %d and retrying\n",
4195 e->src->index, loop->loop_no);
4196 VEC_safe_push (basic_block, heap, loop->blocks, e->src);
4197 bitmap_set_bit (loop->block_bitmap, e->src->index);
4198 FOR_EACH_EDGE (e2, ei2, e->src->preds)
4199 VEC_safe_push (edge, gc, loop->incoming, e2);
4200 VEC_unordered_remove (edge, loop->incoming, ei.index);
4210 VEC_free (basic_block, heap, works);
4213 /* Analyze the structure of the loops in the current function. Use STACK
4214 for bitmap allocations. Returns all the valid candidates for hardware
4215 loops found in this function. */
4217 bfin_discover_loops (bitmap_obstack *stack, FILE *dump_file)
4219 loop_info loops = NULL;
4225 /* Find all the possible loop tails. This means searching for every
4226 loop_end instruction. For each one found, create a loop_info
4227 structure and add the head block to the work list. */
4230 rtx tail = BB_END (bb);
4232 while (GET_CODE (tail) == NOTE)
4233 tail = PREV_INSN (tail);
4237 if (INSN_P (tail) && recog_memoized (tail) == CODE_FOR_loop_end)
4240 /* A possible loop end */
4242 /* There's a degenerate case we can handle - an empty loop consisting
4243 of only a back branch. Handle that by deleting the branch. */
4244 insn = BB_HEAD (BRANCH_EDGE (bb)->dest);
4245 if (next_real_insn (insn) == tail)
4249 fprintf (dump_file, ";; degenerate loop ending at\n");
4250 print_rtl_single (dump_file, tail);
4252 delete_insn_and_edges (tail);
4256 loop = XNEW (struct loop_info);
4259 loop->loop_no = nloops++;
4260 loop->blocks = VEC_alloc (basic_block, heap, 20);
4261 loop->block_bitmap = BITMAP_ALLOC (stack);
4266 fprintf (dump_file, ";; potential loop %d ending at\n",
4268 print_rtl_single (dump_file, tail);
4271 bfin_discover_loop (loop, bb, tail);
4275 tmp_bitmap = BITMAP_ALLOC (stack);
4276 /* Compute loop nestings. */
4277 for (loop = loops; loop; loop = loop->next)
4283 for (other = loop->next; other; other = other->next)
4288 bitmap_and (tmp_bitmap, other->block_bitmap, loop->block_bitmap);
4289 if (bitmap_empty_p (tmp_bitmap))
4291 if (bitmap_equal_p (tmp_bitmap, other->block_bitmap))
4293 other->outer = loop;
4294 VEC_safe_push (loop_info, heap, loop->loops, other);
4296 else if (bitmap_equal_p (tmp_bitmap, loop->block_bitmap))
4298 loop->outer = other;
4299 VEC_safe_push (loop_info, heap, other->loops, loop);
4305 ";; can't find suitable nesting for loops %d and %d\n",
4306 loop->loop_no, other->loop_no);
4307 loop->bad = other->bad = 1;
4311 BITMAP_FREE (tmp_bitmap);
4316 /* Free up the loop structures in LOOPS. */
4318 free_loops (loop_info loops)
4322 loop_info loop = loops;
4324 VEC_free (loop_info, heap, loop->loops);
4325 VEC_free (basic_block, heap, loop->blocks);
4326 BITMAP_FREE (loop->block_bitmap);
4331 #define BB_AUX_INDEX(BB) ((unsigned)(BB)->aux)
4333 /* The taken-branch edge from the loop end can actually go forward. Since the
4334 Blackfin's LSETUP instruction requires that the loop end be after the loop
4335 start, try to reorder a loop's basic blocks when we find such a case. */
4337 bfin_reorder_loops (loop_info loops, FILE *dump_file)
4344 cfg_layout_initialize (0);
4346 for (loop = loops; loop; loop = loop->next)
4356 /* Recreate an index for basic blocks that represents their order. */
4357 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
4358 bb != EXIT_BLOCK_PTR;
4359 bb = bb->next_bb, index++)
4360 bb->aux = (PTR) index;
4362 if (BB_AUX_INDEX (loop->head) < BB_AUX_INDEX (loop->tail))
4365 FOR_EACH_EDGE (e, ei, loop->head->succs)
4367 if (bitmap_bit_p (loop->block_bitmap, e->dest->index)
4368 && BB_AUX_INDEX (e->dest) < BB_AUX_INDEX (loop->tail))
4370 basic_block start_bb = e->dest;
4371 basic_block start_prev_bb = start_bb->prev_bb;
4374 fprintf (dump_file, ";; Moving block %d before block %d\n",
4375 loop->head->index, start_bb->index);
4376 loop->head->prev_bb->next_bb = loop->head->next_bb;
4377 loop->head->next_bb->prev_bb = loop->head->prev_bb;
4379 loop->head->prev_bb = start_prev_bb;
4380 loop->head->next_bb = start_bb;
4381 start_prev_bb->next_bb = start_bb->prev_bb = loop->head;
4385 loops = loops->next;
4390 if (bb->next_bb != EXIT_BLOCK_PTR)
4391 bb->aux = bb->next_bb;
4395 cfg_layout_finalize ();
4399 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
4400 and tries to rewrite the RTL of these loops so that proper Blackfin
4401 hardware loops are generated. */
4404 bfin_reorg_loops (FILE *dump_file)
4406 loop_info loops = NULL;
4409 bitmap_obstack stack;
4411 bitmap_obstack_initialize (&stack);
4414 fprintf (dump_file, ";; Find loops, first pass\n\n");
4416 loops = bfin_discover_loops (&stack, dump_file);
4419 bfin_dump_loops (loops);
4421 bfin_reorder_loops (loops, dump_file);
4425 fprintf (dump_file, ";; Find loops, second pass\n\n");
4427 loops = bfin_discover_loops (&stack, dump_file);
4430 fprintf (dump_file, ";; All loops found:\n\n");
4431 bfin_dump_loops (loops);
4434 /* Now apply the optimizations. */
4435 for (loop = loops; loop; loop = loop->next)
4436 bfin_optimize_loop (loop);
4440 fprintf (dump_file, ";; After hardware loops optimization:\n\n");
4441 bfin_dump_loops (loops);
4447 print_rtl (dump_file, get_insns ());
4453 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
4454 Returns true if we modified the insn chain, false otherwise. */
4456 gen_one_bundle (rtx slot[3])
4458 gcc_assert (slot[1] != NULL_RTX);
4460 /* Verify that we really can do the multi-issue. */
4463 rtx t = NEXT_INSN (slot[0]);
4464 while (t != slot[1])
4466 if (GET_CODE (t) != NOTE
4467 || NOTE_KIND (t) != NOTE_INSN_DELETED)
4474 rtx t = NEXT_INSN (slot[1]);
4475 while (t != slot[2])
4477 if (GET_CODE (t) != NOTE
4478 || NOTE_KIND (t) != NOTE_INSN_DELETED)
4484 if (slot[0] == NULL_RTX)
4486 slot[0] = emit_insn_before (gen_mnop (), slot[1]);
4487 df_insn_rescan (slot[0]);
4489 if (slot[2] == NULL_RTX)
4491 slot[2] = emit_insn_after (gen_forced_nop (), slot[1]);
4492 df_insn_rescan (slot[2]);
4495 /* Avoid line number information being printed inside one bundle. */
4496 if (INSN_LOCATOR (slot[1])
4497 && INSN_LOCATOR (slot[1]) != INSN_LOCATOR (slot[0]))
4498 INSN_LOCATOR (slot[1]) = INSN_LOCATOR (slot[0]);
4499 if (INSN_LOCATOR (slot[2])
4500 && INSN_LOCATOR (slot[2]) != INSN_LOCATOR (slot[0]))
4501 INSN_LOCATOR (slot[2]) = INSN_LOCATOR (slot[0]);
4503 /* Terminate them with "|| " instead of ";" in the output. */
4504 PUT_MODE (slot[0], SImode);
4505 PUT_MODE (slot[1], SImode);
4506 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
4507 PUT_MODE (slot[2], QImode);
4511 /* Go through all insns, and use the information generated during scheduling
4512 to generate SEQUENCEs to represent bundles of instructions issued
4516 bfin_gen_bundles (void)
4525 slot[0] = slot[1] = slot[2] = NULL_RTX;
4526 for (insn = BB_HEAD (bb);; insn = next)
4531 if (get_attr_type (insn) == TYPE_DSP32)
4533 else if (slot[1] == NULL_RTX)
4540 next = NEXT_INSN (insn);
4541 while (next && insn != BB_END (bb)
4543 && GET_CODE (PATTERN (next)) != USE
4544 && GET_CODE (PATTERN (next)) != CLOBBER))
4547 next = NEXT_INSN (insn);
4550 /* BB_END can change due to emitting extra NOPs, so check here. */
4551 at_end = insn == BB_END (bb);
4552 if (at_end || GET_MODE (next) == TImode)
4555 || !gen_one_bundle (slot))
4556 && slot[0] != NULL_RTX)
4558 rtx pat = PATTERN (slot[0]);
4559 if (GET_CODE (pat) == SET
4560 && GET_CODE (SET_SRC (pat)) == UNSPEC
4561 && XINT (SET_SRC (pat), 1) == UNSPEC_32BIT)
4563 SET_SRC (pat) = XVECEXP (SET_SRC (pat), 0, 0);
4564 INSN_CODE (slot[0]) = -1;
4565 df_insn_rescan (slot[0]);
4569 slot[0] = slot[1] = slot[2] = NULL_RTX;
4577 /* Ensure that no var tracking notes are emitted in the middle of a
4578 three-instruction bundle. */
4581 reorder_var_tracking_notes (void)
4587 rtx queue = NULL_RTX;
4588 bool in_bundle = false;
4590 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4592 next = NEXT_INSN (insn);
4596 /* Emit queued up notes at the last instruction of a bundle. */
4597 if (GET_MODE (insn) == QImode)
4601 rtx next_queue = PREV_INSN (queue);
4602 PREV_INSN (NEXT_INSN (insn)) = queue;
4603 NEXT_INSN (queue) = NEXT_INSN (insn);
4604 NEXT_INSN (insn) = queue;
4605 PREV_INSN (queue) = insn;
4610 else if (GET_MODE (insn) == SImode)
4613 else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4617 rtx prev = PREV_INSN (insn);
4618 PREV_INSN (next) = prev;
4619 NEXT_INSN (prev) = next;
4621 PREV_INSN (insn) = queue;
4629 /* Return an insn type for INSN that can be used by the caller for anomaly
4630 workarounds. This differs from plain get_attr_type in that it handles
4633 static enum attr_type
4634 type_for_anomaly (rtx insn)
4636 rtx pat = PATTERN (insn);
4637 if (GET_CODE (pat) == SEQUENCE)
4640 t = get_attr_type (XVECEXP (pat, 0, 1));
4643 t = get_attr_type (XVECEXP (pat, 0, 2));
4649 return get_attr_type (insn);
4652 /* Return nonzero if INSN contains any loads that may trap. It handles
4653 SEQUENCEs correctly. */
4656 trapping_loads_p (rtx insn)
4658 rtx pat = PATTERN (insn);
4659 if (GET_CODE (pat) == SEQUENCE)
4662 t = get_attr_type (XVECEXP (pat, 0, 1));
4664 && may_trap_p (SET_SRC (PATTERN (XVECEXP (pat, 0, 1)))))
4666 t = get_attr_type (XVECEXP (pat, 0, 2));
4668 && may_trap_p (SET_SRC (PATTERN (XVECEXP (pat, 0, 2)))))
4673 return may_trap_p (SET_SRC (single_set (insn)));
4676 /* This function acts like NEXT_INSN, but is aware of three-insn bundles and
4677 skips all subsequent parallel instructions if INSN is the start of such
4680 find_next_insn_start (rtx insn)
4682 if (GET_MODE (insn) == SImode)
4684 while (GET_MODE (insn) != QImode)
4685 insn = NEXT_INSN (insn);
4687 return NEXT_INSN (insn);
4690 /* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4691 a three-insn bundle, see if one of them is a load and return that if so.
4692 Return NULL_RTX if the insn does not contain loads. */
4694 find_load (rtx insn)
4696 if (get_attr_type (insn) == TYPE_MCLD)
4698 if (GET_MODE (insn) != SImode)
4701 insn = NEXT_INSN (insn);
4702 if ((GET_MODE (insn) == SImode || GET_MODE (insn) == QImode)
4703 && get_attr_type (insn) == TYPE_MCLD)
4705 } while (GET_MODE (insn) != QImode);
4709 /* We use the machine specific reorg pass for emitting CSYNC instructions
4710 after conditional branches as needed.
4712 The Blackfin is unusual in that a code sequence like
4715 may speculatively perform the load even if the condition isn't true. This
4716 happens for a branch that is predicted not taken, because the pipeline
4717 isn't flushed or stalled, so the early stages of the following instructions,
4718 which perform the memory reference, are allowed to execute before the
4719 jump condition is evaluated.
4720 Therefore, we must insert additional instructions in all places where this
4721 could lead to incorrect behavior. The manual recommends CSYNC, while
4722 VDSP seems to use NOPs (even though its corresponding compiler option is
4725 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4726 When optimizing for size, we turn the branch into a predicted taken one.
4727 This may be slower due to mispredicts, but saves code size. */
4733 rtx last_condjump = NULL_RTX;
4734 int cycles_since_jump = INT_MAX;
4736 /* We are freeing block_for_insn in the toplev to keep compatibility
4737 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4738 compute_bb_for_insn ();
4740 if (bfin_flag_schedule_insns2)
4742 splitting_for_sched = 1;
4744 splitting_for_sched = 0;
4746 timevar_push (TV_SCHED2);
4748 timevar_pop (TV_SCHED2);
4750 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4752 bfin_gen_bundles ();
4757 /* Doloop optimization */
4758 if (cfun->machine->has_hardware_loops)
4759 bfin_reorg_loops (dump_file);
4761 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS)
4764 /* First pass: find predicted-false branches; if something after them
4765 needs nops, insert them or change the branch to predict true. */
4766 for (insn = get_insns (); insn; insn = next)
4770 next = find_next_insn_start (insn);
4772 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
4775 pat = PATTERN (insn);
4776 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4777 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4778 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4783 if (any_condjump_p (insn)
4784 && ! cbranch_predicted_taken_p (insn))
4786 last_condjump = insn;
4787 cycles_since_jump = 0;
4790 cycles_since_jump = INT_MAX;
4792 else if (INSN_P (insn))
4794 rtx load_insn = find_load (insn);
4795 enum attr_type type = type_for_anomaly (insn);
4796 int delay_needed = 0;
4797 if (cycles_since_jump < INT_MAX)
4798 cycles_since_jump++;
4800 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
4802 if (trapping_loads_p (load_insn))
4805 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
4808 if (delay_needed > cycles_since_jump)
4812 rtx *op = recog_data.operand;
4814 delay_needed -= cycles_since_jump;
4816 extract_insn (last_condjump);
4819 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
4821 cycles_since_jump = INT_MAX;
4824 /* Do not adjust cycles_since_jump in this case, so that
4825 we'll increase the number of NOPs for a subsequent insn
4827 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
4828 GEN_INT (delay_needed));
4829 PATTERN (last_condjump) = pat;
4830 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
4834 /* Second pass: for predicted-true branches, see if anything at the
4835 branch destination needs extra nops. */
4836 if (! ENABLE_WA_SPECULATIVE_SYNCS)
4839 if (! ENABLE_WA_RETS)
4842 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4845 && any_condjump_p (insn)
4846 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
4847 || cbranch_predicted_taken_p (insn)))
4849 rtx target = JUMP_LABEL (insn);
4851 cycles_since_jump = 0;
4852 for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
4856 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
4859 pat = PATTERN (target);
4860 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4861 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4862 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4865 if (INSN_P (target))
4867 enum attr_type type = type_for_anomaly (target);
4868 int delay_needed = 0;
4869 if (cycles_since_jump < INT_MAX)
4870 cycles_since_jump++;
4872 if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
4875 if (delay_needed > cycles_since_jump)
4877 rtx prev = prev_real_insn (label);
4878 delay_needed -= cycles_since_jump;
4880 fprintf (dump_file, "Adding %d nops after %d\n",
4881 delay_needed, INSN_UID (label));
4883 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
4890 "Reducing nops on insn %d.\n",
4893 x = XVECEXP (x, 0, 1);
4894 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
4895 XVECEXP (x, 0, 0) = GEN_INT (v);
4897 while (delay_needed-- > 0)
4898 emit_insn_after (gen_nop (), label);
4906 if (bfin_flag_var_tracking)
4908 timevar_push (TV_VAR_TRACKING);
4909 variable_tracking_main ();
4910 reorder_var_tracking_notes ();
4911 timevar_pop (TV_VAR_TRACKING);
4913 df_finish_pass (false);
4916 /* Handle interrupt_handler, exception_handler and nmi_handler function
4917 attributes; arguments as in struct attribute_spec.handler. */
4920 handle_int_attribute (tree *node, tree name,
4921 tree args ATTRIBUTE_UNUSED,
4922 int flags ATTRIBUTE_UNUSED,
4926 if (TREE_CODE (x) == FUNCTION_DECL)
4929 if (TREE_CODE (x) != FUNCTION_TYPE)
4931 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4932 IDENTIFIER_POINTER (name));
4933 *no_add_attrs = true;
4935 else if (funkind (x) != SUBROUTINE)
4936 error ("multiple function type attributes specified");
4941 /* Return 0 if the attributes for two types are incompatible, 1 if they
4942 are compatible, and 2 if they are nearly compatible (which causes a
4943 warning to be generated). */
4946 bfin_comp_type_attributes (const_tree type1, const_tree type2)
4948 e_funkind kind1, kind2;
4950 if (TREE_CODE (type1) != FUNCTION_TYPE)
4953 kind1 = funkind (type1);
4954 kind2 = funkind (type2);
4959 /* Check for mismatched modifiers */
4960 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
4961 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
4964 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
4965 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
4968 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
4969 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
4972 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
4973 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
4979 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4980 struct attribute_spec.handler. */
4983 bfin_handle_longcall_attribute (tree *node, tree name,
4984 tree args ATTRIBUTE_UNUSED,
4985 int flags ATTRIBUTE_UNUSED,
4988 if (TREE_CODE (*node) != FUNCTION_TYPE
4989 && TREE_CODE (*node) != FIELD_DECL
4990 && TREE_CODE (*node) != TYPE_DECL)
4992 warning (OPT_Wattributes, "`%s' attribute only applies to functions",
4993 IDENTIFIER_POINTER (name));
4994 *no_add_attrs = true;
4997 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
4998 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
4999 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
5000 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
5002 warning (OPT_Wattributes,
5003 "can't apply both longcall and shortcall attributes to the same function");
5004 *no_add_attrs = true;
5010 /* Handle a "l1_text" attribute; arguments as in
5011 struct attribute_spec.handler. */
5014 bfin_handle_l1_text_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5015 int ARG_UNUSED (flags), bool *no_add_attrs)
5019 if (TREE_CODE (decl) != FUNCTION_DECL)
5021 error ("`%s' attribute only applies to functions",
5022 IDENTIFIER_POINTER (name));
5023 *no_add_attrs = true;
5026 /* The decl may have already been given a section attribute
5027 from a previous declaration. Ensure they match. */
5028 else if (DECL_SECTION_NAME (decl) != NULL_TREE
5029 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
5032 error ("section of %q+D conflicts with previous declaration",
5034 *no_add_attrs = true;
5037 DECL_SECTION_NAME (decl) = build_string (9, ".l1.text");
5042 /* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
5043 arguments as in struct attribute_spec.handler. */
5046 bfin_handle_l1_data_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5047 int ARG_UNUSED (flags), bool *no_add_attrs)
5051 if (TREE_CODE (decl) != VAR_DECL)
5053 error ("`%s' attribute only applies to variables",
5054 IDENTIFIER_POINTER (name));
5055 *no_add_attrs = true;
5057 else if (current_function_decl != NULL_TREE
5058 && !TREE_STATIC (decl))
5060 error ("`%s' attribute cannot be specified for local variables",
5061 IDENTIFIER_POINTER (name));
5062 *no_add_attrs = true;
5066 const char *section_name;
5068 if (strcmp (IDENTIFIER_POINTER (name), "l1_data") == 0)
5069 section_name = ".l1.data";
5070 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_A") == 0)
5071 section_name = ".l1.data.A";
5072 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_B") == 0)
5073 section_name = ".l1.data.B";
5077 /* The decl may have already been given a section attribute
5078 from a previous declaration. Ensure they match. */
5079 if (DECL_SECTION_NAME (decl) != NULL_TREE
5080 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
5083 error ("section of %q+D conflicts with previous declaration",
5085 *no_add_attrs = true;
5088 DECL_SECTION_NAME (decl)
5089 = build_string (strlen (section_name) + 1, section_name);
5095 /* Table of valid machine attributes. */
5096 const struct attribute_spec bfin_attribute_table[] =
5098 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
5099 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
5100 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
5101 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
5102 { "nesting", 0, 0, false, true, true, NULL },
5103 { "kspisusp", 0, 0, false, true, true, NULL },
5104 { "saveall", 0, 0, false, true, true, NULL },
5105 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
5106 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
5107 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute },
5108 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute },
5109 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute },
5110 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute },
5111 { NULL, 0, 0, false, false, false, NULL }
5114 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
5115 tell the assembler to generate pointers to function descriptors in
5119 bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
5121 if (TARGET_FDPIC && size == UNITS_PER_WORD)
5123 if (GET_CODE (value) == SYMBOL_REF
5124 && SYMBOL_REF_FUNCTION_P (value))
5126 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
5127 output_addr_const (asm_out_file, value);
5128 fputs (")\n", asm_out_file);
5133 /* We've set the unaligned SI op to NULL, so we always have to
5134 handle the unaligned case here. */
5135 assemble_integer_with_op ("\t.4byte\t", value);
5139 return default_assemble_integer (value, size, aligned_p);
5142 /* Output the assembler code for a thunk function. THUNK_DECL is the
5143 declaration for the thunk function itself, FUNCTION is the decl for
5144 the target function. DELTA is an immediate constant offset to be
5145 added to THIS. If VCALL_OFFSET is nonzero, the word at
5146 *(*this + vcall_offset) should be added to THIS. */
5149 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
5150 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
5151 HOST_WIDE_INT vcall_offset, tree function)
5154 /* The this parameter is passed as the first argument. */
5155 rtx this_rtx = gen_rtx_REG (Pmode, REG_R0);
5157 /* Adjust the this parameter by a fixed constant. */
5161 if (delta >= -64 && delta <= 63)
5163 xops[0] = GEN_INT (delta);
5164 output_asm_insn ("%1 += %0;", xops);
5166 else if (delta >= -128 && delta < -64)
5168 xops[0] = GEN_INT (delta + 64);
5169 output_asm_insn ("%1 += -64; %1 += %0;", xops);
5171 else if (delta > 63 && delta <= 126)
5173 xops[0] = GEN_INT (delta - 63);
5174 output_asm_insn ("%1 += 63; %1 += %0;", xops);
5178 xops[0] = GEN_INT (delta);
5179 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
5183 /* Adjust the this parameter by a value stored in the vtable. */
5186 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
5187 rtx tmp = gen_rtx_REG (Pmode, REG_R3);
5191 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
5193 /* Adjust the this parameter. */
5194 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
5195 if (!memory_operand (xops[0], Pmode))
5197 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
5198 xops[0] = GEN_INT (vcall_offset);
5200 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
5201 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
5204 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
5207 xops[0] = XEXP (DECL_RTL (function), 0);
5208 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
5209 output_asm_insn ("jump.l\t%P0", xops);
5212 /* Codes for all the Blackfin builtins. */
5218 BFIN_BUILTIN_COMPOSE_2X16,
5219 BFIN_BUILTIN_EXTRACTLO,
5220 BFIN_BUILTIN_EXTRACTHI,
5222 BFIN_BUILTIN_SSADD_2X16,
5223 BFIN_BUILTIN_SSSUB_2X16,
5224 BFIN_BUILTIN_SSADDSUB_2X16,
5225 BFIN_BUILTIN_SSSUBADD_2X16,
5226 BFIN_BUILTIN_MULT_2X16,
5227 BFIN_BUILTIN_MULTR_2X16,
5228 BFIN_BUILTIN_NEG_2X16,
5229 BFIN_BUILTIN_ABS_2X16,
5230 BFIN_BUILTIN_MIN_2X16,
5231 BFIN_BUILTIN_MAX_2X16,
5233 BFIN_BUILTIN_SSADD_1X16,
5234 BFIN_BUILTIN_SSSUB_1X16,
5235 BFIN_BUILTIN_MULT_1X16,
5236 BFIN_BUILTIN_MULTR_1X16,
5237 BFIN_BUILTIN_NORM_1X16,
5238 BFIN_BUILTIN_NEG_1X16,
5239 BFIN_BUILTIN_ABS_1X16,
5240 BFIN_BUILTIN_MIN_1X16,
5241 BFIN_BUILTIN_MAX_1X16,
5243 BFIN_BUILTIN_SUM_2X16,
5244 BFIN_BUILTIN_DIFFHL_2X16,
5245 BFIN_BUILTIN_DIFFLH_2X16,
5247 BFIN_BUILTIN_SSADD_1X32,
5248 BFIN_BUILTIN_SSSUB_1X32,
5249 BFIN_BUILTIN_NORM_1X32,
5250 BFIN_BUILTIN_ROUND_1X32,
5251 BFIN_BUILTIN_NEG_1X32,
5252 BFIN_BUILTIN_ABS_1X32,
5253 BFIN_BUILTIN_MIN_1X32,
5254 BFIN_BUILTIN_MAX_1X32,
5255 BFIN_BUILTIN_MULT_1X32,
5256 BFIN_BUILTIN_MULT_1X32X32,
5257 BFIN_BUILTIN_MULT_1X32X32NS,
5259 BFIN_BUILTIN_MULHISILL,
5260 BFIN_BUILTIN_MULHISILH,
5261 BFIN_BUILTIN_MULHISIHL,
5262 BFIN_BUILTIN_MULHISIHH,
5264 BFIN_BUILTIN_LSHIFT_1X16,
5265 BFIN_BUILTIN_LSHIFT_2X16,
5266 BFIN_BUILTIN_SSASHIFT_1X16,
5267 BFIN_BUILTIN_SSASHIFT_2X16,
5268 BFIN_BUILTIN_SSASHIFT_1X32,
5270 BFIN_BUILTIN_CPLX_MUL_16,
5271 BFIN_BUILTIN_CPLX_MAC_16,
5272 BFIN_BUILTIN_CPLX_MSU_16,
5274 BFIN_BUILTIN_CPLX_MUL_16_S40,
5275 BFIN_BUILTIN_CPLX_MAC_16_S40,
5276 BFIN_BUILTIN_CPLX_MSU_16_S40,
5278 BFIN_BUILTIN_CPLX_SQU,
5280 BFIN_BUILTIN_LOADBYTES,
5285 #define def_builtin(NAME, TYPE, CODE) \
5287 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5291 /* Set up all builtin functions for this target. */
5293 bfin_init_builtins (void)
5295 tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
5296 tree void_ftype_void
5297 = build_function_type (void_type_node, void_list_node);
5298 tree short_ftype_short
5299 = build_function_type_list (short_integer_type_node, short_integer_type_node,
5301 tree short_ftype_int_int
5302 = build_function_type_list (short_integer_type_node, integer_type_node,
5303 integer_type_node, NULL_TREE);
5304 tree int_ftype_int_int
5305 = build_function_type_list (integer_type_node, integer_type_node,
5306 integer_type_node, NULL_TREE);
5308 = build_function_type_list (integer_type_node, integer_type_node,
5310 tree short_ftype_int
5311 = build_function_type_list (short_integer_type_node, integer_type_node,
5313 tree int_ftype_v2hi_v2hi
5314 = build_function_type_list (integer_type_node, V2HI_type_node,
5315 V2HI_type_node, NULL_TREE);
5316 tree v2hi_ftype_v2hi_v2hi
5317 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5318 V2HI_type_node, NULL_TREE);
5319 tree v2hi_ftype_v2hi_v2hi_v2hi
5320 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5321 V2HI_type_node, V2HI_type_node, NULL_TREE);
5322 tree v2hi_ftype_int_int
5323 = build_function_type_list (V2HI_type_node, integer_type_node,
5324 integer_type_node, NULL_TREE);
5325 tree v2hi_ftype_v2hi_int
5326 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5327 integer_type_node, NULL_TREE);
5328 tree int_ftype_short_short
5329 = build_function_type_list (integer_type_node, short_integer_type_node,
5330 short_integer_type_node, NULL_TREE);
5331 tree v2hi_ftype_v2hi
5332 = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
5333 tree short_ftype_v2hi
5334 = build_function_type_list (short_integer_type_node, V2HI_type_node,
5337 = build_function_type_list (integer_type_node,
5338 build_pointer_type (integer_type_node),
5341 /* Add the remaining MMX insns with somewhat more complicated types. */
5342 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
5343 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
5345 def_builtin ("__builtin_bfin_ones", short_ftype_int, BFIN_BUILTIN_ONES);
5347 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
5348 BFIN_BUILTIN_COMPOSE_2X16);
5349 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
5350 BFIN_BUILTIN_EXTRACTHI);
5351 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
5352 BFIN_BUILTIN_EXTRACTLO);
5354 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
5355 BFIN_BUILTIN_MIN_2X16);
5356 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
5357 BFIN_BUILTIN_MAX_2X16);
5359 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
5360 BFIN_BUILTIN_SSADD_2X16);
5361 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
5362 BFIN_BUILTIN_SSSUB_2X16);
5363 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
5364 BFIN_BUILTIN_SSADDSUB_2X16);
5365 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
5366 BFIN_BUILTIN_SSSUBADD_2X16);
5367 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
5368 BFIN_BUILTIN_MULT_2X16);
5369 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
5370 BFIN_BUILTIN_MULTR_2X16);
5371 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
5372 BFIN_BUILTIN_NEG_2X16);
5373 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
5374 BFIN_BUILTIN_ABS_2X16);
5376 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int,
5377 BFIN_BUILTIN_MIN_1X16);
5378 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int,
5379 BFIN_BUILTIN_MAX_1X16);
5381 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
5382 BFIN_BUILTIN_SSADD_1X16);
5383 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
5384 BFIN_BUILTIN_SSSUB_1X16);
5385 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
5386 BFIN_BUILTIN_MULT_1X16);
5387 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
5388 BFIN_BUILTIN_MULTR_1X16);
5389 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
5390 BFIN_BUILTIN_NEG_1X16);
5391 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
5392 BFIN_BUILTIN_ABS_1X16);
5393 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
5394 BFIN_BUILTIN_NORM_1X16);
5396 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi,
5397 BFIN_BUILTIN_SUM_2X16);
5398 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
5399 BFIN_BUILTIN_DIFFHL_2X16);
5400 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
5401 BFIN_BUILTIN_DIFFLH_2X16);
5403 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
5404 BFIN_BUILTIN_MULHISILL);
5405 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
5406 BFIN_BUILTIN_MULHISIHL);
5407 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
5408 BFIN_BUILTIN_MULHISILH);
5409 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
5410 BFIN_BUILTIN_MULHISIHH);
5412 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int,
5413 BFIN_BUILTIN_MIN_1X32);
5414 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int,
5415 BFIN_BUILTIN_MAX_1X32);
5417 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
5418 BFIN_BUILTIN_SSADD_1X32);
5419 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
5420 BFIN_BUILTIN_SSSUB_1X32);
5421 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
5422 BFIN_BUILTIN_NEG_1X32);
5423 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int,
5424 BFIN_BUILTIN_ABS_1X32);
5425 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
5426 BFIN_BUILTIN_NORM_1X32);
5427 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int,
5428 BFIN_BUILTIN_ROUND_1X32);
5429 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
5430 BFIN_BUILTIN_MULT_1X32);
5431 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int,
5432 BFIN_BUILTIN_MULT_1X32X32);
5433 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int,
5434 BFIN_BUILTIN_MULT_1X32X32NS);
5437 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
5438 BFIN_BUILTIN_SSASHIFT_1X16);
5439 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
5440 BFIN_BUILTIN_SSASHIFT_2X16);
5441 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
5442 BFIN_BUILTIN_LSHIFT_1X16);
5443 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
5444 BFIN_BUILTIN_LSHIFT_2X16);
5445 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int,
5446 BFIN_BUILTIN_SSASHIFT_1X32);
5448 /* Complex numbers. */
5449 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi,
5450 BFIN_BUILTIN_SSADD_2X16);
5451 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi,
5452 BFIN_BUILTIN_SSSUB_2X16);
5453 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
5454 BFIN_BUILTIN_CPLX_MUL_16);
5455 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
5456 BFIN_BUILTIN_CPLX_MAC_16);
5457 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
5458 BFIN_BUILTIN_CPLX_MSU_16);
5459 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi,
5460 BFIN_BUILTIN_CPLX_MUL_16_S40);
5461 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5462 BFIN_BUILTIN_CPLX_MAC_16_S40);
5463 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5464 BFIN_BUILTIN_CPLX_MSU_16_S40);
5465 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi,
5466 BFIN_BUILTIN_CPLX_SQU);
5468 /* "Unaligned" load. */
5469 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint,
5470 BFIN_BUILTIN_LOADBYTES);
5475 struct builtin_description
5477 const enum insn_code icode;
5478 const char *const name;
5479 const enum bfin_builtins code;
5483 static const struct builtin_description bdesc_2arg[] =
5485 { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
5487 { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
5488 { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
5489 { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
5490 { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
5491 { CODE_FOR_ssashiftsi3, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32, -1 },
5493 { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
5494 { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
5495 { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
5496 { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
5498 { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
5499 { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
5500 { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
5501 { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
5503 { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
5504 { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
5505 { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
5506 { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
5507 { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
5508 { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
5510 { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
5511 { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
5512 { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
5513 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
5514 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE }
5517 static const struct builtin_description bdesc_1arg[] =
5519 { CODE_FOR_loadbytes, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES, 0 },
5521 { CODE_FOR_ones, "__builtin_bfin_ones", BFIN_BUILTIN_ONES, 0 },
5523 { CODE_FOR_signbitshi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
5524 { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
5525 { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
5527 { CODE_FOR_signbitssi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
5528 { CODE_FOR_ssroundsi2, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32, 0 },
5529 { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
5530 { CODE_FOR_ssabssi2, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32, 0 },
5532 { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
5533 { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
5534 { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
5535 { CODE_FOR_ssabsv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
5538 /* Errors in the source file can cause expand_expr to return const0_rtx
5539 where we expect a vector. To avoid crashing, use one of the vector
5540 clear instructions. */
5542 safe_vector_operand (rtx x, enum machine_mode mode)
5544 if (x != const0_rtx)
5546 x = gen_reg_rtx (SImode);
5548 emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
5549 return gen_lowpart (mode, x);
5552 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5553 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5556 bfin_expand_binop_builtin (enum insn_code icode, tree exp, rtx target,
5560 tree arg0 = CALL_EXPR_ARG (exp, 0);
5561 tree arg1 = CALL_EXPR_ARG (exp, 1);
5562 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5563 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5564 enum machine_mode op0mode = GET_MODE (op0);
5565 enum machine_mode op1mode = GET_MODE (op1);
5566 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5567 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5568 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5570 if (VECTOR_MODE_P (mode0))
5571 op0 = safe_vector_operand (op0, mode0);
5572 if (VECTOR_MODE_P (mode1))
5573 op1 = safe_vector_operand (op1, mode1);
5576 || GET_MODE (target) != tmode
5577 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5578 target = gen_reg_rtx (tmode);
5580 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
5583 op0 = gen_lowpart (HImode, op0);
5585 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
5588 op1 = gen_lowpart (HImode, op1);
5590 /* In case the insn wants input operands in modes different from
5591 the result, abort. */
5592 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
5593 && (op1mode == mode1 || op1mode == VOIDmode));
5595 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5596 op0 = copy_to_mode_reg (mode0, op0);
5597 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5598 op1 = copy_to_mode_reg (mode1, op1);
5601 pat = GEN_FCN (icode) (target, op0, op1);
5603 pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
5611 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
5614 bfin_expand_unop_builtin (enum insn_code icode, tree exp,
5618 tree arg0 = CALL_EXPR_ARG (exp, 0);
5619 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5620 enum machine_mode op0mode = GET_MODE (op0);
5621 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5622 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5625 || GET_MODE (target) != tmode
5626 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5627 target = gen_reg_rtx (tmode);
5629 if (VECTOR_MODE_P (mode0))
5630 op0 = safe_vector_operand (op0, mode0);
5632 if (op0mode == SImode && mode0 == HImode)
5635 op0 = gen_lowpart (HImode, op0);
5637 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
5639 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5640 op0 = copy_to_mode_reg (mode0, op0);
5642 pat = GEN_FCN (icode) (target, op0);
5649 /* Expand an expression EXP that calls a built-in function,
5650 with result going to TARGET if that's convenient
5651 (and in mode MODE if that's convenient).
5652 SUBTARGET may be used as the target for computing one of EXP's operands.
5653 IGNORE is nonzero if the value is to be ignored. */
5656 bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5657 rtx subtarget ATTRIBUTE_UNUSED,
5658 enum machine_mode mode ATTRIBUTE_UNUSED,
5659 int ignore ATTRIBUTE_UNUSED)
5662 enum insn_code icode;
5663 const struct builtin_description *d;
5664 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
5665 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5666 tree arg0, arg1, arg2;
5667 rtx op0, op1, op2, accvec, pat, tmp1, tmp2, a0reg, a1reg;
5668 enum machine_mode tmode, mode0;
5672 case BFIN_BUILTIN_CSYNC:
5673 emit_insn (gen_csync ());
5675 case BFIN_BUILTIN_SSYNC:
5676 emit_insn (gen_ssync ());
5679 case BFIN_BUILTIN_DIFFHL_2X16:
5680 case BFIN_BUILTIN_DIFFLH_2X16:
5681 case BFIN_BUILTIN_SUM_2X16:
5682 arg0 = CALL_EXPR_ARG (exp, 0);
5683 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5684 icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16 ? CODE_FOR_subhilov2hi3
5685 : fcode == BFIN_BUILTIN_DIFFLH_2X16 ? CODE_FOR_sublohiv2hi3
5686 : CODE_FOR_ssaddhilov2hi3);
5687 tmode = insn_data[icode].operand[0].mode;
5688 mode0 = insn_data[icode].operand[1].mode;
5691 || GET_MODE (target) != tmode
5692 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5693 target = gen_reg_rtx (tmode);
5695 if (VECTOR_MODE_P (mode0))
5696 op0 = safe_vector_operand (op0, mode0);
5698 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5699 op0 = copy_to_mode_reg (mode0, op0);
5701 pat = GEN_FCN (icode) (target, op0, op0);
5707 case BFIN_BUILTIN_MULT_1X32X32:
5708 case BFIN_BUILTIN_MULT_1X32X32NS:
5709 arg0 = CALL_EXPR_ARG (exp, 0);
5710 arg1 = CALL_EXPR_ARG (exp, 1);
5711 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5712 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5714 || !register_operand (target, SImode))
5715 target = gen_reg_rtx (SImode);
5717 a1reg = gen_rtx_REG (PDImode, REG_A1);
5718 a0reg = gen_rtx_REG (PDImode, REG_A0);
5719 tmp1 = gen_lowpart (V2HImode, op0);
5720 tmp2 = gen_lowpart (V2HImode, op1);
5721 emit_insn (gen_flag_macinit1hi (a1reg,
5722 gen_lowpart (HImode, op0),
5723 gen_lowpart (HImode, op1),
5724 GEN_INT (MACFLAG_FU)));
5725 emit_insn (gen_lshrpdi3 (a1reg, a1reg, GEN_INT (16)));
5727 if (fcode == BFIN_BUILTIN_MULT_1X32X32)
5728 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg, a1reg, tmp1, tmp2,
5729 const1_rtx, const1_rtx,
5730 const1_rtx, const0_rtx, a1reg,
5731 const0_rtx, GEN_INT (MACFLAG_NONE),
5732 GEN_INT (MACFLAG_M)));
5735 /* For saturating multiplication, there's exactly one special case
5736 to be handled: multiplying the smallest negative value with
5737 itself. Due to shift correction in fractional multiplies, this
5738 can overflow. Iff this happens, OP2 will contain 1, which, when
5739 added in 32 bits to the smallest negative, wraps to the largest
5740 positive, which is the result we want. */
5741 op2 = gen_reg_rtx (V2HImode);
5742 emit_insn (gen_packv2hi (op2, tmp1, tmp2, const0_rtx, const0_rtx));
5743 emit_insn (gen_movsibi (gen_rtx_REG (BImode, REG_CC),
5744 gen_lowpart (SImode, op2)));
5745 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg, a1reg, tmp1, tmp2,
5746 const1_rtx, const1_rtx,
5747 const1_rtx, const0_rtx, a1reg,
5748 const0_rtx, GEN_INT (MACFLAG_NONE),
5749 GEN_INT (MACFLAG_M)));
5750 op2 = gen_reg_rtx (SImode);
5751 emit_insn (gen_movbisi (op2, gen_rtx_REG (BImode, REG_CC)));
5753 emit_insn (gen_flag_machi_parts_acconly (a1reg, tmp2, tmp1,
5754 const1_rtx, const0_rtx,
5755 a1reg, const0_rtx, GEN_INT (MACFLAG_M)));
5756 emit_insn (gen_ashrpdi3 (a1reg, a1reg, GEN_INT (15)));
5757 emit_insn (gen_sum_of_accumulators (target, a0reg, a0reg, a1reg));
5758 if (fcode == BFIN_BUILTIN_MULT_1X32X32NS)
5759 emit_insn (gen_addsi3 (target, target, op2));
5762 case BFIN_BUILTIN_CPLX_MUL_16:
5763 case BFIN_BUILTIN_CPLX_MUL_16_S40:
5764 arg0 = CALL_EXPR_ARG (exp, 0);
5765 arg1 = CALL_EXPR_ARG (exp, 1);
5766 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5767 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5768 accvec = gen_reg_rtx (V2PDImode);
5771 || GET_MODE (target) != V2HImode
5772 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5773 target = gen_reg_rtx (tmode);
5774 if (! register_operand (op0, GET_MODE (op0)))
5775 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5776 if (! register_operand (op1, GET_MODE (op1)))
5777 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
5779 if (fcode == BFIN_BUILTIN_CPLX_MUL_16)
5780 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5781 const0_rtx, const0_rtx,
5782 const1_rtx, GEN_INT (MACFLAG_W32)));
5784 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5785 const0_rtx, const0_rtx,
5786 const1_rtx, GEN_INT (MACFLAG_NONE)));
5787 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
5788 const1_rtx, const1_rtx,
5789 const0_rtx, accvec, const1_rtx, const0_rtx,
5790 GEN_INT (MACFLAG_NONE), accvec));
5794 case BFIN_BUILTIN_CPLX_MAC_16:
5795 case BFIN_BUILTIN_CPLX_MSU_16:
5796 case BFIN_BUILTIN_CPLX_MAC_16_S40:
5797 case BFIN_BUILTIN_CPLX_MSU_16_S40:
5798 arg0 = CALL_EXPR_ARG (exp, 0);
5799 arg1 = CALL_EXPR_ARG (exp, 1);
5800 arg2 = CALL_EXPR_ARG (exp, 2);
5801 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5802 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5803 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5804 accvec = gen_reg_rtx (V2PDImode);
5807 || GET_MODE (target) != V2HImode
5808 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5809 target = gen_reg_rtx (tmode);
5810 if (! register_operand (op1, GET_MODE (op1)))
5811 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
5812 if (! register_operand (op2, GET_MODE (op2)))
5813 op2 = copy_to_mode_reg (GET_MODE (op2), op2);
5815 tmp1 = gen_reg_rtx (SImode);
5816 tmp2 = gen_reg_rtx (SImode);
5817 emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op0), GEN_INT (16)));
5818 emit_move_insn (tmp2, gen_lowpart (SImode, op0));
5819 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
5820 emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
5821 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5822 || fcode == BFIN_BUILTIN_CPLX_MSU_16)
5823 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5824 const0_rtx, const0_rtx,
5825 const1_rtx, accvec, const0_rtx,
5827 GEN_INT (MACFLAG_W32)));
5829 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5830 const0_rtx, const0_rtx,
5831 const1_rtx, accvec, const0_rtx,
5833 GEN_INT (MACFLAG_NONE)));
5834 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5835 || fcode == BFIN_BUILTIN_CPLX_MAC_16_S40)
5845 emit_insn (gen_flag_macv2hi_parts (target, op1, op2, const1_rtx,
5846 const1_rtx, const1_rtx,
5847 const0_rtx, accvec, tmp1, tmp2,
5848 GEN_INT (MACFLAG_NONE), accvec));
5852 case BFIN_BUILTIN_CPLX_SQU:
5853 arg0 = CALL_EXPR_ARG (exp, 0);
5854 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5855 accvec = gen_reg_rtx (V2PDImode);
5856 icode = CODE_FOR_flag_mulv2hi;
5857 tmp1 = gen_reg_rtx (V2HImode);
5858 tmp2 = gen_reg_rtx (V2HImode);
5861 || GET_MODE (target) != V2HImode
5862 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5863 target = gen_reg_rtx (V2HImode);
5864 if (! register_operand (op0, GET_MODE (op0)))
5865 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5867 emit_insn (gen_flag_mulv2hi (tmp1, op0, op0, GEN_INT (MACFLAG_NONE)));
5869 emit_insn (gen_flag_mulhi_parts (tmp2, op0, op0, const0_rtx,
5870 const0_rtx, const1_rtx,
5871 GEN_INT (MACFLAG_NONE)));
5873 emit_insn (gen_ssaddhi3_parts (target, tmp2, tmp2, const1_rtx,
5874 const0_rtx, const0_rtx));
5876 emit_insn (gen_sssubhi3_parts (target, tmp1, tmp1, const0_rtx,
5877 const0_rtx, const1_rtx));
5885 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5886 if (d->code == fcode)
5887 return bfin_expand_binop_builtin (d->icode, exp, target,
5890 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5891 if (d->code == fcode)
5892 return bfin_expand_unop_builtin (d->icode, exp, target);
5897 #undef TARGET_INIT_BUILTINS
5898 #define TARGET_INIT_BUILTINS bfin_init_builtins
5900 #undef TARGET_EXPAND_BUILTIN
5901 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5903 #undef TARGET_ASM_GLOBALIZE_LABEL
5904 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5906 #undef TARGET_ASM_FILE_START
5907 #define TARGET_ASM_FILE_START output_file_start
5909 #undef TARGET_ATTRIBUTE_TABLE
5910 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5912 #undef TARGET_COMP_TYPE_ATTRIBUTES
5913 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5915 #undef TARGET_RTX_COSTS
5916 #define TARGET_RTX_COSTS bfin_rtx_costs
5918 #undef TARGET_ADDRESS_COST
5919 #define TARGET_ADDRESS_COST bfin_address_cost
5921 #undef TARGET_ASM_INTEGER
5922 #define TARGET_ASM_INTEGER bfin_assemble_integer
5924 #undef TARGET_MACHINE_DEPENDENT_REORG
5925 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5927 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5928 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5930 #undef TARGET_ASM_OUTPUT_MI_THUNK
5931 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5932 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5933 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
5935 #undef TARGET_SCHED_ADJUST_COST
5936 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5938 #undef TARGET_SCHED_ISSUE_RATE
5939 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5941 #undef TARGET_PROMOTE_PROTOTYPES
5942 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
5943 #undef TARGET_PROMOTE_FUNCTION_ARGS
5944 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
5945 #undef TARGET_PROMOTE_FUNCTION_RETURN
5946 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
5948 #undef TARGET_ARG_PARTIAL_BYTES
5949 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5951 #undef TARGET_PASS_BY_REFERENCE
5952 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5954 #undef TARGET_SETUP_INCOMING_VARARGS
5955 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5957 #undef TARGET_STRUCT_VALUE_RTX
5958 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5960 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5961 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5963 #undef TARGET_HANDLE_OPTION
5964 #define TARGET_HANDLE_OPTION bfin_handle_option
5966 #undef TARGET_DEFAULT_TARGET_FLAGS
5967 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
5969 #undef TARGET_SECONDARY_RELOAD
5970 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5972 #undef TARGET_DELEGITIMIZE_ADDRESS
5973 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5975 #undef TARGET_CANNOT_FORCE_CONST_MEM
5976 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5978 #undef TARGET_RETURN_IN_MEMORY
5979 #define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5981 struct gcc_target targetm = TARGET_INITIALIZER;