1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "insn-codes.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
41 #include "target-def.h"
47 #include "integrate.h"
49 #include "langhooks.h"
50 #include "bfin-protos.h"
52 #include "tm-constrs.h"
54 #include "basic-block.h"
55 #include "cfglayout.h"
59 /* A C structure for machine-specific, per-function data.
60 This is added to the cfun structure. */
61 struct machine_function GTY(())
63 int has_hardware_loops;
66 /* Test and compare insns in bfin.md store the information needed to
67 generate branch and scc insns here. */
68 rtx bfin_compare_op0, bfin_compare_op1;
70 /* RTX for condition code flag register and RETS register */
71 extern GTY(()) rtx bfin_cc_rtx;
72 extern GTY(()) rtx bfin_rets_rtx;
73 rtx bfin_cc_rtx, bfin_rets_rtx;
75 int max_arg_registers = 0;
77 /* Arrays used when emitting register names. */
78 const char *short_reg_names[] = SHORT_REGISTER_NAMES;
79 const char *high_reg_names[] = HIGH_REGISTER_NAMES;
80 const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
81 const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
83 static int arg_regs[] = FUNCTION_ARG_REGISTERS;
85 /* Nonzero if -mshared-library-id was given. */
86 static int bfin_lib_id_given;
88 /* Nonzero if -fschedule-insns2 was given. We override it and
89 call the scheduler ourselves during reorg. */
90 static int bfin_flag_schedule_insns2;
92 /* Determines whether we run variable tracking in machine dependent
94 static int bfin_flag_var_tracking;
97 bfin_cpu_t bfin_cpu_type = DEFAULT_CPU_TYPE;
99 /* -msi-revision support. There are three special values:
100 -1 -msi-revision=none.
101 0xffff -msi-revision=any. */
102 int bfin_si_revision;
104 /* The workarounds enabled */
105 unsigned int bfin_workarounds = 0;
107 static bool cputype_selected = false;
114 unsigned int workarounds;
117 struct bfin_cpu bfin_cpus[] =
119 {"bf522", BFIN_CPU_BF522, 0x0000,
120 WA_SPECULATIVE_LOADS | WA_RETS},
122 {"bf523", BFIN_CPU_BF523, 0x0000,
123 WA_SPECULATIVE_LOADS | WA_RETS},
125 {"bf524", BFIN_CPU_BF524, 0x0000,
126 WA_SPECULATIVE_LOADS | WA_RETS},
128 {"bf525", BFIN_CPU_BF525, 0x0000,
129 WA_SPECULATIVE_LOADS | WA_RETS},
131 {"bf526", BFIN_CPU_BF526, 0x0000,
132 WA_SPECULATIVE_LOADS | WA_RETS},
134 {"bf527", BFIN_CPU_BF527, 0x0000,
135 WA_SPECULATIVE_LOADS | WA_RETS},
137 {"bf531", BFIN_CPU_BF531, 0x0005,
138 WA_SPECULATIVE_LOADS | WA_RETS},
139 {"bf531", BFIN_CPU_BF531, 0x0004,
140 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
141 {"bf531", BFIN_CPU_BF531, 0x0003,
142 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
144 {"bf532", BFIN_CPU_BF532, 0x0005,
145 WA_SPECULATIVE_LOADS | WA_RETS},
146 {"bf532", BFIN_CPU_BF532, 0x0004,
147 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
148 {"bf532", BFIN_CPU_BF532, 0x0003,
149 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
151 {"bf533", BFIN_CPU_BF533, 0x0005,
152 WA_SPECULATIVE_LOADS | WA_RETS},
153 {"bf533", BFIN_CPU_BF533, 0x0004,
154 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
155 {"bf533", BFIN_CPU_BF533, 0x0003,
156 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
158 {"bf534", BFIN_CPU_BF534, 0x0003,
159 WA_SPECULATIVE_LOADS | WA_RETS},
160 {"bf534", BFIN_CPU_BF534, 0x0002,
161 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
162 {"bf534", BFIN_CPU_BF534, 0x0001,
163 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
165 {"bf536", BFIN_CPU_BF536, 0x0003,
166 WA_SPECULATIVE_LOADS | WA_RETS},
167 {"bf536", BFIN_CPU_BF536, 0x0002,
168 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
169 {"bf536", BFIN_CPU_BF536, 0x0001,
170 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
172 {"bf537", BFIN_CPU_BF537, 0x0003,
173 WA_SPECULATIVE_LOADS | WA_RETS},
174 {"bf537", BFIN_CPU_BF537, 0x0002,
175 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
176 {"bf537", BFIN_CPU_BF537, 0x0001,
177 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
179 {"bf538", BFIN_CPU_BF538, 0x0004,
180 WA_SPECULATIVE_LOADS | WA_RETS},
181 {"bf538", BFIN_CPU_BF538, 0x0003,
182 WA_SPECULATIVE_LOADS | WA_RETS},
184 {"bf539", BFIN_CPU_BF539, 0x0004,
185 WA_SPECULATIVE_LOADS | WA_RETS},
186 {"bf539", BFIN_CPU_BF539, 0x0003,
187 WA_SPECULATIVE_LOADS | WA_RETS},
188 {"bf539", BFIN_CPU_BF539, 0x0002,
189 WA_SPECULATIVE_LOADS | WA_RETS},
191 {"bf542", BFIN_CPU_BF542, 0x0000,
192 WA_SPECULATIVE_LOADS | WA_RETS},
194 {"bf544", BFIN_CPU_BF544, 0x0000,
195 WA_SPECULATIVE_LOADS | WA_RETS},
197 {"bf547", BFIN_CPU_BF547, 0x0000,
198 WA_SPECULATIVE_LOADS | WA_RETS},
200 {"bf548", BFIN_CPU_BF548, 0x0000,
201 WA_SPECULATIVE_LOADS | WA_RETS},
203 {"bf549", BFIN_CPU_BF549, 0x0000,
204 WA_SPECULATIVE_LOADS | WA_RETS},
206 {"bf561", BFIN_CPU_BF561, 0x0005, WA_RETS},
207 {"bf561", BFIN_CPU_BF561, 0x0003,
208 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
209 {"bf561", BFIN_CPU_BF561, 0x0002,
210 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS},
215 int splitting_for_sched;
218 bfin_globalize_label (FILE *stream, const char *name)
220 fputs (".global ", stream);
221 assemble_name (stream, name);
227 output_file_start (void)
229 FILE *file = asm_out_file;
232 /* Variable tracking should be run after all optimizations which change order
233 of insns. It also needs a valid CFG. This can't be done in
234 override_options, because flag_var_tracking is finalized after
236 bfin_flag_var_tracking = flag_var_tracking;
237 flag_var_tracking = 0;
239 fprintf (file, ".file \"%s\";\n", input_filename);
241 for (i = 0; arg_regs[i] >= 0; i++)
243 max_arg_registers = i; /* how many arg reg used */
246 /* Called early in the compilation to conditionally modify
247 fixed_regs/call_used_regs. */
250 conditional_register_usage (void)
252 /* initialize condition code flag register rtx */
253 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
254 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
257 /* Examine machine-dependent attributes of function type FUNTYPE and return its
258 type. See the definition of E_FUNKIND. */
261 funkind (const_tree funtype)
263 tree attrs = TYPE_ATTRIBUTES (funtype);
264 if (lookup_attribute ("interrupt_handler", attrs))
265 return INTERRUPT_HANDLER;
266 else if (lookup_attribute ("exception_handler", attrs))
267 return EXCPT_HANDLER;
268 else if (lookup_attribute ("nmi_handler", attrs))
274 /* Legitimize PIC addresses. If the address is already position-independent,
275 we return ORIG. Newly generated position-independent addresses go into a
276 reg. This is REG if nonzero, otherwise we allocate register(s) as
277 necessary. PICREG is the register holding the pointer to the PIC offset
281 legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
286 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
291 if (TARGET_ID_SHARED_LIBRARY)
292 unspec = UNSPEC_MOVE_PIC;
293 else if (GET_CODE (addr) == SYMBOL_REF
294 && SYMBOL_REF_FUNCTION_P (addr))
295 unspec = UNSPEC_FUNCDESC_GOT17M4;
297 unspec = UNSPEC_MOVE_FDPIC;
301 gcc_assert (can_create_pseudo_p ());
302 reg = gen_reg_rtx (Pmode);
305 tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
306 new = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
308 emit_move_insn (reg, new);
309 if (picreg == pic_offset_table_rtx)
310 crtl->uses_pic_offset_table = 1;
314 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
318 if (GET_CODE (addr) == CONST)
320 addr = XEXP (addr, 0);
321 gcc_assert (GET_CODE (addr) == PLUS);
324 if (XEXP (addr, 0) == picreg)
329 gcc_assert (can_create_pseudo_p ());
330 reg = gen_reg_rtx (Pmode);
333 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
334 addr = legitimize_pic_address (XEXP (addr, 1),
335 base == reg ? NULL_RTX : reg,
338 if (GET_CODE (addr) == CONST_INT)
340 gcc_assert (! reload_in_progress && ! reload_completed);
341 addr = force_reg (Pmode, addr);
344 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
346 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
347 addr = XEXP (addr, 1);
350 return gen_rtx_PLUS (Pmode, base, addr);
356 /* Stack frame layout. */
358 /* For a given REGNO, determine whether it must be saved in the function
359 prologue. IS_INTHANDLER specifies whether we're generating a normal
360 prologue or an interrupt/exception one. */
362 must_save_p (bool is_inthandler, unsigned regno)
364 if (D_REGNO_P (regno))
366 bool is_eh_return_reg = false;
367 if (crtl->calls_eh_return)
372 unsigned test = EH_RETURN_DATA_REGNO (j);
373 if (test == INVALID_REGNUM)
376 is_eh_return_reg = true;
380 return (is_eh_return_reg
381 || (df_regs_ever_live_p (regno)
382 && !fixed_regs[regno]
383 && (is_inthandler || !call_used_regs[regno])));
385 else if (P_REGNO_P (regno))
387 return ((df_regs_ever_live_p (regno)
388 && !fixed_regs[regno]
389 && (is_inthandler || !call_used_regs[regno]))
391 && regno == PIC_OFFSET_TABLE_REGNUM
392 && (crtl->uses_pic_offset_table
393 || (TARGET_ID_SHARED_LIBRARY && !current_function_is_leaf))));
396 return ((is_inthandler || !call_used_regs[regno])
397 && (df_regs_ever_live_p (regno)
398 || (!leaf_function_p () && call_used_regs[regno])));
402 /* Compute the number of DREGS to save with a push_multiple operation.
403 This could include registers that aren't modified in the function,
404 since push_multiple only takes a range of registers.
405 If IS_INTHANDLER, then everything that is live must be saved, even
406 if normally call-clobbered.
407 If CONSECUTIVE, return the number of registers we can save in one
408 instruction with a push/pop multiple instruction. */
411 n_dregs_to_save (bool is_inthandler, bool consecutive)
416 for (i = REG_R7 + 1; i-- != REG_R0;)
418 if (must_save_p (is_inthandler, i))
420 else if (consecutive)
426 /* Like n_dregs_to_save, but compute number of PREGS to save. */
429 n_pregs_to_save (bool is_inthandler, bool consecutive)
434 for (i = REG_P5 + 1; i-- != REG_P0;)
435 if (must_save_p (is_inthandler, i))
437 else if (consecutive)
442 /* Determine if we are going to save the frame pointer in the prologue. */
445 must_save_fp_p (void)
447 return frame_pointer_needed || df_regs_ever_live_p (REG_FP);
451 stack_frame_needed_p (void)
453 /* EH return puts a new return address into the frame using an
454 address relative to the frame pointer. */
455 if (crtl->calls_eh_return)
457 return frame_pointer_needed;
460 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
461 must save all registers; this is used for interrupt handlers.
462 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
463 this for an interrupt (or exception) handler. */
466 expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
468 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
469 rtx predec = gen_rtx_MEM (SImode, predec1);
470 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
471 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
472 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
473 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
475 int total_consec = ndregs_consec + npregs_consec;
478 if (saveall || is_inthandler)
480 rtx insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
481 RTX_FRAME_RELATED_P (insn) = 1;
484 if (total_consec != 0)
487 rtx val = GEN_INT (-total_consec * 4);
488 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 2));
490 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
491 UNSPEC_PUSH_MULTIPLE);
492 XVECEXP (pat, 0, total_consec + 1) = gen_rtx_SET (VOIDmode, spreg,
496 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total_consec + 1)) = 1;
497 d_to_save = ndregs_consec;
498 dregno = REG_R7 + 1 - ndregs_consec;
499 pregno = REG_P5 + 1 - npregs_consec;
500 for (i = 0; i < total_consec; i++)
502 rtx memref = gen_rtx_MEM (word_mode,
503 gen_rtx_PLUS (Pmode, spreg,
504 GEN_INT (- i * 4 - 4)));
508 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
514 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
517 XVECEXP (pat, 0, i + 1) = subpat;
518 RTX_FRAME_RELATED_P (subpat) = 1;
520 insn = emit_insn (pat);
521 RTX_FRAME_RELATED_P (insn) = 1;
524 for (dregno = REG_R0; ndregs != ndregs_consec; dregno++)
526 if (must_save_p (is_inthandler, dregno))
528 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, dregno));
529 RTX_FRAME_RELATED_P (insn) = 1;
533 for (pregno = REG_P0; npregs != npregs_consec; pregno++)
535 if (must_save_p (is_inthandler, pregno))
537 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, pregno));
538 RTX_FRAME_RELATED_P (insn) = 1;
542 for (i = REG_P7 + 1; i < REG_CC; i++)
545 && (df_regs_ever_live_p (i)
546 || (!leaf_function_p () && call_used_regs[i]))))
549 if (i == REG_A0 || i == REG_A1)
550 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
551 gen_rtx_REG (PDImode, i));
553 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
554 RTX_FRAME_RELATED_P (insn) = 1;
558 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
559 must save all registers; this is used for interrupt handlers.
560 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
561 this for an interrupt (or exception) handler. */
564 expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
566 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
567 rtx postinc = gen_rtx_MEM (SImode, postinc1);
569 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
570 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
571 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
572 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
573 int total_consec = ndregs_consec + npregs_consec;
577 /* A slightly crude technique to stop flow from trying to delete "dead"
579 MEM_VOLATILE_P (postinc) = 1;
581 for (i = REG_CC - 1; i > REG_P7; i--)
584 && (df_regs_ever_live_p (i)
585 || (!leaf_function_p () && call_used_regs[i]))))
587 if (i == REG_A0 || i == REG_A1)
589 rtx mem = gen_rtx_MEM (PDImode, postinc1);
590 MEM_VOLATILE_P (mem) = 1;
591 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
594 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
597 regno = REG_P5 - npregs_consec;
598 for (; npregs != npregs_consec; regno--)
600 if (must_save_p (is_inthandler, regno))
602 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
606 regno = REG_R7 - ndregs_consec;
607 for (; ndregs != ndregs_consec; regno--)
609 if (must_save_p (is_inthandler, regno))
611 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
616 if (total_consec != 0)
618 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 1));
620 = gen_rtx_SET (VOIDmode, spreg,
621 gen_rtx_PLUS (Pmode, spreg,
622 GEN_INT (total_consec * 4)));
624 if (npregs_consec > 0)
629 for (i = 0; i < total_consec; i++)
632 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
634 rtx memref = gen_rtx_MEM (word_mode, addr);
637 XVECEXP (pat, 0, i + 1)
638 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
640 if (npregs_consec > 0)
642 if (--npregs_consec == 0)
647 insn = emit_insn (pat);
648 RTX_FRAME_RELATED_P (insn) = 1;
650 if (saveall || is_inthandler)
651 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
654 /* Perform any needed actions needed for a function that is receiving a
655 variable number of arguments.
659 MODE and TYPE are the mode and type of the current parameter.
661 PRETEND_SIZE is a variable that should be set to the amount of stack
662 that must be pushed by the prolog to pretend that our caller pushed
665 Normally, this macro will push all remaining incoming registers on the
666 stack and set PRETEND_SIZE to the length of the registers pushed.
669 - VDSP C compiler manual (our ABI) says that a variable args function
670 should save the R0, R1 and R2 registers in the stack.
671 - The caller will always leave space on the stack for the
672 arguments that are passed in registers, so we dont have
673 to leave any extra space.
674 - now, the vastart pointer can access all arguments from the stack. */
677 setup_incoming_varargs (CUMULATIVE_ARGS *cum,
678 enum machine_mode mode ATTRIBUTE_UNUSED,
679 tree type ATTRIBUTE_UNUSED, int *pretend_size,
688 /* The move for named arguments will be generated automatically by the
689 compiler. We need to generate the move rtx for the unnamed arguments
690 if they are in the first 3 words. We assume at least 1 named argument
691 exists, so we never generate [ARGP] = R0 here. */
693 for (i = cum->words + 1; i < max_arg_registers; i++)
695 mem = gen_rtx_MEM (Pmode,
696 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
697 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
703 /* Value should be nonzero if functions must have frame pointers.
704 Zero means the frame pointer need not be set up (and parms may
705 be accessed via the stack pointer) in functions that seem suitable. */
708 bfin_frame_pointer_required (void)
710 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
712 if (fkind != SUBROUTINE)
715 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
716 so we have to override it for non-leaf functions. */
717 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
723 /* Return the number of registers pushed during the prologue. */
726 n_regs_saved_by_prologue (void)
728 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
729 bool is_inthandler = fkind != SUBROUTINE;
730 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
731 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
732 || (is_inthandler && !current_function_is_leaf));
733 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler, false);
734 int npregs = all ? 6 : n_pregs_to_save (is_inthandler, false);
735 int n = ndregs + npregs;
738 if (all || stack_frame_needed_p ())
739 /* We use a LINK instruction in this case. */
743 if (must_save_fp_p ())
745 if (! current_function_is_leaf)
749 if (fkind != SUBROUTINE || all)
750 /* Increment once for ASTAT. */
753 if (fkind != SUBROUTINE)
756 if (lookup_attribute ("nesting", attrs))
760 for (i = REG_P7 + 1; i < REG_CC; i++)
762 || (fkind != SUBROUTINE
763 && (df_regs_ever_live_p (i)
764 || (!leaf_function_p () && call_used_regs[i]))))
765 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
770 /* Return the offset between two registers, one to be eliminated, and the other
771 its replacement, at the start of a routine. */
774 bfin_initial_elimination_offset (int from, int to)
776 HOST_WIDE_INT offset = 0;
778 if (from == ARG_POINTER_REGNUM)
779 offset = n_regs_saved_by_prologue () * 4;
781 if (to == STACK_POINTER_REGNUM)
783 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
784 offset += crtl->outgoing_args_size;
785 else if (crtl->outgoing_args_size)
786 offset += FIXED_STACK_AREA;
788 offset += get_frame_size ();
794 /* Emit code to load a constant CONSTANT into register REG; setting
795 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
796 Make sure that the insns we generate need not be split. */
799 frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
802 rtx cst = GEN_INT (constant);
804 if (constant >= -32768 && constant < 65536)
805 insn = emit_move_insn (reg, cst);
808 /* We don't call split_load_immediate here, since dwarf2out.c can get
809 confused about some of the more clever sequences it can generate. */
810 insn = emit_insn (gen_movsi_high (reg, cst));
812 RTX_FRAME_RELATED_P (insn) = 1;
813 insn = emit_insn (gen_movsi_low (reg, reg, cst));
816 RTX_FRAME_RELATED_P (insn) = 1;
819 /* Generate efficient code to add a value to a P register.
820 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
821 EPILOGUE_P is zero if this function is called for prologue,
822 otherwise it's nonzero. And it's less than zero if this is for
826 add_to_reg (rtx reg, HOST_WIDE_INT value, int frame, int epilogue_p)
831 /* Choose whether to use a sequence using a temporary register, or
832 a sequence with multiple adds. We can add a signed 7-bit value
833 in one instruction. */
834 if (value > 120 || value < -120)
842 /* For prologue or normal epilogue, P1 can be safely used
843 as the temporary register. For sibcall epilogue, we try to find
844 a call used P register, which will be restored in epilogue.
845 If we cannot find such a P register, we have to use one I register
849 tmpreg = gen_rtx_REG (SImode, REG_P1);
853 for (i = REG_P0; i <= REG_P5; i++)
854 if ((df_regs_ever_live_p (i) && ! call_used_regs[i])
856 && i == PIC_OFFSET_TABLE_REGNUM
857 && (crtl->uses_pic_offset_table
858 || (TARGET_ID_SHARED_LIBRARY
859 && ! current_function_is_leaf))))
862 tmpreg = gen_rtx_REG (SImode, i);
865 tmpreg = gen_rtx_REG (SImode, REG_P1);
866 tmpreg2 = gen_rtx_REG (SImode, REG_I0);
867 emit_move_insn (tmpreg2, tmpreg);
872 frame_related_constant_load (tmpreg, value, TRUE);
874 insn = emit_move_insn (tmpreg, GEN_INT (value));
876 insn = emit_insn (gen_addsi3 (reg, reg, tmpreg));
878 RTX_FRAME_RELATED_P (insn) = 1;
880 if (tmpreg2 != NULL_RTX)
881 emit_move_insn (tmpreg, tmpreg2);
892 /* We could use -62, but that would leave the stack unaligned, so
896 insn = emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
898 RTX_FRAME_RELATED_P (insn) = 1;
904 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
905 is too large, generate a sequence of insns that has the same effect.
906 SPREG contains (reg:SI REG_SP). */
909 emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
911 HOST_WIDE_INT link_size = frame_size;
915 if (link_size > 262140)
918 /* Use a LINK insn with as big a constant as possible, then subtract
919 any remaining size from the SP. */
920 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
921 RTX_FRAME_RELATED_P (insn) = 1;
923 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
925 rtx set = XVECEXP (PATTERN (insn), 0, i);
926 gcc_assert (GET_CODE (set) == SET);
927 RTX_FRAME_RELATED_P (set) = 1;
930 frame_size -= link_size;
934 /* Must use a call-clobbered PREG that isn't the static chain. */
935 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
937 frame_related_constant_load (tmpreg, -frame_size, TRUE);
938 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
939 RTX_FRAME_RELATED_P (insn) = 1;
943 /* Return the number of bytes we must reserve for outgoing arguments
944 in the current function's stack frame. */
949 if (crtl->outgoing_args_size)
951 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
952 return crtl->outgoing_args_size;
954 return FIXED_STACK_AREA;
959 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
960 function must save all its registers (true only for certain interrupt
964 do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
966 frame_size += arg_area_size ();
968 if (all || stack_frame_needed_p ()
969 || (must_save_fp_p () && ! current_function_is_leaf))
970 emit_link_insn (spreg, frame_size);
973 if (! current_function_is_leaf)
975 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
976 gen_rtx_PRE_DEC (Pmode, spreg)),
978 rtx insn = emit_insn (pat);
979 RTX_FRAME_RELATED_P (insn) = 1;
981 if (must_save_fp_p ())
983 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
984 gen_rtx_PRE_DEC (Pmode, spreg)),
985 gen_rtx_REG (Pmode, REG_FP));
986 rtx insn = emit_insn (pat);
987 RTX_FRAME_RELATED_P (insn) = 1;
989 add_to_reg (spreg, -frame_size, 1, 0);
993 /* Like do_link, but used for epilogues to deallocate the stack frame.
994 EPILOGUE_P is zero if this function is called for prologue,
995 otherwise it's nonzero. And it's less than zero if this is for
999 do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all, int epilogue_p)
1001 frame_size += arg_area_size ();
1003 if (all || stack_frame_needed_p ())
1004 emit_insn (gen_unlink ());
1007 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
1009 add_to_reg (spreg, frame_size, 0, epilogue_p);
1010 if (must_save_fp_p ())
1012 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
1013 emit_move_insn (fpreg, postinc);
1014 emit_insn (gen_rtx_USE (VOIDmode, fpreg));
1016 if (! current_function_is_leaf)
1018 emit_move_insn (bfin_rets_rtx, postinc);
1019 emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
1024 /* Generate a prologue suitable for a function of kind FKIND. This is
1025 called for interrupt and exception handler prologues.
1026 SPREG contains (reg:SI REG_SP). */
1029 expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind, bool all)
1031 HOST_WIDE_INT frame_size = get_frame_size ();
1032 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
1033 rtx predec = gen_rtx_MEM (SImode, predec1);
1035 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1036 tree kspisusp = lookup_attribute ("kspisusp", attrs);
1040 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
1041 RTX_FRAME_RELATED_P (insn) = 1;
1044 /* We need space on the stack in case we need to save the argument
1046 if (fkind == EXCPT_HANDLER)
1048 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
1049 RTX_FRAME_RELATED_P (insn) = 1;
1052 /* If we're calling other functions, they won't save their call-clobbered
1053 registers, so we must save everything here. */
1054 if (!current_function_is_leaf)
1056 expand_prologue_reg_save (spreg, all, true);
1058 if (lookup_attribute ("nesting", attrs))
1060 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
1061 : fkind == NMI_HANDLER ? REG_RETN
1063 insn = emit_move_insn (predec, srcreg);
1064 RTX_FRAME_RELATED_P (insn) = 1;
1067 do_link (spreg, frame_size, all);
1069 if (fkind == EXCPT_HANDLER)
1071 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
1072 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
1073 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
1076 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
1077 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
1078 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
1079 insn = emit_move_insn (r1reg, spreg);
1080 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
1081 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
1085 /* Generate an epilogue suitable for a function of kind FKIND. This is
1086 called for interrupt and exception handler epilogues.
1087 SPREG contains (reg:SI REG_SP). */
1090 expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind, bool all)
1092 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1093 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
1094 rtx postinc = gen_rtx_MEM (SImode, postinc1);
1096 /* A slightly crude technique to stop flow from trying to delete "dead"
1098 MEM_VOLATILE_P (postinc) = 1;
1100 do_unlink (spreg, get_frame_size (), all, 1);
1102 if (lookup_attribute ("nesting", attrs))
1104 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
1105 : fkind == NMI_HANDLER ? REG_RETN
1107 emit_move_insn (srcreg, postinc);
1110 /* If we're calling other functions, they won't save their call-clobbered
1111 registers, so we must save (and restore) everything here. */
1112 if (!current_function_is_leaf)
1115 expand_epilogue_reg_restore (spreg, all, true);
1117 /* Deallocate any space we left on the stack in case we needed to save the
1118 argument registers. */
1119 if (fkind == EXCPT_HANDLER)
1120 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
1122 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
1125 /* Used while emitting the prologue to generate code to load the correct value
1126 into the PIC register, which is passed in DEST. */
1129 bfin_load_pic_reg (rtx dest)
1131 struct cgraph_local_info *i = NULL;
1134 if (flag_unit_at_a_time)
1135 i = cgraph_local_info (current_function_decl);
1137 /* Functions local to the translation unit don't need to reload the
1138 pic reg, since the caller always passes a usable one. */
1140 return pic_offset_table_rtx;
1142 if (bfin_lib_id_given)
1143 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
1145 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1146 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
1147 UNSPEC_LIBRARY_OFFSET));
1148 insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
1152 /* Generate RTL for the prologue of the current function. */
1155 bfin_expand_prologue (void)
1157 HOST_WIDE_INT frame_size = get_frame_size ();
1158 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1159 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1160 rtx pic_reg_loaded = NULL_RTX;
1161 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1162 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
1164 if (fkind != SUBROUTINE)
1166 expand_interrupt_handler_prologue (spreg, fkind, all);
1170 if (crtl->limit_stack
1171 || TARGET_STACK_CHECK_L1)
1173 HOST_WIDE_INT offset
1174 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
1175 STACK_POINTER_REGNUM);
1176 rtx lim = crtl->limit_stack ? stack_limit_rtx : NULL_RTX;
1177 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
1181 emit_move_insn (p2reg, gen_int_mode (0xFFB00000, SImode));
1182 emit_move_insn (p2reg, gen_rtx_MEM (Pmode, p2reg));
1185 if (GET_CODE (lim) == SYMBOL_REF)
1187 if (TARGET_ID_SHARED_LIBRARY)
1189 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
1191 pic_reg_loaded = bfin_load_pic_reg (p2reg);
1192 val = legitimize_pic_address (stack_limit_rtx, p1reg,
1194 emit_move_insn (p1reg, val);
1195 frame_related_constant_load (p2reg, offset, FALSE);
1196 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
1201 rtx limit = plus_constant (lim, offset);
1202 emit_move_insn (p2reg, limit);
1209 emit_move_insn (p2reg, lim);
1210 add_to_reg (p2reg, offset, 0, 0);
1213 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
1214 emit_insn (gen_trapifcc ());
1216 expand_prologue_reg_save (spreg, all, false);
1218 do_link (spreg, frame_size, false);
1220 if (TARGET_ID_SHARED_LIBRARY
1222 && (crtl->uses_pic_offset_table
1223 || !current_function_is_leaf))
1224 bfin_load_pic_reg (pic_offset_table_rtx);
1227 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1228 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1229 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1233 bfin_expand_epilogue (int need_return, int eh_return, bool sibcall_p)
1235 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1236 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1237 int e = sibcall_p ? -1 : 1;
1238 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1239 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
1241 if (fkind != SUBROUTINE)
1243 expand_interrupt_handler_epilogue (spreg, fkind, all);
1247 do_unlink (spreg, get_frame_size (), false, e);
1249 expand_epilogue_reg_restore (spreg, all, false);
1251 /* Omit the return insn if this is for a sibcall. */
1256 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
1258 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
1261 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1264 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
1265 unsigned int new_reg)
1267 /* Interrupt functions can only use registers that have already been
1268 saved by the prologue, even if they would normally be
1271 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
1272 && !df_regs_ever_live_p (new_reg))
1278 /* Return the value of the return address for the frame COUNT steps up
1279 from the current frame, after the prologue.
1280 We punt for everything but the current frame by returning const0_rtx. */
1283 bfin_return_addr_rtx (int count)
1288 return get_hard_reg_initial_val (Pmode, REG_RETS);
1291 /* Try machine-dependent ways of modifying an illegitimate address X
1292 to be legitimate. If we find one, return the new, valid address,
1293 otherwise return NULL_RTX.
1295 OLDX is the address as it was before break_out_memory_refs was called.
1296 In some cases it is useful to look at this to decide what needs to be done.
1298 MODE is the mode of the memory reference. */
1301 legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
1302 enum machine_mode mode ATTRIBUTE_UNUSED)
1308 bfin_delegitimize_address (rtx orig_x)
1312 if (GET_CODE (x) != MEM)
1316 if (GET_CODE (x) == PLUS
1317 && GET_CODE (XEXP (x, 1)) == UNSPEC
1318 && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1319 && GET_CODE (XEXP (x, 0)) == REG
1320 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1321 return XVECEXP (XEXP (x, 1), 0, 0);
1326 /* This predicate is used to compute the length of a load/store insn.
1327 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1328 32-bit instruction. */
1331 effective_address_32bit_p (rtx op, enum machine_mode mode)
1333 HOST_WIDE_INT offset;
1335 mode = GET_MODE (op);
1338 if (GET_CODE (op) != PLUS)
1340 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1341 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1345 if (GET_CODE (XEXP (op, 1)) == UNSPEC)
1348 offset = INTVAL (XEXP (op, 1));
1350 /* All byte loads use a 16-bit offset. */
1351 if (GET_MODE_SIZE (mode) == 1)
1354 if (GET_MODE_SIZE (mode) == 4)
1356 /* Frame pointer relative loads can use a negative offset, all others
1357 are restricted to a small positive one. */
1358 if (XEXP (op, 0) == frame_pointer_rtx)
1359 return offset < -128 || offset > 60;
1360 return offset < 0 || offset > 60;
1363 /* Must be HImode now. */
1364 return offset < 0 || offset > 30;
1367 /* Returns true if X is a memory reference using an I register. */
1369 bfin_dsp_memref_p (rtx x)
1374 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1375 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1380 /* Return cost of the memory address ADDR.
1381 All addressing modes are equally cheap on the Blackfin. */
1384 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
1389 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1392 print_address_operand (FILE *file, rtx x)
1394 switch (GET_CODE (x))
1397 output_address (XEXP (x, 0));
1398 fprintf (file, "+");
1399 output_address (XEXP (x, 1));
1403 fprintf (file, "--");
1404 output_address (XEXP (x, 0));
1407 output_address (XEXP (x, 0));
1408 fprintf (file, "++");
1411 output_address (XEXP (x, 0));
1412 fprintf (file, "--");
1416 gcc_assert (GET_CODE (x) != MEM);
1417 print_operand (file, x, 0);
1422 /* Adding intp DImode support by Tony
1428 print_operand (FILE *file, rtx x, char code)
1430 enum machine_mode mode;
1434 if (GET_MODE (current_output_insn) == SImode)
1435 fprintf (file, " ||");
1437 fprintf (file, ";");
1441 mode = GET_MODE (x);
1446 switch (GET_CODE (x))
1449 fprintf (file, "e");
1452 fprintf (file, "ne");
1455 fprintf (file, "g");
1458 fprintf (file, "l");
1461 fprintf (file, "ge");
1464 fprintf (file, "le");
1467 fprintf (file, "g");
1470 fprintf (file, "l");
1473 fprintf (file, "ge");
1476 fprintf (file, "le");
1479 output_operand_lossage ("invalid %%j value");
1483 case 'J': /* reverse logic */
1484 switch (GET_CODE(x))
1487 fprintf (file, "ne");
1490 fprintf (file, "e");
1493 fprintf (file, "le");
1496 fprintf (file, "ge");
1499 fprintf (file, "l");
1502 fprintf (file, "g");
1505 fprintf (file, "le");
1508 fprintf (file, "ge");
1511 fprintf (file, "l");
1514 fprintf (file, "g");
1517 output_operand_lossage ("invalid %%J value");
1522 switch (GET_CODE (x))
1528 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1530 output_operand_lossage ("invalid operand for code '%c'", code);
1532 else if (code == 'd')
1535 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1537 output_operand_lossage ("invalid operand for code '%c'", code);
1539 else if (code == 'w')
1541 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1542 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1544 output_operand_lossage ("invalid operand for code '%c'", code);
1546 else if (code == 'x')
1548 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1549 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1551 output_operand_lossage ("invalid operand for code '%c'", code);
1553 else if (code == 'v')
1555 if (REGNO (x) == REG_A0)
1556 fprintf (file, "AV0");
1557 else if (REGNO (x) == REG_A1)
1558 fprintf (file, "AV1");
1560 output_operand_lossage ("invalid operand for code '%c'", code);
1562 else if (code == 'D')
1564 if (D_REGNO_P (REGNO (x)))
1565 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1567 output_operand_lossage ("invalid operand for code '%c'", code);
1569 else if (code == 'H')
1571 if ((mode == DImode || mode == DFmode) && REG_P (x))
1572 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1574 output_operand_lossage ("invalid operand for code '%c'", code);
1576 else if (code == 'T')
1578 if (D_REGNO_P (REGNO (x)))
1579 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1581 output_operand_lossage ("invalid operand for code '%c'", code);
1584 fprintf (file, "%s", reg_names[REGNO (x)]);
1590 print_address_operand (file, x);
1602 fputs ("(FU)", file);
1605 fputs ("(T)", file);
1608 fputs ("(TFU)", file);
1611 fputs ("(W32)", file);
1614 fputs ("(IS)", file);
1617 fputs ("(IU)", file);
1620 fputs ("(IH)", file);
1623 fputs ("(M)", file);
1626 fputs ("(IS,M)", file);
1629 fputs ("(ISS2)", file);
1632 fputs ("(S2RND)", file);
1639 else if (code == 'b')
1641 if (INTVAL (x) == 0)
1643 else if (INTVAL (x) == 1)
1649 /* Moves to half registers with d or h modifiers always use unsigned
1651 else if (code == 'd')
1652 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1653 else if (code == 'h')
1654 x = GEN_INT (INTVAL (x) & 0xffff);
1655 else if (code == 'N')
1656 x = GEN_INT (-INTVAL (x));
1657 else if (code == 'X')
1658 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1659 else if (code == 'Y')
1660 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1661 else if (code == 'Z')
1662 /* Used for LINK insns. */
1663 x = GEN_INT (-8 - INTVAL (x));
1668 output_addr_const (file, x);
1672 output_operand_lossage ("invalid const_double operand");
1676 switch (XINT (x, 1))
1678 case UNSPEC_MOVE_PIC:
1679 output_addr_const (file, XVECEXP (x, 0, 0));
1680 fprintf (file, "@GOT");
1683 case UNSPEC_MOVE_FDPIC:
1684 output_addr_const (file, XVECEXP (x, 0, 0));
1685 fprintf (file, "@GOT17M4");
1688 case UNSPEC_FUNCDESC_GOT17M4:
1689 output_addr_const (file, XVECEXP (x, 0, 0));
1690 fprintf (file, "@FUNCDESC_GOT17M4");
1693 case UNSPEC_LIBRARY_OFFSET:
1694 fprintf (file, "_current_shared_library_p5_offset_");
1703 output_addr_const (file, x);
1708 /* Argument support functions. */
1710 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1711 for a call to a function whose data type is FNTYPE.
1712 For a library call, FNTYPE is 0.
1713 VDSP C Compiler manual, our ABI says that
1714 first 3 words of arguments will use R0, R1 and R2.
1718 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1719 rtx libname ATTRIBUTE_UNUSED)
1721 static CUMULATIVE_ARGS zero_cum;
1725 /* Set up the number of registers to use for passing arguments. */
1727 cum->nregs = max_arg_registers;
1728 cum->arg_regs = arg_regs;
1730 cum->call_cookie = CALL_NORMAL;
1731 /* Check for a longcall attribute. */
1732 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1733 cum->call_cookie |= CALL_SHORT;
1734 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1735 cum->call_cookie |= CALL_LONG;
1740 /* Update the data in CUM to advance over an argument
1741 of mode MODE and data type TYPE.
1742 (TYPE is null for libcalls where that information may not be available.) */
1745 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1746 int named ATTRIBUTE_UNUSED)
1748 int count, bytes, words;
1750 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1751 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1753 cum->words += words;
1754 cum->nregs -= words;
1756 if (cum->nregs <= 0)
1759 cum->arg_regs = NULL;
1763 for (count = 1; count <= words; count++)
1770 /* Define where to put the arguments to a function.
1771 Value is zero to push the argument on the stack,
1772 or a hard register in which to store the argument.
1774 MODE is the argument's machine mode.
1775 TYPE is the data type of the argument (as a tree).
1776 This is null for libcalls where that information may
1778 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1779 the preceding args and about the function being called.
1780 NAMED is nonzero if this argument is a named parameter
1781 (otherwise it is an extra parameter matching an ellipsis). */
1784 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1785 int named ATTRIBUTE_UNUSED)
1788 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1790 if (mode == VOIDmode)
1791 /* Compute operand 2 of the call insn. */
1792 return GEN_INT (cum->call_cookie);
1798 return gen_rtx_REG (mode, *(cum->arg_regs));
1803 /* For an arg passed partly in registers and partly in memory,
1804 this is the number of bytes passed in registers.
1805 For args passed entirely in registers or entirely in memory, zero.
1807 Refer VDSP C Compiler manual, our ABI.
1808 First 3 words are in registers. So, if an argument is larger
1809 than the registers available, it will span the register and
1813 bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1814 tree type ATTRIBUTE_UNUSED,
1815 bool named ATTRIBUTE_UNUSED)
1818 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1819 int bytes_left = cum->nregs * UNITS_PER_WORD;
1824 if (bytes_left == 0)
1826 if (bytes > bytes_left)
1831 /* Variable sized types are passed by reference. */
1834 bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1835 enum machine_mode mode ATTRIBUTE_UNUSED,
1836 const_tree type, bool named ATTRIBUTE_UNUSED)
1838 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1841 /* Decide whether a type should be returned in memory (true)
1842 or in a register (false). This is called by the macro
1843 TARGET_RETURN_IN_MEMORY. */
1846 bfin_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1848 int size = int_size_in_bytes (type);
1849 return size > 2 * UNITS_PER_WORD || size == -1;
1852 /* Register in which address to store a structure value
1853 is passed to a function. */
1855 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1856 int incoming ATTRIBUTE_UNUSED)
1858 return gen_rtx_REG (Pmode, REG_P0);
1861 /* Return true when register may be used to pass function parameters. */
1864 function_arg_regno_p (int n)
1867 for (i = 0; arg_regs[i] != -1; i++)
1868 if (n == arg_regs[i])
1873 /* Returns 1 if OP contains a symbol reference */
1876 symbolic_reference_mentioned_p (rtx op)
1878 register const char *fmt;
1881 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1884 fmt = GET_RTX_FORMAT (GET_CODE (op));
1885 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1891 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1892 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1896 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1903 /* Decide whether we can make a sibling call to a function. DECL is the
1904 declaration of the function being targeted by the call and EXP is the
1905 CALL_EXPR representing the call. */
1908 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1909 tree exp ATTRIBUTE_UNUSED)
1911 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1912 if (fkind != SUBROUTINE)
1914 if (!TARGET_ID_SHARED_LIBRARY || TARGET_SEP_DATA)
1917 /* When compiling for ID shared libraries, can't sibcall a local function
1918 from a non-local function, because the local function thinks it does
1919 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1920 sibcall epilogue, and we end up with the wrong value in P5. */
1922 if (!flag_unit_at_a_time || decl == NULL)
1923 /* Not enough information. */
1927 struct cgraph_local_info *this_func, *called_func;
1929 this_func = cgraph_local_info (current_function_decl);
1930 called_func = cgraph_local_info (decl);
1931 return !called_func->local || this_func->local;
1935 /* Emit RTL insns to initialize the variable parts of a trampoline at
1936 TRAMP. FNADDR is an RTX for the address of the function's pure
1937 code. CXT is an RTX for the static chain value for the function. */
1940 initialize_trampoline (rtx tramp, rtx fnaddr, rtx cxt)
1942 rtx t1 = copy_to_reg (fnaddr);
1943 rtx t2 = copy_to_reg (cxt);
1949 rtx a = memory_address (Pmode, plus_constant (tramp, 8));
1950 addr = memory_address (Pmode, tramp);
1951 emit_move_insn (gen_rtx_MEM (SImode, addr), a);
1955 addr = memory_address (Pmode, plus_constant (tramp, i + 2));
1956 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1957 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1958 addr = memory_address (Pmode, plus_constant (tramp, i + 6));
1959 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1961 addr = memory_address (Pmode, plus_constant (tramp, i + 10));
1962 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1963 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1964 addr = memory_address (Pmode, plus_constant (tramp, i + 14));
1965 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1968 /* Emit insns to move operands[1] into operands[0]. */
1971 emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1973 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1975 gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
1976 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1977 operands[1] = force_reg (SImode, operands[1]);
1979 operands[1] = legitimize_pic_address (operands[1], temp,
1980 TARGET_FDPIC ? OUR_FDPIC_REG
1981 : pic_offset_table_rtx);
1984 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1985 Returns true if no further code must be generated, false if the caller
1986 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1989 expand_move (rtx *operands, enum machine_mode mode)
1991 rtx op = operands[1];
1992 if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
1993 && SYMBOLIC_CONST (op))
1994 emit_pic_move (operands, mode);
1995 else if (mode == SImode && GET_CODE (op) == CONST
1996 && GET_CODE (XEXP (op, 0)) == PLUS
1997 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
1998 && !bfin_legitimate_constant_p (op))
2000 rtx dest = operands[0];
2002 gcc_assert (!reload_in_progress && !reload_completed);
2004 op0 = force_reg (mode, XEXP (op, 0));
2006 if (!insn_data[CODE_FOR_addsi3].operand[2].predicate (op1, mode))
2007 op1 = force_reg (mode, op1);
2008 if (GET_CODE (dest) == MEM)
2009 dest = gen_reg_rtx (mode);
2010 emit_insn (gen_addsi3 (dest, op0, op1));
2011 if (dest == operands[0])
2015 /* Don't generate memory->memory or constant->memory moves, go through a
2017 else if ((reload_in_progress | reload_completed) == 0
2018 && GET_CODE (operands[0]) == MEM
2019 && GET_CODE (operands[1]) != REG)
2020 operands[1] = force_reg (mode, operands[1]);
2024 /* Split one or more DImode RTL references into pairs of SImode
2025 references. The RTL can be REG, offsettable MEM, integer constant, or
2026 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
2027 split and "num" is its length. lo_half and hi_half are output arrays
2028 that parallel "operands". */
2031 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
2035 rtx op = operands[num];
2037 /* simplify_subreg refuse to split volatile memory addresses,
2038 but we still have to handle it. */
2039 if (GET_CODE (op) == MEM)
2041 lo_half[num] = adjust_address (op, SImode, 0);
2042 hi_half[num] = adjust_address (op, SImode, 4);
2046 lo_half[num] = simplify_gen_subreg (SImode, op,
2047 GET_MODE (op) == VOIDmode
2048 ? DImode : GET_MODE (op), 0);
2049 hi_half[num] = simplify_gen_subreg (SImode, op,
2050 GET_MODE (op) == VOIDmode
2051 ? DImode : GET_MODE (op), 4);
2057 bfin_longcall_p (rtx op, int call_cookie)
2059 gcc_assert (GET_CODE (op) == SYMBOL_REF);
2060 if (call_cookie & CALL_SHORT)
2062 if (call_cookie & CALL_LONG)
2064 if (TARGET_LONG_CALLS)
2069 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
2070 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2071 SIBCALL is nonzero if this is a sibling call. */
2074 bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
2076 rtx use = NULL, call;
2077 rtx callee = XEXP (fnaddr, 0);
2078 int nelts = 2 + !!sibcall;
2080 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
2083 /* In an untyped call, we can get NULL for operand 2. */
2084 if (cookie == NULL_RTX)
2085 cookie = const0_rtx;
2087 /* Static functions and indirect calls don't need the pic register. */
2088 if (!TARGET_FDPIC && flag_pic
2089 && GET_CODE (callee) == SYMBOL_REF
2090 && !SYMBOL_REF_LOCAL_P (callee))
2091 use_reg (&use, pic_offset_table_rtx);
2095 int caller_has_l1_text, callee_has_l1_text;
2097 caller_has_l1_text = callee_has_l1_text = 0;
2099 if (lookup_attribute ("l1_text",
2100 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
2101 caller_has_l1_text = 1;
2103 if (GET_CODE (callee) == SYMBOL_REF
2104 && SYMBOL_REF_DECL (callee) && DECL_P (SYMBOL_REF_DECL (callee))
2107 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2108 callee_has_l1_text = 1;
2110 if (GET_CODE (callee) != SYMBOL_REF
2111 || bfin_longcall_p (callee, INTVAL (cookie))
2112 || (GET_CODE (callee) == SYMBOL_REF
2113 && !SYMBOL_REF_LOCAL_P (callee)
2114 && TARGET_INLINE_PLT)
2115 || caller_has_l1_text != callee_has_l1_text
2116 || (caller_has_l1_text && callee_has_l1_text
2117 && (GET_CODE (callee) != SYMBOL_REF
2118 || !SYMBOL_REF_LOCAL_P (callee))))
2121 if (! address_operand (addr, Pmode))
2122 addr = force_reg (Pmode, addr);
2124 fnaddr = gen_reg_rtx (SImode);
2125 emit_insn (gen_load_funcdescsi (fnaddr, addr));
2126 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
2128 picreg = gen_reg_rtx (SImode);
2129 emit_insn (gen_load_funcdescsi (picreg,
2130 plus_constant (addr, 4)));
2135 else if ((!register_no_elim_operand (callee, Pmode)
2136 && GET_CODE (callee) != SYMBOL_REF)
2137 || (GET_CODE (callee) == SYMBOL_REF
2138 && ((TARGET_ID_SHARED_LIBRARY && !TARGET_LEAF_ID_SHARED_LIBRARY)
2139 || bfin_longcall_p (callee, INTVAL (cookie)))))
2141 callee = copy_to_mode_reg (Pmode, callee);
2142 fnaddr = gen_rtx_MEM (Pmode, callee);
2144 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
2147 call = gen_rtx_SET (VOIDmode, retval, call);
2149 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
2151 XVECEXP (pat, 0, n++) = call;
2153 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
2154 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
2156 XVECEXP (pat, 0, n++) = gen_rtx_RETURN (VOIDmode);
2157 call = emit_call_insn (pat);
2159 CALL_INSN_FUNCTION_USAGE (call) = use;
2162 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2165 hard_regno_mode_ok (int regno, enum machine_mode mode)
2167 /* Allow only dregs to store value of mode HI or QI */
2168 enum reg_class class = REGNO_REG_CLASS (regno);
2173 if (mode == V2HImode)
2174 return D_REGNO_P (regno);
2175 if (class == CCREGS)
2176 return mode == BImode;
2177 if (mode == PDImode || mode == V2PDImode)
2178 return regno == REG_A0 || regno == REG_A1;
2180 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2181 up with a bad register class (such as ALL_REGS) for DImode. */
2183 return regno < REG_M3;
2186 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
2189 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
2192 /* Implements target hook vector_mode_supported_p. */
2195 bfin_vector_mode_supported_p (enum machine_mode mode)
2197 return mode == V2HImode;
2200 /* Return the cost of moving data from a register in class CLASS1 to
2201 one in class CLASS2. A cost of 2 is the default. */
2204 bfin_register_move_cost (enum machine_mode mode,
2205 enum reg_class class1, enum reg_class class2)
2207 /* These need secondary reloads, so they're more expensive. */
2208 if ((class1 == CCREGS && class2 != DREGS)
2209 || (class1 != DREGS && class2 == CCREGS))
2212 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2216 /* There are some stalls involved when moving from a DREG to a different
2217 class reg, and using the value in one of the following instructions.
2218 Attempt to model this by slightly discouraging such moves. */
2219 if (class1 == DREGS && class2 != DREGS)
2222 if (GET_MODE_CLASS (mode) == MODE_INT)
2224 /* Discourage trying to use the accumulators. */
2225 if (TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A0)
2226 || TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A1)
2227 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A0)
2228 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A1))
2234 /* Return the cost of moving data of mode M between a
2235 register and memory. A value of 2 is the default; this cost is
2236 relative to those in `REGISTER_MOVE_COST'.
2238 ??? In theory L1 memory has single-cycle latency. We should add a switch
2239 that tells the compiler whether we expect to use only L1 memory for the
2240 program; it'll make the costs more accurate. */
2243 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2244 enum reg_class class,
2245 int in ATTRIBUTE_UNUSED)
2247 /* Make memory accesses slightly more expensive than any register-register
2248 move. Also, penalize non-DP registers, since they need secondary
2249 reloads to load and store. */
2250 if (! reg_class_subset_p (class, DPREGS))
2256 /* Inform reload about cases where moving X with a mode MODE to a register in
2257 CLASS requires an extra scratch register. Return the class needed for the
2258 scratch register. */
2260 static enum reg_class
2261 bfin_secondary_reload (bool in_p, rtx x, enum reg_class class,
2262 enum machine_mode mode, secondary_reload_info *sri)
2264 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2265 in most other cases we can also use PREGS. */
2266 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
2267 enum reg_class x_class = NO_REGS;
2268 enum rtx_code code = GET_CODE (x);
2271 x = SUBREG_REG (x), code = GET_CODE (x);
2274 int regno = REGNO (x);
2275 if (regno >= FIRST_PSEUDO_REGISTER)
2276 regno = reg_renumber[regno];
2281 x_class = REGNO_REG_CLASS (regno);
2284 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2285 This happens as a side effect of register elimination, and we need
2286 a scratch register to do it. */
2287 if (fp_plus_const_operand (x, mode))
2289 rtx op2 = XEXP (x, 1);
2290 int large_constant_p = ! satisfies_constraint_Ks7 (op2);
2292 if (class == PREGS || class == PREGS_CLOBBERED)
2294 /* If destination is a DREG, we can do this without a scratch register
2295 if the constant is valid for an add instruction. */
2296 if ((class == DREGS || class == DPREGS)
2297 && ! large_constant_p)
2299 /* Reloading to anything other than a DREG? Use a PREG scratch
2301 sri->icode = CODE_FOR_reload_insi;
2305 /* Data can usually be moved freely between registers of most classes.
2306 AREGS are an exception; they can only move to or from another register
2307 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2308 if (x_class == AREGS || x_class == EVEN_AREGS || x_class == ODD_AREGS)
2309 return (class == DREGS || class == AREGS || class == EVEN_AREGS
2310 || class == ODD_AREGS
2313 if (class == AREGS || class == EVEN_AREGS || class == ODD_AREGS)
2317 sri->icode = in_p ? CODE_FOR_reload_inpdi : CODE_FOR_reload_outpdi;
2321 if (x != const0_rtx && x_class != DREGS)
2329 /* CCREGS can only be moved from/to DREGS. */
2330 if (class == CCREGS && x_class != DREGS)
2332 if (x_class == CCREGS && class != DREGS)
2335 /* All registers other than AREGS can load arbitrary constants. The only
2336 case that remains is MEM. */
2338 if (! reg_class_subset_p (class, default_class))
2339 return default_class;
2344 /* Implement TARGET_HANDLE_OPTION. */
2347 bfin_handle_option (size_t code, const char *arg, int value)
2351 case OPT_mshared_library_id_:
2352 if (value > MAX_LIBRARY_ID)
2353 error ("-mshared-library-id=%s is not between 0 and %d",
2354 arg, MAX_LIBRARY_ID);
2355 bfin_lib_id_given = 1;
2364 while ((p = bfin_cpus[i].name) != NULL)
2366 if (strncmp (arg, p, strlen (p)) == 0)
2373 error ("-mcpu=%s is not valid", arg);
2377 bfin_cpu_type = bfin_cpus[i].type;
2379 q = arg + strlen (p);
2381 cputype_selected = true;
2385 bfin_si_revision = bfin_cpus[i].si_revision;
2386 bfin_workarounds |= bfin_cpus[i].workarounds;
2388 else if (strcmp (q, "-none") == 0)
2389 bfin_si_revision = -1;
2390 else if (strcmp (q, "-any") == 0)
2392 bfin_si_revision = 0xffff;
2393 while (bfin_cpus[i].type == bfin_cpu_type)
2395 bfin_workarounds |= bfin_cpus[i].workarounds;
2401 unsigned int si_major, si_minor;
2404 rev_len = strlen (q);
2406 if (sscanf (q, "-%u.%u%n", &si_major, &si_minor, &n) != 2
2408 || si_major > 0xff || si_minor > 0xff)
2410 invalid_silicon_revision:
2411 error ("-mcpu=%s has invalid silicon revision", arg);
2415 bfin_si_revision = (si_major << 8) | si_minor;
2417 while (bfin_cpus[i].type == bfin_cpu_type
2418 && bfin_cpus[i].si_revision != bfin_si_revision)
2421 if (bfin_cpus[i].type != bfin_cpu_type)
2422 goto invalid_silicon_revision;
2424 bfin_workarounds |= bfin_cpus[i].workarounds;
2427 if (bfin_cpu_type == BFIN_CPU_BF561)
2428 warning (0, "bf561 support is incomplete yet.");
2438 static struct machine_function *
2439 bfin_init_machine_status (void)
2441 struct machine_function *f;
2443 f = ggc_alloc_cleared (sizeof (struct machine_function));
2448 /* Implement the macro OVERRIDE_OPTIONS. */
2451 override_options (void)
2453 if (bfin_csync_anomaly == 1)
2454 bfin_workarounds |= WA_SPECULATIVE_SYNCS;
2455 else if (bfin_csync_anomaly == 0)
2456 bfin_workarounds &= ~WA_SPECULATIVE_SYNCS;
2458 if (bfin_specld_anomaly == 1)
2459 bfin_workarounds |= WA_SPECULATIVE_LOADS;
2460 else if (bfin_specld_anomaly == 0)
2461 bfin_workarounds &= ~WA_SPECULATIVE_LOADS;
2463 if (!cputype_selected)
2464 bfin_workarounds |= WA_RETS;
2466 if (TARGET_OMIT_LEAF_FRAME_POINTER)
2467 flag_omit_frame_pointer = 1;
2469 /* Library identification */
2470 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
2471 error ("-mshared-library-id= specified without -mid-shared-library");
2473 if (stack_limit_rtx && TARGET_STACK_CHECK_L1)
2474 error ("Can't use multiple stack checking methods together.");
2476 if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
2477 error ("ID shared libraries and FD-PIC mode can't be used together.");
2479 /* Don't allow the user to specify -mid-shared-library and -msep-data
2480 together, as it makes little sense from a user's point of view... */
2481 if (TARGET_SEP_DATA && TARGET_ID_SHARED_LIBRARY)
2482 error ("cannot specify both -msep-data and -mid-shared-library");
2483 /* ... internally, however, it's nearly the same. */
2484 if (TARGET_SEP_DATA)
2485 target_flags |= MASK_ID_SHARED_LIBRARY | MASK_LEAF_ID_SHARED_LIBRARY;
2487 if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
2490 /* There is no single unaligned SI op for PIC code. Sometimes we
2491 need to use ".4byte" and sometimes we need to use ".picptr".
2492 See bfin_assemble_integer for details. */
2494 targetm.asm_out.unaligned_op.si = 0;
2496 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2497 since we don't support it and it'll just break. */
2498 if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2501 flag_schedule_insns = 0;
2503 /* Passes after sched2 can break the helpful TImode annotations that
2504 haifa-sched puts on every insn. Just do scheduling in reorg. */
2505 bfin_flag_schedule_insns2 = flag_schedule_insns_after_reload;
2506 flag_schedule_insns_after_reload = 0;
2508 init_machine_status = bfin_init_machine_status;
2511 /* Return the destination address of BRANCH.
2512 We need to use this instead of get_attr_length, because the
2513 cbranch_with_nops pattern conservatively sets its length to 6, and
2514 we still prefer to use shorter sequences. */
2517 branch_dest (rtx branch)
2521 rtx pat = PATTERN (branch);
2522 if (GET_CODE (pat) == PARALLEL)
2523 pat = XVECEXP (pat, 0, 0);
2524 dest = SET_SRC (pat);
2525 if (GET_CODE (dest) == IF_THEN_ELSE)
2526 dest = XEXP (dest, 1);
2527 dest = XEXP (dest, 0);
2528 dest_uid = INSN_UID (dest);
2529 return INSN_ADDRESSES (dest_uid);
2532 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2533 it's a branch that's predicted taken. */
2536 cbranch_predicted_taken_p (rtx insn)
2538 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2542 int pred_val = INTVAL (XEXP (x, 0));
2544 return pred_val >= REG_BR_PROB_BASE / 2;
2550 /* Templates for use by asm_conditional_branch. */
2552 static const char *ccbranch_templates[][3] = {
2553 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2554 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2555 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2556 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2559 /* Output INSN, which is a conditional branch instruction with operands
2562 We deal with the various forms of conditional branches that can be generated
2563 by bfin_reorg to prevent the hardware from doing speculative loads, by
2564 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2565 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2566 Either of these is only necessary if the branch is short, otherwise the
2567 template we use ends in an unconditional jump which flushes the pipeline
2571 asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2573 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2574 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2575 is to be taken from start of if cc rather than jump.
2576 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2578 int len = (offset >= -1024 && offset <= 1022 ? 0
2579 : offset >= -4094 && offset <= 4096 ? 1
2581 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2582 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2583 output_asm_insn (ccbranch_templates[idx][len], operands);
2584 gcc_assert (n_nops == 0 || !bp);
2586 while (n_nops-- > 0)
2587 output_asm_insn ("nop;", NULL);
2590 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2591 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2594 bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2596 enum rtx_code code1, code2;
2597 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
2598 rtx tem = bfin_cc_rtx;
2599 enum rtx_code code = GET_CODE (cmp);
2601 /* If we have a BImode input, then we already have a compare result, and
2602 do not need to emit another comparison. */
2603 if (GET_MODE (op0) == BImode)
2605 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2606 tem = op0, code2 = code;
2611 /* bfin has these conditions */
2621 code1 = reverse_condition (code);
2625 emit_insn (gen_rtx_SET (BImode, tem,
2626 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2629 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2632 /* Return nonzero iff C has exactly one bit set if it is interpreted
2633 as a 32-bit constant. */
2636 log2constp (unsigned HOST_WIDE_INT c)
2639 return c != 0 && (c & (c-1)) == 0;
2642 /* Returns the number of consecutive least significant zeros in the binary
2643 representation of *V.
2644 We modify *V to contain the original value arithmetically shifted right by
2645 the number of zeroes. */
2648 shiftr_zero (HOST_WIDE_INT *v)
2650 unsigned HOST_WIDE_INT tmp = *v;
2651 unsigned HOST_WIDE_INT sgn;
2657 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2658 while ((tmp & 0x1) == 0 && n <= 32)
2660 tmp = (tmp >> 1) | sgn;
2667 /* After reload, split the load of an immediate constant. OPERANDS are the
2668 operands of the movsi_insn pattern which we are splitting. We return
2669 nonzero if we emitted a sequence to load the constant, zero if we emitted
2670 nothing because we want to use the splitter's default sequence. */
2673 split_load_immediate (rtx operands[])
2675 HOST_WIDE_INT val = INTVAL (operands[1]);
2677 HOST_WIDE_INT shifted = val;
2678 HOST_WIDE_INT shifted_compl = ~val;
2679 int num_zero = shiftr_zero (&shifted);
2680 int num_compl_zero = shiftr_zero (&shifted_compl);
2681 unsigned int regno = REGNO (operands[0]);
2683 /* This case takes care of single-bit set/clear constants, which we could
2684 also implement with BITSET/BITCLR. */
2686 && shifted >= -32768 && shifted < 65536
2687 && (D_REGNO_P (regno)
2688 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2690 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2691 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2696 tmp |= -(tmp & 0x8000);
2698 /* If high word has one bit set or clear, try to use a bit operation. */
2699 if (D_REGNO_P (regno))
2701 if (log2constp (val & 0xFFFF0000))
2703 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2704 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2707 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2709 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2710 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2714 if (D_REGNO_P (regno))
2716 if (tmp >= -64 && tmp <= 63)
2718 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2719 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2723 if ((val & 0xFFFF0000) == 0)
2725 emit_insn (gen_movsi (operands[0], const0_rtx));
2726 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2730 if ((val & 0xFFFF0000) == 0xFFFF0000)
2732 emit_insn (gen_movsi (operands[0], constm1_rtx));
2733 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2738 /* Need DREGs for the remaining case. */
2743 && num_compl_zero && shifted_compl >= -64 && shifted_compl <= 63)
2745 /* If optimizing for size, generate a sequence that has more instructions
2747 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2748 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2749 GEN_INT (num_compl_zero)));
2750 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2756 /* Return true if the legitimate memory address for a memory operand of mode
2757 MODE. Return false if not. */
2760 bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2762 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2763 int sz = GET_MODE_SIZE (mode);
2764 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2765 /* The usual offsettable_memref machinery doesn't work so well for this
2766 port, so we deal with the problem here. */
2767 if (value > 0 && sz == 8)
2769 return (v & ~(0x7fff << shift)) == 0;
2773 bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2774 enum rtx_code outer_code)
2777 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2779 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
2783 bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2785 switch (GET_CODE (x)) {
2787 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
2791 if (REG_P (XEXP (x, 0))
2792 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
2793 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
2794 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2795 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2800 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2801 && REG_P (XEXP (x, 0))
2802 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
2805 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2806 && XEXP (x, 0) == stack_pointer_rtx
2807 && REG_P (XEXP (x, 0))
2808 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
2817 /* Decide whether we can force certain constants to memory. If we
2818 decide we can't, the caller should be able to cope with it in
2822 bfin_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
2824 /* We have only one class of non-legitimate constants, and our movsi
2825 expander knows how to handle them. Dropping these constants into the
2826 data section would only shift the problem - we'd still get relocs
2827 outside the object, in the data section rather than the text section. */
2831 /* Ensure that for any constant of the form symbol + offset, the offset
2832 remains within the object. Any other constants are ok.
2833 This ensures that flat binaries never have to deal with relocations
2834 crossing section boundaries. */
2837 bfin_legitimate_constant_p (rtx x)
2840 HOST_WIDE_INT offset;
2842 if (GET_CODE (x) != CONST)
2846 gcc_assert (GET_CODE (x) == PLUS);
2850 if (GET_CODE (sym) != SYMBOL_REF
2851 || GET_CODE (x) != CONST_INT)
2853 offset = INTVAL (x);
2855 if (SYMBOL_REF_DECL (sym) == 0)
2858 || offset >= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym))))
2865 bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2867 int cost2 = COSTS_N_INSNS (1);
2873 if (outer_code == SET || outer_code == PLUS)
2874 *total = satisfies_constraint_Ks7 (x) ? 0 : cost2;
2875 else if (outer_code == AND)
2876 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2877 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2878 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2879 else if (outer_code == LEU || outer_code == LTU)
2880 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2881 else if (outer_code == MULT)
2882 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2883 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2885 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2886 || outer_code == LSHIFTRT)
2887 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2888 else if (outer_code == IOR || outer_code == XOR)
2889 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2898 *total = COSTS_N_INSNS (2);
2904 if (GET_MODE (x) == SImode)
2906 if (GET_CODE (op0) == MULT
2907 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
2909 HOST_WIDE_INT val = INTVAL (XEXP (op0, 1));
2910 if (val == 2 || val == 4)
2913 *total += rtx_cost (XEXP (op0, 0), outer_code);
2914 *total += rtx_cost (op1, outer_code);
2919 if (GET_CODE (op0) != REG
2920 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2921 *total += rtx_cost (op0, SET);
2922 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2923 towards creating too many induction variables. */
2924 if (!reg_or_7bit_operand (op1, SImode))
2925 *total += rtx_cost (op1, SET);
2928 else if (GET_MODE (x) == DImode)
2931 if (GET_CODE (op1) != CONST_INT
2932 || !satisfies_constraint_Ks7 (op1))
2933 *total += rtx_cost (op1, PLUS);
2934 if (GET_CODE (op0) != REG
2935 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2936 *total += rtx_cost (op0, PLUS);
2941 if (GET_MODE (x) == DImode)
2950 if (GET_MODE (x) == DImode)
2957 if (GET_CODE (op0) != REG
2958 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2959 *total += rtx_cost (op0, code);
2969 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2972 if ((GET_CODE (op0) == LSHIFTRT && GET_CODE (op1) == ASHIFT)
2973 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == ZERO_EXTEND)
2974 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
2975 || (GET_CODE (op0) == AND && GET_CODE (op1) == CONST_INT))
2982 if (GET_CODE (op0) != REG
2983 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2984 *total += rtx_cost (op0, code);
2986 if (GET_MODE (x) == DImode)
2992 if (GET_MODE (x) != SImode)
2997 if (! rhs_andsi3_operand (XEXP (x, 1), SImode))
2998 *total += rtx_cost (XEXP (x, 1), code);
3002 if (! regorlog2_operand (XEXP (x, 1), SImode))
3003 *total += rtx_cost (XEXP (x, 1), code);
3010 if (outer_code == SET
3011 && XEXP (x, 1) == const1_rtx
3012 && GET_CODE (XEXP (x, 2)) == CONST_INT)
3028 if (GET_CODE (op0) == GET_CODE (op1)
3029 && (GET_CODE (op0) == ZERO_EXTEND
3030 || GET_CODE (op0) == SIGN_EXTEND))
3032 *total = COSTS_N_INSNS (1);
3033 op0 = XEXP (op0, 0);
3034 op1 = XEXP (op1, 0);
3036 else if (optimize_size)
3037 *total = COSTS_N_INSNS (1);
3039 *total = COSTS_N_INSNS (3);
3041 if (GET_CODE (op0) != REG
3042 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
3043 *total += rtx_cost (op0, MULT);
3044 if (GET_CODE (op1) != REG
3045 && (GET_CODE (op1) != SUBREG || GET_CODE (SUBREG_REG (op1)) != REG))
3046 *total += rtx_cost (op1, MULT);
3052 *total = COSTS_N_INSNS (32);
3057 if (outer_code == SET)
3066 /* Used for communication between {push,pop}_multiple_operation (which
3067 we use not only as a predicate) and the corresponding output functions. */
3068 static int first_preg_to_save, first_dreg_to_save;
3071 push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3073 int lastdreg = 8, lastpreg = 6;
3076 first_preg_to_save = lastpreg;
3077 first_dreg_to_save = lastdreg;
3078 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
3080 rtx t = XVECEXP (op, 0, i);
3084 if (GET_CODE (t) != SET)
3088 dest = SET_DEST (t);
3089 if (GET_CODE (dest) != MEM || ! REG_P (src))
3091 dest = XEXP (dest, 0);
3092 if (GET_CODE (dest) != PLUS
3093 || ! REG_P (XEXP (dest, 0))
3094 || REGNO (XEXP (dest, 0)) != REG_SP
3095 || GET_CODE (XEXP (dest, 1)) != CONST_INT
3096 || INTVAL (XEXP (dest, 1)) != -i * 4)
3099 regno = REGNO (src);
3102 if (D_REGNO_P (regno))
3105 first_dreg_to_save = lastdreg = regno - REG_R0;
3107 else if (regno >= REG_P0 && regno <= REG_P7)
3110 first_preg_to_save = lastpreg = regno - REG_P0;
3120 if (regno >= REG_P0 && regno <= REG_P7)
3123 first_preg_to_save = lastpreg = regno - REG_P0;
3125 else if (regno != REG_R0 + lastdreg + 1)
3130 else if (group == 2)
3132 if (regno != REG_P0 + lastpreg + 1)
3141 pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3143 int lastdreg = 8, lastpreg = 6;
3146 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
3148 rtx t = XVECEXP (op, 0, i);
3152 if (GET_CODE (t) != SET)
3156 dest = SET_DEST (t);
3157 if (GET_CODE (src) != MEM || ! REG_P (dest))
3159 src = XEXP (src, 0);
3163 if (! REG_P (src) || REGNO (src) != REG_SP)
3166 else if (GET_CODE (src) != PLUS
3167 || ! REG_P (XEXP (src, 0))
3168 || REGNO (XEXP (src, 0)) != REG_SP
3169 || GET_CODE (XEXP (src, 1)) != CONST_INT
3170 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
3173 regno = REGNO (dest);
3176 if (regno == REG_R7)
3181 else if (regno != REG_P0 + lastpreg - 1)
3186 else if (group == 1)
3188 if (regno != REG_R0 + lastdreg - 1)
3194 first_dreg_to_save = lastdreg;
3195 first_preg_to_save = lastpreg;
3199 /* Emit assembly code for one multi-register push described by INSN, with
3200 operands in OPERANDS. */
3203 output_push_multiple (rtx insn, rtx *operands)
3208 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3209 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
3212 if (first_dreg_to_save == 8)
3213 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
3214 else if (first_preg_to_save == 6)
3215 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
3217 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
3218 first_dreg_to_save, first_preg_to_save);
3220 output_asm_insn (buf, operands);
3223 /* Emit assembly code for one multi-register pop described by INSN, with
3224 operands in OPERANDS. */
3227 output_pop_multiple (rtx insn, rtx *operands)
3232 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3233 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
3236 if (first_dreg_to_save == 8)
3237 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
3238 else if (first_preg_to_save == 6)
3239 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
3241 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
3242 first_dreg_to_save, first_preg_to_save);
3244 output_asm_insn (buf, operands);
3247 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3250 single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
3252 rtx scratch = gen_reg_rtx (mode);
3255 srcmem = adjust_address_nv (src, mode, offset);
3256 dstmem = adjust_address_nv (dst, mode, offset);
3257 emit_move_insn (scratch, srcmem);
3258 emit_move_insn (dstmem, scratch);
3261 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3262 alignment ALIGN_EXP. Return true if successful, false if we should fall
3263 back on a different method. */
3266 bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
3268 rtx srcreg, destreg, countreg;
3269 HOST_WIDE_INT align = 0;
3270 unsigned HOST_WIDE_INT count = 0;
3272 if (GET_CODE (align_exp) == CONST_INT)
3273 align = INTVAL (align_exp);
3274 if (GET_CODE (count_exp) == CONST_INT)
3276 count = INTVAL (count_exp);
3278 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
3283 /* If optimizing for size, only do single copies inline. */
3286 if (count == 2 && align < 2)
3288 if (count == 4 && align < 4)
3290 if (count != 1 && count != 2 && count != 4)
3293 if (align < 2 && count != 1)
3296 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
3297 if (destreg != XEXP (dst, 0))
3298 dst = replace_equiv_address_nv (dst, destreg);
3299 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
3300 if (srcreg != XEXP (src, 0))
3301 src = replace_equiv_address_nv (src, srcreg);
3303 if (count != 0 && align >= 2)
3305 unsigned HOST_WIDE_INT offset = 0;
3309 if ((count & ~3) == 4)
3311 single_move_for_movmem (dst, src, SImode, offset);
3314 else if (count & ~3)
3316 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
3317 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3319 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
3323 single_move_for_movmem (dst, src, HImode, offset);
3329 if ((count & ~1) == 2)
3331 single_move_for_movmem (dst, src, HImode, offset);
3334 else if (count & ~1)
3336 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
3337 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3339 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
3344 single_move_for_movmem (dst, src, QImode, offset);
3351 /* Compute the alignment for a local variable.
3352 TYPE is the data type, and ALIGN is the alignment that
3353 the object would ordinarily have. The value of this macro is used
3354 instead of that alignment to align the object. */
3357 bfin_local_alignment (tree type, int align)
3359 /* Increasing alignment for (relatively) big types allows the builtin
3360 memcpy can use 32 bit loads/stores. */
3361 if (TYPE_SIZE (type)
3362 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3363 && (TREE_INT_CST_LOW (TYPE_SIZE (type)) > 8
3364 || TREE_INT_CST_HIGH (TYPE_SIZE (type))) && align < 32)
3369 /* Implement TARGET_SCHED_ISSUE_RATE. */
3372 bfin_issue_rate (void)
3378 bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3380 enum attr_type insn_type, dep_insn_type;
3381 int dep_insn_code_number;
3383 /* Anti and output dependencies have zero cost. */
3384 if (REG_NOTE_KIND (link) != 0)
3387 dep_insn_code_number = recog_memoized (dep_insn);
3389 /* If we can't recognize the insns, we can't really do anything. */
3390 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
3393 insn_type = get_attr_type (insn);
3394 dep_insn_type = get_attr_type (dep_insn);
3396 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
3398 rtx pat = PATTERN (dep_insn);
3399 if (GET_CODE (pat) == PARALLEL)
3400 pat = XVECEXP (pat, 0, 0);
3401 rtx dest = SET_DEST (pat);
3402 rtx src = SET_SRC (pat);
3403 if (! ADDRESS_REGNO_P (REGNO (dest))
3404 || ! (MEM_P (src) || D_REGNO_P (REGNO (src))))
3406 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
3413 /* Increment the counter for the number of loop instructions in the
3414 current function. */
3417 bfin_hardware_loop (void)
3419 cfun->machine->has_hardware_loops++;
3422 /* Maximum loop nesting depth. */
3423 #define MAX_LOOP_DEPTH 2
3425 /* Maximum size of a loop. */
3426 #define MAX_LOOP_LENGTH 2042
3428 /* Maximum distance of the LSETUP instruction from the loop start. */
3429 #define MAX_LSETUP_DISTANCE 30
3431 /* We need to keep a vector of loops */
3432 typedef struct loop_info *loop_info;
3433 DEF_VEC_P (loop_info);
3434 DEF_VEC_ALLOC_P (loop_info,heap);
3436 /* Information about a loop we have found (or are in the process of
3438 struct loop_info GTY (())
3440 /* loop number, for dumps */
3443 /* All edges that jump into and out of the loop. */
3444 VEC(edge,gc) *incoming;
3446 /* We can handle two cases: all incoming edges have the same destination
3447 block, or all incoming edges have the same source block. These two
3448 members are set to the common source or destination we found, or NULL
3449 if different blocks were found. If both are NULL the loop can't be
3451 basic_block incoming_src;
3452 basic_block incoming_dest;
3454 /* First block in the loop. This is the one branched to by the loop_end
3458 /* Last block in the loop (the one with the loop_end insn). */
3461 /* The successor block of the loop. This is the one the loop_end insn
3463 basic_block successor;
3465 /* The last instruction in the tail. */
3468 /* The loop_end insn. */
3471 /* The iteration register. */
3474 /* The new initialization insn. */
3477 /* The new initialization instruction. */
3480 /* The new label placed at the beginning of the loop. */
3483 /* The new label placed at the end of the loop. */
3486 /* The length of the loop. */
3489 /* The nesting depth of the loop. */
3492 /* Nonzero if we can't optimize this loop. */
3495 /* True if we have visited this loop. */
3498 /* True if this loop body clobbers any of LC0, LT0, or LB0. */
3501 /* True if this loop body clobbers any of LC1, LT1, or LB1. */
3504 /* Next loop in the graph. */
3505 struct loop_info *next;
3507 /* Immediate outer loop of this loop. */
3508 struct loop_info *outer;
3510 /* Vector of blocks only within the loop, including those within
3512 VEC (basic_block,heap) *blocks;
3514 /* Same information in a bitmap. */
3515 bitmap block_bitmap;
3517 /* Vector of inner loops within this loop */
3518 VEC (loop_info,heap) *loops;
3522 bfin_dump_loops (loop_info loops)
3526 for (loop = loops; loop; loop = loop->next)
3532 fprintf (dump_file, ";; loop %d: ", loop->loop_no);
3534 fprintf (dump_file, "(bad) ");
3535 fprintf (dump_file, "{head:%d, depth:%d}", loop->head->index, loop->depth);
3537 fprintf (dump_file, " blocks: [ ");
3538 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, b); ix++)
3539 fprintf (dump_file, "%d ", b->index);
3540 fprintf (dump_file, "] ");
3542 fprintf (dump_file, " inner loops: [ ");
3543 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, i); ix++)
3544 fprintf (dump_file, "%d ", i->loop_no);
3545 fprintf (dump_file, "]\n");
3547 fprintf (dump_file, "\n");
3550 /* Scan the blocks of LOOP (and its inferiors) looking for basic block
3551 BB. Return true, if we find it. */
3554 bfin_bb_in_loop (loop_info loop, basic_block bb)
3556 return bitmap_bit_p (loop->block_bitmap, bb->index);
3559 /* Scan the blocks of LOOP (and its inferiors) looking for uses of
3560 REG. Return true, if we find any. Don't count the loop's loop_end
3561 insn if it matches LOOP_END. */
3564 bfin_scan_loop (loop_info loop, rtx reg, rtx loop_end)
3569 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3573 for (insn = BB_HEAD (bb);
3574 insn != NEXT_INSN (BB_END (bb));
3575 insn = NEXT_INSN (insn))
3579 if (insn == loop_end)
3581 if (reg_mentioned_p (reg, PATTERN (insn)))
3588 /* Estimate the length of INSN conservatively. */
3591 length_for_loop (rtx insn)
3594 if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
3596 if (ENABLE_WA_SPECULATIVE_SYNCS)
3598 else if (ENABLE_WA_SPECULATIVE_LOADS)
3601 else if (LABEL_P (insn))
3603 if (ENABLE_WA_SPECULATIVE_SYNCS)
3608 length += get_attr_length (insn);
3613 /* Optimize LOOP. */
3616 bfin_optimize_loop (loop_info loop)
3620 rtx insn, init_insn, last_insn, nop_insn;
3621 rtx loop_init, start_label, end_label;
3622 rtx reg_lc0, reg_lc1, reg_lt0, reg_lt1, reg_lb0, reg_lb1;
3624 rtx lc_reg, lt_reg, lb_reg;
3628 int inner_depth = 0;
3638 fprintf (dump_file, ";; loop %d bad when found\n", loop->loop_no);
3642 /* Every loop contains in its list of inner loops every loop nested inside
3643 it, even if there are intermediate loops. This works because we're doing
3644 a depth-first search here and never visit a loop more than once. */
3645 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
3647 bfin_optimize_loop (inner);
3649 if (!inner->bad && inner_depth < inner->depth)
3651 inner_depth = inner->depth;
3653 loop->clobber_loop0 |= inner->clobber_loop0;
3654 loop->clobber_loop1 |= inner->clobber_loop1;
3658 loop->depth = inner_depth + 1;
3659 if (loop->depth > MAX_LOOP_DEPTH)
3662 fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
3666 /* Get the loop iteration register. */
3667 iter_reg = loop->iter_reg;
3669 if (!DPREG_P (iter_reg))
3672 fprintf (dump_file, ";; loop %d iteration count NOT in PREG or DREG\n",
3677 if (loop->incoming_src)
3679 /* Make sure the predecessor is before the loop start label, as required by
3680 the LSETUP instruction. */
3682 for (insn = BB_END (loop->incoming_src);
3683 insn && insn != loop->start_label;
3684 insn = NEXT_INSN (insn))
3685 length += length_for_loop (insn);
3690 fprintf (dump_file, ";; loop %d lsetup not before loop_start\n",
3695 if (length > MAX_LSETUP_DISTANCE)
3698 fprintf (dump_file, ";; loop %d lsetup too far away\n", loop->loop_no);
3703 /* Check if start_label appears before loop_end and calculate the
3704 offset between them. We calculate the length of instructions
3707 for (insn = loop->start_label;
3708 insn && insn != loop->loop_end;
3709 insn = NEXT_INSN (insn))
3710 length += length_for_loop (insn);
3715 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
3720 loop->length = length;
3721 if (loop->length > MAX_LOOP_LENGTH)
3724 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
3728 /* Scan all the blocks to make sure they don't use iter_reg. */
3729 if (bfin_scan_loop (loop, iter_reg, loop->loop_end))
3732 fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
3736 /* Scan all the insns to see if the loop body clobber
3737 any hardware loop registers. */
3739 reg_lc0 = gen_rtx_REG (SImode, REG_LC0);
3740 reg_lc1 = gen_rtx_REG (SImode, REG_LC1);
3741 reg_lt0 = gen_rtx_REG (SImode, REG_LT0);
3742 reg_lt1 = gen_rtx_REG (SImode, REG_LT1);
3743 reg_lb0 = gen_rtx_REG (SImode, REG_LB0);
3744 reg_lb1 = gen_rtx_REG (SImode, REG_LB1);
3746 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3750 for (insn = BB_HEAD (bb);
3751 insn != NEXT_INSN (BB_END (bb));
3752 insn = NEXT_INSN (insn))
3757 if (reg_set_p (reg_lc0, insn)
3758 || reg_set_p (reg_lt0, insn)
3759 || reg_set_p (reg_lb0, insn))
3760 loop->clobber_loop0 = 1;
3762 if (reg_set_p (reg_lc1, insn)
3763 || reg_set_p (reg_lt1, insn)
3764 || reg_set_p (reg_lb1, insn))
3765 loop->clobber_loop1 |= 1;
3769 if ((loop->clobber_loop0 && loop->clobber_loop1)
3770 || (loop->depth == MAX_LOOP_DEPTH && loop->clobber_loop0))
3772 loop->depth = MAX_LOOP_DEPTH + 1;
3774 fprintf (dump_file, ";; loop %d no loop reg available\n",
3779 /* There should be an instruction before the loop_end instruction
3780 in the same basic block. And the instruction must not be
3782 - CONDITIONAL BRANCH
3786 - Returns (RTS, RTN, etc.) */
3789 last_insn = PREV_INSN (loop->loop_end);
3793 for (; last_insn != PREV_INSN (BB_HEAD (bb));
3794 last_insn = PREV_INSN (last_insn))
3795 if (INSN_P (last_insn))
3798 if (last_insn != PREV_INSN (BB_HEAD (bb)))
3801 if (single_pred_p (bb)
3802 && single_pred (bb) != ENTRY_BLOCK_PTR)
3804 bb = single_pred (bb);
3805 last_insn = BB_END (bb);
3810 last_insn = NULL_RTX;
3818 fprintf (dump_file, ";; loop %d has no last instruction\n",
3823 if (JUMP_P (last_insn))
3825 loop_info inner = bb->aux;
3827 && inner->outer == loop
3828 && inner->loop_end == last_insn
3829 && inner->depth == 1)
3830 /* This jump_insn is the exact loop_end of an inner loop
3831 and to be optimized away. So use the inner's last_insn. */
3832 last_insn = inner->last_insn;
3836 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3841 else if (CALL_P (last_insn)
3842 || (GET_CODE (PATTERN (last_insn)) != SEQUENCE
3843 && get_attr_type (last_insn) == TYPE_SYNC)
3844 || recog_memoized (last_insn) == CODE_FOR_return_internal)
3847 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3852 if (GET_CODE (PATTERN (last_insn)) == ASM_INPUT
3853 || asm_noperands (PATTERN (last_insn)) >= 0
3854 || (GET_CODE (PATTERN (last_insn)) != SEQUENCE
3855 && get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI))
3857 nop_insn = emit_insn_after (gen_nop (), last_insn);
3858 last_insn = nop_insn;
3861 loop->last_insn = last_insn;
3863 /* The loop is good for replacement. */
3864 start_label = loop->start_label;
3865 end_label = gen_label_rtx ();
3866 iter_reg = loop->iter_reg;
3868 if (loop->depth == 1 && !loop->clobber_loop1)
3873 loop->clobber_loop1 = 1;
3880 loop->clobber_loop0 = 1;
3883 /* If iter_reg is a DREG, we need generate an instruction to load
3884 the loop count into LC register. */
3885 if (D_REGNO_P (REGNO (iter_reg)))
3887 init_insn = gen_movsi (lc_reg, iter_reg);
3888 loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
3892 else if (P_REGNO_P (REGNO (iter_reg)))
3894 init_insn = NULL_RTX;
3895 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3902 loop->init = init_insn;
3903 loop->end_label = end_label;
3904 loop->loop_init = loop_init;
3908 fprintf (dump_file, ";; replacing loop %d initializer with\n",
3910 print_rtl_single (dump_file, loop->loop_init);
3911 fprintf (dump_file, ";; replacing loop %d terminator with\n",
3913 print_rtl_single (dump_file, loop->loop_end);
3918 if (loop->init != NULL_RTX)
3919 emit_insn (loop->init);
3920 seq_end = emit_insn (loop->loop_init);
3925 if (loop->incoming_src)
3927 rtx prev = BB_END (loop->incoming_src);
3928 if (VEC_length (edge, loop->incoming) > 1
3929 || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
3931 gcc_assert (JUMP_P (prev));
3932 prev = PREV_INSN (prev);
3934 emit_insn_after (seq, prev);
3942 if (loop->head != loop->incoming_dest)
3944 FOR_EACH_EDGE (e, ei, loop->head->preds)
3946 if (e->flags & EDGE_FALLTHRU)
3948 rtx newjump = gen_jump (loop->start_label);
3949 emit_insn_before (newjump, BB_HEAD (loop->head));
3950 new_bb = create_basic_block (newjump, newjump, loop->head->prev_bb);
3951 gcc_assert (new_bb = loop->head->prev_bb);
3957 emit_insn_before (seq, BB_HEAD (loop->head));
3958 seq = emit_label_before (gen_label_rtx (), seq);
3960 new_bb = create_basic_block (seq, seq_end, loop->head->prev_bb);
3961 FOR_EACH_EDGE (e, ei, loop->incoming)
3963 if (!(e->flags & EDGE_FALLTHRU)
3964 || e->dest != loop->head)
3965 redirect_edge_and_branch_force (e, new_bb);
3967 redirect_edge_succ (e, new_bb);
3971 delete_insn (loop->loop_end);
3972 /* Insert the loop end label before the last instruction of the loop. */
3973 emit_label_before (loop->end_label, loop->last_insn);
3980 fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
3984 if (DPREG_P (loop->iter_reg))
3986 /* If loop->iter_reg is a DREG or PREG, we can split it here
3987 without scratch register. */
3990 emit_insn_before (gen_addsi3 (loop->iter_reg,
3995 emit_insn_before (gen_cmpsi (loop->iter_reg, const0_rtx),
3998 insn = emit_jump_insn_before (gen_bne (loop->start_label),
4001 JUMP_LABEL (insn) = loop->start_label;
4002 LABEL_NUSES (loop->start_label)++;
4003 delete_insn (loop->loop_end);
4007 /* Called from bfin_reorg_loops when a potential loop end is found. LOOP is
4008 a newly set up structure describing the loop, it is this function's
4009 responsibility to fill most of it. TAIL_BB and TAIL_INSN point to the
4010 loop_end insn and its enclosing basic block. */
4013 bfin_discover_loop (loop_info loop, basic_block tail_bb, rtx tail_insn)
4017 VEC (basic_block,heap) *works = VEC_alloc (basic_block,heap,20);
4019 loop->tail = tail_bb;
4020 loop->head = BRANCH_EDGE (tail_bb)->dest;
4021 loop->successor = FALLTHRU_EDGE (tail_bb)->dest;
4022 loop->loop_end = tail_insn;
4023 loop->last_insn = NULL_RTX;
4024 loop->iter_reg = SET_DEST (XVECEXP (PATTERN (tail_insn), 0, 1));
4025 loop->depth = loop->length = 0;
4027 loop->clobber_loop0 = loop->clobber_loop1 = 0;
4030 loop->incoming = VEC_alloc (edge, gc, 2);
4031 loop->init = loop->loop_init = NULL_RTX;
4032 loop->start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (tail_insn), 0, 0)), 1), 0);
4033 loop->end_label = NULL_RTX;
4036 VEC_safe_push (basic_block, heap, works, loop->head);
4038 while (VEC_iterate (basic_block, works, dwork++, bb))
4042 if (bb == EXIT_BLOCK_PTR)
4044 /* We've reached the exit block. The loop must be bad. */
4047 ";; Loop is bad - reached exit block while scanning\n");
4052 if (bitmap_bit_p (loop->block_bitmap, bb->index))
4055 /* We've not seen this block before. Add it to the loop's
4056 list and then add each successor to the work list. */
4058 VEC_safe_push (basic_block, heap, loop->blocks, bb);
4059 bitmap_set_bit (loop->block_bitmap, bb->index);
4063 FOR_EACH_EDGE (e, ei, bb->succs)
4065 basic_block succ = EDGE_SUCC (bb, ei.index)->dest;
4066 if (!REGNO_REG_SET_P (df_get_live_in (succ),
4067 REGNO (loop->iter_reg)))
4069 if (!VEC_space (basic_block, works, 1))
4073 VEC_block_remove (basic_block, works, 0, dwork);
4077 VEC_reserve (basic_block, heap, works, 1);
4079 VEC_quick_push (basic_block, works, succ);
4084 /* Find the predecessor, and make sure nothing else jumps into this loop. */
4088 for (dwork = 0; VEC_iterate (basic_block, loop->blocks, dwork, bb); dwork++)
4092 FOR_EACH_EDGE (e, ei, bb->preds)
4094 basic_block pred = e->src;
4096 if (!bfin_bb_in_loop (loop, pred))
4099 fprintf (dump_file, ";; Loop %d: incoming edge %d -> %d\n",
4100 loop->loop_no, pred->index,
4102 VEC_safe_push (edge, gc, loop->incoming, e);
4107 for (pass = 0, retry = 1; retry && pass < 2; pass++)
4114 FOR_EACH_EDGE (e, ei, loop->incoming)
4118 loop->incoming_src = e->src;
4119 loop->incoming_dest = e->dest;
4124 if (e->dest != loop->incoming_dest)
4125 loop->incoming_dest = NULL;
4126 if (e->src != loop->incoming_src)
4127 loop->incoming_src = NULL;
4129 if (loop->incoming_src == NULL && loop->incoming_dest == NULL)
4135 ";; retrying loop %d with forwarder blocks\n",
4143 ";; can't find suitable entry for loop %d\n",
4151 FOR_EACH_EDGE (e, ei, loop->incoming)
4153 if (forwarder_block_p (e->src))
4160 ";; Adding forwarder block %d to loop %d and retrying\n",
4161 e->src->index, loop->loop_no);
4162 VEC_safe_push (basic_block, heap, loop->blocks, e->src);
4163 bitmap_set_bit (loop->block_bitmap, e->src->index);
4164 FOR_EACH_EDGE (e2, ei2, e->src->preds)
4165 VEC_safe_push (edge, gc, loop->incoming, e2);
4166 VEC_unordered_remove (edge, loop->incoming, ei.index);
4176 VEC_free (basic_block, heap, works);
4179 /* Analyze the structure of the loops in the current function. Use STACK
4180 for bitmap allocations. Returns all the valid candidates for hardware
4181 loops found in this function. */
4183 bfin_discover_loops (bitmap_obstack *stack, FILE *dump_file)
4185 loop_info loops = NULL;
4191 /* Find all the possible loop tails. This means searching for every
4192 loop_end instruction. For each one found, create a loop_info
4193 structure and add the head block to the work list. */
4196 rtx tail = BB_END (bb);
4198 while (GET_CODE (tail) == NOTE)
4199 tail = PREV_INSN (tail);
4203 if (INSN_P (tail) && recog_memoized (tail) == CODE_FOR_loop_end)
4205 /* A possible loop end */
4207 loop = XNEW (struct loop_info);
4210 loop->loop_no = nloops++;
4211 loop->blocks = VEC_alloc (basic_block, heap, 20);
4212 loop->block_bitmap = BITMAP_ALLOC (stack);
4217 fprintf (dump_file, ";; potential loop %d ending at\n",
4219 print_rtl_single (dump_file, tail);
4222 bfin_discover_loop (loop, bb, tail);
4226 tmp_bitmap = BITMAP_ALLOC (stack);
4227 /* Compute loop nestings. */
4228 for (loop = loops; loop; loop = loop->next)
4234 for (other = loop->next; other; other = other->next)
4239 bitmap_and (tmp_bitmap, other->block_bitmap, loop->block_bitmap);
4240 if (bitmap_empty_p (tmp_bitmap))
4242 if (bitmap_equal_p (tmp_bitmap, other->block_bitmap))
4244 other->outer = loop;
4245 VEC_safe_push (loop_info, heap, loop->loops, other);
4247 else if (bitmap_equal_p (tmp_bitmap, loop->block_bitmap))
4249 loop->outer = other;
4250 VEC_safe_push (loop_info, heap, other->loops, loop);
4256 ";; can't find suitable nesting for loops %d and %d\n",
4257 loop->loop_no, other->loop_no);
4258 loop->bad = other->bad = 1;
4262 BITMAP_FREE (tmp_bitmap);
4267 /* Free up the loop structures in LOOPS. */
4269 free_loops (loop_info loops)
4273 loop_info loop = loops;
4275 VEC_free (loop_info, heap, loop->loops);
4276 VEC_free (basic_block, heap, loop->blocks);
4277 BITMAP_FREE (loop->block_bitmap);
4282 #define BB_AUX_INDEX(BB) ((unsigned)(BB)->aux)
4284 /* The taken-branch edge from the loop end can actually go forward. Since the
4285 Blackfin's LSETUP instruction requires that the loop end be after the loop
4286 start, try to reorder a loop's basic blocks when we find such a case. */
4288 bfin_reorder_loops (loop_info loops, FILE *dump_file)
4295 cfg_layout_initialize (0);
4297 for (loop = loops; loop; loop = loop->next)
4307 /* Recreate an index for basic blocks that represents their order. */
4308 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
4309 bb != EXIT_BLOCK_PTR;
4310 bb = bb->next_bb, index++)
4311 bb->aux = (PTR) index;
4313 if (BB_AUX_INDEX (loop->head) < BB_AUX_INDEX (loop->tail))
4316 FOR_EACH_EDGE (e, ei, loop->head->succs)
4318 if (bitmap_bit_p (loop->block_bitmap, e->dest->index)
4319 && BB_AUX_INDEX (e->dest) < BB_AUX_INDEX (loop->tail))
4321 basic_block start_bb = e->dest;
4322 basic_block start_prev_bb = start_bb->prev_bb;
4325 fprintf (dump_file, ";; Moving block %d before block %d\n",
4326 loop->head->index, start_bb->index);
4327 loop->head->prev_bb->next_bb = loop->head->next_bb;
4328 loop->head->next_bb->prev_bb = loop->head->prev_bb;
4330 loop->head->prev_bb = start_prev_bb;
4331 loop->head->next_bb = start_bb;
4332 start_prev_bb->next_bb = start_bb->prev_bb = loop->head;
4336 loops = loops->next;
4341 if (bb->next_bb != EXIT_BLOCK_PTR)
4342 bb->aux = bb->next_bb;
4346 cfg_layout_finalize ();
4350 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
4351 and tries to rewrite the RTL of these loops so that proper Blackfin
4352 hardware loops are generated. */
4355 bfin_reorg_loops (FILE *dump_file)
4357 loop_info loops = NULL;
4360 bitmap_obstack stack;
4362 bitmap_obstack_initialize (&stack);
4365 fprintf (dump_file, ";; Find loops, first pass\n\n");
4367 loops = bfin_discover_loops (&stack, dump_file);
4370 bfin_dump_loops (loops);
4372 bfin_reorder_loops (loops, dump_file);
4376 fprintf (dump_file, ";; Find loops, second pass\n\n");
4378 loops = bfin_discover_loops (&stack, dump_file);
4381 fprintf (dump_file, ";; All loops found:\n\n");
4382 bfin_dump_loops (loops);
4385 /* Now apply the optimizations. */
4386 for (loop = loops; loop; loop = loop->next)
4387 bfin_optimize_loop (loop);
4391 fprintf (dump_file, ";; After hardware loops optimization:\n\n");
4392 bfin_dump_loops (loops);
4398 print_rtl (dump_file, get_insns ());
4404 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
4405 Returns true if we modified the insn chain, false otherwise. */
4407 gen_one_bundle (rtx slot[3])
4409 gcc_assert (slot[1] != NULL_RTX);
4411 /* Verify that we really can do the multi-issue. */
4414 rtx t = NEXT_INSN (slot[0]);
4415 while (t != slot[1])
4417 if (GET_CODE (t) != NOTE
4418 || NOTE_KIND (t) != NOTE_INSN_DELETED)
4425 rtx t = NEXT_INSN (slot[1]);
4426 while (t != slot[2])
4428 if (GET_CODE (t) != NOTE
4429 || NOTE_KIND (t) != NOTE_INSN_DELETED)
4435 if (slot[0] == NULL_RTX)
4437 slot[0] = emit_insn_before (gen_mnop (), slot[1]);
4438 df_insn_rescan (slot[0]);
4440 if (slot[2] == NULL_RTX)
4442 slot[2] = emit_insn_after (gen_forced_nop (), slot[1]);
4443 df_insn_rescan (slot[2]);
4446 /* Avoid line number information being printed inside one bundle. */
4447 if (INSN_LOCATOR (slot[1])
4448 && INSN_LOCATOR (slot[1]) != INSN_LOCATOR (slot[0]))
4449 INSN_LOCATOR (slot[1]) = INSN_LOCATOR (slot[0]);
4450 if (INSN_LOCATOR (slot[2])
4451 && INSN_LOCATOR (slot[2]) != INSN_LOCATOR (slot[0]))
4452 INSN_LOCATOR (slot[2]) = INSN_LOCATOR (slot[0]);
4454 /* Terminate them with "|| " instead of ";" in the output. */
4455 PUT_MODE (slot[0], SImode);
4456 PUT_MODE (slot[1], SImode);
4457 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
4458 PUT_MODE (slot[2], QImode);
4462 /* Go through all insns, and use the information generated during scheduling
4463 to generate SEQUENCEs to represent bundles of instructions issued
4467 bfin_gen_bundles (void)
4476 slot[0] = slot[1] = slot[2] = NULL_RTX;
4477 for (insn = BB_HEAD (bb);; insn = next)
4482 if (get_attr_type (insn) == TYPE_DSP32)
4484 else if (slot[1] == NULL_RTX)
4491 next = NEXT_INSN (insn);
4492 while (next && insn != BB_END (bb)
4494 && GET_CODE (PATTERN (next)) != USE
4495 && GET_CODE (PATTERN (next)) != CLOBBER))
4498 next = NEXT_INSN (insn);
4501 /* BB_END can change due to emitting extra NOPs, so check here. */
4502 at_end = insn == BB_END (bb);
4503 if (at_end || GET_MODE (next) == TImode)
4506 || !gen_one_bundle (slot))
4507 && slot[0] != NULL_RTX)
4509 rtx pat = PATTERN (slot[0]);
4510 if (GET_CODE (pat) == SET
4511 && GET_CODE (SET_SRC (pat)) == UNSPEC
4512 && XINT (SET_SRC (pat), 1) == UNSPEC_32BIT)
4514 SET_SRC (pat) = XVECEXP (SET_SRC (pat), 0, 0);
4515 INSN_CODE (slot[0]) = -1;
4516 df_insn_rescan (slot[0]);
4520 slot[0] = slot[1] = slot[2] = NULL_RTX;
4528 /* Ensure that no var tracking notes are emitted in the middle of a
4529 three-instruction bundle. */
4532 reorder_var_tracking_notes (void)
4538 rtx queue = NULL_RTX;
4539 bool in_bundle = false;
4541 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4543 next = NEXT_INSN (insn);
4547 /* Emit queued up notes at the last instruction of a bundle. */
4548 if (GET_MODE (insn) == QImode)
4552 rtx next_queue = PREV_INSN (queue);
4553 PREV_INSN (NEXT_INSN (insn)) = queue;
4554 NEXT_INSN (queue) = NEXT_INSN (insn);
4555 NEXT_INSN (insn) = queue;
4556 PREV_INSN (queue) = insn;
4561 else if (GET_MODE (insn) == SImode)
4564 else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4568 rtx prev = PREV_INSN (insn);
4569 PREV_INSN (next) = prev;
4570 NEXT_INSN (prev) = next;
4572 PREV_INSN (insn) = queue;
4580 /* Return an insn type for INSN that can be used by the caller for anomaly
4581 workarounds. This differs from plain get_attr_type in that it handles
4584 static enum attr_type
4585 type_for_anomaly (rtx insn)
4587 rtx pat = PATTERN (insn);
4588 if (GET_CODE (pat) == SEQUENCE)
4591 t = get_attr_type (XVECEXP (pat, 0, 1));
4594 t = get_attr_type (XVECEXP (pat, 0, 2));
4600 return get_attr_type (insn);
4603 /* Return nonzero if INSN contains any loads that may trap. It handles
4604 SEQUENCEs correctly. */
4607 trapping_loads_p (rtx insn)
4609 rtx pat = PATTERN (insn);
4610 if (GET_CODE (pat) == SEQUENCE)
4613 t = get_attr_type (XVECEXP (pat, 0, 1));
4615 && may_trap_p (SET_SRC (PATTERN (XVECEXP (pat, 0, 1)))))
4617 t = get_attr_type (XVECEXP (pat, 0, 2));
4619 && may_trap_p (SET_SRC (PATTERN (XVECEXP (pat, 0, 2)))))
4624 return may_trap_p (SET_SRC (single_set (insn)));
4627 /* This function acts like NEXT_INSN, but is aware of three-insn bundles and
4628 skips all subsequent parallel instructions if INSN is the start of such
4631 find_next_insn_start (rtx insn)
4633 if (GET_MODE (insn) == SImode)
4635 while (GET_MODE (insn) != QImode)
4636 insn = NEXT_INSN (insn);
4638 return NEXT_INSN (insn);
4641 /* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4642 a three-insn bundle, see if one of them is a load and return that if so.
4643 Return NULL_RTX if the insn does not contain loads. */
4645 find_load (rtx insn)
4647 if (get_attr_type (insn) == TYPE_MCLD)
4649 if (GET_MODE (insn) != SImode)
4652 insn = NEXT_INSN (insn);
4653 if ((GET_MODE (insn) == SImode || GET_MODE (insn) == QImode)
4654 && get_attr_type (insn) == TYPE_MCLD)
4656 } while (GET_MODE (insn) != QImode);
4660 /* We use the machine specific reorg pass for emitting CSYNC instructions
4661 after conditional branches as needed.
4663 The Blackfin is unusual in that a code sequence like
4666 may speculatively perform the load even if the condition isn't true. This
4667 happens for a branch that is predicted not taken, because the pipeline
4668 isn't flushed or stalled, so the early stages of the following instructions,
4669 which perform the memory reference, are allowed to execute before the
4670 jump condition is evaluated.
4671 Therefore, we must insert additional instructions in all places where this
4672 could lead to incorrect behavior. The manual recommends CSYNC, while
4673 VDSP seems to use NOPs (even though its corresponding compiler option is
4676 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4677 When optimizing for size, we turn the branch into a predicted taken one.
4678 This may be slower due to mispredicts, but saves code size. */
4684 rtx last_condjump = NULL_RTX;
4685 int cycles_since_jump = INT_MAX;
4687 /* We are freeing block_for_insn in the toplev to keep compatibility
4688 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4689 compute_bb_for_insn ();
4691 if (bfin_flag_schedule_insns2)
4693 splitting_for_sched = 1;
4695 splitting_for_sched = 0;
4697 timevar_push (TV_SCHED2);
4699 timevar_pop (TV_SCHED2);
4701 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4703 bfin_gen_bundles ();
4708 /* Doloop optimization */
4709 if (cfun->machine->has_hardware_loops)
4710 bfin_reorg_loops (dump_file);
4712 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS)
4715 /* First pass: find predicted-false branches; if something after them
4716 needs nops, insert them or change the branch to predict true. */
4717 for (insn = get_insns (); insn; insn = next)
4721 next = find_next_insn_start (insn);
4723 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
4726 pat = PATTERN (insn);
4727 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4728 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4729 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4734 if (any_condjump_p (insn)
4735 && ! cbranch_predicted_taken_p (insn))
4737 last_condjump = insn;
4738 cycles_since_jump = 0;
4741 cycles_since_jump = INT_MAX;
4743 else if (INSN_P (insn))
4745 rtx load_insn = find_load (insn);
4746 enum attr_type type = type_for_anomaly (insn);
4747 int delay_needed = 0;
4748 if (cycles_since_jump < INT_MAX)
4749 cycles_since_jump++;
4751 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
4753 if (trapping_loads_p (load_insn))
4756 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
4759 if (delay_needed > cycles_since_jump)
4763 rtx *op = recog_data.operand;
4765 delay_needed -= cycles_since_jump;
4767 extract_insn (last_condjump);
4770 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
4772 cycles_since_jump = INT_MAX;
4775 /* Do not adjust cycles_since_jump in this case, so that
4776 we'll increase the number of NOPs for a subsequent insn
4778 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
4779 GEN_INT (delay_needed));
4780 PATTERN (last_condjump) = pat;
4781 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
4785 /* Second pass: for predicted-true branches, see if anything at the
4786 branch destination needs extra nops. */
4787 if (! ENABLE_WA_SPECULATIVE_SYNCS)
4790 if (! ENABLE_WA_RETS)
4793 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4796 && any_condjump_p (insn)
4797 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
4798 || cbranch_predicted_taken_p (insn)))
4800 rtx target = JUMP_LABEL (insn);
4802 cycles_since_jump = 0;
4803 for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
4807 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
4810 pat = PATTERN (target);
4811 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4812 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4813 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4816 if (INSN_P (target))
4818 enum attr_type type = type_for_anomaly (target);
4819 int delay_needed = 0;
4820 if (cycles_since_jump < INT_MAX)
4821 cycles_since_jump++;
4823 if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
4826 if (delay_needed > cycles_since_jump)
4828 rtx prev = prev_real_insn (label);
4829 delay_needed -= cycles_since_jump;
4831 fprintf (dump_file, "Adding %d nops after %d\n",
4832 delay_needed, INSN_UID (label));
4834 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
4841 "Reducing nops on insn %d.\n",
4844 x = XVECEXP (x, 0, 1);
4845 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
4846 XVECEXP (x, 0, 0) = GEN_INT (v);
4848 while (delay_needed-- > 0)
4849 emit_insn_after (gen_nop (), label);
4857 if (bfin_flag_var_tracking)
4859 timevar_push (TV_VAR_TRACKING);
4860 variable_tracking_main ();
4861 reorder_var_tracking_notes ();
4862 timevar_pop (TV_VAR_TRACKING);
4864 df_finish_pass (false);
4867 /* Handle interrupt_handler, exception_handler and nmi_handler function
4868 attributes; arguments as in struct attribute_spec.handler. */
4871 handle_int_attribute (tree *node, tree name,
4872 tree args ATTRIBUTE_UNUSED,
4873 int flags ATTRIBUTE_UNUSED,
4877 if (TREE_CODE (x) == FUNCTION_DECL)
4880 if (TREE_CODE (x) != FUNCTION_TYPE)
4882 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4883 IDENTIFIER_POINTER (name));
4884 *no_add_attrs = true;
4886 else if (funkind (x) != SUBROUTINE)
4887 error ("multiple function type attributes specified");
4892 /* Return 0 if the attributes for two types are incompatible, 1 if they
4893 are compatible, and 2 if they are nearly compatible (which causes a
4894 warning to be generated). */
4897 bfin_comp_type_attributes (const_tree type1, const_tree type2)
4899 e_funkind kind1, kind2;
4901 if (TREE_CODE (type1) != FUNCTION_TYPE)
4904 kind1 = funkind (type1);
4905 kind2 = funkind (type2);
4910 /* Check for mismatched modifiers */
4911 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
4912 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
4915 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
4916 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
4919 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
4920 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
4923 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
4924 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
4930 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4931 struct attribute_spec.handler. */
4934 bfin_handle_longcall_attribute (tree *node, tree name,
4935 tree args ATTRIBUTE_UNUSED,
4936 int flags ATTRIBUTE_UNUSED,
4939 if (TREE_CODE (*node) != FUNCTION_TYPE
4940 && TREE_CODE (*node) != FIELD_DECL
4941 && TREE_CODE (*node) != TYPE_DECL)
4943 warning (OPT_Wattributes, "`%s' attribute only applies to functions",
4944 IDENTIFIER_POINTER (name));
4945 *no_add_attrs = true;
4948 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
4949 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
4950 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
4951 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
4953 warning (OPT_Wattributes,
4954 "can't apply both longcall and shortcall attributes to the same function");
4955 *no_add_attrs = true;
4961 /* Handle a "l1_text" attribute; arguments as in
4962 struct attribute_spec.handler. */
4965 bfin_handle_l1_text_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4966 int ARG_UNUSED (flags), bool *no_add_attrs)
4970 if (TREE_CODE (decl) != FUNCTION_DECL)
4972 error ("`%s' attribute only applies to functions",
4973 IDENTIFIER_POINTER (name));
4974 *no_add_attrs = true;
4977 /* The decl may have already been given a section attribute
4978 from a previous declaration. Ensure they match. */
4979 else if (DECL_SECTION_NAME (decl) != NULL_TREE
4980 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4983 error ("section of %q+D conflicts with previous declaration",
4985 *no_add_attrs = true;
4988 DECL_SECTION_NAME (decl) = build_string (9, ".l1.text");
4993 /* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4994 arguments as in struct attribute_spec.handler. */
4997 bfin_handle_l1_data_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4998 int ARG_UNUSED (flags), bool *no_add_attrs)
5002 if (TREE_CODE (decl) != VAR_DECL)
5004 error ("`%s' attribute only applies to variables",
5005 IDENTIFIER_POINTER (name));
5006 *no_add_attrs = true;
5008 else if (current_function_decl != NULL_TREE
5009 && !TREE_STATIC (decl))
5011 error ("`%s' attribute cannot be specified for local variables",
5012 IDENTIFIER_POINTER (name));
5013 *no_add_attrs = true;
5017 const char *section_name;
5019 if (strcmp (IDENTIFIER_POINTER (name), "l1_data") == 0)
5020 section_name = ".l1.data";
5021 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_A") == 0)
5022 section_name = ".l1.data.A";
5023 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_B") == 0)
5024 section_name = ".l1.data.B";
5028 /* The decl may have already been given a section attribute
5029 from a previous declaration. Ensure they match. */
5030 if (DECL_SECTION_NAME (decl) != NULL_TREE
5031 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
5034 error ("section of %q+D conflicts with previous declaration",
5036 *no_add_attrs = true;
5039 DECL_SECTION_NAME (decl)
5040 = build_string (strlen (section_name) + 1, section_name);
5046 /* Table of valid machine attributes. */
5047 const struct attribute_spec bfin_attribute_table[] =
5049 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
5050 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
5051 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
5052 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
5053 { "nesting", 0, 0, false, true, true, NULL },
5054 { "kspisusp", 0, 0, false, true, true, NULL },
5055 { "saveall", 0, 0, false, true, true, NULL },
5056 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
5057 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
5058 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute },
5059 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute },
5060 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute },
5061 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute },
5062 { NULL, 0, 0, false, false, false, NULL }
5065 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
5066 tell the assembler to generate pointers to function descriptors in
5070 bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
5072 if (TARGET_FDPIC && size == UNITS_PER_WORD)
5074 if (GET_CODE (value) == SYMBOL_REF
5075 && SYMBOL_REF_FUNCTION_P (value))
5077 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
5078 output_addr_const (asm_out_file, value);
5079 fputs (")\n", asm_out_file);
5084 /* We've set the unaligned SI op to NULL, so we always have to
5085 handle the unaligned case here. */
5086 assemble_integer_with_op ("\t.4byte\t", value);
5090 return default_assemble_integer (value, size, aligned_p);
5093 /* Output the assembler code for a thunk function. THUNK_DECL is the
5094 declaration for the thunk function itself, FUNCTION is the decl for
5095 the target function. DELTA is an immediate constant offset to be
5096 added to THIS. If VCALL_OFFSET is nonzero, the word at
5097 *(*this + vcall_offset) should be added to THIS. */
5100 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
5101 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
5102 HOST_WIDE_INT vcall_offset, tree function)
5105 /* The this parameter is passed as the first argument. */
5106 rtx this = gen_rtx_REG (Pmode, REG_R0);
5108 /* Adjust the this parameter by a fixed constant. */
5112 if (delta >= -64 && delta <= 63)
5114 xops[0] = GEN_INT (delta);
5115 output_asm_insn ("%1 += %0;", xops);
5117 else if (delta >= -128 && delta < -64)
5119 xops[0] = GEN_INT (delta + 64);
5120 output_asm_insn ("%1 += -64; %1 += %0;", xops);
5122 else if (delta > 63 && delta <= 126)
5124 xops[0] = GEN_INT (delta - 63);
5125 output_asm_insn ("%1 += 63; %1 += %0;", xops);
5129 xops[0] = GEN_INT (delta);
5130 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
5134 /* Adjust the this parameter by a value stored in the vtable. */
5137 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
5138 rtx tmp = gen_rtx_REG (Pmode, REG_R3);
5142 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
5144 /* Adjust the this parameter. */
5145 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
5146 if (!memory_operand (xops[0], Pmode))
5148 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
5149 xops[0] = GEN_INT (vcall_offset);
5151 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
5152 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
5155 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
5158 xops[0] = XEXP (DECL_RTL (function), 0);
5159 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
5160 output_asm_insn ("jump.l\t%P0", xops);
5163 /* Codes for all the Blackfin builtins. */
5169 BFIN_BUILTIN_COMPOSE_2X16,
5170 BFIN_BUILTIN_EXTRACTLO,
5171 BFIN_BUILTIN_EXTRACTHI,
5173 BFIN_BUILTIN_SSADD_2X16,
5174 BFIN_BUILTIN_SSSUB_2X16,
5175 BFIN_BUILTIN_SSADDSUB_2X16,
5176 BFIN_BUILTIN_SSSUBADD_2X16,
5177 BFIN_BUILTIN_MULT_2X16,
5178 BFIN_BUILTIN_MULTR_2X16,
5179 BFIN_BUILTIN_NEG_2X16,
5180 BFIN_BUILTIN_ABS_2X16,
5181 BFIN_BUILTIN_MIN_2X16,
5182 BFIN_BUILTIN_MAX_2X16,
5184 BFIN_BUILTIN_SSADD_1X16,
5185 BFIN_BUILTIN_SSSUB_1X16,
5186 BFIN_BUILTIN_MULT_1X16,
5187 BFIN_BUILTIN_MULTR_1X16,
5188 BFIN_BUILTIN_NORM_1X16,
5189 BFIN_BUILTIN_NEG_1X16,
5190 BFIN_BUILTIN_ABS_1X16,
5191 BFIN_BUILTIN_MIN_1X16,
5192 BFIN_BUILTIN_MAX_1X16,
5194 BFIN_BUILTIN_SUM_2X16,
5195 BFIN_BUILTIN_DIFFHL_2X16,
5196 BFIN_BUILTIN_DIFFLH_2X16,
5198 BFIN_BUILTIN_SSADD_1X32,
5199 BFIN_BUILTIN_SSSUB_1X32,
5200 BFIN_BUILTIN_NORM_1X32,
5201 BFIN_BUILTIN_ROUND_1X32,
5202 BFIN_BUILTIN_NEG_1X32,
5203 BFIN_BUILTIN_ABS_1X32,
5204 BFIN_BUILTIN_MIN_1X32,
5205 BFIN_BUILTIN_MAX_1X32,
5206 BFIN_BUILTIN_MULT_1X32,
5207 BFIN_BUILTIN_MULT_1X32X32,
5208 BFIN_BUILTIN_MULT_1X32X32NS,
5210 BFIN_BUILTIN_MULHISILL,
5211 BFIN_BUILTIN_MULHISILH,
5212 BFIN_BUILTIN_MULHISIHL,
5213 BFIN_BUILTIN_MULHISIHH,
5215 BFIN_BUILTIN_LSHIFT_1X16,
5216 BFIN_BUILTIN_LSHIFT_2X16,
5217 BFIN_BUILTIN_SSASHIFT_1X16,
5218 BFIN_BUILTIN_SSASHIFT_2X16,
5219 BFIN_BUILTIN_SSASHIFT_1X32,
5221 BFIN_BUILTIN_CPLX_MUL_16,
5222 BFIN_BUILTIN_CPLX_MAC_16,
5223 BFIN_BUILTIN_CPLX_MSU_16,
5225 BFIN_BUILTIN_CPLX_MUL_16_S40,
5226 BFIN_BUILTIN_CPLX_MAC_16_S40,
5227 BFIN_BUILTIN_CPLX_MSU_16_S40,
5229 BFIN_BUILTIN_CPLX_SQU,
5231 BFIN_BUILTIN_LOADBYTES,
5236 #define def_builtin(NAME, TYPE, CODE) \
5238 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5242 /* Set up all builtin functions for this target. */
5244 bfin_init_builtins (void)
5246 tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
5247 tree void_ftype_void
5248 = build_function_type (void_type_node, void_list_node);
5249 tree short_ftype_short
5250 = build_function_type_list (short_integer_type_node, short_integer_type_node,
5252 tree short_ftype_int_int
5253 = build_function_type_list (short_integer_type_node, integer_type_node,
5254 integer_type_node, NULL_TREE);
5255 tree int_ftype_int_int
5256 = build_function_type_list (integer_type_node, integer_type_node,
5257 integer_type_node, NULL_TREE);
5259 = build_function_type_list (integer_type_node, integer_type_node,
5261 tree short_ftype_int
5262 = build_function_type_list (short_integer_type_node, integer_type_node,
5264 tree int_ftype_v2hi_v2hi
5265 = build_function_type_list (integer_type_node, V2HI_type_node,
5266 V2HI_type_node, NULL_TREE);
5267 tree v2hi_ftype_v2hi_v2hi
5268 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5269 V2HI_type_node, NULL_TREE);
5270 tree v2hi_ftype_v2hi_v2hi_v2hi
5271 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5272 V2HI_type_node, V2HI_type_node, NULL_TREE);
5273 tree v2hi_ftype_int_int
5274 = build_function_type_list (V2HI_type_node, integer_type_node,
5275 integer_type_node, NULL_TREE);
5276 tree v2hi_ftype_v2hi_int
5277 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5278 integer_type_node, NULL_TREE);
5279 tree int_ftype_short_short
5280 = build_function_type_list (integer_type_node, short_integer_type_node,
5281 short_integer_type_node, NULL_TREE);
5282 tree v2hi_ftype_v2hi
5283 = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
5284 tree short_ftype_v2hi
5285 = build_function_type_list (short_integer_type_node, V2HI_type_node,
5288 = build_function_type_list (integer_type_node,
5289 build_pointer_type (integer_type_node),
5292 /* Add the remaining MMX insns with somewhat more complicated types. */
5293 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
5294 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
5296 def_builtin ("__builtin_bfin_ones", short_ftype_int, BFIN_BUILTIN_ONES);
5298 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
5299 BFIN_BUILTIN_COMPOSE_2X16);
5300 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
5301 BFIN_BUILTIN_EXTRACTHI);
5302 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
5303 BFIN_BUILTIN_EXTRACTLO);
5305 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
5306 BFIN_BUILTIN_MIN_2X16);
5307 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
5308 BFIN_BUILTIN_MAX_2X16);
5310 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
5311 BFIN_BUILTIN_SSADD_2X16);
5312 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
5313 BFIN_BUILTIN_SSSUB_2X16);
5314 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
5315 BFIN_BUILTIN_SSADDSUB_2X16);
5316 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
5317 BFIN_BUILTIN_SSSUBADD_2X16);
5318 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
5319 BFIN_BUILTIN_MULT_2X16);
5320 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
5321 BFIN_BUILTIN_MULTR_2X16);
5322 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
5323 BFIN_BUILTIN_NEG_2X16);
5324 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
5325 BFIN_BUILTIN_ABS_2X16);
5327 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int,
5328 BFIN_BUILTIN_MIN_1X16);
5329 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int,
5330 BFIN_BUILTIN_MAX_1X16);
5332 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
5333 BFIN_BUILTIN_SSADD_1X16);
5334 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
5335 BFIN_BUILTIN_SSSUB_1X16);
5336 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
5337 BFIN_BUILTIN_MULT_1X16);
5338 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
5339 BFIN_BUILTIN_MULTR_1X16);
5340 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
5341 BFIN_BUILTIN_NEG_1X16);
5342 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
5343 BFIN_BUILTIN_ABS_1X16);
5344 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
5345 BFIN_BUILTIN_NORM_1X16);
5347 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi,
5348 BFIN_BUILTIN_SUM_2X16);
5349 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
5350 BFIN_BUILTIN_DIFFHL_2X16);
5351 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
5352 BFIN_BUILTIN_DIFFLH_2X16);
5354 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
5355 BFIN_BUILTIN_MULHISILL);
5356 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
5357 BFIN_BUILTIN_MULHISIHL);
5358 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
5359 BFIN_BUILTIN_MULHISILH);
5360 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
5361 BFIN_BUILTIN_MULHISIHH);
5363 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int,
5364 BFIN_BUILTIN_MIN_1X32);
5365 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int,
5366 BFIN_BUILTIN_MAX_1X32);
5368 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
5369 BFIN_BUILTIN_SSADD_1X32);
5370 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
5371 BFIN_BUILTIN_SSSUB_1X32);
5372 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
5373 BFIN_BUILTIN_NEG_1X32);
5374 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int,
5375 BFIN_BUILTIN_ABS_1X32);
5376 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
5377 BFIN_BUILTIN_NORM_1X32);
5378 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int,
5379 BFIN_BUILTIN_ROUND_1X32);
5380 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
5381 BFIN_BUILTIN_MULT_1X32);
5382 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int,
5383 BFIN_BUILTIN_MULT_1X32X32);
5384 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int,
5385 BFIN_BUILTIN_MULT_1X32X32NS);
5388 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
5389 BFIN_BUILTIN_SSASHIFT_1X16);
5390 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
5391 BFIN_BUILTIN_SSASHIFT_2X16);
5392 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
5393 BFIN_BUILTIN_LSHIFT_1X16);
5394 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
5395 BFIN_BUILTIN_LSHIFT_2X16);
5396 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int,
5397 BFIN_BUILTIN_SSASHIFT_1X32);
5399 /* Complex numbers. */
5400 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi,
5401 BFIN_BUILTIN_SSADD_2X16);
5402 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi,
5403 BFIN_BUILTIN_SSSUB_2X16);
5404 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
5405 BFIN_BUILTIN_CPLX_MUL_16);
5406 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
5407 BFIN_BUILTIN_CPLX_MAC_16);
5408 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
5409 BFIN_BUILTIN_CPLX_MSU_16);
5410 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi,
5411 BFIN_BUILTIN_CPLX_MUL_16_S40);
5412 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5413 BFIN_BUILTIN_CPLX_MAC_16_S40);
5414 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5415 BFIN_BUILTIN_CPLX_MSU_16_S40);
5416 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi,
5417 BFIN_BUILTIN_CPLX_SQU);
5419 /* "Unaligned" load. */
5420 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint,
5421 BFIN_BUILTIN_LOADBYTES);
5426 struct builtin_description
5428 const enum insn_code icode;
5429 const char *const name;
5430 const enum bfin_builtins code;
5434 static const struct builtin_description bdesc_2arg[] =
5436 { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
5438 { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
5439 { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
5440 { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
5441 { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
5442 { CODE_FOR_ssashiftsi3, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32, -1 },
5444 { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
5445 { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
5446 { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
5447 { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
5449 { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
5450 { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
5451 { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
5452 { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
5454 { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
5455 { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
5456 { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
5457 { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
5458 { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
5459 { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
5461 { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
5462 { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
5463 { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
5464 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
5465 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE }
5468 static const struct builtin_description bdesc_1arg[] =
5470 { CODE_FOR_loadbytes, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES, 0 },
5472 { CODE_FOR_ones, "__builtin_bfin_ones", BFIN_BUILTIN_ONES, 0 },
5474 { CODE_FOR_signbitshi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
5475 { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
5476 { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
5478 { CODE_FOR_signbitssi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
5479 { CODE_FOR_ssroundsi2, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32, 0 },
5480 { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
5481 { CODE_FOR_ssabssi2, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32, 0 },
5483 { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
5484 { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
5485 { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
5486 { CODE_FOR_ssabsv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
5489 /* Errors in the source file can cause expand_expr to return const0_rtx
5490 where we expect a vector. To avoid crashing, use one of the vector
5491 clear instructions. */
5493 safe_vector_operand (rtx x, enum machine_mode mode)
5495 if (x != const0_rtx)
5497 x = gen_reg_rtx (SImode);
5499 emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
5500 return gen_lowpart (mode, x);
5503 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5504 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5507 bfin_expand_binop_builtin (enum insn_code icode, tree exp, rtx target,
5511 tree arg0 = CALL_EXPR_ARG (exp, 0);
5512 tree arg1 = CALL_EXPR_ARG (exp, 1);
5513 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5514 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5515 enum machine_mode op0mode = GET_MODE (op0);
5516 enum machine_mode op1mode = GET_MODE (op1);
5517 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5518 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5519 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5521 if (VECTOR_MODE_P (mode0))
5522 op0 = safe_vector_operand (op0, mode0);
5523 if (VECTOR_MODE_P (mode1))
5524 op1 = safe_vector_operand (op1, mode1);
5527 || GET_MODE (target) != tmode
5528 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5529 target = gen_reg_rtx (tmode);
5531 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
5534 op0 = gen_lowpart (HImode, op0);
5536 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
5539 op1 = gen_lowpart (HImode, op1);
5541 /* In case the insn wants input operands in modes different from
5542 the result, abort. */
5543 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
5544 && (op1mode == mode1 || op1mode == VOIDmode));
5546 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5547 op0 = copy_to_mode_reg (mode0, op0);
5548 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5549 op1 = copy_to_mode_reg (mode1, op1);
5552 pat = GEN_FCN (icode) (target, op0, op1);
5554 pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
5562 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
5565 bfin_expand_unop_builtin (enum insn_code icode, tree exp,
5569 tree arg0 = CALL_EXPR_ARG (exp, 0);
5570 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5571 enum machine_mode op0mode = GET_MODE (op0);
5572 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5573 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5576 || GET_MODE (target) != tmode
5577 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5578 target = gen_reg_rtx (tmode);
5580 if (VECTOR_MODE_P (mode0))
5581 op0 = safe_vector_operand (op0, mode0);
5583 if (op0mode == SImode && mode0 == HImode)
5586 op0 = gen_lowpart (HImode, op0);
5588 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
5590 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5591 op0 = copy_to_mode_reg (mode0, op0);
5593 pat = GEN_FCN (icode) (target, op0);
5600 /* Expand an expression EXP that calls a built-in function,
5601 with result going to TARGET if that's convenient
5602 (and in mode MODE if that's convenient).
5603 SUBTARGET may be used as the target for computing one of EXP's operands.
5604 IGNORE is nonzero if the value is to be ignored. */
5607 bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5608 rtx subtarget ATTRIBUTE_UNUSED,
5609 enum machine_mode mode ATTRIBUTE_UNUSED,
5610 int ignore ATTRIBUTE_UNUSED)
5613 enum insn_code icode;
5614 const struct builtin_description *d;
5615 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
5616 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5617 tree arg0, arg1, arg2;
5618 rtx op0, op1, op2, accvec, pat, tmp1, tmp2, a0reg, a1reg;
5619 enum machine_mode tmode, mode0;
5623 case BFIN_BUILTIN_CSYNC:
5624 emit_insn (gen_csync ());
5626 case BFIN_BUILTIN_SSYNC:
5627 emit_insn (gen_ssync ());
5630 case BFIN_BUILTIN_DIFFHL_2X16:
5631 case BFIN_BUILTIN_DIFFLH_2X16:
5632 case BFIN_BUILTIN_SUM_2X16:
5633 arg0 = CALL_EXPR_ARG (exp, 0);
5634 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5635 icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16 ? CODE_FOR_subhilov2hi3
5636 : fcode == BFIN_BUILTIN_DIFFLH_2X16 ? CODE_FOR_sublohiv2hi3
5637 : CODE_FOR_ssaddhilov2hi3);
5638 tmode = insn_data[icode].operand[0].mode;
5639 mode0 = insn_data[icode].operand[1].mode;
5642 || GET_MODE (target) != tmode
5643 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5644 target = gen_reg_rtx (tmode);
5646 if (VECTOR_MODE_P (mode0))
5647 op0 = safe_vector_operand (op0, mode0);
5649 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5650 op0 = copy_to_mode_reg (mode0, op0);
5652 pat = GEN_FCN (icode) (target, op0, op0);
5658 case BFIN_BUILTIN_MULT_1X32X32:
5659 case BFIN_BUILTIN_MULT_1X32X32NS:
5660 arg0 = CALL_EXPR_ARG (exp, 0);
5661 arg1 = CALL_EXPR_ARG (exp, 1);
5662 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5663 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5665 || !register_operand (target, SImode))
5666 target = gen_reg_rtx (SImode);
5668 a1reg = gen_rtx_REG (PDImode, REG_A1);
5669 a0reg = gen_rtx_REG (PDImode, REG_A0);
5670 tmp1 = gen_lowpart (V2HImode, op0);
5671 tmp2 = gen_lowpart (V2HImode, op1);
5672 emit_insn (gen_flag_macinit1hi (a1reg,
5673 gen_lowpart (HImode, op0),
5674 gen_lowpart (HImode, op1),
5675 GEN_INT (MACFLAG_FU)));
5676 emit_insn (gen_lshrpdi3 (a1reg, a1reg, GEN_INT (16)));
5678 if (fcode == BFIN_BUILTIN_MULT_1X32X32)
5679 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg, a1reg, tmp1, tmp2,
5680 const1_rtx, const1_rtx,
5681 const1_rtx, const0_rtx, a1reg,
5682 const0_rtx, GEN_INT (MACFLAG_NONE),
5683 GEN_INT (MACFLAG_M)));
5686 /* For saturating multiplication, there's exactly one special case
5687 to be handled: multiplying the smallest negative value with
5688 itself. Due to shift correction in fractional multiplies, this
5689 can overflow. Iff this happens, OP2 will contain 1, which, when
5690 added in 32 bits to the smallest negative, wraps to the largest
5691 positive, which is the result we want. */
5692 op2 = gen_reg_rtx (V2HImode);
5693 emit_insn (gen_packv2hi (op2, tmp1, tmp2, const0_rtx, const0_rtx));
5694 emit_insn (gen_movsibi (gen_rtx_REG (BImode, REG_CC),
5695 gen_lowpart (SImode, op2)));
5696 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg, a1reg, tmp1, tmp2,
5697 const1_rtx, const1_rtx,
5698 const1_rtx, const0_rtx, a1reg,
5699 const0_rtx, GEN_INT (MACFLAG_NONE),
5700 GEN_INT (MACFLAG_M)));
5701 op2 = gen_reg_rtx (SImode);
5702 emit_insn (gen_movbisi (op2, gen_rtx_REG (BImode, REG_CC)));
5704 emit_insn (gen_flag_machi_parts_acconly (a1reg, tmp2, tmp1,
5705 const1_rtx, const0_rtx,
5706 a1reg, const0_rtx, GEN_INT (MACFLAG_M)));
5707 emit_insn (gen_ashrpdi3 (a1reg, a1reg, GEN_INT (15)));
5708 emit_insn (gen_sum_of_accumulators (target, a0reg, a0reg, a1reg));
5709 if (fcode == BFIN_BUILTIN_MULT_1X32X32NS)
5710 emit_insn (gen_addsi3 (target, target, op2));
5713 case BFIN_BUILTIN_CPLX_MUL_16:
5714 case BFIN_BUILTIN_CPLX_MUL_16_S40:
5715 arg0 = CALL_EXPR_ARG (exp, 0);
5716 arg1 = CALL_EXPR_ARG (exp, 1);
5717 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5718 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5719 accvec = gen_reg_rtx (V2PDImode);
5722 || GET_MODE (target) != V2HImode
5723 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5724 target = gen_reg_rtx (tmode);
5725 if (! register_operand (op0, GET_MODE (op0)))
5726 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5727 if (! register_operand (op1, GET_MODE (op1)))
5728 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
5730 if (fcode == BFIN_BUILTIN_CPLX_MUL_16)
5731 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5732 const0_rtx, const0_rtx,
5733 const1_rtx, GEN_INT (MACFLAG_W32)));
5735 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5736 const0_rtx, const0_rtx,
5737 const1_rtx, GEN_INT (MACFLAG_NONE)));
5738 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
5739 const1_rtx, const1_rtx,
5740 const0_rtx, accvec, const1_rtx, const0_rtx,
5741 GEN_INT (MACFLAG_NONE), accvec));
5745 case BFIN_BUILTIN_CPLX_MAC_16:
5746 case BFIN_BUILTIN_CPLX_MSU_16:
5747 case BFIN_BUILTIN_CPLX_MAC_16_S40:
5748 case BFIN_BUILTIN_CPLX_MSU_16_S40:
5749 arg0 = CALL_EXPR_ARG (exp, 0);
5750 arg1 = CALL_EXPR_ARG (exp, 1);
5751 arg2 = CALL_EXPR_ARG (exp, 2);
5752 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5753 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5754 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5755 accvec = gen_reg_rtx (V2PDImode);
5758 || GET_MODE (target) != V2HImode
5759 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5760 target = gen_reg_rtx (tmode);
5761 if (! register_operand (op1, GET_MODE (op1)))
5762 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
5763 if (! register_operand (op2, GET_MODE (op2)))
5764 op2 = copy_to_mode_reg (GET_MODE (op2), op2);
5766 tmp1 = gen_reg_rtx (SImode);
5767 tmp2 = gen_reg_rtx (SImode);
5768 emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op0), GEN_INT (16)));
5769 emit_move_insn (tmp2, gen_lowpart (SImode, op0));
5770 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
5771 emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
5772 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5773 || fcode == BFIN_BUILTIN_CPLX_MSU_16)
5774 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5775 const0_rtx, const0_rtx,
5776 const1_rtx, accvec, const0_rtx,
5778 GEN_INT (MACFLAG_W32)));
5780 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5781 const0_rtx, const0_rtx,
5782 const1_rtx, accvec, const0_rtx,
5784 GEN_INT (MACFLAG_NONE)));
5785 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5786 || fcode == BFIN_BUILTIN_CPLX_MAC_16_S40)
5796 emit_insn (gen_flag_macv2hi_parts (target, op1, op2, const1_rtx,
5797 const1_rtx, const1_rtx,
5798 const0_rtx, accvec, tmp1, tmp2,
5799 GEN_INT (MACFLAG_NONE), accvec));
5803 case BFIN_BUILTIN_CPLX_SQU:
5804 arg0 = CALL_EXPR_ARG (exp, 0);
5805 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5806 accvec = gen_reg_rtx (V2PDImode);
5807 icode = CODE_FOR_flag_mulv2hi;
5808 tmp1 = gen_reg_rtx (V2HImode);
5809 tmp2 = gen_reg_rtx (V2HImode);
5812 || GET_MODE (target) != V2HImode
5813 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5814 target = gen_reg_rtx (V2HImode);
5815 if (! register_operand (op0, GET_MODE (op0)))
5816 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5818 emit_insn (gen_flag_mulv2hi (tmp1, op0, op0, GEN_INT (MACFLAG_NONE)));
5820 emit_insn (gen_flag_mulhi_parts (tmp2, op0, op0, const0_rtx,
5821 const0_rtx, const1_rtx,
5822 GEN_INT (MACFLAG_NONE)));
5824 emit_insn (gen_ssaddhi3_parts (target, tmp2, tmp2, const1_rtx,
5825 const0_rtx, const0_rtx));
5827 emit_insn (gen_sssubhi3_parts (target, tmp1, tmp1, const0_rtx,
5828 const0_rtx, const1_rtx));
5836 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5837 if (d->code == fcode)
5838 return bfin_expand_binop_builtin (d->icode, exp, target,
5841 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5842 if (d->code == fcode)
5843 return bfin_expand_unop_builtin (d->icode, exp, target);
5848 #undef TARGET_INIT_BUILTINS
5849 #define TARGET_INIT_BUILTINS bfin_init_builtins
5851 #undef TARGET_EXPAND_BUILTIN
5852 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5854 #undef TARGET_ASM_GLOBALIZE_LABEL
5855 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5857 #undef TARGET_ASM_FILE_START
5858 #define TARGET_ASM_FILE_START output_file_start
5860 #undef TARGET_ATTRIBUTE_TABLE
5861 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5863 #undef TARGET_COMP_TYPE_ATTRIBUTES
5864 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5866 #undef TARGET_RTX_COSTS
5867 #define TARGET_RTX_COSTS bfin_rtx_costs
5869 #undef TARGET_ADDRESS_COST
5870 #define TARGET_ADDRESS_COST bfin_address_cost
5872 #undef TARGET_ASM_INTEGER
5873 #define TARGET_ASM_INTEGER bfin_assemble_integer
5875 #undef TARGET_MACHINE_DEPENDENT_REORG
5876 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5878 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5879 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5881 #undef TARGET_ASM_OUTPUT_MI_THUNK
5882 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5883 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5884 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
5886 #undef TARGET_SCHED_ADJUST_COST
5887 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5889 #undef TARGET_SCHED_ISSUE_RATE
5890 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5892 #undef TARGET_PROMOTE_PROTOTYPES
5893 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
5894 #undef TARGET_PROMOTE_FUNCTION_ARGS
5895 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
5896 #undef TARGET_PROMOTE_FUNCTION_RETURN
5897 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
5899 #undef TARGET_ARG_PARTIAL_BYTES
5900 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5902 #undef TARGET_PASS_BY_REFERENCE
5903 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5905 #undef TARGET_SETUP_INCOMING_VARARGS
5906 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5908 #undef TARGET_STRUCT_VALUE_RTX
5909 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5911 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5912 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5914 #undef TARGET_HANDLE_OPTION
5915 #define TARGET_HANDLE_OPTION bfin_handle_option
5917 #undef TARGET_DEFAULT_TARGET_FLAGS
5918 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
5920 #undef TARGET_SECONDARY_RELOAD
5921 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5923 #undef TARGET_DELEGITIMIZE_ADDRESS
5924 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5926 #undef TARGET_CANNOT_FORCE_CONST_MEM
5927 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5929 #undef TARGET_RETURN_IN_MEMORY
5930 #define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5932 struct gcc_target targetm = TARGET_INITIALIZER;