1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Analog Devices.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "insn-codes.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
41 #include "target-def.h"
43 #include "diagnostic-core.h"
48 #include "integrate.h"
50 #include "langhooks.h"
51 #include "bfin-protos.h"
53 #include "tm-constrs.h"
55 #include "basic-block.h"
56 #include "cfglayout.h"
60 /* A C structure for machine-specific, per-function data.
61 This is added to the cfun structure. */
62 struct GTY(()) machine_function
64 /* Set if we are notified by the doloop pass that a hardware loop
66 int has_hardware_loops;
68 /* Set if we create a memcpy pattern that uses loop registers. */
69 int has_loopreg_clobber;
72 /* RTX for condition code flag register and RETS register */
73 extern GTY(()) rtx bfin_cc_rtx;
74 extern GTY(()) rtx bfin_rets_rtx;
75 rtx bfin_cc_rtx, bfin_rets_rtx;
77 int max_arg_registers = 0;
79 /* Arrays used when emitting register names. */
80 const char *short_reg_names[] = SHORT_REGISTER_NAMES;
81 const char *high_reg_names[] = HIGH_REGISTER_NAMES;
82 const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
83 const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
85 static int arg_regs[] = FUNCTION_ARG_REGISTERS;
86 static int ret_regs[] = FUNCTION_RETURN_REGISTERS;
88 /* Nonzero if -mshared-library-id was given. */
89 static int bfin_lib_id_given;
91 /* Nonzero if -fschedule-insns2 was given. We override it and
92 call the scheduler ourselves during reorg. */
93 static int bfin_flag_schedule_insns2;
95 /* Determines whether we run variable tracking in machine dependent
97 static int bfin_flag_var_tracking;
100 bfin_cpu_t bfin_cpu_type = BFIN_CPU_UNKNOWN;
102 /* -msi-revision support. There are three special values:
103 -1 -msi-revision=none.
104 0xffff -msi-revision=any. */
105 int bfin_si_revision;
107 /* The workarounds enabled */
108 unsigned int bfin_workarounds = 0;
115 unsigned int workarounds;
118 struct bfin_cpu bfin_cpus[] =
120 {"bf512", BFIN_CPU_BF512, 0x0000,
121 WA_SPECULATIVE_LOADS | WA_05000074},
123 {"bf514", BFIN_CPU_BF514, 0x0000,
124 WA_SPECULATIVE_LOADS | WA_05000074},
126 {"bf516", BFIN_CPU_BF516, 0x0000,
127 WA_SPECULATIVE_LOADS | WA_05000074},
129 {"bf518", BFIN_CPU_BF518, 0x0000,
130 WA_SPECULATIVE_LOADS | WA_05000074},
132 {"bf522", BFIN_CPU_BF522, 0x0002,
133 WA_SPECULATIVE_LOADS | WA_05000074},
134 {"bf522", BFIN_CPU_BF522, 0x0001,
135 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
136 {"bf522", BFIN_CPU_BF522, 0x0000,
137 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
139 {"bf523", BFIN_CPU_BF523, 0x0002,
140 WA_SPECULATIVE_LOADS | WA_05000074},
141 {"bf523", BFIN_CPU_BF523, 0x0001,
142 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
143 {"bf523", BFIN_CPU_BF523, 0x0000,
144 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
146 {"bf524", BFIN_CPU_BF524, 0x0002,
147 WA_SPECULATIVE_LOADS | WA_05000074},
148 {"bf524", BFIN_CPU_BF524, 0x0001,
149 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
150 {"bf524", BFIN_CPU_BF524, 0x0000,
151 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
153 {"bf525", BFIN_CPU_BF525, 0x0002,
154 WA_SPECULATIVE_LOADS | WA_05000074},
155 {"bf525", BFIN_CPU_BF525, 0x0001,
156 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
157 {"bf525", BFIN_CPU_BF525, 0x0000,
158 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
160 {"bf526", BFIN_CPU_BF526, 0x0002,
161 WA_SPECULATIVE_LOADS | WA_05000074},
162 {"bf526", BFIN_CPU_BF526, 0x0001,
163 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
164 {"bf526", BFIN_CPU_BF526, 0x0000,
165 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
167 {"bf527", BFIN_CPU_BF527, 0x0002,
168 WA_SPECULATIVE_LOADS | WA_05000074},
169 {"bf527", BFIN_CPU_BF527, 0x0001,
170 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
171 {"bf527", BFIN_CPU_BF527, 0x0000,
172 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
174 {"bf531", BFIN_CPU_BF531, 0x0006,
175 WA_SPECULATIVE_LOADS | WA_LOAD_LCREGS | WA_05000074},
176 {"bf531", BFIN_CPU_BF531, 0x0005,
177 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000283 | WA_05000315
178 | WA_LOAD_LCREGS | WA_05000074},
179 {"bf531", BFIN_CPU_BF531, 0x0004,
180 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
181 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
183 {"bf531", BFIN_CPU_BF531, 0x0003,
184 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
185 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
188 {"bf532", BFIN_CPU_BF532, 0x0006,
189 WA_SPECULATIVE_LOADS | WA_LOAD_LCREGS | WA_05000074},
190 {"bf532", BFIN_CPU_BF532, 0x0005,
191 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000283 | WA_05000315
192 | WA_LOAD_LCREGS | WA_05000074},
193 {"bf532", BFIN_CPU_BF532, 0x0004,
194 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
195 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
197 {"bf532", BFIN_CPU_BF532, 0x0003,
198 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
199 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
202 {"bf533", BFIN_CPU_BF533, 0x0006,
203 WA_SPECULATIVE_LOADS | WA_LOAD_LCREGS | WA_05000074},
204 {"bf533", BFIN_CPU_BF533, 0x0005,
205 WA_SPECULATIVE_LOADS | WA_RETS | WA_05000283 | WA_05000315
206 | WA_LOAD_LCREGS | WA_05000074},
207 {"bf533", BFIN_CPU_BF533, 0x0004,
208 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
209 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
211 {"bf533", BFIN_CPU_BF533, 0x0003,
212 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
213 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
216 {"bf534", BFIN_CPU_BF534, 0x0003,
217 WA_SPECULATIVE_LOADS | WA_RETS | WA_LOAD_LCREGS | WA_05000074},
218 {"bf534", BFIN_CPU_BF534, 0x0002,
219 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
220 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
222 {"bf534", BFIN_CPU_BF534, 0x0001,
223 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
224 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
227 {"bf536", BFIN_CPU_BF536, 0x0003,
228 WA_SPECULATIVE_LOADS | WA_RETS | WA_LOAD_LCREGS | WA_05000074},
229 {"bf536", BFIN_CPU_BF536, 0x0002,
230 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
231 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
233 {"bf536", BFIN_CPU_BF536, 0x0001,
234 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
235 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
238 {"bf537", BFIN_CPU_BF537, 0x0003,
239 WA_SPECULATIVE_LOADS | WA_RETS | WA_LOAD_LCREGS | WA_05000074},
240 {"bf537", BFIN_CPU_BF537, 0x0002,
241 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
242 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
244 {"bf537", BFIN_CPU_BF537, 0x0001,
245 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
246 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
249 {"bf538", BFIN_CPU_BF538, 0x0005,
250 WA_SPECULATIVE_LOADS | WA_LOAD_LCREGS | WA_05000074},
251 {"bf538", BFIN_CPU_BF538, 0x0004,
252 WA_SPECULATIVE_LOADS | WA_RETS | WA_LOAD_LCREGS | WA_05000074},
253 {"bf538", BFIN_CPU_BF538, 0x0003,
254 WA_SPECULATIVE_LOADS | WA_RETS
255 | WA_05000283 | WA_05000315 | WA_LOAD_LCREGS | WA_05000074},
256 {"bf538", BFIN_CPU_BF538, 0x0002,
257 WA_SPECULATIVE_LOADS | WA_RETS
258 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
261 {"bf539", BFIN_CPU_BF539, 0x0005,
262 WA_SPECULATIVE_LOADS | WA_LOAD_LCREGS | WA_05000074},
263 {"bf539", BFIN_CPU_BF539, 0x0004,
264 WA_SPECULATIVE_LOADS | WA_RETS | WA_LOAD_LCREGS | WA_05000074},
265 {"bf539", BFIN_CPU_BF539, 0x0003,
266 WA_SPECULATIVE_LOADS | WA_RETS
267 | WA_05000283 | WA_05000315 | WA_LOAD_LCREGS | WA_05000074},
268 {"bf539", BFIN_CPU_BF539, 0x0002,
269 WA_SPECULATIVE_LOADS | WA_RETS
270 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
273 {"bf542m", BFIN_CPU_BF542M, 0x0003,
274 WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
276 {"bf542", BFIN_CPU_BF542, 0x0002,
277 WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
278 {"bf542", BFIN_CPU_BF542, 0x0001,
279 WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_05000074},
280 {"bf542", BFIN_CPU_BF542, 0x0000,
281 WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_LOAD_LCREGS
284 {"bf544m", BFIN_CPU_BF544M, 0x0003,
285 WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
287 {"bf544", BFIN_CPU_BF544, 0x0002,
288 WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
289 {"bf544", BFIN_CPU_BF544, 0x0001,
290 WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_05000074},
291 {"bf544", BFIN_CPU_BF544, 0x0000,
292 WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_LOAD_LCREGS
295 {"bf547m", BFIN_CPU_BF547M, 0x0003,
296 WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
298 {"bf547", BFIN_CPU_BF547, 0x0002,
299 WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
300 {"bf547", BFIN_CPU_BF547, 0x0001,
301 WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_05000074},
302 {"bf547", BFIN_CPU_BF547, 0x0000,
303 WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_LOAD_LCREGS
306 {"bf548m", BFIN_CPU_BF548M, 0x0003,
307 WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
309 {"bf548", BFIN_CPU_BF548, 0x0002,
310 WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
311 {"bf548", BFIN_CPU_BF548, 0x0001,
312 WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_05000074},
313 {"bf548", BFIN_CPU_BF548, 0x0000,
314 WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_LOAD_LCREGS
317 {"bf549m", BFIN_CPU_BF549M, 0x0003,
318 WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
320 {"bf549", BFIN_CPU_BF549, 0x0002,
321 WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
322 {"bf549", BFIN_CPU_BF549, 0x0001,
323 WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_05000074},
324 {"bf549", BFIN_CPU_BF549, 0x0000,
325 WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_LOAD_LCREGS
328 {"bf561", BFIN_CPU_BF561, 0x0005, WA_RETS
329 | WA_05000283 | WA_05000315 | WA_LOAD_LCREGS | WA_05000074},
330 {"bf561", BFIN_CPU_BF561, 0x0003,
331 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
332 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
334 {"bf561", BFIN_CPU_BF561, 0x0002,
335 WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
336 | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
342 int splitting_for_sched, splitting_loops;
345 bfin_globalize_label (FILE *stream, const char *name)
347 fputs (".global ", stream);
348 assemble_name (stream, name);
354 output_file_start (void)
356 FILE *file = asm_out_file;
359 /* Variable tracking should be run after all optimizations which change order
360 of insns. It also needs a valid CFG. This can't be done in
361 override_options, because flag_var_tracking is finalized after
363 bfin_flag_var_tracking = flag_var_tracking;
364 flag_var_tracking = 0;
366 fprintf (file, ".file \"%s\";\n", input_filename);
368 for (i = 0; arg_regs[i] >= 0; i++)
370 max_arg_registers = i; /* how many arg reg used */
373 /* Called early in the compilation to conditionally modify
374 fixed_regs/call_used_regs. */
377 conditional_register_usage (void)
379 /* initialize condition code flag register rtx */
380 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
381 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
384 /* Examine machine-dependent attributes of function type FUNTYPE and return its
385 type. See the definition of E_FUNKIND. */
388 funkind (const_tree funtype)
390 tree attrs = TYPE_ATTRIBUTES (funtype);
391 if (lookup_attribute ("interrupt_handler", attrs))
392 return INTERRUPT_HANDLER;
393 else if (lookup_attribute ("exception_handler", attrs))
394 return EXCPT_HANDLER;
395 else if (lookup_attribute ("nmi_handler", attrs))
401 /* Legitimize PIC addresses. If the address is already position-independent,
402 we return ORIG. Newly generated position-independent addresses go into a
403 reg. This is REG if nonzero, otherwise we allocate register(s) as
404 necessary. PICREG is the register holding the pointer to the PIC offset
408 legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
413 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
418 if (TARGET_ID_SHARED_LIBRARY)
419 unspec = UNSPEC_MOVE_PIC;
420 else if (GET_CODE (addr) == SYMBOL_REF
421 && SYMBOL_REF_FUNCTION_P (addr))
422 unspec = UNSPEC_FUNCDESC_GOT17M4;
424 unspec = UNSPEC_MOVE_FDPIC;
428 gcc_assert (can_create_pseudo_p ());
429 reg = gen_reg_rtx (Pmode);
432 tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
433 new_rtx = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
435 emit_move_insn (reg, new_rtx);
436 if (picreg == pic_offset_table_rtx)
437 crtl->uses_pic_offset_table = 1;
441 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
445 if (GET_CODE (addr) == CONST)
447 addr = XEXP (addr, 0);
448 gcc_assert (GET_CODE (addr) == PLUS);
451 if (XEXP (addr, 0) == picreg)
456 gcc_assert (can_create_pseudo_p ());
457 reg = gen_reg_rtx (Pmode);
460 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
461 addr = legitimize_pic_address (XEXP (addr, 1),
462 base == reg ? NULL_RTX : reg,
465 if (GET_CODE (addr) == CONST_INT)
467 gcc_assert (! reload_in_progress && ! reload_completed);
468 addr = force_reg (Pmode, addr);
471 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
473 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
474 addr = XEXP (addr, 1);
477 return gen_rtx_PLUS (Pmode, base, addr);
483 /* Stack frame layout. */
485 /* For a given REGNO, determine whether it must be saved in the function
486 prologue. IS_INTHANDLER specifies whether we're generating a normal
487 prologue or an interrupt/exception one. */
489 must_save_p (bool is_inthandler, unsigned regno)
491 if (D_REGNO_P (regno))
493 bool is_eh_return_reg = false;
494 if (crtl->calls_eh_return)
499 unsigned test = EH_RETURN_DATA_REGNO (j);
500 if (test == INVALID_REGNUM)
503 is_eh_return_reg = true;
507 return (is_eh_return_reg
508 || (df_regs_ever_live_p (regno)
509 && !fixed_regs[regno]
510 && (is_inthandler || !call_used_regs[regno])));
512 else if (P_REGNO_P (regno))
514 return ((df_regs_ever_live_p (regno)
515 && !fixed_regs[regno]
516 && (is_inthandler || !call_used_regs[regno]))
518 && (ENABLE_WA_05000283 || ENABLE_WA_05000315)
521 && regno == PIC_OFFSET_TABLE_REGNUM
522 && (crtl->uses_pic_offset_table
523 || (TARGET_ID_SHARED_LIBRARY && !current_function_is_leaf))));
526 return ((is_inthandler || !call_used_regs[regno])
527 && (df_regs_ever_live_p (regno)
528 || (!leaf_function_p () && call_used_regs[regno])));
532 /* Compute the number of DREGS to save with a push_multiple operation.
533 This could include registers that aren't modified in the function,
534 since push_multiple only takes a range of registers.
535 If IS_INTHANDLER, then everything that is live must be saved, even
536 if normally call-clobbered.
537 If CONSECUTIVE, return the number of registers we can save in one
538 instruction with a push/pop multiple instruction. */
541 n_dregs_to_save (bool is_inthandler, bool consecutive)
546 for (i = REG_R7 + 1; i-- != REG_R0;)
548 if (must_save_p (is_inthandler, i))
550 else if (consecutive)
556 /* Like n_dregs_to_save, but compute number of PREGS to save. */
559 n_pregs_to_save (bool is_inthandler, bool consecutive)
564 for (i = REG_P5 + 1; i-- != REG_P0;)
565 if (must_save_p (is_inthandler, i))
567 else if (consecutive)
572 /* Determine if we are going to save the frame pointer in the prologue. */
575 must_save_fp_p (void)
577 return df_regs_ever_live_p (REG_FP);
580 /* Determine if we are going to save the RETS register. */
582 must_save_rets_p (void)
584 return df_regs_ever_live_p (REG_RETS);
588 stack_frame_needed_p (void)
590 /* EH return puts a new return address into the frame using an
591 address relative to the frame pointer. */
592 if (crtl->calls_eh_return)
594 return frame_pointer_needed;
597 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
598 must save all registers; this is used for interrupt handlers.
599 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
600 this for an interrupt (or exception) handler. */
603 expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
605 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
606 rtx predec = gen_rtx_MEM (SImode, predec1);
607 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
608 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
609 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
610 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
612 int total_consec = ndregs_consec + npregs_consec;
615 if (saveall || is_inthandler)
617 rtx insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
619 RTX_FRAME_RELATED_P (insn) = 1;
620 for (dregno = REG_LT0; dregno <= REG_LB1; dregno++)
621 if (! current_function_is_leaf
622 || cfun->machine->has_hardware_loops
623 || cfun->machine->has_loopreg_clobber
624 || (ENABLE_WA_05000257
625 && (dregno == REG_LC0 || dregno == REG_LC1)))
627 insn = emit_move_insn (predec, gen_rtx_REG (SImode, dregno));
628 RTX_FRAME_RELATED_P (insn) = 1;
632 if (total_consec != 0)
635 rtx val = GEN_INT (-total_consec * 4);
636 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 2));
638 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
639 UNSPEC_PUSH_MULTIPLE);
640 XVECEXP (pat, 0, total_consec + 1) = gen_rtx_SET (VOIDmode, spreg,
644 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total_consec + 1)) = 1;
645 d_to_save = ndregs_consec;
646 dregno = REG_R7 + 1 - ndregs_consec;
647 pregno = REG_P5 + 1 - npregs_consec;
648 for (i = 0; i < total_consec; i++)
650 rtx memref = gen_rtx_MEM (word_mode,
651 gen_rtx_PLUS (Pmode, spreg,
652 GEN_INT (- i * 4 - 4)));
656 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
662 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
665 XVECEXP (pat, 0, i + 1) = subpat;
666 RTX_FRAME_RELATED_P (subpat) = 1;
668 insn = emit_insn (pat);
669 RTX_FRAME_RELATED_P (insn) = 1;
672 for (dregno = REG_R0; ndregs != ndregs_consec; dregno++)
674 if (must_save_p (is_inthandler, dregno))
676 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, dregno));
677 RTX_FRAME_RELATED_P (insn) = 1;
681 for (pregno = REG_P0; npregs != npregs_consec; pregno++)
683 if (must_save_p (is_inthandler, pregno))
685 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, pregno));
686 RTX_FRAME_RELATED_P (insn) = 1;
690 for (i = REG_P7 + 1; i < REG_CC; i++)
693 && (df_regs_ever_live_p (i)
694 || (!leaf_function_p () && call_used_regs[i]))))
697 if (i == REG_A0 || i == REG_A1)
698 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
699 gen_rtx_REG (PDImode, i));
701 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
702 RTX_FRAME_RELATED_P (insn) = 1;
706 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
707 must save all registers; this is used for interrupt handlers.
708 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
709 this for an interrupt (or exception) handler. */
712 expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
714 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
715 rtx postinc = gen_rtx_MEM (SImode, postinc1);
717 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
718 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
719 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
720 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
721 int total_consec = ndregs_consec + npregs_consec;
725 /* A slightly crude technique to stop flow from trying to delete "dead"
727 MEM_VOLATILE_P (postinc) = 1;
729 for (i = REG_CC - 1; i > REG_P7; i--)
732 && (df_regs_ever_live_p (i)
733 || (!leaf_function_p () && call_used_regs[i]))))
735 if (i == REG_A0 || i == REG_A1)
737 rtx mem = gen_rtx_MEM (PDImode, postinc1);
738 MEM_VOLATILE_P (mem) = 1;
739 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
742 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
745 regno = REG_P5 - npregs_consec;
746 for (; npregs != npregs_consec; regno--)
748 if (must_save_p (is_inthandler, regno))
750 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
754 regno = REG_R7 - ndregs_consec;
755 for (; ndregs != ndregs_consec; regno--)
757 if (must_save_p (is_inthandler, regno))
759 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
764 if (total_consec != 0)
766 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 1));
768 = gen_rtx_SET (VOIDmode, spreg,
769 gen_rtx_PLUS (Pmode, spreg,
770 GEN_INT (total_consec * 4)));
772 if (npregs_consec > 0)
777 for (i = 0; i < total_consec; i++)
780 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
782 rtx memref = gen_rtx_MEM (word_mode, addr);
785 XVECEXP (pat, 0, i + 1)
786 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
788 if (npregs_consec > 0)
790 if (--npregs_consec == 0)
795 insn = emit_insn (pat);
796 RTX_FRAME_RELATED_P (insn) = 1;
798 if (saveall || is_inthandler)
800 for (regno = REG_LB1; regno >= REG_LT0; regno--)
801 if (! current_function_is_leaf
802 || cfun->machine->has_hardware_loops
803 || cfun->machine->has_loopreg_clobber
804 || (ENABLE_WA_05000257 && (regno == REG_LC0 || regno == REG_LC1)))
805 emit_move_insn (gen_rtx_REG (SImode, regno), postinc);
807 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
811 /* Perform any needed actions needed for a function that is receiving a
812 variable number of arguments.
816 MODE and TYPE are the mode and type of the current parameter.
818 PRETEND_SIZE is a variable that should be set to the amount of stack
819 that must be pushed by the prolog to pretend that our caller pushed
822 Normally, this macro will push all remaining incoming registers on the
823 stack and set PRETEND_SIZE to the length of the registers pushed.
826 - VDSP C compiler manual (our ABI) says that a variable args function
827 should save the R0, R1 and R2 registers in the stack.
828 - The caller will always leave space on the stack for the
829 arguments that are passed in registers, so we dont have
830 to leave any extra space.
831 - now, the vastart pointer can access all arguments from the stack. */
834 setup_incoming_varargs (CUMULATIVE_ARGS *cum,
835 enum machine_mode mode ATTRIBUTE_UNUSED,
836 tree type ATTRIBUTE_UNUSED, int *pretend_size,
845 /* The move for named arguments will be generated automatically by the
846 compiler. We need to generate the move rtx for the unnamed arguments
847 if they are in the first 3 words. We assume at least 1 named argument
848 exists, so we never generate [ARGP] = R0 here. */
850 for (i = cum->words + 1; i < max_arg_registers; i++)
852 mem = gen_rtx_MEM (Pmode,
853 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
854 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
860 /* Value should be nonzero if functions must have frame pointers.
861 Zero means the frame pointer need not be set up (and parms may
862 be accessed via the stack pointer) in functions that seem suitable. */
865 bfin_frame_pointer_required (void)
867 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
869 if (fkind != SUBROUTINE)
872 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
873 so we have to override it for non-leaf functions. */
874 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
880 /* Return the number of registers pushed during the prologue. */
883 n_regs_saved_by_prologue (void)
885 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
886 bool is_inthandler = fkind != SUBROUTINE;
887 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
888 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
889 || (is_inthandler && !current_function_is_leaf));
890 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler, false);
891 int npregs = all ? 6 : n_pregs_to_save (is_inthandler, false);
892 int n = ndregs + npregs;
895 if (all || stack_frame_needed_p ())
899 if (must_save_fp_p ())
901 if (must_save_rets_p ())
905 if (fkind != SUBROUTINE || all)
907 /* Increment once for ASTAT. */
909 if (! current_function_is_leaf
910 || cfun->machine->has_hardware_loops
911 || cfun->machine->has_loopreg_clobber)
917 if (fkind != SUBROUTINE)
920 if (lookup_attribute ("nesting", attrs))
924 for (i = REG_P7 + 1; i < REG_CC; i++)
926 || (fkind != SUBROUTINE
927 && (df_regs_ever_live_p (i)
928 || (!leaf_function_p () && call_used_regs[i]))))
929 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
934 /* Given FROM and TO register numbers, say whether this elimination is
935 allowed. Frame pointer elimination is automatically handled.
937 All other eliminations are valid. */
940 bfin_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
942 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
945 /* Return the offset between two registers, one to be eliminated, and the other
946 its replacement, at the start of a routine. */
949 bfin_initial_elimination_offset (int from, int to)
951 HOST_WIDE_INT offset = 0;
953 if (from == ARG_POINTER_REGNUM)
954 offset = n_regs_saved_by_prologue () * 4;
956 if (to == STACK_POINTER_REGNUM)
958 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
959 offset += crtl->outgoing_args_size;
960 else if (crtl->outgoing_args_size)
961 offset += FIXED_STACK_AREA;
963 offset += get_frame_size ();
969 /* Emit code to load a constant CONSTANT into register REG; setting
970 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
971 Make sure that the insns we generate need not be split. */
974 frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
977 rtx cst = GEN_INT (constant);
979 if (constant >= -32768 && constant < 65536)
980 insn = emit_move_insn (reg, cst);
983 /* We don't call split_load_immediate here, since dwarf2out.c can get
984 confused about some of the more clever sequences it can generate. */
985 insn = emit_insn (gen_movsi_high (reg, cst));
987 RTX_FRAME_RELATED_P (insn) = 1;
988 insn = emit_insn (gen_movsi_low (reg, reg, cst));
991 RTX_FRAME_RELATED_P (insn) = 1;
994 /* Generate efficient code to add a value to a P register.
995 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
996 EPILOGUE_P is zero if this function is called for prologue,
997 otherwise it's nonzero. And it's less than zero if this is for
1001 add_to_reg (rtx reg, HOST_WIDE_INT value, int frame, int epilogue_p)
1006 /* Choose whether to use a sequence using a temporary register, or
1007 a sequence with multiple adds. We can add a signed 7-bit value
1008 in one instruction. */
1009 if (value > 120 || value < -120)
1017 /* For prologue or normal epilogue, P1 can be safely used
1018 as the temporary register. For sibcall epilogue, we try to find
1019 a call used P register, which will be restored in epilogue.
1020 If we cannot find such a P register, we have to use one I register
1023 if (epilogue_p >= 0)
1024 tmpreg = gen_rtx_REG (SImode, REG_P1);
1028 for (i = REG_P0; i <= REG_P5; i++)
1029 if ((df_regs_ever_live_p (i) && ! call_used_regs[i])
1031 && i == PIC_OFFSET_TABLE_REGNUM
1032 && (crtl->uses_pic_offset_table
1033 || (TARGET_ID_SHARED_LIBRARY
1034 && ! current_function_is_leaf))))
1037 tmpreg = gen_rtx_REG (SImode, i);
1040 tmpreg = gen_rtx_REG (SImode, REG_P1);
1041 tmpreg2 = gen_rtx_REG (SImode, REG_I0);
1042 emit_move_insn (tmpreg2, tmpreg);
1047 frame_related_constant_load (tmpreg, value, TRUE);
1049 insn = emit_move_insn (tmpreg, GEN_INT (value));
1051 insn = emit_insn (gen_addsi3 (reg, reg, tmpreg));
1053 RTX_FRAME_RELATED_P (insn) = 1;
1055 if (tmpreg2 != NULL_RTX)
1056 emit_move_insn (tmpreg, tmpreg2);
1066 else if (size < -60)
1067 /* We could use -62, but that would leave the stack unaligned, so
1071 insn = emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
1073 RTX_FRAME_RELATED_P (insn) = 1;
1079 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
1080 is too large, generate a sequence of insns that has the same effect.
1081 SPREG contains (reg:SI REG_SP). */
1084 emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
1086 HOST_WIDE_INT link_size = frame_size;
1090 if (link_size > 262140)
1093 /* Use a LINK insn with as big a constant as possible, then subtract
1094 any remaining size from the SP. */
1095 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
1096 RTX_FRAME_RELATED_P (insn) = 1;
1098 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1100 rtx set = XVECEXP (PATTERN (insn), 0, i);
1101 gcc_assert (GET_CODE (set) == SET);
1102 RTX_FRAME_RELATED_P (set) = 1;
1105 frame_size -= link_size;
1109 /* Must use a call-clobbered PREG that isn't the static chain. */
1110 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
1112 frame_related_constant_load (tmpreg, -frame_size, TRUE);
1113 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
1114 RTX_FRAME_RELATED_P (insn) = 1;
1118 /* Return the number of bytes we must reserve for outgoing arguments
1119 in the current function's stack frame. */
1121 static HOST_WIDE_INT
1122 arg_area_size (void)
1124 if (crtl->outgoing_args_size)
1126 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
1127 return crtl->outgoing_args_size;
1129 return FIXED_STACK_AREA;
1134 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
1135 function must save all its registers (true only for certain interrupt
1139 do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
1141 frame_size += arg_area_size ();
1144 || stack_frame_needed_p ()
1145 || (must_save_rets_p () && must_save_fp_p ()))
1146 emit_link_insn (spreg, frame_size);
1149 if (must_save_rets_p ())
1151 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
1152 gen_rtx_PRE_DEC (Pmode, spreg)),
1154 rtx insn = emit_insn (pat);
1155 RTX_FRAME_RELATED_P (insn) = 1;
1157 if (must_save_fp_p ())
1159 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
1160 gen_rtx_PRE_DEC (Pmode, spreg)),
1161 gen_rtx_REG (Pmode, REG_FP));
1162 rtx insn = emit_insn (pat);
1163 RTX_FRAME_RELATED_P (insn) = 1;
1165 add_to_reg (spreg, -frame_size, 1, 0);
1169 /* Like do_link, but used for epilogues to deallocate the stack frame.
1170 EPILOGUE_P is zero if this function is called for prologue,
1171 otherwise it's nonzero. And it's less than zero if this is for
1172 sibcall epilogue. */
1175 do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all, int epilogue_p)
1177 frame_size += arg_area_size ();
1179 if (stack_frame_needed_p ())
1180 emit_insn (gen_unlink ());
1183 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
1185 add_to_reg (spreg, frame_size, 0, epilogue_p);
1186 if (all || must_save_fp_p ())
1188 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
1189 emit_move_insn (fpreg, postinc);
1192 if (all || must_save_rets_p ())
1194 emit_move_insn (bfin_rets_rtx, postinc);
1195 emit_use (bfin_rets_rtx);
1200 /* Generate a prologue suitable for a function of kind FKIND. This is
1201 called for interrupt and exception handler prologues.
1202 SPREG contains (reg:SI REG_SP). */
1205 expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind, bool all)
1207 HOST_WIDE_INT frame_size = get_frame_size ();
1208 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
1209 rtx predec = gen_rtx_MEM (SImode, predec1);
1211 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1212 tree kspisusp = lookup_attribute ("kspisusp", attrs);
1216 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
1217 RTX_FRAME_RELATED_P (insn) = 1;
1220 /* We need space on the stack in case we need to save the argument
1222 if (fkind == EXCPT_HANDLER)
1224 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
1225 RTX_FRAME_RELATED_P (insn) = 1;
1228 /* If we're calling other functions, they won't save their call-clobbered
1229 registers, so we must save everything here. */
1230 if (!current_function_is_leaf)
1232 expand_prologue_reg_save (spreg, all, true);
1234 if (ENABLE_WA_05000283 || ENABLE_WA_05000315)
1236 rtx chipid = GEN_INT (trunc_int_for_mode (0xFFC00014, SImode));
1237 rtx p5reg = gen_rtx_REG (Pmode, REG_P5);
1238 emit_insn (gen_movbi (bfin_cc_rtx, const1_rtx));
1239 emit_insn (gen_movsi_high (p5reg, chipid));
1240 emit_insn (gen_movsi_low (p5reg, p5reg, chipid));
1241 emit_insn (gen_dummy_load (p5reg, bfin_cc_rtx));
1244 if (lookup_attribute ("nesting", attrs))
1246 rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
1247 insn = emit_move_insn (predec, srcreg);
1248 RTX_FRAME_RELATED_P (insn) = 1;
1251 do_link (spreg, frame_size, all);
1253 if (fkind == EXCPT_HANDLER)
1255 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
1256 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
1257 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
1260 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
1261 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
1262 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
1263 insn = emit_move_insn (r1reg, spreg);
1264 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
1265 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
1269 /* Generate an epilogue suitable for a function of kind FKIND. This is
1270 called for interrupt and exception handler epilogues.
1271 SPREG contains (reg:SI REG_SP). */
1274 expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind, bool all)
1276 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1277 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
1278 rtx postinc = gen_rtx_MEM (SImode, postinc1);
1280 /* A slightly crude technique to stop flow from trying to delete "dead"
1282 MEM_VOLATILE_P (postinc) = 1;
1284 do_unlink (spreg, get_frame_size (), all, 1);
1286 if (lookup_attribute ("nesting", attrs))
1288 rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
1289 emit_move_insn (srcreg, postinc);
1292 /* If we're calling other functions, they won't save their call-clobbered
1293 registers, so we must save (and restore) everything here. */
1294 if (!current_function_is_leaf)
1297 expand_epilogue_reg_restore (spreg, all, true);
1299 /* Deallocate any space we left on the stack in case we needed to save the
1300 argument registers. */
1301 if (fkind == EXCPT_HANDLER)
1302 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
1304 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, ret_regs[fkind])));
1307 /* Used while emitting the prologue to generate code to load the correct value
1308 into the PIC register, which is passed in DEST. */
1311 bfin_load_pic_reg (rtx dest)
1313 struct cgraph_local_info *i = NULL;
1316 i = cgraph_local_info (current_function_decl);
1318 /* Functions local to the translation unit don't need to reload the
1319 pic reg, since the caller always passes a usable one. */
1321 return pic_offset_table_rtx;
1323 if (bfin_lib_id_given)
1324 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
1326 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1327 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
1328 UNSPEC_LIBRARY_OFFSET));
1329 insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
1333 /* Generate RTL for the prologue of the current function. */
1336 bfin_expand_prologue (void)
1338 HOST_WIDE_INT frame_size = get_frame_size ();
1339 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1340 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1341 rtx pic_reg_loaded = NULL_RTX;
1342 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1343 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
1345 if (fkind != SUBROUTINE)
1347 expand_interrupt_handler_prologue (spreg, fkind, all);
1351 if (crtl->limit_stack
1352 || (TARGET_STACK_CHECK_L1
1353 && !DECL_NO_LIMIT_STACK (current_function_decl)))
1355 HOST_WIDE_INT offset
1356 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
1357 STACK_POINTER_REGNUM);
1358 rtx lim = crtl->limit_stack ? stack_limit_rtx : NULL_RTX;
1359 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
1363 emit_move_insn (p2reg, gen_int_mode (0xFFB00000, SImode));
1364 emit_move_insn (p2reg, gen_rtx_MEM (Pmode, p2reg));
1367 if (GET_CODE (lim) == SYMBOL_REF)
1369 if (TARGET_ID_SHARED_LIBRARY)
1371 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
1373 pic_reg_loaded = bfin_load_pic_reg (p2reg);
1374 val = legitimize_pic_address (stack_limit_rtx, p1reg,
1376 emit_move_insn (p1reg, val);
1377 frame_related_constant_load (p2reg, offset, FALSE);
1378 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
1383 rtx limit = plus_constant (lim, offset);
1384 emit_move_insn (p2reg, limit);
1391 emit_move_insn (p2reg, lim);
1392 add_to_reg (p2reg, offset, 0, 0);
1395 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
1396 emit_insn (gen_trapifcc ());
1398 expand_prologue_reg_save (spreg, all, false);
1400 do_link (spreg, frame_size, all);
1402 if (TARGET_ID_SHARED_LIBRARY
1404 && (crtl->uses_pic_offset_table
1405 || !current_function_is_leaf))
1406 bfin_load_pic_reg (pic_offset_table_rtx);
1409 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1410 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1411 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1415 bfin_expand_epilogue (int need_return, int eh_return, bool sibcall_p)
1417 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1418 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1419 int e = sibcall_p ? -1 : 1;
1420 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1421 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
1423 if (fkind != SUBROUTINE)
1425 expand_interrupt_handler_epilogue (spreg, fkind, all);
1429 do_unlink (spreg, get_frame_size (), all, e);
1431 expand_epilogue_reg_restore (spreg, all, false);
1433 /* Omit the return insn if this is for a sibcall. */
1438 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
1440 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, REG_RETS)));
1443 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1446 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
1447 unsigned int new_reg)
1449 /* Interrupt functions can only use registers that have already been
1450 saved by the prologue, even if they would normally be
1453 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
1454 && !df_regs_ever_live_p (new_reg))
1460 /* Return the value of the return address for the frame COUNT steps up
1461 from the current frame, after the prologue.
1462 We punt for everything but the current frame by returning const0_rtx. */
1465 bfin_return_addr_rtx (int count)
1470 return get_hard_reg_initial_val (Pmode, REG_RETS);
1474 bfin_delegitimize_address (rtx orig_x)
1478 if (GET_CODE (x) != MEM)
1482 if (GET_CODE (x) == PLUS
1483 && GET_CODE (XEXP (x, 1)) == UNSPEC
1484 && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1485 && GET_CODE (XEXP (x, 0)) == REG
1486 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1487 return XVECEXP (XEXP (x, 1), 0, 0);
1492 /* This predicate is used to compute the length of a load/store insn.
1493 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1494 32-bit instruction. */
1497 effective_address_32bit_p (rtx op, enum machine_mode mode)
1499 HOST_WIDE_INT offset;
1501 mode = GET_MODE (op);
1504 if (GET_CODE (op) != PLUS)
1506 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1507 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1511 if (GET_CODE (XEXP (op, 1)) == UNSPEC)
1514 offset = INTVAL (XEXP (op, 1));
1516 /* All byte loads use a 16-bit offset. */
1517 if (GET_MODE_SIZE (mode) == 1)
1520 if (GET_MODE_SIZE (mode) == 4)
1522 /* Frame pointer relative loads can use a negative offset, all others
1523 are restricted to a small positive one. */
1524 if (XEXP (op, 0) == frame_pointer_rtx)
1525 return offset < -128 || offset > 60;
1526 return offset < 0 || offset > 60;
1529 /* Must be HImode now. */
1530 return offset < 0 || offset > 30;
1533 /* Returns true if X is a memory reference using an I register. */
1535 bfin_dsp_memref_p (rtx x)
1540 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1541 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1546 /* Return cost of the memory address ADDR.
1547 All addressing modes are equally cheap on the Blackfin. */
1550 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED, bool speed ATTRIBUTE_UNUSED)
1555 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1558 print_address_operand (FILE *file, rtx x)
1560 switch (GET_CODE (x))
1563 output_address (XEXP (x, 0));
1564 fprintf (file, "+");
1565 output_address (XEXP (x, 1));
1569 fprintf (file, "--");
1570 output_address (XEXP (x, 0));
1573 output_address (XEXP (x, 0));
1574 fprintf (file, "++");
1577 output_address (XEXP (x, 0));
1578 fprintf (file, "--");
1582 gcc_assert (GET_CODE (x) != MEM);
1583 print_operand (file, x, 0);
1588 /* Adding intp DImode support by Tony
1594 print_operand (FILE *file, rtx x, char code)
1596 enum machine_mode mode;
1600 if (GET_MODE (current_output_insn) == SImode)
1601 fprintf (file, " ||");
1603 fprintf (file, ";");
1607 mode = GET_MODE (x);
1612 switch (GET_CODE (x))
1615 fprintf (file, "e");
1618 fprintf (file, "ne");
1621 fprintf (file, "g");
1624 fprintf (file, "l");
1627 fprintf (file, "ge");
1630 fprintf (file, "le");
1633 fprintf (file, "g");
1636 fprintf (file, "l");
1639 fprintf (file, "ge");
1642 fprintf (file, "le");
1645 output_operand_lossage ("invalid %%j value");
1649 case 'J': /* reverse logic */
1650 switch (GET_CODE(x))
1653 fprintf (file, "ne");
1656 fprintf (file, "e");
1659 fprintf (file, "le");
1662 fprintf (file, "ge");
1665 fprintf (file, "l");
1668 fprintf (file, "g");
1671 fprintf (file, "le");
1674 fprintf (file, "ge");
1677 fprintf (file, "l");
1680 fprintf (file, "g");
1683 output_operand_lossage ("invalid %%J value");
1688 switch (GET_CODE (x))
1694 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1696 output_operand_lossage ("invalid operand for code '%c'", code);
1698 else if (code == 'd')
1701 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1703 output_operand_lossage ("invalid operand for code '%c'", code);
1705 else if (code == 'w')
1707 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1708 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1710 output_operand_lossage ("invalid operand for code '%c'", code);
1712 else if (code == 'x')
1714 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1715 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1717 output_operand_lossage ("invalid operand for code '%c'", code);
1719 else if (code == 'v')
1721 if (REGNO (x) == REG_A0)
1722 fprintf (file, "AV0");
1723 else if (REGNO (x) == REG_A1)
1724 fprintf (file, "AV1");
1726 output_operand_lossage ("invalid operand for code '%c'", code);
1728 else if (code == 'D')
1730 if (D_REGNO_P (REGNO (x)))
1731 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1733 output_operand_lossage ("invalid operand for code '%c'", code);
1735 else if (code == 'H')
1737 if ((mode == DImode || mode == DFmode) && REG_P (x))
1738 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1740 output_operand_lossage ("invalid operand for code '%c'", code);
1742 else if (code == 'T')
1744 if (D_REGNO_P (REGNO (x)))
1745 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1747 output_operand_lossage ("invalid operand for code '%c'", code);
1750 fprintf (file, "%s", reg_names[REGNO (x)]);
1756 print_address_operand (file, x);
1768 fputs ("(FU)", file);
1771 fputs ("(T)", file);
1774 fputs ("(TFU)", file);
1777 fputs ("(W32)", file);
1780 fputs ("(IS)", file);
1783 fputs ("(IU)", file);
1786 fputs ("(IH)", file);
1789 fputs ("(M)", file);
1792 fputs ("(IS,M)", file);
1795 fputs ("(ISS2)", file);
1798 fputs ("(S2RND)", file);
1805 else if (code == 'b')
1807 if (INTVAL (x) == 0)
1809 else if (INTVAL (x) == 1)
1815 /* Moves to half registers with d or h modifiers always use unsigned
1817 else if (code == 'd')
1818 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1819 else if (code == 'h')
1820 x = GEN_INT (INTVAL (x) & 0xffff);
1821 else if (code == 'N')
1822 x = GEN_INT (-INTVAL (x));
1823 else if (code == 'X')
1824 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1825 else if (code == 'Y')
1826 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1827 else if (code == 'Z')
1828 /* Used for LINK insns. */
1829 x = GEN_INT (-8 - INTVAL (x));
1834 output_addr_const (file, x);
1838 output_operand_lossage ("invalid const_double operand");
1842 switch (XINT (x, 1))
1844 case UNSPEC_MOVE_PIC:
1845 output_addr_const (file, XVECEXP (x, 0, 0));
1846 fprintf (file, "@GOT");
1849 case UNSPEC_MOVE_FDPIC:
1850 output_addr_const (file, XVECEXP (x, 0, 0));
1851 fprintf (file, "@GOT17M4");
1854 case UNSPEC_FUNCDESC_GOT17M4:
1855 output_addr_const (file, XVECEXP (x, 0, 0));
1856 fprintf (file, "@FUNCDESC_GOT17M4");
1859 case UNSPEC_LIBRARY_OFFSET:
1860 fprintf (file, "_current_shared_library_p5_offset_");
1869 output_addr_const (file, x);
1874 /* Argument support functions. */
1876 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1877 for a call to a function whose data type is FNTYPE.
1878 For a library call, FNTYPE is 0.
1879 VDSP C Compiler manual, our ABI says that
1880 first 3 words of arguments will use R0, R1 and R2.
1884 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1885 rtx libname ATTRIBUTE_UNUSED)
1887 static CUMULATIVE_ARGS zero_cum;
1891 /* Set up the number of registers to use for passing arguments. */
1893 cum->nregs = max_arg_registers;
1894 cum->arg_regs = arg_regs;
1896 cum->call_cookie = CALL_NORMAL;
1897 /* Check for a longcall attribute. */
1898 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1899 cum->call_cookie |= CALL_SHORT;
1900 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1901 cum->call_cookie |= CALL_LONG;
1906 /* Update the data in CUM to advance over an argument
1907 of mode MODE and data type TYPE.
1908 (TYPE is null for libcalls where that information may not be available.) */
1911 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1912 int named ATTRIBUTE_UNUSED)
1914 int count, bytes, words;
1916 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1917 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1919 cum->words += words;
1920 cum->nregs -= words;
1922 if (cum->nregs <= 0)
1925 cum->arg_regs = NULL;
1929 for (count = 1; count <= words; count++)
1936 /* Define where to put the arguments to a function.
1937 Value is zero to push the argument on the stack,
1938 or a hard register in which to store the argument.
1940 MODE is the argument's machine mode.
1941 TYPE is the data type of the argument (as a tree).
1942 This is null for libcalls where that information may
1944 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1945 the preceding args and about the function being called.
1946 NAMED is nonzero if this argument is a named parameter
1947 (otherwise it is an extra parameter matching an ellipsis). */
1950 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1951 int named ATTRIBUTE_UNUSED)
1954 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1956 if (mode == VOIDmode)
1957 /* Compute operand 2 of the call insn. */
1958 return GEN_INT (cum->call_cookie);
1964 return gen_rtx_REG (mode, *(cum->arg_regs));
1969 /* For an arg passed partly in registers and partly in memory,
1970 this is the number of bytes passed in registers.
1971 For args passed entirely in registers or entirely in memory, zero.
1973 Refer VDSP C Compiler manual, our ABI.
1974 First 3 words are in registers. So, if an argument is larger
1975 than the registers available, it will span the register and
1979 bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1980 tree type ATTRIBUTE_UNUSED,
1981 bool named ATTRIBUTE_UNUSED)
1984 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1985 int bytes_left = cum->nregs * UNITS_PER_WORD;
1990 if (bytes_left == 0)
1992 if (bytes > bytes_left)
1997 /* Variable sized types are passed by reference. */
2000 bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
2001 enum machine_mode mode ATTRIBUTE_UNUSED,
2002 const_tree type, bool named ATTRIBUTE_UNUSED)
2004 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
2007 /* Decide whether a type should be returned in memory (true)
2008 or in a register (false). This is called by the macro
2009 TARGET_RETURN_IN_MEMORY. */
2012 bfin_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2014 int size = int_size_in_bytes (type);
2015 return size > 2 * UNITS_PER_WORD || size == -1;
2018 /* Register in which address to store a structure value
2019 is passed to a function. */
2021 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
2022 int incoming ATTRIBUTE_UNUSED)
2024 return gen_rtx_REG (Pmode, REG_P0);
2027 /* Return true when register may be used to pass function parameters. */
2030 function_arg_regno_p (int n)
2033 for (i = 0; arg_regs[i] != -1; i++)
2034 if (n == arg_regs[i])
2039 /* Returns 1 if OP contains a symbol reference */
2042 symbolic_reference_mentioned_p (rtx op)
2044 register const char *fmt;
2047 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2050 fmt = GET_RTX_FORMAT (GET_CODE (op));
2051 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2057 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2058 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2062 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2069 /* Decide whether we can make a sibling call to a function. DECL is the
2070 declaration of the function being targeted by the call and EXP is the
2071 CALL_EXPR representing the call. */
2074 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
2075 tree exp ATTRIBUTE_UNUSED)
2077 struct cgraph_local_info *this_func, *called_func;
2078 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
2079 if (fkind != SUBROUTINE)
2081 if (!TARGET_ID_SHARED_LIBRARY || TARGET_SEP_DATA)
2084 /* When compiling for ID shared libraries, can't sibcall a local function
2085 from a non-local function, because the local function thinks it does
2086 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
2087 sibcall epilogue, and we end up with the wrong value in P5. */
2090 /* Not enough information. */
2093 this_func = cgraph_local_info (current_function_decl);
2094 called_func = cgraph_local_info (decl);
2095 return !called_func->local || this_func->local;
2098 /* Write a template for a trampoline to F. */
2101 bfin_asm_trampoline_template (FILE *f)
2105 fprintf (f, "\t.dd\t0x00000000\n"); /* 0 */
2106 fprintf (f, "\t.dd\t0x00000000\n"); /* 0 */
2107 fprintf (f, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
2108 fprintf (f, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
2109 fprintf (f, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
2110 fprintf (f, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
2111 fprintf (f, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
2112 fprintf (f, "\t.dw\t0x9149\n"); /* p1 = [p1] */
2113 fprintf (f, "\t.dw\t0x0051\n"); /* jump (p1)*/
2117 fprintf (f, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
2118 fprintf (f, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
2119 fprintf (f, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
2120 fprintf (f, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
2121 fprintf (f, "\t.dw\t0x0051\n"); /* jump (p1)*/
2125 /* Emit RTL insns to initialize the variable parts of a trampoline at
2126 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
2127 the static chain value for the function. */
2130 bfin_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2132 rtx t1 = copy_to_reg (XEXP (DECL_RTL (fndecl), 0));
2133 rtx t2 = copy_to_reg (chain_value);
2137 emit_block_move (m_tramp, assemble_trampoline_template (),
2138 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2142 rtx a = force_reg (Pmode, plus_constant (XEXP (m_tramp, 0), 8));
2143 mem = adjust_address (m_tramp, Pmode, 0);
2144 emit_move_insn (mem, a);
2148 mem = adjust_address (m_tramp, HImode, i + 2);
2149 emit_move_insn (mem, gen_lowpart (HImode, t1));
2150 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
2151 mem = adjust_address (m_tramp, HImode, i + 6);
2152 emit_move_insn (mem, gen_lowpart (HImode, t1));
2154 mem = adjust_address (m_tramp, HImode, i + 10);
2155 emit_move_insn (mem, gen_lowpart (HImode, t2));
2156 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
2157 mem = adjust_address (m_tramp, HImode, i + 14);
2158 emit_move_insn (mem, gen_lowpart (HImode, t2));
2161 /* Emit insns to move operands[1] into operands[0]. */
2164 emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
2166 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2168 gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
2169 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2170 operands[1] = force_reg (SImode, operands[1]);
2172 operands[1] = legitimize_pic_address (operands[1], temp,
2173 TARGET_FDPIC ? OUR_FDPIC_REG
2174 : pic_offset_table_rtx);
2177 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
2178 Returns true if no further code must be generated, false if the caller
2179 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
2182 expand_move (rtx *operands, enum machine_mode mode)
2184 rtx op = operands[1];
2185 if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
2186 && SYMBOLIC_CONST (op))
2187 emit_pic_move (operands, mode);
2188 else if (mode == SImode && GET_CODE (op) == CONST
2189 && GET_CODE (XEXP (op, 0)) == PLUS
2190 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
2191 && !bfin_legitimate_constant_p (op))
2193 rtx dest = operands[0];
2195 gcc_assert (!reload_in_progress && !reload_completed);
2197 op0 = force_reg (mode, XEXP (op, 0));
2199 if (!insn_data[CODE_FOR_addsi3].operand[2].predicate (op1, mode))
2200 op1 = force_reg (mode, op1);
2201 if (GET_CODE (dest) == MEM)
2202 dest = gen_reg_rtx (mode);
2203 emit_insn (gen_addsi3 (dest, op0, op1));
2204 if (dest == operands[0])
2208 /* Don't generate memory->memory or constant->memory moves, go through a
2210 else if ((reload_in_progress | reload_completed) == 0
2211 && GET_CODE (operands[0]) == MEM
2212 && GET_CODE (operands[1]) != REG)
2213 operands[1] = force_reg (mode, operands[1]);
2217 /* Split one or more DImode RTL references into pairs of SImode
2218 references. The RTL can be REG, offsettable MEM, integer constant, or
2219 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
2220 split and "num" is its length. lo_half and hi_half are output arrays
2221 that parallel "operands". */
2224 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
2228 rtx op = operands[num];
2230 /* simplify_subreg refuse to split volatile memory addresses,
2231 but we still have to handle it. */
2232 if (GET_CODE (op) == MEM)
2234 lo_half[num] = adjust_address (op, SImode, 0);
2235 hi_half[num] = adjust_address (op, SImode, 4);
2239 lo_half[num] = simplify_gen_subreg (SImode, op,
2240 GET_MODE (op) == VOIDmode
2241 ? DImode : GET_MODE (op), 0);
2242 hi_half[num] = simplify_gen_subreg (SImode, op,
2243 GET_MODE (op) == VOIDmode
2244 ? DImode : GET_MODE (op), 4);
2250 bfin_longcall_p (rtx op, int call_cookie)
2252 gcc_assert (GET_CODE (op) == SYMBOL_REF);
2253 if (SYMBOL_REF_WEAK (op))
2255 if (call_cookie & CALL_SHORT)
2257 if (call_cookie & CALL_LONG)
2259 if (TARGET_LONG_CALLS)
2264 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
2265 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2266 SIBCALL is nonzero if this is a sibling call. */
2269 bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
2271 rtx use = NULL, call;
2272 rtx callee = XEXP (fnaddr, 0);
2275 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
2276 rtx retsreg = gen_rtx_REG (Pmode, REG_RETS);
2279 /* In an untyped call, we can get NULL for operand 2. */
2280 if (cookie == NULL_RTX)
2281 cookie = const0_rtx;
2283 /* Static functions and indirect calls don't need the pic register. */
2284 if (!TARGET_FDPIC && flag_pic
2285 && GET_CODE (callee) == SYMBOL_REF
2286 && !SYMBOL_REF_LOCAL_P (callee))
2287 use_reg (&use, pic_offset_table_rtx);
2291 int caller_in_sram, callee_in_sram;
2293 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2294 caller_in_sram = callee_in_sram = 0;
2296 if (lookup_attribute ("l1_text",
2297 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
2299 else if (lookup_attribute ("l2",
2300 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
2303 if (GET_CODE (callee) == SYMBOL_REF
2304 && SYMBOL_REF_DECL (callee) && DECL_P (SYMBOL_REF_DECL (callee)))
2306 if (lookup_attribute
2308 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2310 else if (lookup_attribute
2312 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2316 if (GET_CODE (callee) != SYMBOL_REF
2317 || bfin_longcall_p (callee, INTVAL (cookie))
2318 || (GET_CODE (callee) == SYMBOL_REF
2319 && !SYMBOL_REF_LOCAL_P (callee)
2320 && TARGET_INLINE_PLT)
2321 || caller_in_sram != callee_in_sram
2322 || (caller_in_sram && callee_in_sram
2323 && (GET_CODE (callee) != SYMBOL_REF
2324 || !SYMBOL_REF_LOCAL_P (callee))))
2327 if (! address_operand (addr, Pmode))
2328 addr = force_reg (Pmode, addr);
2330 fnaddr = gen_reg_rtx (SImode);
2331 emit_insn (gen_load_funcdescsi (fnaddr, addr));
2332 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
2334 picreg = gen_reg_rtx (SImode);
2335 emit_insn (gen_load_funcdescsi (picreg,
2336 plus_constant (addr, 4)));
2341 else if ((!register_no_elim_operand (callee, Pmode)
2342 && GET_CODE (callee) != SYMBOL_REF)
2343 || (GET_CODE (callee) == SYMBOL_REF
2344 && ((TARGET_ID_SHARED_LIBRARY && !TARGET_LEAF_ID_SHARED_LIBRARY)
2345 || bfin_longcall_p (callee, INTVAL (cookie)))))
2347 callee = copy_to_mode_reg (Pmode, callee);
2348 fnaddr = gen_rtx_MEM (Pmode, callee);
2350 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
2353 call = gen_rtx_SET (VOIDmode, retval, call);
2355 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
2357 XVECEXP (pat, 0, n++) = call;
2359 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
2360 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
2362 XVECEXP (pat, 0, n++) = gen_rtx_RETURN (VOIDmode);
2364 XVECEXP (pat, 0, n++) = gen_rtx_CLOBBER (VOIDmode, retsreg);
2365 call = emit_call_insn (pat);
2367 CALL_INSN_FUNCTION_USAGE (call) = use;
2370 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2373 hard_regno_mode_ok (int regno, enum machine_mode mode)
2375 /* Allow only dregs to store value of mode HI or QI */
2376 enum reg_class rclass = REGNO_REG_CLASS (regno);
2381 if (mode == V2HImode)
2382 return D_REGNO_P (regno);
2383 if (rclass == CCREGS)
2384 return mode == BImode;
2385 if (mode == PDImode || mode == V2PDImode)
2386 return regno == REG_A0 || regno == REG_A1;
2388 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2389 up with a bad register class (such as ALL_REGS) for DImode. */
2391 return regno < REG_M3;
2394 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
2397 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
2400 /* Implements target hook vector_mode_supported_p. */
2403 bfin_vector_mode_supported_p (enum machine_mode mode)
2405 return mode == V2HImode;
2408 /* Return the cost of moving data from a register in class CLASS1 to
2409 one in class CLASS2. A cost of 2 is the default. */
2412 bfin_register_move_cost (enum machine_mode mode,
2413 enum reg_class class1, enum reg_class class2)
2415 /* These need secondary reloads, so they're more expensive. */
2416 if ((class1 == CCREGS && !reg_class_subset_p (class2, DREGS))
2417 || (class2 == CCREGS && !reg_class_subset_p (class1, DREGS)))
2420 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2424 if (GET_MODE_CLASS (mode) == MODE_INT)
2426 /* Discourage trying to use the accumulators. */
2427 if (TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A0)
2428 || TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A1)
2429 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A0)
2430 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A1))
2436 /* Return the cost of moving data of mode M between a
2437 register and memory. A value of 2 is the default; this cost is
2438 relative to those in `REGISTER_MOVE_COST'.
2440 ??? In theory L1 memory has single-cycle latency. We should add a switch
2441 that tells the compiler whether we expect to use only L1 memory for the
2442 program; it'll make the costs more accurate. */
2445 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2446 enum reg_class rclass,
2447 int in ATTRIBUTE_UNUSED)
2449 /* Make memory accesses slightly more expensive than any register-register
2450 move. Also, penalize non-DP registers, since they need secondary
2451 reloads to load and store. */
2452 if (! reg_class_subset_p (rclass, DPREGS))
2458 /* Inform reload about cases where moving X with a mode MODE to a register in
2459 RCLASS requires an extra scratch register. Return the class needed for the
2460 scratch register. */
2463 bfin_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
2464 enum machine_mode mode, secondary_reload_info *sri)
2466 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2467 in most other cases we can also use PREGS. */
2468 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
2469 enum reg_class x_class = NO_REGS;
2470 enum rtx_code code = GET_CODE (x);
2471 enum reg_class rclass = (enum reg_class) rclass_i;
2474 x = SUBREG_REG (x), code = GET_CODE (x);
2477 int regno = REGNO (x);
2478 if (regno >= FIRST_PSEUDO_REGISTER)
2479 regno = reg_renumber[regno];
2484 x_class = REGNO_REG_CLASS (regno);
2487 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2488 This happens as a side effect of register elimination, and we need
2489 a scratch register to do it. */
2490 if (fp_plus_const_operand (x, mode))
2492 rtx op2 = XEXP (x, 1);
2493 int large_constant_p = ! satisfies_constraint_Ks7 (op2);
2495 if (rclass == PREGS || rclass == PREGS_CLOBBERED)
2497 /* If destination is a DREG, we can do this without a scratch register
2498 if the constant is valid for an add instruction. */
2499 if ((rclass == DREGS || rclass == DPREGS)
2500 && ! large_constant_p)
2502 /* Reloading to anything other than a DREG? Use a PREG scratch
2504 sri->icode = CODE_FOR_reload_insi;
2508 /* Data can usually be moved freely between registers of most classes.
2509 AREGS are an exception; they can only move to or from another register
2510 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2511 if (x_class == AREGS || x_class == EVEN_AREGS || x_class == ODD_AREGS)
2512 return (rclass == DREGS || rclass == AREGS || rclass == EVEN_AREGS
2513 || rclass == ODD_AREGS
2516 if (rclass == AREGS || rclass == EVEN_AREGS || rclass == ODD_AREGS)
2520 sri->icode = in_p ? CODE_FOR_reload_inpdi : CODE_FOR_reload_outpdi;
2524 if (x != const0_rtx && x_class != DREGS)
2532 /* CCREGS can only be moved from/to DREGS. */
2533 if (rclass == CCREGS && x_class != DREGS)
2535 if (x_class == CCREGS && rclass != DREGS)
2538 /* All registers other than AREGS can load arbitrary constants. The only
2539 case that remains is MEM. */
2541 if (! reg_class_subset_p (rclass, default_class))
2542 return default_class;
2547 /* Implement TARGET_HANDLE_OPTION. */
2550 bfin_handle_option (size_t code, const char *arg, int value)
2554 case OPT_mshared_library_id_:
2555 if (value > MAX_LIBRARY_ID)
2556 error ("-mshared-library-id=%s is not between 0 and %d",
2557 arg, MAX_LIBRARY_ID);
2558 bfin_lib_id_given = 1;
2567 while ((p = bfin_cpus[i].name) != NULL)
2569 if (strncmp (arg, p, strlen (p)) == 0)
2576 error ("-mcpu=%s is not valid", arg);
2580 bfin_cpu_type = bfin_cpus[i].type;
2582 q = arg + strlen (p);
2586 bfin_si_revision = bfin_cpus[i].si_revision;
2587 bfin_workarounds |= bfin_cpus[i].workarounds;
2589 else if (strcmp (q, "-none") == 0)
2590 bfin_si_revision = -1;
2591 else if (strcmp (q, "-any") == 0)
2593 bfin_si_revision = 0xffff;
2594 while (bfin_cpus[i].type == bfin_cpu_type)
2596 bfin_workarounds |= bfin_cpus[i].workarounds;
2602 unsigned int si_major, si_minor;
2605 rev_len = strlen (q);
2607 if (sscanf (q, "-%u.%u%n", &si_major, &si_minor, &n) != 2
2609 || si_major > 0xff || si_minor > 0xff)
2611 invalid_silicon_revision:
2612 error ("-mcpu=%s has invalid silicon revision", arg);
2616 bfin_si_revision = (si_major << 8) | si_minor;
2618 while (bfin_cpus[i].type == bfin_cpu_type
2619 && bfin_cpus[i].si_revision != bfin_si_revision)
2622 if (bfin_cpus[i].type != bfin_cpu_type)
2623 goto invalid_silicon_revision;
2625 bfin_workarounds |= bfin_cpus[i].workarounds;
2636 static struct machine_function *
2637 bfin_init_machine_status (void)
2639 return ggc_alloc_cleared_machine_function ();
2642 /* Implement the macro OVERRIDE_OPTIONS. */
2645 override_options (void)
2647 /* If processor type is not specified, enable all workarounds. */
2648 if (bfin_cpu_type == BFIN_CPU_UNKNOWN)
2652 for (i = 0; bfin_cpus[i].name != NULL; i++)
2653 bfin_workarounds |= bfin_cpus[i].workarounds;
2655 bfin_si_revision = 0xffff;
2658 if (bfin_csync_anomaly == 1)
2659 bfin_workarounds |= WA_SPECULATIVE_SYNCS;
2660 else if (bfin_csync_anomaly == 0)
2661 bfin_workarounds &= ~WA_SPECULATIVE_SYNCS;
2663 if (bfin_specld_anomaly == 1)
2664 bfin_workarounds |= WA_SPECULATIVE_LOADS;
2665 else if (bfin_specld_anomaly == 0)
2666 bfin_workarounds &= ~WA_SPECULATIVE_LOADS;
2668 if (TARGET_OMIT_LEAF_FRAME_POINTER)
2669 flag_omit_frame_pointer = 1;
2671 /* Library identification */
2672 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
2673 error ("-mshared-library-id= specified without -mid-shared-library");
2675 if (stack_limit_rtx && TARGET_STACK_CHECK_L1)
2676 error ("Can't use multiple stack checking methods together.");
2678 if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
2679 error ("ID shared libraries and FD-PIC mode can't be used together.");
2681 /* Don't allow the user to specify -mid-shared-library and -msep-data
2682 together, as it makes little sense from a user's point of view... */
2683 if (TARGET_SEP_DATA && TARGET_ID_SHARED_LIBRARY)
2684 error ("cannot specify both -msep-data and -mid-shared-library");
2685 /* ... internally, however, it's nearly the same. */
2686 if (TARGET_SEP_DATA)
2687 target_flags |= MASK_ID_SHARED_LIBRARY | MASK_LEAF_ID_SHARED_LIBRARY;
2689 if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
2692 /* There is no single unaligned SI op for PIC code. Sometimes we
2693 need to use ".4byte" and sometimes we need to use ".picptr".
2694 See bfin_assemble_integer for details. */
2696 targetm.asm_out.unaligned_op.si = 0;
2698 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2699 since we don't support it and it'll just break. */
2700 if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2703 if (TARGET_MULTICORE && bfin_cpu_type != BFIN_CPU_BF561)
2704 error ("-mmulticore can only be used with BF561");
2706 if (TARGET_COREA && !TARGET_MULTICORE)
2707 error ("-mcorea should be used with -mmulticore");
2709 if (TARGET_COREB && !TARGET_MULTICORE)
2710 error ("-mcoreb should be used with -mmulticore");
2712 if (TARGET_COREA && TARGET_COREB)
2713 error ("-mcorea and -mcoreb can't be used together");
2715 flag_schedule_insns = 0;
2717 /* Passes after sched2 can break the helpful TImode annotations that
2718 haifa-sched puts on every insn. Just do scheduling in reorg. */
2719 bfin_flag_schedule_insns2 = flag_schedule_insns_after_reload;
2720 flag_schedule_insns_after_reload = 0;
2722 init_machine_status = bfin_init_machine_status;
2725 /* Return the destination address of BRANCH.
2726 We need to use this instead of get_attr_length, because the
2727 cbranch_with_nops pattern conservatively sets its length to 6, and
2728 we still prefer to use shorter sequences. */
2731 branch_dest (rtx branch)
2735 rtx pat = PATTERN (branch);
2736 if (GET_CODE (pat) == PARALLEL)
2737 pat = XVECEXP (pat, 0, 0);
2738 dest = SET_SRC (pat);
2739 if (GET_CODE (dest) == IF_THEN_ELSE)
2740 dest = XEXP (dest, 1);
2741 dest = XEXP (dest, 0);
2742 dest_uid = INSN_UID (dest);
2743 return INSN_ADDRESSES (dest_uid);
2746 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2747 it's a branch that's predicted taken. */
2750 cbranch_predicted_taken_p (rtx insn)
2752 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2756 int pred_val = INTVAL (XEXP (x, 0));
2758 return pred_val >= REG_BR_PROB_BASE / 2;
2764 /* Templates for use by asm_conditional_branch. */
2766 static const char *ccbranch_templates[][3] = {
2767 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2768 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2769 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2770 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2773 /* Output INSN, which is a conditional branch instruction with operands
2776 We deal with the various forms of conditional branches that can be generated
2777 by bfin_reorg to prevent the hardware from doing speculative loads, by
2778 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2779 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2780 Either of these is only necessary if the branch is short, otherwise the
2781 template we use ends in an unconditional jump which flushes the pipeline
2785 asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2787 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2788 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2789 is to be taken from start of if cc rather than jump.
2790 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2792 int len = (offset >= -1024 && offset <= 1022 ? 0
2793 : offset >= -4094 && offset <= 4096 ? 1
2795 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2796 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2797 output_asm_insn (ccbranch_templates[idx][len], operands);
2798 gcc_assert (n_nops == 0 || !bp);
2800 while (n_nops-- > 0)
2801 output_asm_insn ("nop;", NULL);
2804 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2805 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2808 bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2810 enum rtx_code code1, code2;
2811 rtx op0 = XEXP (cmp, 0), op1 = XEXP (cmp, 1);
2812 rtx tem = bfin_cc_rtx;
2813 enum rtx_code code = GET_CODE (cmp);
2815 /* If we have a BImode input, then we already have a compare result, and
2816 do not need to emit another comparison. */
2817 if (GET_MODE (op0) == BImode)
2819 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2820 tem = op0, code2 = code;
2825 /* bfin has these conditions */
2835 code1 = reverse_condition (code);
2839 emit_insn (gen_rtx_SET (VOIDmode, tem,
2840 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2843 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2846 /* Return nonzero iff C has exactly one bit set if it is interpreted
2847 as a 32-bit constant. */
2850 log2constp (unsigned HOST_WIDE_INT c)
2853 return c != 0 && (c & (c-1)) == 0;
2856 /* Returns the number of consecutive least significant zeros in the binary
2857 representation of *V.
2858 We modify *V to contain the original value arithmetically shifted right by
2859 the number of zeroes. */
2862 shiftr_zero (HOST_WIDE_INT *v)
2864 unsigned HOST_WIDE_INT tmp = *v;
2865 unsigned HOST_WIDE_INT sgn;
2871 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2872 while ((tmp & 0x1) == 0 && n <= 32)
2874 tmp = (tmp >> 1) | sgn;
2881 /* After reload, split the load of an immediate constant. OPERANDS are the
2882 operands of the movsi_insn pattern which we are splitting. We return
2883 nonzero if we emitted a sequence to load the constant, zero if we emitted
2884 nothing because we want to use the splitter's default sequence. */
2887 split_load_immediate (rtx operands[])
2889 HOST_WIDE_INT val = INTVAL (operands[1]);
2891 HOST_WIDE_INT shifted = val;
2892 HOST_WIDE_INT shifted_compl = ~val;
2893 int num_zero = shiftr_zero (&shifted);
2894 int num_compl_zero = shiftr_zero (&shifted_compl);
2895 unsigned int regno = REGNO (operands[0]);
2897 /* This case takes care of single-bit set/clear constants, which we could
2898 also implement with BITSET/BITCLR. */
2900 && shifted >= -32768 && shifted < 65536
2901 && (D_REGNO_P (regno)
2902 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2904 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2905 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2910 tmp |= -(tmp & 0x8000);
2912 /* If high word has one bit set or clear, try to use a bit operation. */
2913 if (D_REGNO_P (regno))
2915 if (log2constp (val & 0xFFFF0000))
2917 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2918 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2921 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2923 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2924 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2928 if (D_REGNO_P (regno))
2930 if (tmp >= -64 && tmp <= 63)
2932 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2933 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2937 if ((val & 0xFFFF0000) == 0)
2939 emit_insn (gen_movsi (operands[0], const0_rtx));
2940 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2944 if ((val & 0xFFFF0000) == 0xFFFF0000)
2946 emit_insn (gen_movsi (operands[0], constm1_rtx));
2947 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2952 /* Need DREGs for the remaining case. */
2957 && num_compl_zero && shifted_compl >= -64 && shifted_compl <= 63)
2959 /* If optimizing for size, generate a sequence that has more instructions
2961 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2962 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2963 GEN_INT (num_compl_zero)));
2964 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2970 /* Return true if the legitimate memory address for a memory operand of mode
2971 MODE. Return false if not. */
2974 bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2976 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2977 int sz = GET_MODE_SIZE (mode);
2978 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2979 /* The usual offsettable_memref machinery doesn't work so well for this
2980 port, so we deal with the problem here. */
2981 if (value > 0 && sz == 8)
2983 return (v & ~(0x7fff << shift)) == 0;
2987 bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2988 enum rtx_code outer_code)
2991 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2993 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
2996 /* Recognize an RTL expression that is a valid memory address for an
2997 instruction. The MODE argument is the machine mode for the MEM expression
2998 that wants to use this address.
3000 Blackfin addressing modes are as follows:
3006 W [ Preg + uimm16m2 ]
3015 bfin_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
3017 switch (GET_CODE (x)) {
3019 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
3023 if (REG_P (XEXP (x, 0))
3024 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
3025 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
3026 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3027 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
3032 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
3033 && REG_P (XEXP (x, 0))
3034 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
3037 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
3038 && XEXP (x, 0) == stack_pointer_rtx
3039 && REG_P (XEXP (x, 0))
3040 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
3049 /* Decide whether we can force certain constants to memory. If we
3050 decide we can't, the caller should be able to cope with it in
3054 bfin_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
3056 /* We have only one class of non-legitimate constants, and our movsi
3057 expander knows how to handle them. Dropping these constants into the
3058 data section would only shift the problem - we'd still get relocs
3059 outside the object, in the data section rather than the text section. */
3063 /* Ensure that for any constant of the form symbol + offset, the offset
3064 remains within the object. Any other constants are ok.
3065 This ensures that flat binaries never have to deal with relocations
3066 crossing section boundaries. */
3069 bfin_legitimate_constant_p (rtx x)
3072 HOST_WIDE_INT offset;
3074 if (GET_CODE (x) != CONST)
3078 gcc_assert (GET_CODE (x) == PLUS);
3082 if (GET_CODE (sym) != SYMBOL_REF
3083 || GET_CODE (x) != CONST_INT)
3085 offset = INTVAL (x);
3087 if (SYMBOL_REF_DECL (sym) == 0)
3090 || offset >= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym))))
3097 bfin_rtx_costs (rtx x, int code, int outer_code, int *total, bool speed)
3099 int cost2 = COSTS_N_INSNS (1);
3105 if (outer_code == SET || outer_code == PLUS)
3106 *total = satisfies_constraint_Ks7 (x) ? 0 : cost2;
3107 else if (outer_code == AND)
3108 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
3109 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
3110 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
3111 else if (outer_code == LEU || outer_code == LTU)
3112 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
3113 else if (outer_code == MULT)
3114 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
3115 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
3117 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
3118 || outer_code == LSHIFTRT)
3119 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
3120 else if (outer_code == IOR || outer_code == XOR)
3121 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
3130 *total = COSTS_N_INSNS (2);
3136 if (GET_MODE (x) == SImode)
3138 if (GET_CODE (op0) == MULT
3139 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
3141 HOST_WIDE_INT val = INTVAL (XEXP (op0, 1));
3142 if (val == 2 || val == 4)
3145 *total += rtx_cost (XEXP (op0, 0), outer_code, speed);
3146 *total += rtx_cost (op1, outer_code, speed);
3151 if (GET_CODE (op0) != REG
3152 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
3153 *total += rtx_cost (op0, SET, speed);
3154 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
3155 towards creating too many induction variables. */
3156 if (!reg_or_7bit_operand (op1, SImode))
3157 *total += rtx_cost (op1, SET, speed);
3160 else if (GET_MODE (x) == DImode)
3163 if (GET_CODE (op1) != CONST_INT
3164 || !satisfies_constraint_Ks7 (op1))
3165 *total += rtx_cost (op1, PLUS, speed);
3166 if (GET_CODE (op0) != REG
3167 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
3168 *total += rtx_cost (op0, PLUS, speed);
3173 if (GET_MODE (x) == DImode)
3182 if (GET_MODE (x) == DImode)
3189 if (GET_CODE (op0) != REG
3190 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
3191 *total += rtx_cost (op0, code, speed);
3201 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
3204 if ((GET_CODE (op0) == LSHIFTRT && GET_CODE (op1) == ASHIFT)
3205 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == ZERO_EXTEND)
3206 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
3207 || (GET_CODE (op0) == AND && GET_CODE (op1) == CONST_INT))
3214 if (GET_CODE (op0) != REG
3215 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
3216 *total += rtx_cost (op0, code, speed);
3218 if (GET_MODE (x) == DImode)
3224 if (GET_MODE (x) != SImode)
3229 if (! rhs_andsi3_operand (XEXP (x, 1), SImode))
3230 *total += rtx_cost (XEXP (x, 1), code, speed);
3234 if (! regorlog2_operand (XEXP (x, 1), SImode))
3235 *total += rtx_cost (XEXP (x, 1), code, speed);
3242 if (outer_code == SET
3243 && XEXP (x, 1) == const1_rtx
3244 && GET_CODE (XEXP (x, 2)) == CONST_INT)
3260 if (GET_CODE (op0) == GET_CODE (op1)
3261 && (GET_CODE (op0) == ZERO_EXTEND
3262 || GET_CODE (op0) == SIGN_EXTEND))
3264 *total = COSTS_N_INSNS (1);
3265 op0 = XEXP (op0, 0);
3266 op1 = XEXP (op1, 0);
3269 *total = COSTS_N_INSNS (1);
3271 *total = COSTS_N_INSNS (3);
3273 if (GET_CODE (op0) != REG
3274 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
3275 *total += rtx_cost (op0, MULT, speed);
3276 if (GET_CODE (op1) != REG
3277 && (GET_CODE (op1) != SUBREG || GET_CODE (SUBREG_REG (op1)) != REG))
3278 *total += rtx_cost (op1, MULT, speed);
3284 *total = COSTS_N_INSNS (32);
3289 if (outer_code == SET)
3298 /* Used for communication between {push,pop}_multiple_operation (which
3299 we use not only as a predicate) and the corresponding output functions. */
3300 static int first_preg_to_save, first_dreg_to_save;
3301 static int n_regs_to_save;
3304 push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3306 int lastdreg = 8, lastpreg = 6;
3309 first_preg_to_save = lastpreg;
3310 first_dreg_to_save = lastdreg;
3311 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
3313 rtx t = XVECEXP (op, 0, i);
3317 if (GET_CODE (t) != SET)
3321 dest = SET_DEST (t);
3322 if (GET_CODE (dest) != MEM || ! REG_P (src))
3324 dest = XEXP (dest, 0);
3325 if (GET_CODE (dest) != PLUS
3326 || ! REG_P (XEXP (dest, 0))
3327 || REGNO (XEXP (dest, 0)) != REG_SP
3328 || GET_CODE (XEXP (dest, 1)) != CONST_INT
3329 || INTVAL (XEXP (dest, 1)) != -i * 4)
3332 regno = REGNO (src);
3335 if (D_REGNO_P (regno))
3338 first_dreg_to_save = lastdreg = regno - REG_R0;
3340 else if (regno >= REG_P0 && regno <= REG_P7)
3343 first_preg_to_save = lastpreg = regno - REG_P0;
3353 if (regno >= REG_P0 && regno <= REG_P7)
3356 first_preg_to_save = lastpreg = regno - REG_P0;
3358 else if (regno != REG_R0 + lastdreg + 1)
3363 else if (group == 2)
3365 if (regno != REG_P0 + lastpreg + 1)
3370 n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
3375 pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3377 int lastdreg = 8, lastpreg = 6;
3380 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
3382 rtx t = XVECEXP (op, 0, i);
3386 if (GET_CODE (t) != SET)
3390 dest = SET_DEST (t);
3391 if (GET_CODE (src) != MEM || ! REG_P (dest))
3393 src = XEXP (src, 0);
3397 if (! REG_P (src) || REGNO (src) != REG_SP)
3400 else if (GET_CODE (src) != PLUS
3401 || ! REG_P (XEXP (src, 0))
3402 || REGNO (XEXP (src, 0)) != REG_SP
3403 || GET_CODE (XEXP (src, 1)) != CONST_INT
3404 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
3407 regno = REGNO (dest);
3410 if (regno == REG_R7)
3415 else if (regno != REG_P0 + lastpreg - 1)
3420 else if (group == 1)
3422 if (regno != REG_R0 + lastdreg - 1)
3428 first_dreg_to_save = lastdreg;
3429 first_preg_to_save = lastpreg;
3430 n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
3434 /* Emit assembly code for one multi-register push described by INSN, with
3435 operands in OPERANDS. */
3438 output_push_multiple (rtx insn, rtx *operands)
3443 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3444 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
3447 if (first_dreg_to_save == 8)
3448 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
3449 else if (first_preg_to_save == 6)
3450 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
3452 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
3453 first_dreg_to_save, first_preg_to_save);
3455 output_asm_insn (buf, operands);
3458 /* Emit assembly code for one multi-register pop described by INSN, with
3459 operands in OPERANDS. */
3462 output_pop_multiple (rtx insn, rtx *operands)
3467 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3468 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
3471 if (first_dreg_to_save == 8)
3472 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
3473 else if (first_preg_to_save == 6)
3474 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
3476 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
3477 first_dreg_to_save, first_preg_to_save);
3479 output_asm_insn (buf, operands);
3482 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3485 single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
3487 rtx scratch = gen_reg_rtx (mode);
3490 srcmem = adjust_address_nv (src, mode, offset);
3491 dstmem = adjust_address_nv (dst, mode, offset);
3492 emit_move_insn (scratch, srcmem);
3493 emit_move_insn (dstmem, scratch);
3496 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3497 alignment ALIGN_EXP. Return true if successful, false if we should fall
3498 back on a different method. */
3501 bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
3503 rtx srcreg, destreg, countreg;
3504 HOST_WIDE_INT align = 0;
3505 unsigned HOST_WIDE_INT count = 0;
3507 if (GET_CODE (align_exp) == CONST_INT)
3508 align = INTVAL (align_exp);
3509 if (GET_CODE (count_exp) == CONST_INT)
3511 count = INTVAL (count_exp);
3513 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
3518 /* If optimizing for size, only do single copies inline. */
3521 if (count == 2 && align < 2)
3523 if (count == 4 && align < 4)
3525 if (count != 1 && count != 2 && count != 4)
3528 if (align < 2 && count != 1)
3531 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
3532 if (destreg != XEXP (dst, 0))
3533 dst = replace_equiv_address_nv (dst, destreg);
3534 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
3535 if (srcreg != XEXP (src, 0))
3536 src = replace_equiv_address_nv (src, srcreg);
3538 if (count != 0 && align >= 2)
3540 unsigned HOST_WIDE_INT offset = 0;
3544 if ((count & ~3) == 4)
3546 single_move_for_movmem (dst, src, SImode, offset);
3549 else if (count & ~3)
3551 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
3552 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3554 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
3555 cfun->machine->has_loopreg_clobber = true;
3559 single_move_for_movmem (dst, src, HImode, offset);
3565 if ((count & ~1) == 2)
3567 single_move_for_movmem (dst, src, HImode, offset);
3570 else if (count & ~1)
3572 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
3573 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3575 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
3576 cfun->machine->has_loopreg_clobber = true;
3581 single_move_for_movmem (dst, src, QImode, offset);
3588 /* Compute the alignment for a local variable.
3589 TYPE is the data type, and ALIGN is the alignment that
3590 the object would ordinarily have. The value of this macro is used
3591 instead of that alignment to align the object. */
3594 bfin_local_alignment (tree type, int align)
3596 /* Increasing alignment for (relatively) big types allows the builtin
3597 memcpy can use 32 bit loads/stores. */
3598 if (TYPE_SIZE (type)
3599 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3600 && (TREE_INT_CST_LOW (TYPE_SIZE (type)) > 8
3601 || TREE_INT_CST_HIGH (TYPE_SIZE (type))) && align < 32)
3606 /* Implement TARGET_SCHED_ISSUE_RATE. */
3609 bfin_issue_rate (void)
3615 bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3617 enum attr_type insn_type, dep_insn_type;
3618 int dep_insn_code_number;
3620 /* Anti and output dependencies have zero cost. */
3621 if (REG_NOTE_KIND (link) != 0)
3624 dep_insn_code_number = recog_memoized (dep_insn);
3626 /* If we can't recognize the insns, we can't really do anything. */
3627 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
3630 insn_type = get_attr_type (insn);
3631 dep_insn_type = get_attr_type (dep_insn);
3633 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
3635 rtx pat = PATTERN (dep_insn);
3636 if (GET_CODE (pat) == PARALLEL)
3637 pat = XVECEXP (pat, 0, 0);
3638 rtx dest = SET_DEST (pat);
3639 rtx src = SET_SRC (pat);
3640 if (! ADDRESS_REGNO_P (REGNO (dest))
3641 || ! (MEM_P (src) || D_REGNO_P (REGNO (src))))
3643 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
3649 /* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3650 skips all subsequent parallel instructions if INSN is the start of such
3653 find_next_insn_start (rtx insn)
3655 if (GET_MODE (insn) == SImode)
3657 while (GET_MODE (insn) != QImode)
3658 insn = NEXT_INSN (insn);
3660 return NEXT_INSN (insn);
3663 /* This function acts like PREV_INSN, but is aware of three-insn bundles and
3664 skips all subsequent parallel instructions if INSN is the start of such
3667 find_prev_insn_start (rtx insn)
3669 insn = PREV_INSN (insn);
3670 gcc_assert (GET_MODE (insn) != SImode);
3671 if (GET_MODE (insn) == QImode)
3673 while (GET_MODE (PREV_INSN (insn)) == SImode)
3674 insn = PREV_INSN (insn);
3679 /* Increment the counter for the number of loop instructions in the
3680 current function. */
3683 bfin_hardware_loop (void)
3685 cfun->machine->has_hardware_loops++;
3688 /* Maximum loop nesting depth. */
3689 #define MAX_LOOP_DEPTH 2
3691 /* Maximum size of a loop. */
3692 #define MAX_LOOP_LENGTH 2042
3694 /* Maximum distance of the LSETUP instruction from the loop start. */
3695 #define MAX_LSETUP_DISTANCE 30
3697 /* We need to keep a vector of loops */
3698 typedef struct loop_info *loop_info;
3699 DEF_VEC_P (loop_info);
3700 DEF_VEC_ALLOC_P (loop_info,heap);
3702 /* Information about a loop we have found (or are in the process of
3704 struct GTY (()) loop_info
3706 /* loop number, for dumps */
3709 /* All edges that jump into and out of the loop. */
3710 VEC(edge,gc) *incoming;
3712 /* We can handle two cases: all incoming edges have the same destination
3713 block, or all incoming edges have the same source block. These two
3714 members are set to the common source or destination we found, or NULL
3715 if different blocks were found. If both are NULL the loop can't be
3717 basic_block incoming_src;
3718 basic_block incoming_dest;
3720 /* First block in the loop. This is the one branched to by the loop_end
3724 /* Last block in the loop (the one with the loop_end insn). */
3727 /* The successor block of the loop. This is the one the loop_end insn
3729 basic_block successor;
3731 /* The last instruction in the tail. */
3734 /* The loop_end insn. */
3737 /* The iteration register. */
3740 /* The new label placed at the beginning of the loop. */
3743 /* The new label placed at the end of the loop. */
3746 /* The length of the loop. */
3749 /* The nesting depth of the loop. */
3752 /* Nonzero if we can't optimize this loop. */
3755 /* True if we have visited this loop. */
3758 /* True if this loop body clobbers any of LC0, LT0, or LB0. */
3761 /* True if this loop body clobbers any of LC1, LT1, or LB1. */
3764 /* Next loop in the graph. */
3765 struct loop_info *next;
3767 /* Immediate outer loop of this loop. */
3768 struct loop_info *outer;
3770 /* Vector of blocks only within the loop, including those within
3772 VEC (basic_block,heap) *blocks;
3774 /* Same information in a bitmap. */
3775 bitmap block_bitmap;
3777 /* Vector of inner loops within this loop */
3778 VEC (loop_info,heap) *loops;
3782 bfin_dump_loops (loop_info loops)
3786 for (loop = loops; loop; loop = loop->next)
3792 fprintf (dump_file, ";; loop %d: ", loop->loop_no);
3794 fprintf (dump_file, "(bad) ");
3795 fprintf (dump_file, "{head:%d, depth:%d}", loop->head->index, loop->depth);
3797 fprintf (dump_file, " blocks: [ ");
3798 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, b); ix++)
3799 fprintf (dump_file, "%d ", b->index);
3800 fprintf (dump_file, "] ");
3802 fprintf (dump_file, " inner loops: [ ");
3803 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, i); ix++)
3804 fprintf (dump_file, "%d ", i->loop_no);
3805 fprintf (dump_file, "]\n");
3807 fprintf (dump_file, "\n");
3810 /* Scan the blocks of LOOP (and its inferiors) looking for basic block
3811 BB. Return true, if we find it. */
3814 bfin_bb_in_loop (loop_info loop, basic_block bb)
3816 return bitmap_bit_p (loop->block_bitmap, bb->index);
3819 /* Scan the blocks of LOOP (and its inferiors) looking for uses of
3820 REG. Return true, if we find any. Don't count the loop's loop_end
3821 insn if it matches LOOP_END. */
3824 bfin_scan_loop (loop_info loop, rtx reg, rtx loop_end)
3829 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3833 for (insn = BB_HEAD (bb);
3834 insn != NEXT_INSN (BB_END (bb));
3835 insn = NEXT_INSN (insn))
3839 if (insn == loop_end)
3841 if (reg_mentioned_p (reg, PATTERN (insn)))
3848 /* Estimate the length of INSN conservatively. */
3851 length_for_loop (rtx insn)
3854 if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
3856 if (ENABLE_WA_SPECULATIVE_SYNCS)
3858 else if (ENABLE_WA_SPECULATIVE_LOADS)
3861 else if (LABEL_P (insn))
3863 if (ENABLE_WA_SPECULATIVE_SYNCS)
3867 if (NONDEBUG_INSN_P (insn))
3868 length += get_attr_length (insn);
3873 /* Optimize LOOP. */
3876 bfin_optimize_loop (loop_info loop)
3880 rtx insn, last_insn;
3881 rtx loop_init, start_label, end_label;
3882 rtx reg_lc0, reg_lc1, reg_lt0, reg_lt1, reg_lb0, reg_lb1;
3883 rtx iter_reg, scratchreg, scratch_init, scratch_init_insn;
3884 rtx lc_reg, lt_reg, lb_reg;
3888 int inner_depth = 0;
3898 fprintf (dump_file, ";; loop %d bad when found\n", loop->loop_no);
3902 /* Every loop contains in its list of inner loops every loop nested inside
3903 it, even if there are intermediate loops. This works because we're doing
3904 a depth-first search here and never visit a loop more than once. */
3905 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
3907 bfin_optimize_loop (inner);
3909 if (!inner->bad && inner_depth < inner->depth)
3911 inner_depth = inner->depth;
3913 loop->clobber_loop0 |= inner->clobber_loop0;
3914 loop->clobber_loop1 |= inner->clobber_loop1;
3918 loop->depth = inner_depth + 1;
3919 if (loop->depth > MAX_LOOP_DEPTH)
3922 fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
3926 /* Get the loop iteration register. */
3927 iter_reg = loop->iter_reg;
3929 if (!REG_P (iter_reg))
3932 fprintf (dump_file, ";; loop %d iteration count not in a register\n",
3936 scratchreg = NULL_RTX;
3937 scratch_init = iter_reg;
3938 scratch_init_insn = NULL_RTX;
3939 if (!PREG_P (iter_reg) && loop->incoming_src)
3941 basic_block bb_in = loop->incoming_src;
3943 for (i = REG_P0; i <= REG_P5; i++)
3944 if ((df_regs_ever_live_p (i)
3945 || (funkind (TREE_TYPE (current_function_decl)) == SUBROUTINE
3946 && call_used_regs[i]))
3947 && !REGNO_REG_SET_P (df_get_live_out (bb_in), i))
3949 scratchreg = gen_rtx_REG (SImode, i);
3952 for (insn = BB_END (bb_in); insn != BB_HEAD (bb_in);
3953 insn = PREV_INSN (insn))
3956 if (NOTE_P (insn) || BARRIER_P (insn))
3958 set = single_set (insn);
3959 if (set && rtx_equal_p (SET_DEST (set), iter_reg))
3961 if (CONSTANT_P (SET_SRC (set)))
3963 scratch_init = SET_SRC (set);
3964 scratch_init_insn = insn;
3968 else if (reg_mentioned_p (iter_reg, PATTERN (insn)))
3973 if (loop->incoming_src)
3975 /* Make sure the predecessor is before the loop start label, as required by
3976 the LSETUP instruction. */
3978 insn = BB_END (loop->incoming_src);
3979 /* If we have to insert the LSETUP before a jump, count that jump in the
3981 if (VEC_length (edge, loop->incoming) > 1
3982 || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
3984 gcc_assert (JUMP_P (insn));
3985 insn = PREV_INSN (insn);
3988 for (; insn && insn != loop->start_label; insn = NEXT_INSN (insn))
3989 length += length_for_loop (insn);
3994 fprintf (dump_file, ";; loop %d lsetup not before loop_start\n",
3999 /* Account for the pop of a scratch register where necessary. */
4000 if (!PREG_P (iter_reg) && scratchreg == NULL_RTX
4001 && ENABLE_WA_LOAD_LCREGS)
4004 if (length > MAX_LSETUP_DISTANCE)
4007 fprintf (dump_file, ";; loop %d lsetup too far away\n", loop->loop_no);
4012 /* Check if start_label appears before loop_end and calculate the
4013 offset between them. We calculate the length of instructions
4016 for (insn = loop->start_label;
4017 insn && insn != loop->loop_end;
4018 insn = NEXT_INSN (insn))
4019 length += length_for_loop (insn);
4024 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
4029 loop->length = length;
4030 if (loop->length > MAX_LOOP_LENGTH)
4033 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
4037 /* Scan all the blocks to make sure they don't use iter_reg. */
4038 if (bfin_scan_loop (loop, iter_reg, loop->loop_end))
4041 fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
4045 /* Scan all the insns to see if the loop body clobber
4046 any hardware loop registers. */
4048 reg_lc0 = gen_rtx_REG (SImode, REG_LC0);
4049 reg_lc1 = gen_rtx_REG (SImode, REG_LC1);
4050 reg_lt0 = gen_rtx_REG (SImode, REG_LT0);
4051 reg_lt1 = gen_rtx_REG (SImode, REG_LT1);
4052 reg_lb0 = gen_rtx_REG (SImode, REG_LB0);
4053 reg_lb1 = gen_rtx_REG (SImode, REG_LB1);
4055 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
4059 for (insn = BB_HEAD (bb);
4060 insn != NEXT_INSN (BB_END (bb));
4061 insn = NEXT_INSN (insn))
4066 if (reg_set_p (reg_lc0, insn)
4067 || reg_set_p (reg_lt0, insn)
4068 || reg_set_p (reg_lb0, insn))
4069 loop->clobber_loop0 = 1;
4071 if (reg_set_p (reg_lc1, insn)
4072 || reg_set_p (reg_lt1, insn)
4073 || reg_set_p (reg_lb1, insn))
4074 loop->clobber_loop1 |= 1;
4078 if ((loop->clobber_loop0 && loop->clobber_loop1)
4079 || (loop->depth == MAX_LOOP_DEPTH && loop->clobber_loop0))
4081 loop->depth = MAX_LOOP_DEPTH + 1;
4083 fprintf (dump_file, ";; loop %d no loop reg available\n",
4088 /* There should be an instruction before the loop_end instruction
4089 in the same basic block. And the instruction must not be
4091 - CONDITIONAL BRANCH
4095 - Returns (RTS, RTN, etc.) */
4098 last_insn = find_prev_insn_start (loop->loop_end);
4102 for (; last_insn != BB_HEAD (bb);
4103 last_insn = find_prev_insn_start (last_insn))
4104 if (NONDEBUG_INSN_P (last_insn))
4107 if (last_insn != BB_HEAD (bb))
4110 if (single_pred_p (bb)
4111 && single_pred_edge (bb)->flags & EDGE_FALLTHRU
4112 && single_pred (bb) != ENTRY_BLOCK_PTR)
4114 bb = single_pred (bb);
4115 last_insn = BB_END (bb);
4120 last_insn = NULL_RTX;
4128 fprintf (dump_file, ";; loop %d has no last instruction\n",
4133 if (JUMP_P (last_insn) && !any_condjump_p (last_insn))
4136 fprintf (dump_file, ";; loop %d has bad last instruction\n",
4140 /* In all other cases, try to replace a bad last insn with a nop. */
4141 else if (JUMP_P (last_insn)
4142 || CALL_P (last_insn)
4143 || get_attr_type (last_insn) == TYPE_SYNC
4144 || get_attr_type (last_insn) == TYPE_CALL
4145 || get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI
4146 || recog_memoized (last_insn) == CODE_FOR_return_internal
4147 || GET_CODE (PATTERN (last_insn)) == ASM_INPUT
4148 || asm_noperands (PATTERN (last_insn)) >= 0)
4150 if (loop->length + 2 > MAX_LOOP_LENGTH)
4153 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
4157 fprintf (dump_file, ";; loop %d has bad last insn; replace with nop\n",
4160 last_insn = emit_insn_after (gen_forced_nop (), last_insn);
4163 loop->last_insn = last_insn;
4165 /* The loop is good for replacement. */
4166 start_label = loop->start_label;
4167 end_label = gen_label_rtx ();
4168 iter_reg = loop->iter_reg;
4170 if (loop->depth == 1 && !loop->clobber_loop1)
4175 loop->clobber_loop1 = 1;
4182 loop->clobber_loop0 = 1;
4185 loop->end_label = end_label;
4187 /* Create a sequence containing the loop setup. */
4190 /* LSETUP only accepts P registers. If we have one, we can use it,
4191 otherwise there are several ways of working around the problem.
4192 If we're not affected by anomaly 312, we can load the LC register
4193 from any iteration register, and use LSETUP without initialization.
4194 If we've found a P scratch register that's not live here, we can
4195 instead copy the iter_reg into that and use an initializing LSETUP.
4196 If all else fails, push and pop P0 and use it as a scratch. */
4197 if (P_REGNO_P (REGNO (iter_reg)))
4199 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
4202 seq_end = emit_insn (loop_init);
4204 else if (!ENABLE_WA_LOAD_LCREGS && DPREG_P (iter_reg))
4206 emit_insn (gen_movsi (lc_reg, iter_reg));
4207 loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
4210 seq_end = emit_insn (loop_init);
4212 else if (scratchreg != NULL_RTX)
4214 emit_insn (gen_movsi (scratchreg, scratch_init));
4215 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
4217 lc_reg, scratchreg);
4218 seq_end = emit_insn (loop_init);
4219 if (scratch_init_insn != NULL_RTX)
4220 delete_insn (scratch_init_insn);
4224 rtx p0reg = gen_rtx_REG (SImode, REG_P0);
4225 rtx push = gen_frame_mem (SImode,
4226 gen_rtx_PRE_DEC (SImode, stack_pointer_rtx));
4227 rtx pop = gen_frame_mem (SImode,
4228 gen_rtx_POST_INC (SImode, stack_pointer_rtx));
4229 emit_insn (gen_movsi (push, p0reg));
4230 emit_insn (gen_movsi (p0reg, scratch_init));
4231 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
4234 emit_insn (loop_init);
4235 seq_end = emit_insn (gen_movsi (p0reg, pop));
4236 if (scratch_init_insn != NULL_RTX)
4237 delete_insn (scratch_init_insn);
4242 fprintf (dump_file, ";; replacing loop %d initializer with\n",
4244 print_rtl_single (dump_file, loop_init);
4245 fprintf (dump_file, ";; replacing loop %d terminator with\n",
4247 print_rtl_single (dump_file, loop->loop_end);
4250 /* If the loop isn't entered at the top, also create a jump to the entry
4252 if (!loop->incoming_src && loop->head != loop->incoming_dest)
4254 rtx label = BB_HEAD (loop->incoming_dest);
4255 /* If we're jumping to the final basic block in the loop, and there's
4256 only one cheap instruction before the end (typically an increment of
4257 an induction variable), we can just emit a copy here instead of a
4259 if (loop->incoming_dest == loop->tail
4260 && next_real_insn (label) == last_insn
4261 && asm_noperands (last_insn) < 0
4262 && GET_CODE (PATTERN (last_insn)) == SET)
4264 seq_end = emit_insn (copy_rtx (PATTERN (last_insn)));
4268 emit_jump_insn (gen_jump (label));
4269 seq_end = emit_barrier ();
4276 if (loop->incoming_src)
4278 rtx prev = BB_END (loop->incoming_src);
4279 if (VEC_length (edge, loop->incoming) > 1
4280 || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
4282 gcc_assert (JUMP_P (prev));
4283 prev = PREV_INSN (prev);
4285 emit_insn_after (seq, prev);
4293 #ifdef ENABLE_CHECKING
4294 if (loop->head != loop->incoming_dest)
4296 /* We aren't entering the loop at the top. Since we've established
4297 that the loop is entered only at one point, this means there
4298 can't be fallthru edges into the head. Any such fallthru edges
4299 would become invalid when we insert the new block, so verify
4300 that this does not in fact happen. */
4301 FOR_EACH_EDGE (e, ei, loop->head->preds)
4302 gcc_assert (!(e->flags & EDGE_FALLTHRU));
4306 emit_insn_before (seq, BB_HEAD (loop->head));
4307 seq = emit_label_before (gen_label_rtx (), seq);
4309 new_bb = create_basic_block (seq, seq_end, loop->head->prev_bb);
4310 FOR_EACH_EDGE (e, ei, loop->incoming)
4312 if (!(e->flags & EDGE_FALLTHRU)
4313 || e->dest != loop->head)
4314 redirect_edge_and_branch_force (e, new_bb);
4316 redirect_edge_succ (e, new_bb);
4318 e = make_edge (new_bb, loop->head, 0);
4321 delete_insn (loop->loop_end);
4322 /* Insert the loop end label before the last instruction of the loop. */
4323 emit_label_before (loop->end_label, loop->last_insn);
4330 fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
4334 if (DPREG_P (loop->iter_reg))
4336 /* If loop->iter_reg is a DREG or PREG, we can split it here
4337 without scratch register. */
4340 emit_insn_before (gen_addsi3 (loop->iter_reg,
4345 test = gen_rtx_NE (VOIDmode, loop->iter_reg, const0_rtx);
4346 insn = emit_jump_insn_before (gen_cbranchsi4 (test,
4347 loop->iter_reg, const0_rtx,
4351 JUMP_LABEL (insn) = loop->start_label;
4352 LABEL_NUSES (loop->start_label)++;
4353 delete_insn (loop->loop_end);
4357 /* Called from bfin_reorg_loops when a potential loop end is found. LOOP is
4358 a newly set up structure describing the loop, it is this function's
4359 responsibility to fill most of it. TAIL_BB and TAIL_INSN point to the
4360 loop_end insn and its enclosing basic block. */
4363 bfin_discover_loop (loop_info loop, basic_block tail_bb, rtx tail_insn)
4367 VEC (basic_block,heap) *works = VEC_alloc (basic_block,heap,20);
4369 loop->tail = tail_bb;
4370 loop->head = BRANCH_EDGE (tail_bb)->dest;
4371 loop->successor = FALLTHRU_EDGE (tail_bb)->dest;
4372 loop->loop_end = tail_insn;
4373 loop->last_insn = NULL_RTX;
4374 loop->iter_reg = SET_DEST (XVECEXP (PATTERN (tail_insn), 0, 1));
4375 loop->depth = loop->length = 0;
4377 loop->clobber_loop0 = loop->clobber_loop1 = 0;
4380 loop->incoming = VEC_alloc (edge, gc, 2);
4381 loop->start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (tail_insn), 0, 0)), 1), 0);
4382 loop->end_label = NULL_RTX;
4385 VEC_safe_push (basic_block, heap, works, loop->head);
4387 while (VEC_iterate (basic_block, works, dwork++, bb))
4391 if (bb == EXIT_BLOCK_PTR)
4393 /* We've reached the exit block. The loop must be bad. */
4396 ";; Loop is bad - reached exit block while scanning\n");
4401 if (!bitmap_set_bit (loop->block_bitmap, bb->index))
4404 /* We've not seen this block before. Add it to the loop's
4405 list and then add each successor to the work list. */
4407 VEC_safe_push (basic_block, heap, loop->blocks, bb);
4411 FOR_EACH_EDGE (e, ei, bb->succs)
4413 basic_block succ = EDGE_SUCC (bb, ei.index)->dest;
4414 if (!REGNO_REG_SET_P (df_get_live_in (succ),
4415 REGNO (loop->iter_reg)))
4417 if (!VEC_space (basic_block, works, 1))
4421 VEC_block_remove (basic_block, works, 0, dwork);
4425 VEC_reserve (basic_block, heap, works, 1);
4427 VEC_quick_push (basic_block, works, succ);
4432 /* Find the predecessor, and make sure nothing else jumps into this loop. */
4436 for (dwork = 0; VEC_iterate (basic_block, loop->blocks, dwork, bb); dwork++)
4440 FOR_EACH_EDGE (e, ei, bb->preds)
4442 basic_block pred = e->src;
4444 if (!bfin_bb_in_loop (loop, pred))
4447 fprintf (dump_file, ";; Loop %d: incoming edge %d -> %d\n",
4448 loop->loop_no, pred->index,
4450 VEC_safe_push (edge, gc, loop->incoming, e);
4455 for (pass = 0, retry = 1; retry && pass < 2; pass++)
4462 FOR_EACH_EDGE (e, ei, loop->incoming)
4466 loop->incoming_src = e->src;
4467 loop->incoming_dest = e->dest;
4472 if (e->dest != loop->incoming_dest)
4473 loop->incoming_dest = NULL;
4474 if (e->src != loop->incoming_src)
4475 loop->incoming_src = NULL;
4477 if (loop->incoming_src == NULL && loop->incoming_dest == NULL)
4483 ";; retrying loop %d with forwarder blocks\n",
4491 ";; can't find suitable entry for loop %d\n",
4499 FOR_EACH_EDGE (e, ei, loop->incoming)
4501 if (forwarder_block_p (e->src))
4508 ";; Adding forwarder block %d to loop %d and retrying\n",
4509 e->src->index, loop->loop_no);
4510 VEC_safe_push (basic_block, heap, loop->blocks, e->src);
4511 bitmap_set_bit (loop->block_bitmap, e->src->index);
4512 FOR_EACH_EDGE (e2, ei2, e->src->preds)
4513 VEC_safe_push (edge, gc, loop->incoming, e2);
4514 VEC_unordered_remove (edge, loop->incoming, ei.index);
4522 fprintf (dump_file, ";; No forwarder blocks found\n");
4530 VEC_free (basic_block, heap, works);
4533 /* Analyze the structure of the loops in the current function. Use STACK
4534 for bitmap allocations. Returns all the valid candidates for hardware
4535 loops found in this function. */
4537 bfin_discover_loops (bitmap_obstack *stack, FILE *dump_file)
4539 loop_info loops = NULL;
4545 /* Find all the possible loop tails. This means searching for every
4546 loop_end instruction. For each one found, create a loop_info
4547 structure and add the head block to the work list. */
4550 rtx tail = BB_END (bb);
4552 while (GET_CODE (tail) == NOTE)
4553 tail = PREV_INSN (tail);
4557 if (INSN_P (tail) && recog_memoized (tail) == CODE_FOR_loop_end)
4560 /* A possible loop end */
4562 /* There's a degenerate case we can handle - an empty loop consisting
4563 of only a back branch. Handle that by deleting the branch. */
4564 insn = BB_HEAD (BRANCH_EDGE (bb)->dest);
4565 if (next_real_insn (insn) == tail)
4569 fprintf (dump_file, ";; degenerate loop ending at\n");
4570 print_rtl_single (dump_file, tail);
4572 delete_insn_and_edges (tail);
4576 loop = XNEW (struct loop_info);
4579 loop->loop_no = nloops++;
4580 loop->blocks = VEC_alloc (basic_block, heap, 20);
4581 loop->block_bitmap = BITMAP_ALLOC (stack);
4586 fprintf (dump_file, ";; potential loop %d ending at\n",
4588 print_rtl_single (dump_file, tail);
4591 bfin_discover_loop (loop, bb, tail);
4595 tmp_bitmap = BITMAP_ALLOC (stack);
4596 /* Compute loop nestings. */
4597 for (loop = loops; loop; loop = loop->next)
4603 for (other = loop->next; other; other = other->next)
4608 bitmap_and (tmp_bitmap, other->block_bitmap, loop->block_bitmap);
4609 if (bitmap_empty_p (tmp_bitmap))
4611 if (bitmap_equal_p (tmp_bitmap, other->block_bitmap))
4613 other->outer = loop;
4614 VEC_safe_push (loop_info, heap, loop->loops, other);
4616 else if (bitmap_equal_p (tmp_bitmap, loop->block_bitmap))
4618 loop->outer = other;
4619 VEC_safe_push (loop_info, heap, other->loops, loop);
4625 ";; can't find suitable nesting for loops %d and %d\n",
4626 loop->loop_no, other->loop_no);
4627 loop->bad = other->bad = 1;
4631 BITMAP_FREE (tmp_bitmap);
4636 /* Free up the loop structures in LOOPS. */
4638 free_loops (loop_info loops)
4642 loop_info loop = loops;
4644 VEC_free (loop_info, heap, loop->loops);
4645 VEC_free (basic_block, heap, loop->blocks);
4646 BITMAP_FREE (loop->block_bitmap);
4651 #define BB_AUX_INDEX(BB) ((unsigned)(BB)->aux)
4653 /* The taken-branch edge from the loop end can actually go forward. Since the
4654 Blackfin's LSETUP instruction requires that the loop end be after the loop
4655 start, try to reorder a loop's basic blocks when we find such a case. */
4657 bfin_reorder_loops (loop_info loops, FILE *dump_file)
4664 cfg_layout_initialize (0);
4666 for (loop = loops; loop; loop = loop->next)
4676 /* Recreate an index for basic blocks that represents their order. */
4677 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
4678 bb != EXIT_BLOCK_PTR;
4679 bb = bb->next_bb, index++)
4680 bb->aux = (PTR) index;
4682 if (BB_AUX_INDEX (loop->head) < BB_AUX_INDEX (loop->tail))
4685 FOR_EACH_EDGE (e, ei, loop->head->succs)
4687 if (bitmap_bit_p (loop->block_bitmap, e->dest->index)
4688 && BB_AUX_INDEX (e->dest) < BB_AUX_INDEX (loop->tail))
4690 basic_block start_bb = e->dest;
4691 basic_block start_prev_bb = start_bb->prev_bb;
4694 fprintf (dump_file, ";; Moving block %d before block %d\n",
4695 loop->head->index, start_bb->index);
4696 loop->head->prev_bb->next_bb = loop->head->next_bb;
4697 loop->head->next_bb->prev_bb = loop->head->prev_bb;
4699 loop->head->prev_bb = start_prev_bb;
4700 loop->head->next_bb = start_bb;
4701 start_prev_bb->next_bb = start_bb->prev_bb = loop->head;
4705 loops = loops->next;
4710 if (bb->next_bb != EXIT_BLOCK_PTR)
4711 bb->aux = bb->next_bb;
4715 cfg_layout_finalize ();
4719 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
4720 and tries to rewrite the RTL of these loops so that proper Blackfin
4721 hardware loops are generated. */
4724 bfin_reorg_loops (FILE *dump_file)
4726 loop_info loops = NULL;
4729 bitmap_obstack stack;
4731 bitmap_obstack_initialize (&stack);
4734 fprintf (dump_file, ";; Find loops, first pass\n\n");
4736 loops = bfin_discover_loops (&stack, dump_file);
4739 bfin_dump_loops (loops);
4741 bfin_reorder_loops (loops, dump_file);
4745 fprintf (dump_file, ";; Find loops, second pass\n\n");
4747 loops = bfin_discover_loops (&stack, dump_file);
4750 fprintf (dump_file, ";; All loops found:\n\n");
4751 bfin_dump_loops (loops);
4754 /* Now apply the optimizations. */
4755 for (loop = loops; loop; loop = loop->next)
4756 bfin_optimize_loop (loop);
4760 fprintf (dump_file, ";; After hardware loops optimization:\n\n");
4761 bfin_dump_loops (loops);
4767 print_rtl (dump_file, get_insns ());
4772 splitting_loops = 1;
4775 rtx insn = BB_END (bb);
4779 try_split (PATTERN (insn), insn, 1);
4781 splitting_loops = 0;
4784 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
4785 Returns true if we modified the insn chain, false otherwise. */
4787 gen_one_bundle (rtx slot[3])
4789 gcc_assert (slot[1] != NULL_RTX);
4791 /* Don't add extra NOPs if optimizing for size. */
4793 && (slot[0] == NULL_RTX || slot[2] == NULL_RTX))
4796 /* Verify that we really can do the multi-issue. */
4799 rtx t = NEXT_INSN (slot[0]);
4800 while (t != slot[1])
4802 if (GET_CODE (t) != NOTE
4803 || NOTE_KIND (t) != NOTE_INSN_DELETED)
4810 rtx t = NEXT_INSN (slot[1]);
4811 while (t != slot[2])
4813 if (GET_CODE (t) != NOTE
4814 || NOTE_KIND (t) != NOTE_INSN_DELETED)
4820 if (slot[0] == NULL_RTX)
4822 slot[0] = emit_insn_before (gen_mnop (), slot[1]);
4823 df_insn_rescan (slot[0]);
4825 if (slot[2] == NULL_RTX)
4827 slot[2] = emit_insn_after (gen_forced_nop (), slot[1]);
4828 df_insn_rescan (slot[2]);
4831 /* Avoid line number information being printed inside one bundle. */
4832 if (INSN_LOCATOR (slot[1])
4833 && INSN_LOCATOR (slot[1]) != INSN_LOCATOR (slot[0]))
4834 INSN_LOCATOR (slot[1]) = INSN_LOCATOR (slot[0]);
4835 if (INSN_LOCATOR (slot[2])
4836 && INSN_LOCATOR (slot[2]) != INSN_LOCATOR (slot[0]))
4837 INSN_LOCATOR (slot[2]) = INSN_LOCATOR (slot[0]);
4839 /* Terminate them with "|| " instead of ";" in the output. */
4840 PUT_MODE (slot[0], SImode);
4841 PUT_MODE (slot[1], SImode);
4842 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
4843 PUT_MODE (slot[2], QImode);
4847 /* Go through all insns, and use the information generated during scheduling
4848 to generate SEQUENCEs to represent bundles of instructions issued
4852 bfin_gen_bundles (void)
4861 slot[0] = slot[1] = slot[2] = NULL_RTX;
4862 for (insn = BB_HEAD (bb);; insn = next)
4865 rtx delete_this = NULL_RTX;
4867 if (NONDEBUG_INSN_P (insn))
4869 enum attr_type type = get_attr_type (insn);
4871 if (type == TYPE_STALL)
4873 gcc_assert (n_filled == 0);
4878 if (type == TYPE_DSP32 || type == TYPE_DSP32SHIFTIMM)
4880 else if (slot[1] == NULL_RTX)
4888 next = NEXT_INSN (insn);
4889 while (next && insn != BB_END (bb)
4891 && GET_CODE (PATTERN (next)) != USE
4892 && GET_CODE (PATTERN (next)) != CLOBBER))
4895 next = NEXT_INSN (insn);
4898 /* BB_END can change due to emitting extra NOPs, so check here. */
4899 at_end = insn == BB_END (bb);
4900 if (delete_this == NULL_RTX && (at_end || GET_MODE (next) == TImode))
4903 || !gen_one_bundle (slot))
4904 && slot[0] != NULL_RTX)
4906 rtx pat = PATTERN (slot[0]);
4907 if (GET_CODE (pat) == SET
4908 && GET_CODE (SET_SRC (pat)) == UNSPEC
4909 && XINT (SET_SRC (pat), 1) == UNSPEC_32BIT)
4911 SET_SRC (pat) = XVECEXP (SET_SRC (pat), 0, 0);
4912 INSN_CODE (slot[0]) = -1;
4913 df_insn_rescan (slot[0]);
4917 slot[0] = slot[1] = slot[2] = NULL_RTX;
4919 if (delete_this != NULL_RTX)
4920 delete_insn (delete_this);
4927 /* Ensure that no var tracking notes are emitted in the middle of a
4928 three-instruction bundle. */
4931 reorder_var_tracking_notes (void)
4937 rtx queue = NULL_RTX;
4938 bool in_bundle = false;
4940 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4942 next = NEXT_INSN (insn);
4946 /* Emit queued up notes at the last instruction of a bundle. */
4947 if (GET_MODE (insn) == QImode)
4951 rtx next_queue = PREV_INSN (queue);
4952 PREV_INSN (NEXT_INSN (insn)) = queue;
4953 NEXT_INSN (queue) = NEXT_INSN (insn);
4954 NEXT_INSN (insn) = queue;
4955 PREV_INSN (queue) = insn;
4960 else if (GET_MODE (insn) == SImode)
4963 else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4967 rtx prev = PREV_INSN (insn);
4968 PREV_INSN (next) = prev;
4969 NEXT_INSN (prev) = next;
4971 PREV_INSN (insn) = queue;
4979 /* On some silicon revisions, functions shorter than a certain number of cycles
4980 can cause unpredictable behaviour. Work around this by adding NOPs as
4983 workaround_rts_anomaly (void)
4985 rtx insn, first_insn = NULL_RTX;
4988 if (! ENABLE_WA_RETS)
4991 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4995 if (BARRIER_P (insn))
4998 if (NOTE_P (insn) || LABEL_P (insn))
5001 if (first_insn == NULL_RTX)
5003 pat = PATTERN (insn);
5004 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
5005 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
5006 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
5014 if (recog_memoized (insn) == CODE_FOR_return_internal)
5017 /* Nothing to worry about for direct jumps. */
5018 if (!any_condjump_p (insn))
5024 else if (INSN_P (insn))
5026 rtx pat = PATTERN (insn);
5027 int this_cycles = 1;
5029 if (GET_CODE (pat) == PARALLEL)
5031 if (push_multiple_operation (pat, VOIDmode)
5032 || pop_multiple_operation (pat, VOIDmode))
5033 this_cycles = n_regs_to_save;
5037 enum insn_code icode = recog_memoized (insn);
5038 if (icode == CODE_FOR_link)
5040 else if (icode == CODE_FOR_unlink)
5042 else if (icode == CODE_FOR_mulsi3)
5045 if (this_cycles >= cycles)
5048 cycles -= this_cycles;
5053 emit_insn_before (gen_nop (), first_insn);
5058 /* Return an insn type for INSN that can be used by the caller for anomaly
5059 workarounds. This differs from plain get_attr_type in that it handles
5062 static enum attr_type
5063 type_for_anomaly (rtx insn)
5065 rtx pat = PATTERN (insn);
5066 if (GET_CODE (pat) == SEQUENCE)
5069 t = get_attr_type (XVECEXP (pat, 0, 1));
5072 t = get_attr_type (XVECEXP (pat, 0, 2));
5078 return get_attr_type (insn);
5081 /* Return true iff the address found in MEM is based on the register
5082 NP_REG and optionally has a positive offset. */
5084 harmless_null_pointer_p (rtx mem, int np_reg)
5086 mem = XEXP (mem, 0);
5087 if (GET_CODE (mem) == POST_INC || GET_CODE (mem) == POST_DEC)
5088 mem = XEXP (mem, 0);
5089 if (REG_P (mem) && REGNO (mem) == np_reg)
5091 if (GET_CODE (mem) == PLUS
5092 && REG_P (XEXP (mem, 0)) && REGNO (XEXP (mem, 0)) == np_reg)
5094 mem = XEXP (mem, 1);
5095 if (GET_CODE (mem) == CONST_INT && INTVAL (mem) > 0)
5101 /* Return nonzero if INSN contains any loads that may trap. */
5104 trapping_loads_p (rtx insn, int np_reg, bool after_np_branch)
5106 rtx pat = PATTERN (insn);
5107 rtx mem = SET_SRC (single_set (insn));
5109 if (!after_np_branch)
5111 return ((np_reg == -1 || !harmless_null_pointer_p (mem, np_reg))
5112 && may_trap_p (mem));
5115 /* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
5116 a three-insn bundle, see if one of them is a load and return that if so.
5117 Return NULL_RTX if the insn does not contain loads. */
5119 find_load (rtx insn)
5121 if (!NONDEBUG_INSN_P (insn))
5123 if (get_attr_type (insn) == TYPE_MCLD)
5125 if (GET_MODE (insn) != SImode)
5128 insn = NEXT_INSN (insn);
5129 if ((GET_MODE (insn) == SImode || GET_MODE (insn) == QImode)
5130 && get_attr_type (insn) == TYPE_MCLD)
5132 } while (GET_MODE (insn) != QImode);
5136 /* Determine whether PAT is an indirect call pattern. */
5138 indirect_call_p (rtx pat)
5140 if (GET_CODE (pat) == PARALLEL)
5141 pat = XVECEXP (pat, 0, 0);
5142 if (GET_CODE (pat) == SET)
5143 pat = SET_SRC (pat);
5144 gcc_assert (GET_CODE (pat) == CALL);
5145 pat = XEXP (pat, 0);
5146 gcc_assert (GET_CODE (pat) == MEM);
5147 pat = XEXP (pat, 0);
5152 /* During workaround_speculation, track whether we're in the shadow of a
5153 conditional branch that tests a P register for NULL. If so, we can omit
5154 emitting NOPs if we see a load from that P register, since a speculative
5155 access at address 0 isn't a problem, and the load is executed in all other
5157 Global for communication with note_np_check_stores through note_stores.
5159 int np_check_regno = -1;
5160 bool np_after_branch = false;
5162 /* Subroutine of workaround_speculation, called through note_stores. */
5164 note_np_check_stores (rtx x, const_rtx pat, void *data ATTRIBUTE_UNUSED)
5166 if (REG_P (x) && (REGNO (x) == REG_CC || REGNO (x) == np_check_regno))
5167 np_check_regno = -1;
5171 workaround_speculation (void)
5174 rtx last_condjump = NULL_RTX;
5175 int cycles_since_jump = INT_MAX;
5176 int delay_added = 0;
5178 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
5179 && ! ENABLE_WA_INDIRECT_CALLS)
5182 /* First pass: find predicted-false branches; if something after them
5183 needs nops, insert them or change the branch to predict true. */
5184 for (insn = get_insns (); insn; insn = next)
5187 int delay_needed = 0;
5189 next = find_next_insn_start (insn);
5191 if (NOTE_P (insn) || BARRIER_P (insn))
5196 np_check_regno = -1;
5200 pat = PATTERN (insn);
5201 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
5202 || GET_CODE (pat) == ADDR_VEC || GET_CODE (pat) == ADDR_DIFF_VEC)
5205 if (GET_CODE (pat) == ASM_INPUT || asm_noperands (pat) >= 0)
5207 np_check_regno = -1;
5213 /* Is this a condjump based on a null pointer comparison we saw
5215 if (np_check_regno != -1
5216 && recog_memoized (insn) == CODE_FOR_cbranchbi4)
5218 rtx op = XEXP (SET_SRC (PATTERN (insn)), 0);
5219 gcc_assert (GET_CODE (op) == EQ || GET_CODE (op) == NE);
5220 if (GET_CODE (op) == NE)
5221 np_after_branch = true;
5223 if (any_condjump_p (insn)
5224 && ! cbranch_predicted_taken_p (insn))
5226 last_condjump = insn;
5228 cycles_since_jump = 0;
5231 cycles_since_jump = INT_MAX;
5233 else if (CALL_P (insn))
5235 np_check_regno = -1;
5236 if (cycles_since_jump < INT_MAX)
5237 cycles_since_jump++;
5238 if (indirect_call_p (pat) && ENABLE_WA_INDIRECT_CALLS)
5243 else if (NONDEBUG_INSN_P (insn))
5245 rtx load_insn = find_load (insn);
5246 enum attr_type type = type_for_anomaly (insn);
5248 if (cycles_since_jump < INT_MAX)
5249 cycles_since_jump++;
5251 /* Detect a comparison of a P register with zero. If we later
5252 see a condjump based on it, we have found a null pointer
5254 if (recog_memoized (insn) == CODE_FOR_compare_eq)
5256 rtx src = SET_SRC (PATTERN (insn));
5257 if (REG_P (XEXP (src, 0))
5258 && P_REGNO_P (REGNO (XEXP (src, 0)))
5259 && XEXP (src, 1) == const0_rtx)
5261 np_check_regno = REGNO (XEXP (src, 0));
5262 np_after_branch = false;
5265 np_check_regno = -1;
5268 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
5270 if (trapping_loads_p (load_insn, np_check_regno,
5274 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
5277 /* See if we need to forget about a null pointer comparison
5278 we found earlier. */
5279 if (recog_memoized (insn) != CODE_FOR_compare_eq)
5281 note_stores (PATTERN (insn), note_np_check_stores, NULL);
5282 if (np_check_regno != -1)
5284 if (find_regno_note (insn, REG_INC, np_check_regno))
5285 np_check_regno = -1;
5291 if (delay_needed > cycles_since_jump
5292 && (delay_needed - cycles_since_jump) > delay_added)
5296 rtx *op = recog_data.operand;
5298 delay_needed -= cycles_since_jump;
5300 extract_insn (last_condjump);
5303 pat1 = gen_cbranch_predicted_taken (op[0], op[1], op[2],
5305 cycles_since_jump = INT_MAX;
5309 /* Do not adjust cycles_since_jump in this case, so that
5310 we'll increase the number of NOPs for a subsequent insn
5312 pat1 = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
5313 GEN_INT (delay_needed));
5314 delay_added = delay_needed;
5316 PATTERN (last_condjump) = pat1;
5317 INSN_CODE (last_condjump) = recog (pat1, insn, &num_clobbers);
5321 cycles_since_jump = INT_MAX;
5326 /* Second pass: for predicted-true branches, see if anything at the
5327 branch destination needs extra nops. */
5328 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5330 int cycles_since_jump;
5332 && any_condjump_p (insn)
5333 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
5334 || cbranch_predicted_taken_p (insn)))
5336 rtx target = JUMP_LABEL (insn);
5340 cycles_since_jump = 0;
5341 for (; target && cycles_since_jump < 3; target = next_tgt)
5345 next_tgt = find_next_insn_start (target);
5347 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
5350 pat = PATTERN (target);
5351 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
5352 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
5353 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
5356 if (NONDEBUG_INSN_P (target))
5358 rtx load_insn = find_load (target);
5359 enum attr_type type = type_for_anomaly (target);
5360 int delay_needed = 0;
5361 if (cycles_since_jump < INT_MAX)
5362 cycles_since_jump++;
5364 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
5366 if (trapping_loads_p (load_insn, -1, false))
5369 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
5372 if (delay_needed > cycles_since_jump)
5374 rtx prev = prev_real_insn (label);
5375 delay_needed -= cycles_since_jump;
5377 fprintf (dump_file, "Adding %d nops after %d\n",
5378 delay_needed, INSN_UID (label));
5380 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
5387 "Reducing nops on insn %d.\n",
5390 x = XVECEXP (x, 0, 1);
5391 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
5392 XVECEXP (x, 0, 0) = GEN_INT (v);
5394 while (delay_needed-- > 0)
5395 emit_insn_after (gen_nop (), label);
5404 /* Called just before the final scheduling pass. If we need to insert NOPs
5405 later on to work around speculative loads, insert special placeholder
5406 insns that cause loads to be delayed for as many cycles as necessary
5407 (and possible). This reduces the number of NOPs we need to add.
5408 The dummy insns we generate are later removed by bfin_gen_bundles. */
5410 add_sched_insns_for_speculation (void)
5414 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
5415 && ! ENABLE_WA_INDIRECT_CALLS)
5418 /* First pass: find predicted-false branches; if something after them
5419 needs nops, insert them or change the branch to predict true. */
5420 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5424 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
5427 pat = PATTERN (insn);
5428 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
5429 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
5430 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
5435 if (any_condjump_p (insn)
5436 && !cbranch_predicted_taken_p (insn))
5438 rtx n = next_real_insn (insn);
5439 emit_insn_before (gen_stall (GEN_INT (3)), n);
5444 /* Second pass: for predicted-true branches, see if anything at the
5445 branch destination needs extra nops. */
5446 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5449 && any_condjump_p (insn)
5450 && (cbranch_predicted_taken_p (insn)))
5452 rtx target = JUMP_LABEL (insn);
5453 rtx next = next_real_insn (target);
5455 if (GET_CODE (PATTERN (next)) == UNSPEC_VOLATILE
5456 && get_attr_type (next) == TYPE_STALL)
5458 emit_insn_before (gen_stall (GEN_INT (1)), next);
5463 /* We use the machine specific reorg pass for emitting CSYNC instructions
5464 after conditional branches as needed.
5466 The Blackfin is unusual in that a code sequence like
5469 may speculatively perform the load even if the condition isn't true. This
5470 happens for a branch that is predicted not taken, because the pipeline
5471 isn't flushed or stalled, so the early stages of the following instructions,
5472 which perform the memory reference, are allowed to execute before the
5473 jump condition is evaluated.
5474 Therefore, we must insert additional instructions in all places where this
5475 could lead to incorrect behavior. The manual recommends CSYNC, while
5476 VDSP seems to use NOPs (even though its corresponding compiler option is
5479 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
5480 When optimizing for size, we turn the branch into a predicted taken one.
5481 This may be slower due to mispredicts, but saves code size. */
5486 /* We are freeing block_for_insn in the toplev to keep compatibility
5487 with old MDEP_REORGS that are not CFG based. Recompute it now. */
5488 compute_bb_for_insn ();
5490 if (bfin_flag_schedule_insns2)
5492 splitting_for_sched = 1;
5494 splitting_for_sched = 0;
5496 add_sched_insns_for_speculation ();
5498 timevar_push (TV_SCHED2);
5499 if (flag_selective_scheduling2
5500 && !maybe_skip_selective_scheduling ())
5501 run_selective_scheduling ();
5504 timevar_pop (TV_SCHED2);
5506 /* Examine the schedule and insert nops as necessary for 64-bit parallel
5508 bfin_gen_bundles ();
5513 /* Doloop optimization */
5514 if (cfun->machine->has_hardware_loops)
5515 bfin_reorg_loops (dump_file);
5517 workaround_speculation ();
5519 if (bfin_flag_var_tracking)
5521 timevar_push (TV_VAR_TRACKING);
5522 variable_tracking_main ();
5523 reorder_var_tracking_notes ();
5524 timevar_pop (TV_VAR_TRACKING);
5527 df_finish_pass (false);
5529 workaround_rts_anomaly ();
5532 /* Handle interrupt_handler, exception_handler and nmi_handler function
5533 attributes; arguments as in struct attribute_spec.handler. */
5536 handle_int_attribute (tree *node, tree name,
5537 tree args ATTRIBUTE_UNUSED,
5538 int flags ATTRIBUTE_UNUSED,
5542 if (TREE_CODE (x) == FUNCTION_DECL)
5545 if (TREE_CODE (x) != FUNCTION_TYPE)
5547 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5549 *no_add_attrs = true;
5551 else if (funkind (x) != SUBROUTINE)
5552 error ("multiple function type attributes specified");
5557 /* Return 0 if the attributes for two types are incompatible, 1 if they
5558 are compatible, and 2 if they are nearly compatible (which causes a
5559 warning to be generated). */
5562 bfin_comp_type_attributes (const_tree type1, const_tree type2)
5564 e_funkind kind1, kind2;
5566 if (TREE_CODE (type1) != FUNCTION_TYPE)
5569 kind1 = funkind (type1);
5570 kind2 = funkind (type2);
5575 /* Check for mismatched modifiers */
5576 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
5577 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
5580 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
5581 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
5584 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
5585 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
5588 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
5589 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
5595 /* Handle a "longcall" or "shortcall" attribute; arguments as in
5596 struct attribute_spec.handler. */
5599 bfin_handle_longcall_attribute (tree *node, tree name,
5600 tree args ATTRIBUTE_UNUSED,
5601 int flags ATTRIBUTE_UNUSED,
5604 if (TREE_CODE (*node) != FUNCTION_TYPE
5605 && TREE_CODE (*node) != FIELD_DECL
5606 && TREE_CODE (*node) != TYPE_DECL)
5608 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5610 *no_add_attrs = true;
5613 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
5614 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
5615 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
5616 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
5618 warning (OPT_Wattributes,
5619 "can't apply both longcall and shortcall attributes to the same function");
5620 *no_add_attrs = true;
5626 /* Handle a "l1_text" attribute; arguments as in
5627 struct attribute_spec.handler. */
5630 bfin_handle_l1_text_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5631 int ARG_UNUSED (flags), bool *no_add_attrs)
5635 if (TREE_CODE (decl) != FUNCTION_DECL)
5637 error ("%qE attribute only applies to functions",
5639 *no_add_attrs = true;
5642 /* The decl may have already been given a section attribute
5643 from a previous declaration. Ensure they match. */
5644 else if (DECL_SECTION_NAME (decl) != NULL_TREE
5645 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
5648 error ("section of %q+D conflicts with previous declaration",
5650 *no_add_attrs = true;
5653 DECL_SECTION_NAME (decl) = build_string (9, ".l1.text");
5658 /* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
5659 arguments as in struct attribute_spec.handler. */
5662 bfin_handle_l1_data_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5663 int ARG_UNUSED (flags), bool *no_add_attrs)
5667 if (TREE_CODE (decl) != VAR_DECL)
5669 error ("%qE attribute only applies to variables",
5671 *no_add_attrs = true;
5673 else if (current_function_decl != NULL_TREE
5674 && !TREE_STATIC (decl))
5676 error ("%qE attribute cannot be specified for local variables",
5678 *no_add_attrs = true;
5682 const char *section_name;
5684 if (strcmp (IDENTIFIER_POINTER (name), "l1_data") == 0)
5685 section_name = ".l1.data";
5686 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_A") == 0)
5687 section_name = ".l1.data.A";
5688 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_B") == 0)
5689 section_name = ".l1.data.B";
5693 /* The decl may have already been given a section attribute
5694 from a previous declaration. Ensure they match. */
5695 if (DECL_SECTION_NAME (decl) != NULL_TREE
5696 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
5699 error ("section of %q+D conflicts with previous declaration",
5701 *no_add_attrs = true;
5704 DECL_SECTION_NAME (decl)
5705 = build_string (strlen (section_name) + 1, section_name);
5711 /* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
5714 bfin_handle_l2_attribute (tree *node, tree ARG_UNUSED (name),
5715 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5720 if (TREE_CODE (decl) == FUNCTION_DECL)
5722 if (DECL_SECTION_NAME (decl) != NULL_TREE
5723 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
5726 error ("section of %q+D conflicts with previous declaration",
5728 *no_add_attrs = true;
5731 DECL_SECTION_NAME (decl) = build_string (9, ".l2.text");
5733 else if (TREE_CODE (decl) == VAR_DECL)
5735 if (DECL_SECTION_NAME (decl) != NULL_TREE
5736 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
5739 error ("section of %q+D conflicts with previous declaration",
5741 *no_add_attrs = true;
5744 DECL_SECTION_NAME (decl) = build_string (9, ".l2.data");
5750 /* Table of valid machine attributes. */
5751 static const struct attribute_spec bfin_attribute_table[] =
5753 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
5754 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
5755 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
5756 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
5757 { "nesting", 0, 0, false, true, true, NULL },
5758 { "kspisusp", 0, 0, false, true, true, NULL },
5759 { "saveall", 0, 0, false, true, true, NULL },
5760 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
5761 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
5762 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute },
5763 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute },
5764 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute },
5765 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute },
5766 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute },
5767 { NULL, 0, 0, false, false, false, NULL }
5770 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
5771 tell the assembler to generate pointers to function descriptors in
5775 bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
5777 if (TARGET_FDPIC && size == UNITS_PER_WORD)
5779 if (GET_CODE (value) == SYMBOL_REF
5780 && SYMBOL_REF_FUNCTION_P (value))
5782 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
5783 output_addr_const (asm_out_file, value);
5784 fputs (")\n", asm_out_file);
5789 /* We've set the unaligned SI op to NULL, so we always have to
5790 handle the unaligned case here. */
5791 assemble_integer_with_op ("\t.4byte\t", value);
5795 return default_assemble_integer (value, size, aligned_p);
5798 /* Output the assembler code for a thunk function. THUNK_DECL is the
5799 declaration for the thunk function itself, FUNCTION is the decl for
5800 the target function. DELTA is an immediate constant offset to be
5801 added to THIS. If VCALL_OFFSET is nonzero, the word at
5802 *(*this + vcall_offset) should be added to THIS. */
5805 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
5806 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
5807 HOST_WIDE_INT vcall_offset, tree function)
5810 /* The this parameter is passed as the first argument. */
5811 rtx this_rtx = gen_rtx_REG (Pmode, REG_R0);
5813 /* Adjust the this parameter by a fixed constant. */
5817 if (delta >= -64 && delta <= 63)
5819 xops[0] = GEN_INT (delta);
5820 output_asm_insn ("%1 += %0;", xops);
5822 else if (delta >= -128 && delta < -64)
5824 xops[0] = GEN_INT (delta + 64);
5825 output_asm_insn ("%1 += -64; %1 += %0;", xops);
5827 else if (delta > 63 && delta <= 126)
5829 xops[0] = GEN_INT (delta - 63);
5830 output_asm_insn ("%1 += 63; %1 += %0;", xops);
5834 xops[0] = GEN_INT (delta);
5835 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
5839 /* Adjust the this parameter by a value stored in the vtable. */
5842 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
5843 rtx tmp = gen_rtx_REG (Pmode, REG_R3);
5847 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
5849 /* Adjust the this parameter. */
5850 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
5851 if (!memory_operand (xops[0], Pmode))
5853 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
5854 xops[0] = GEN_INT (vcall_offset);
5856 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
5857 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
5860 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
5863 xops[0] = XEXP (DECL_RTL (function), 0);
5864 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
5865 output_asm_insn ("jump.l\t%P0", xops);
5868 /* Codes for all the Blackfin builtins. */
5874 BFIN_BUILTIN_COMPOSE_2X16,
5875 BFIN_BUILTIN_EXTRACTLO,
5876 BFIN_BUILTIN_EXTRACTHI,
5878 BFIN_BUILTIN_SSADD_2X16,
5879 BFIN_BUILTIN_SSSUB_2X16,
5880 BFIN_BUILTIN_SSADDSUB_2X16,
5881 BFIN_BUILTIN_SSSUBADD_2X16,
5882 BFIN_BUILTIN_MULT_2X16,
5883 BFIN_BUILTIN_MULTR_2X16,
5884 BFIN_BUILTIN_NEG_2X16,
5885 BFIN_BUILTIN_ABS_2X16,
5886 BFIN_BUILTIN_MIN_2X16,
5887 BFIN_BUILTIN_MAX_2X16,
5889 BFIN_BUILTIN_SSADD_1X16,
5890 BFIN_BUILTIN_SSSUB_1X16,
5891 BFIN_BUILTIN_MULT_1X16,
5892 BFIN_BUILTIN_MULTR_1X16,
5893 BFIN_BUILTIN_NORM_1X16,
5894 BFIN_BUILTIN_NEG_1X16,
5895 BFIN_BUILTIN_ABS_1X16,
5896 BFIN_BUILTIN_MIN_1X16,
5897 BFIN_BUILTIN_MAX_1X16,
5899 BFIN_BUILTIN_SUM_2X16,
5900 BFIN_BUILTIN_DIFFHL_2X16,
5901 BFIN_BUILTIN_DIFFLH_2X16,
5903 BFIN_BUILTIN_SSADD_1X32,
5904 BFIN_BUILTIN_SSSUB_1X32,
5905 BFIN_BUILTIN_NORM_1X32,
5906 BFIN_BUILTIN_ROUND_1X32,
5907 BFIN_BUILTIN_NEG_1X32,
5908 BFIN_BUILTIN_ABS_1X32,
5909 BFIN_BUILTIN_MIN_1X32,
5910 BFIN_BUILTIN_MAX_1X32,
5911 BFIN_BUILTIN_MULT_1X32,
5912 BFIN_BUILTIN_MULT_1X32X32,
5913 BFIN_BUILTIN_MULT_1X32X32NS,
5915 BFIN_BUILTIN_MULHISILL,
5916 BFIN_BUILTIN_MULHISILH,
5917 BFIN_BUILTIN_MULHISIHL,
5918 BFIN_BUILTIN_MULHISIHH,
5920 BFIN_BUILTIN_LSHIFT_1X16,
5921 BFIN_BUILTIN_LSHIFT_2X16,
5922 BFIN_BUILTIN_SSASHIFT_1X16,
5923 BFIN_BUILTIN_SSASHIFT_2X16,
5924 BFIN_BUILTIN_SSASHIFT_1X32,
5926 BFIN_BUILTIN_CPLX_MUL_16,
5927 BFIN_BUILTIN_CPLX_MAC_16,
5928 BFIN_BUILTIN_CPLX_MSU_16,
5930 BFIN_BUILTIN_CPLX_MUL_16_S40,
5931 BFIN_BUILTIN_CPLX_MAC_16_S40,
5932 BFIN_BUILTIN_CPLX_MSU_16_S40,
5934 BFIN_BUILTIN_CPLX_SQU,
5936 BFIN_BUILTIN_LOADBYTES,
5941 #define def_builtin(NAME, TYPE, CODE) \
5943 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5947 /* Set up all builtin functions for this target. */
5949 bfin_init_builtins (void)
5951 tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
5952 tree void_ftype_void
5953 = build_function_type (void_type_node, void_list_node);
5954 tree short_ftype_short
5955 = build_function_type_list (short_integer_type_node, short_integer_type_node,
5957 tree short_ftype_int_int
5958 = build_function_type_list (short_integer_type_node, integer_type_node,
5959 integer_type_node, NULL_TREE);
5960 tree int_ftype_int_int
5961 = build_function_type_list (integer_type_node, integer_type_node,
5962 integer_type_node, NULL_TREE);
5964 = build_function_type_list (integer_type_node, integer_type_node,
5966 tree short_ftype_int
5967 = build_function_type_list (short_integer_type_node, integer_type_node,
5969 tree int_ftype_v2hi_v2hi
5970 = build_function_type_list (integer_type_node, V2HI_type_node,
5971 V2HI_type_node, NULL_TREE);
5972 tree v2hi_ftype_v2hi_v2hi
5973 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5974 V2HI_type_node, NULL_TREE);
5975 tree v2hi_ftype_v2hi_v2hi_v2hi
5976 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5977 V2HI_type_node, V2HI_type_node, NULL_TREE);
5978 tree v2hi_ftype_int_int
5979 = build_function_type_list (V2HI_type_node, integer_type_node,
5980 integer_type_node, NULL_TREE);
5981 tree v2hi_ftype_v2hi_int
5982 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5983 integer_type_node, NULL_TREE);
5984 tree int_ftype_short_short
5985 = build_function_type_list (integer_type_node, short_integer_type_node,
5986 short_integer_type_node, NULL_TREE);
5987 tree v2hi_ftype_v2hi
5988 = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
5989 tree short_ftype_v2hi
5990 = build_function_type_list (short_integer_type_node, V2HI_type_node,
5993 = build_function_type_list (integer_type_node,
5994 build_pointer_type (integer_type_node),
5997 /* Add the remaining MMX insns with somewhat more complicated types. */
5998 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
5999 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
6001 def_builtin ("__builtin_bfin_ones", short_ftype_int, BFIN_BUILTIN_ONES);
6003 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
6004 BFIN_BUILTIN_COMPOSE_2X16);
6005 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
6006 BFIN_BUILTIN_EXTRACTHI);
6007 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
6008 BFIN_BUILTIN_EXTRACTLO);
6010 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
6011 BFIN_BUILTIN_MIN_2X16);
6012 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
6013 BFIN_BUILTIN_MAX_2X16);
6015 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
6016 BFIN_BUILTIN_SSADD_2X16);
6017 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
6018 BFIN_BUILTIN_SSSUB_2X16);
6019 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
6020 BFIN_BUILTIN_SSADDSUB_2X16);
6021 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
6022 BFIN_BUILTIN_SSSUBADD_2X16);
6023 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
6024 BFIN_BUILTIN_MULT_2X16);
6025 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
6026 BFIN_BUILTIN_MULTR_2X16);
6027 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
6028 BFIN_BUILTIN_NEG_2X16);
6029 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
6030 BFIN_BUILTIN_ABS_2X16);
6032 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int,
6033 BFIN_BUILTIN_MIN_1X16);
6034 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int,
6035 BFIN_BUILTIN_MAX_1X16);
6037 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
6038 BFIN_BUILTIN_SSADD_1X16);
6039 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
6040 BFIN_BUILTIN_SSSUB_1X16);
6041 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
6042 BFIN_BUILTIN_MULT_1X16);
6043 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
6044 BFIN_BUILTIN_MULTR_1X16);
6045 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
6046 BFIN_BUILTIN_NEG_1X16);
6047 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
6048 BFIN_BUILTIN_ABS_1X16);
6049 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
6050 BFIN_BUILTIN_NORM_1X16);
6052 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi,
6053 BFIN_BUILTIN_SUM_2X16);
6054 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
6055 BFIN_BUILTIN_DIFFHL_2X16);
6056 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
6057 BFIN_BUILTIN_DIFFLH_2X16);
6059 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
6060 BFIN_BUILTIN_MULHISILL);
6061 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
6062 BFIN_BUILTIN_MULHISIHL);
6063 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
6064 BFIN_BUILTIN_MULHISILH);
6065 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
6066 BFIN_BUILTIN_MULHISIHH);
6068 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int,
6069 BFIN_BUILTIN_MIN_1X32);
6070 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int,
6071 BFIN_BUILTIN_MAX_1X32);
6073 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
6074 BFIN_BUILTIN_SSADD_1X32);
6075 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
6076 BFIN_BUILTIN_SSSUB_1X32);
6077 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
6078 BFIN_BUILTIN_NEG_1X32);
6079 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int,
6080 BFIN_BUILTIN_ABS_1X32);
6081 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
6082 BFIN_BUILTIN_NORM_1X32);
6083 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int,
6084 BFIN_BUILTIN_ROUND_1X32);
6085 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
6086 BFIN_BUILTIN_MULT_1X32);
6087 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int,
6088 BFIN_BUILTIN_MULT_1X32X32);
6089 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int,
6090 BFIN_BUILTIN_MULT_1X32X32NS);
6093 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
6094 BFIN_BUILTIN_SSASHIFT_1X16);
6095 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
6096 BFIN_BUILTIN_SSASHIFT_2X16);
6097 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
6098 BFIN_BUILTIN_LSHIFT_1X16);
6099 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
6100 BFIN_BUILTIN_LSHIFT_2X16);
6101 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int,
6102 BFIN_BUILTIN_SSASHIFT_1X32);
6104 /* Complex numbers. */
6105 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi,
6106 BFIN_BUILTIN_SSADD_2X16);
6107 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi,
6108 BFIN_BUILTIN_SSSUB_2X16);
6109 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
6110 BFIN_BUILTIN_CPLX_MUL_16);
6111 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
6112 BFIN_BUILTIN_CPLX_MAC_16);
6113 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
6114 BFIN_BUILTIN_CPLX_MSU_16);
6115 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi,
6116 BFIN_BUILTIN_CPLX_MUL_16_S40);
6117 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi,
6118 BFIN_BUILTIN_CPLX_MAC_16_S40);
6119 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi,
6120 BFIN_BUILTIN_CPLX_MSU_16_S40);
6121 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi,
6122 BFIN_BUILTIN_CPLX_SQU);
6124 /* "Unaligned" load. */
6125 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint,
6126 BFIN_BUILTIN_LOADBYTES);
6131 struct builtin_description
6133 const enum insn_code icode;
6134 const char *const name;
6135 const enum bfin_builtins code;
6139 static const struct builtin_description bdesc_2arg[] =
6141 { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
6143 { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
6144 { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
6145 { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
6146 { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
6147 { CODE_FOR_ssashiftsi3, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32, -1 },
6149 { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
6150 { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
6151 { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
6152 { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
6154 { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
6155 { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
6156 { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
6157 { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
6159 { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
6160 { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
6161 { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
6162 { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
6163 { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
6164 { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
6166 { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
6167 { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
6168 { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
6169 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
6170 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE },
6172 { CODE_FOR_mulhisi_ll, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL, -1 },
6173 { CODE_FOR_mulhisi_lh, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH, -1 },
6174 { CODE_FOR_mulhisi_hl, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL, -1 },
6175 { CODE_FOR_mulhisi_hh, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH, -1 }
6179 static const struct builtin_description bdesc_1arg[] =
6181 { CODE_FOR_loadbytes, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES, 0 },
6183 { CODE_FOR_ones, "__builtin_bfin_ones", BFIN_BUILTIN_ONES, 0 },
6185 { CODE_FOR_signbitshi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
6186 { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
6187 { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
6189 { CODE_FOR_signbitssi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
6190 { CODE_FOR_ssroundsi2, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32, 0 },
6191 { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
6192 { CODE_FOR_ssabssi2, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32, 0 },
6194 { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
6195 { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
6196 { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
6197 { CODE_FOR_ssabsv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
6200 /* Errors in the source file can cause expand_expr to return const0_rtx
6201 where we expect a vector. To avoid crashing, use one of the vector
6202 clear instructions. */
6204 safe_vector_operand (rtx x, enum machine_mode mode)
6206 if (x != const0_rtx)
6208 x = gen_reg_rtx (SImode);
6210 emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
6211 return gen_lowpart (mode, x);
6214 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
6215 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
6218 bfin_expand_binop_builtin (enum insn_code icode, tree exp, rtx target,
6222 tree arg0 = CALL_EXPR_ARG (exp, 0);
6223 tree arg1 = CALL_EXPR_ARG (exp, 1);
6224 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6225 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6226 enum machine_mode op0mode = GET_MODE (op0);
6227 enum machine_mode op1mode = GET_MODE (op1);
6228 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6229 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6230 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6232 if (VECTOR_MODE_P (mode0))
6233 op0 = safe_vector_operand (op0, mode0);
6234 if (VECTOR_MODE_P (mode1))
6235 op1 = safe_vector_operand (op1, mode1);
6238 || GET_MODE (target) != tmode
6239 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6240 target = gen_reg_rtx (tmode);
6242 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
6245 op0 = gen_lowpart (HImode, op0);
6247 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
6250 op1 = gen_lowpart (HImode, op1);
6252 /* In case the insn wants input operands in modes different from
6253 the result, abort. */
6254 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
6255 && (op1mode == mode1 || op1mode == VOIDmode));
6257 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6258 op0 = copy_to_mode_reg (mode0, op0);
6259 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6260 op1 = copy_to_mode_reg (mode1, op1);
6263 pat = GEN_FCN (icode) (target, op0, op1);
6265 pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
6273 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
6276 bfin_expand_unop_builtin (enum insn_code icode, tree exp,
6280 tree arg0 = CALL_EXPR_ARG (exp, 0);
6281 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6282 enum machine_mode op0mode = GET_MODE (op0);
6283 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6284 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6287 || GET_MODE (target) != tmode
6288 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6289 target = gen_reg_rtx (tmode);
6291 if (VECTOR_MODE_P (mode0))
6292 op0 = safe_vector_operand (op0, mode0);
6294 if (op0mode == SImode && mode0 == HImode)
6297 op0 = gen_lowpart (HImode, op0);
6299 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
6301 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6302 op0 = copy_to_mode_reg (mode0, op0);
6304 pat = GEN_FCN (icode) (target, op0);
6311 /* Expand an expression EXP that calls a built-in function,
6312 with result going to TARGET if that's convenient
6313 (and in mode MODE if that's convenient).
6314 SUBTARGET may be used as the target for computing one of EXP's operands.
6315 IGNORE is nonzero if the value is to be ignored. */
6318 bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6319 rtx subtarget ATTRIBUTE_UNUSED,
6320 enum machine_mode mode ATTRIBUTE_UNUSED,
6321 int ignore ATTRIBUTE_UNUSED)
6324 enum insn_code icode;
6325 const struct builtin_description *d;
6326 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6327 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6328 tree arg0, arg1, arg2;
6329 rtx op0, op1, op2, accvec, pat, tmp1, tmp2, a0reg, a1reg;
6330 enum machine_mode tmode, mode0;
6334 case BFIN_BUILTIN_CSYNC:
6335 emit_insn (gen_csync ());
6337 case BFIN_BUILTIN_SSYNC:
6338 emit_insn (gen_ssync ());
6341 case BFIN_BUILTIN_DIFFHL_2X16:
6342 case BFIN_BUILTIN_DIFFLH_2X16:
6343 case BFIN_BUILTIN_SUM_2X16:
6344 arg0 = CALL_EXPR_ARG (exp, 0);
6345 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6346 icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16 ? CODE_FOR_subhilov2hi3
6347 : fcode == BFIN_BUILTIN_DIFFLH_2X16 ? CODE_FOR_sublohiv2hi3
6348 : CODE_FOR_ssaddhilov2hi3);
6349 tmode = insn_data[icode].operand[0].mode;
6350 mode0 = insn_data[icode].operand[1].mode;
6353 || GET_MODE (target) != tmode
6354 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6355 target = gen_reg_rtx (tmode);
6357 if (VECTOR_MODE_P (mode0))
6358 op0 = safe_vector_operand (op0, mode0);
6360 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6361 op0 = copy_to_mode_reg (mode0, op0);
6363 pat = GEN_FCN (icode) (target, op0, op0);
6369 case BFIN_BUILTIN_MULT_1X32X32:
6370 case BFIN_BUILTIN_MULT_1X32X32NS:
6371 arg0 = CALL_EXPR_ARG (exp, 0);
6372 arg1 = CALL_EXPR_ARG (exp, 1);
6373 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6374 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6376 || !register_operand (target, SImode))
6377 target = gen_reg_rtx (SImode);
6378 if (! register_operand (op0, SImode))
6379 op0 = copy_to_mode_reg (SImode, op0);
6380 if (! register_operand (op1, SImode))
6381 op1 = copy_to_mode_reg (SImode, op1);
6383 a1reg = gen_rtx_REG (PDImode, REG_A1);
6384 a0reg = gen_rtx_REG (PDImode, REG_A0);
6385 tmp1 = gen_lowpart (V2HImode, op0);
6386 tmp2 = gen_lowpart (V2HImode, op1);
6387 emit_insn (gen_flag_macinit1hi (a1reg,
6388 gen_lowpart (HImode, op0),
6389 gen_lowpart (HImode, op1),
6390 GEN_INT (MACFLAG_FU)));
6391 emit_insn (gen_lshrpdi3 (a1reg, a1reg, GEN_INT (16)));
6393 if (fcode == BFIN_BUILTIN_MULT_1X32X32)
6394 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg, a1reg, tmp1, tmp2,
6395 const1_rtx, const1_rtx,
6396 const1_rtx, const0_rtx, a1reg,
6397 const0_rtx, GEN_INT (MACFLAG_NONE),
6398 GEN_INT (MACFLAG_M)));
6401 /* For saturating multiplication, there's exactly one special case
6402 to be handled: multiplying the smallest negative value with
6403 itself. Due to shift correction in fractional multiplies, this
6404 can overflow. Iff this happens, OP2 will contain 1, which, when
6405 added in 32 bits to the smallest negative, wraps to the largest
6406 positive, which is the result we want. */
6407 op2 = gen_reg_rtx (V2HImode);
6408 emit_insn (gen_packv2hi (op2, tmp1, tmp2, const0_rtx, const0_rtx));
6409 emit_insn (gen_movsibi (gen_rtx_REG (BImode, REG_CC),
6410 gen_lowpart (SImode, op2)));
6411 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg, a1reg, tmp1, tmp2,
6412 const1_rtx, const1_rtx,
6413 const1_rtx, const0_rtx, a1reg,
6414 const0_rtx, GEN_INT (MACFLAG_NONE),
6415 GEN_INT (MACFLAG_M)));
6416 op2 = gen_reg_rtx (SImode);
6417 emit_insn (gen_movbisi (op2, gen_rtx_REG (BImode, REG_CC)));
6419 emit_insn (gen_flag_machi_parts_acconly (a1reg, tmp2, tmp1,
6420 const1_rtx, const0_rtx,
6421 a1reg, const0_rtx, GEN_INT (MACFLAG_M)));
6422 emit_insn (gen_ashrpdi3 (a1reg, a1reg, GEN_INT (15)));
6423 emit_insn (gen_sum_of_accumulators (target, a0reg, a0reg, a1reg));
6424 if (fcode == BFIN_BUILTIN_MULT_1X32X32NS)
6425 emit_insn (gen_addsi3 (target, target, op2));
6428 case BFIN_BUILTIN_CPLX_MUL_16:
6429 case BFIN_BUILTIN_CPLX_MUL_16_S40:
6430 arg0 = CALL_EXPR_ARG (exp, 0);
6431 arg1 = CALL_EXPR_ARG (exp, 1);
6432 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6433 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6434 accvec = gen_reg_rtx (V2PDImode);
6435 icode = CODE_FOR_flag_macv2hi_parts;
6438 || GET_MODE (target) != V2HImode
6439 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
6440 target = gen_reg_rtx (tmode);
6441 if (! register_operand (op0, GET_MODE (op0)))
6442 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
6443 if (! register_operand (op1, GET_MODE (op1)))
6444 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
6446 if (fcode == BFIN_BUILTIN_CPLX_MUL_16)
6447 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
6448 const0_rtx, const0_rtx,
6449 const1_rtx, GEN_INT (MACFLAG_W32)));
6451 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
6452 const0_rtx, const0_rtx,
6453 const1_rtx, GEN_INT (MACFLAG_NONE)));
6454 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
6455 const1_rtx, const1_rtx,
6456 const0_rtx, accvec, const1_rtx, const0_rtx,
6457 GEN_INT (MACFLAG_NONE), accvec));
6461 case BFIN_BUILTIN_CPLX_MAC_16:
6462 case BFIN_BUILTIN_CPLX_MSU_16:
6463 case BFIN_BUILTIN_CPLX_MAC_16_S40:
6464 case BFIN_BUILTIN_CPLX_MSU_16_S40:
6465 arg0 = CALL_EXPR_ARG (exp, 0);
6466 arg1 = CALL_EXPR_ARG (exp, 1);
6467 arg2 = CALL_EXPR_ARG (exp, 2);
6468 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6469 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6470 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6471 accvec = gen_reg_rtx (V2PDImode);
6472 icode = CODE_FOR_flag_macv2hi_parts;
6475 || GET_MODE (target) != V2HImode
6476 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
6477 target = gen_reg_rtx (tmode);
6478 if (! register_operand (op1, GET_MODE (op1)))
6479 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
6480 if (! register_operand (op2, GET_MODE (op2)))
6481 op2 = copy_to_mode_reg (GET_MODE (op2), op2);
6483 tmp1 = gen_reg_rtx (SImode);
6484 tmp2 = gen_reg_rtx (SImode);
6485 emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op0), GEN_INT (16)));
6486 emit_move_insn (tmp2, gen_lowpart (SImode, op0));
6487 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
6488 emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
6489 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
6490 || fcode == BFIN_BUILTIN_CPLX_MSU_16)
6491 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
6492 const0_rtx, const0_rtx,
6493 const1_rtx, accvec, const0_rtx,
6495 GEN_INT (MACFLAG_W32)));
6497 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
6498 const0_rtx, const0_rtx,
6499 const1_rtx, accvec, const0_rtx,
6501 GEN_INT (MACFLAG_NONE)));
6502 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
6503 || fcode == BFIN_BUILTIN_CPLX_MAC_16_S40)
6513 emit_insn (gen_flag_macv2hi_parts (target, op1, op2, const1_rtx,
6514 const1_rtx, const1_rtx,
6515 const0_rtx, accvec, tmp1, tmp2,
6516 GEN_INT (MACFLAG_NONE), accvec));
6520 case BFIN_BUILTIN_CPLX_SQU:
6521 arg0 = CALL_EXPR_ARG (exp, 0);
6522 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6523 accvec = gen_reg_rtx (V2PDImode);
6524 icode = CODE_FOR_flag_mulv2hi;
6525 tmp1 = gen_reg_rtx (V2HImode);
6526 tmp2 = gen_reg_rtx (V2HImode);
6529 || GET_MODE (target) != V2HImode
6530 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
6531 target = gen_reg_rtx (V2HImode);
6532 if (! register_operand (op0, GET_MODE (op0)))
6533 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
6535 emit_insn (gen_flag_mulv2hi (tmp1, op0, op0, GEN_INT (MACFLAG_NONE)));
6537 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode, tmp2), op0, op0,
6538 const0_rtx, const1_rtx,
6539 GEN_INT (MACFLAG_NONE)));
6541 emit_insn (gen_ssaddhi3_high_parts (target, tmp2, tmp2, tmp2, const0_rtx,
6543 emit_insn (gen_sssubhi3_low_parts (target, target, tmp1, tmp1,
6544 const0_rtx, const1_rtx));
6552 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6553 if (d->code == fcode)
6554 return bfin_expand_binop_builtin (d->icode, exp, target,
6557 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6558 if (d->code == fcode)
6559 return bfin_expand_unop_builtin (d->icode, exp, target);
6564 #undef TARGET_INIT_BUILTINS
6565 #define TARGET_INIT_BUILTINS bfin_init_builtins
6567 #undef TARGET_EXPAND_BUILTIN
6568 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
6570 #undef TARGET_ASM_GLOBALIZE_LABEL
6571 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
6573 #undef TARGET_ASM_FILE_START
6574 #define TARGET_ASM_FILE_START output_file_start
6576 #undef TARGET_ATTRIBUTE_TABLE
6577 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
6579 #undef TARGET_COMP_TYPE_ATTRIBUTES
6580 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
6582 #undef TARGET_RTX_COSTS
6583 #define TARGET_RTX_COSTS bfin_rtx_costs
6585 #undef TARGET_ADDRESS_COST
6586 #define TARGET_ADDRESS_COST bfin_address_cost
6588 #undef TARGET_ASM_INTEGER
6589 #define TARGET_ASM_INTEGER bfin_assemble_integer
6591 #undef TARGET_MACHINE_DEPENDENT_REORG
6592 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
6594 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
6595 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
6597 #undef TARGET_ASM_OUTPUT_MI_THUNK
6598 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
6599 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
6600 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
6602 #undef TARGET_SCHED_ADJUST_COST
6603 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
6605 #undef TARGET_SCHED_ISSUE_RATE
6606 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
6608 #undef TARGET_PROMOTE_FUNCTION_MODE
6609 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
6611 #undef TARGET_ARG_PARTIAL_BYTES
6612 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
6614 #undef TARGET_PASS_BY_REFERENCE
6615 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
6617 #undef TARGET_SETUP_INCOMING_VARARGS
6618 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
6620 #undef TARGET_STRUCT_VALUE_RTX
6621 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
6623 #undef TARGET_VECTOR_MODE_SUPPORTED_P
6624 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
6626 #undef TARGET_HANDLE_OPTION
6627 #define TARGET_HANDLE_OPTION bfin_handle_option
6629 #undef TARGET_DEFAULT_TARGET_FLAGS
6630 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
6632 #undef TARGET_SECONDARY_RELOAD
6633 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
6635 #undef TARGET_DELEGITIMIZE_ADDRESS
6636 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
6638 #undef TARGET_CANNOT_FORCE_CONST_MEM
6639 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
6641 #undef TARGET_RETURN_IN_MEMORY
6642 #define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
6644 #undef TARGET_LEGITIMATE_ADDRESS_P
6645 #define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
6647 #undef TARGET_FRAME_POINTER_REQUIRED
6648 #define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
6650 #undef TARGET_CAN_ELIMINATE
6651 #define TARGET_CAN_ELIMINATE bfin_can_eliminate
6653 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
6654 #define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
6655 #undef TARGET_TRAMPOLINE_INIT
6656 #define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
6658 struct gcc_target targetm = TARGET_INITIALIZER;