1 /* Output routines for Sunplus S+CORE processor
2 Copyright (C) 2005, 2007 Free Software Foundation, Inc.
3 Contributed by Sunnorth.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
47 #include "target-def.h"
48 #include "integrate.h"
49 #include "langhooks.h"
53 #undef TARGET_ASM_FILE_START
54 #define TARGET_ASM_FILE_START score_asm_file_start
56 #undef TARGET_ASM_FILE_END
57 #define TARGET_ASM_FILE_END score_asm_file_end
59 #undef TARGET_ASM_FUNCTION_PROLOGUE
60 #define TARGET_ASM_FUNCTION_PROLOGUE score_function_prologue
62 #undef TARGET_ASM_FUNCTION_EPILOGUE
63 #define TARGET_ASM_FUNCTION_EPILOGUE score_function_epilogue
65 #undef TARGET_DEFAULT_TARGET_FLAGS
66 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
67 #undef TARGET_HANDLE_OPTION
68 #define TARGET_HANDLE_OPTION score_handle_option
70 #undef TARGET_SCHED_ISSUE_RATE
71 #define TARGET_SCHED_ISSUE_RATE score_issue_rate
73 #undef TARGET_ASM_SELECT_RTX_SECTION
74 #define TARGET_ASM_SELECT_RTX_SECTION score_select_rtx_section
76 #undef TARGET_IN_SMALL_DATA_P
77 #define TARGET_IN_SMALL_DATA_P score_in_small_data_p
79 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
80 #define TARGET_FUNCTION_OK_FOR_SIBCALL score_function_ok_for_sibcall
82 #undef TARGET_STRICT_ARGUMENT_NAMING
83 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
85 #undef TARGET_ASM_OUTPUT_MI_THUNK
86 #define TARGET_ASM_OUTPUT_MI_THUNK score_output_mi_thunk
88 #undef TARGET_PROMOTE_FUNCTION_ARGS
89 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
91 #undef TARGET_PROMOTE_FUNCTION_RETURN
92 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
94 #undef TARGET_PROMOTE_PROTOTYPES
95 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
97 #undef TARGET_MUST_PASS_IN_STACK
98 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
100 #undef TARGET_ARG_PARTIAL_BYTES
101 #define TARGET_ARG_PARTIAL_BYTES score_arg_partial_bytes
103 #undef TARGET_PASS_BY_REFERENCE
104 #define TARGET_PASS_BY_REFERENCE score_pass_by_reference
106 #undef TARGET_RETURN_IN_MEMORY
107 #define TARGET_RETURN_IN_MEMORY score_return_in_memory
109 #undef TARGET_RTX_COSTS
110 #define TARGET_RTX_COSTS score_rtx_costs
112 #undef TARGET_ADDRESS_COST
113 #define TARGET_ADDRESS_COST score_address_cost
115 struct extern_list *extern_head = 0;
116 rtx cmp_op0, cmp_op1;
118 /* default 0 = NO_REGS */
119 enum reg_class score_char_to_class[256];
121 /* Implement TARGET_RETURN_IN_MEMORY. In S+core,
122 small structures are returned in a register.
123 Objects with varying size must still be returned in memory. */
125 score_return_in_memory (tree type, tree fndecl ATTRIBUTE_UNUSED)
127 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
128 return score7_return_in_memory (type, fndecl);
129 else if (TARGET_SCORE3)
130 return score3_return_in_memory (type, fndecl);
135 /* Return nonzero when an argument must be passed by reference. */
137 score_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
138 enum machine_mode mode, tree type,
139 bool named ATTRIBUTE_UNUSED)
141 /* If we have a variable-sized parameter, we have no choice. */
142 return targetm.calls.must_pass_in_stack (mode, type);
145 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. Generate rtl rather than asm text
146 in order to avoid duplicating too much logic from elsewhere. */
148 score_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
149 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
152 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
153 return score7_output_mi_thunk (file, thunk_fndecl, delta,
154 vcall_offset, function);
155 else if (TARGET_SCORE3)
156 return score3_output_mi_thunk (file, thunk_fndecl, delta,
157 vcall_offset, function);
161 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
163 score_function_ok_for_sibcall (ATTRIBUTE_UNUSED tree decl,
164 ATTRIBUTE_UNUSED tree exp)
169 /* Set up the stack and frame (if desired) for the function. */
171 score_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
173 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
174 return score7_function_prologue (file, size);
175 else if (TARGET_SCORE3)
176 return score3_function_prologue (file, size);
181 /* Do any necessary cleanup after a function to restore stack, frame,
184 score_function_epilogue (FILE *file,
185 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
187 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
188 return score7_function_epilogue (file, size);
189 else if (TARGET_SCORE3)
190 return score3_function_epilogue (file, size);
195 /* Implement TARGET_SCHED_ISSUE_RATE. */
197 score_issue_rate (void)
202 /* Choose the section to use for the constant rtx expression X that has
205 score_select_rtx_section (enum machine_mode mode, rtx x,
206 unsigned HOST_WIDE_INT align)
208 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
209 return score7_select_rtx_section (mode, x, align);
210 else if (TARGET_SCORE3)
211 return score3_select_rtx_section (mode, x, align);
216 /* Implement TARGET_IN_SMALL_DATA_P. */
218 score_in_small_data_p (tree decl)
220 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
221 return score7_in_small_data_p (decl);
222 else if (TARGET_SCORE3)
223 return score3_in_small_data_p (decl);
228 /* Implement TARGET_ASM_FILE_START. */
230 score_asm_file_start (void)
233 fprintf (asm_out_file, "# Sunplus S+core5 %s rev=%s\n",
234 TARGET_LITTLE_ENDIAN ? "el" : "eb", SCORE_GCC_VERSION);
235 else if (TARGET_SCORE5U)
236 fprintf (asm_out_file, "# Sunplus S+core5u %s rev=%s\n",
237 TARGET_LITTLE_ENDIAN ? "el" : "eb", SCORE_GCC_VERSION);
238 else if (TARGET_SCORE7D)
239 fprintf (asm_out_file, "# Sunplus S+core7d %s rev=%s\n",
240 TARGET_LITTLE_ENDIAN ? "el" : "eb", SCORE_GCC_VERSION);
241 else if (TARGET_SCORE7)
242 fprintf (asm_out_file, "# Sunplus S+core7 %s rev=%s\n",
243 TARGET_LITTLE_ENDIAN ? "el" : "eb", SCORE_GCC_VERSION);
244 else if (TARGET_SCORE3D)
245 fprintf (asm_out_file, "# Sunplus S+core3d %s rev=%s\n",
246 TARGET_LITTLE_ENDIAN ? "el" : "eb", SCORE_GCC_VERSION);
247 else if (TARGET_SCORE3)
248 fprintf (asm_out_file, "# Sunplus S+core3 %s rev=%s\n",
249 TARGET_LITTLE_ENDIAN ? "el" : "eb", SCORE_GCC_VERSION);
251 fprintf (asm_out_file, "# Sunplus S+core unknown %s rev=%s\n",
252 TARGET_LITTLE_ENDIAN ? "el" : "eb", SCORE_GCC_VERSION);
254 default_file_start ();
257 fprintf (asm_out_file, "\t.set pic\n");
260 /* Implement TARGET_ASM_FILE_END. When using assembler macros, emit
261 .externs for any small-data variables that turned out to be external. */
263 score_asm_file_end (void)
265 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
266 return score7_asm_file_end ();
267 else if (TARGET_SCORE3)
268 return score3_asm_file_end ();
273 #define MASK_ALL_CPU_BITS \
274 (MASK_SCORE5 | MASK_SCORE5U | MASK_SCORE7 | MASK_SCORE7D \
275 | MASK_SCORE3 | MASK_SCORE3D)
277 /* Implement TARGET_HANDLE_OPTION. */
279 score_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
284 target_flags &= ~(MASK_ALL_CPU_BITS);
285 target_flags |= MASK_SCORE7 | MASK_SCORE7D;
289 target_flags &= ~(MASK_ALL_CPU_BITS);
290 target_flags |= MASK_SCORE3 | MASK_SCORE3D;
294 if (strcmp (arg, "score5") == 0)
296 target_flags &= ~(MASK_ALL_CPU_BITS);
297 target_flags |= MASK_SCORE5;
300 else if (strcmp (arg, "score5u") == 0)
302 target_flags &= ~(MASK_ALL_CPU_BITS);
303 target_flags |= MASK_SCORE5U;
306 else if (strcmp (arg, "score7") == 0)
308 target_flags &= ~(MASK_ALL_CPU_BITS);
309 target_flags |= MASK_SCORE7;
312 else if (strcmp (arg, "score7d") == 0)
314 target_flags &= ~(MASK_ALL_CPU_BITS);
315 target_flags |= MASK_SCORE7 | MASK_SCORE7D;
318 else if (strcmp (arg, "score3") == 0)
320 target_flags &= ~(MASK_ALL_CPU_BITS);
321 target_flags |= MASK_SCORE3;
324 else if (strcmp (arg, "score3d") == 0)
326 target_flags &= ~(MASK_ALL_CPU_BITS);
327 target_flags |= MASK_SCORE3 | MASK_SCORE3D;
338 /* Implement OVERRIDE_OPTIONS macro. */
340 score_override_options (void)
342 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
343 return score7_override_options ();
344 else if (TARGET_SCORE3)
345 return score3_override_options ();
347 return score7_override_options ();
350 /* Implement REGNO_REG_CLASS macro. */
352 score_reg_class (int regno)
354 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
355 return score7_reg_class (regno);
356 else if (TARGET_SCORE3)
357 return score3_reg_class (regno);
362 /* Implement PREFERRED_RELOAD_CLASS macro. */
364 score_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
366 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
367 return score7_preferred_reload_class (x, rclass);
368 else if (TARGET_SCORE3)
369 return score3_preferred_reload_class (x, rclass);
374 /* Implement SECONDARY_INPUT_RELOAD_CLASS
375 and SECONDARY_OUTPUT_RELOAD_CLASS macro. */
377 score_secondary_reload_class (enum reg_class rclass,
378 enum machine_mode mode ATTRIBUTE_UNUSED,
381 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
382 return score7_secondary_reload_class (rclass, mode, x);
383 else if (TARGET_SCORE3)
384 return score3_secondary_reload_class (rclass, mode, x);
389 /* Implement CONST_OK_FOR_LETTER_P macro. */
391 score_const_ok_for_letter_p (HOST_WIDE_INT value, char c)
393 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
394 return score7_const_ok_for_letter_p (value, c);
395 else if (TARGET_SCORE3)
396 return score3_const_ok_for_letter_p (value, c);
401 /* Implement EXTRA_CONSTRAINT macro. */
403 score_extra_constraint (rtx op, char c)
405 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
406 return score7_extra_constraint (op, c);
407 else if (TARGET_SCORE3)
408 return score3_extra_constraint (op, c);
413 /* Return truth value on whether or not a given hard register
414 can support a given mode. */
416 score_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
418 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
419 return score7_hard_regno_mode_ok (regno, mode);
420 else if (TARGET_SCORE3)
421 return score3_hard_regno_mode_ok (regno, mode);
426 /* Implement INITIAL_ELIMINATION_OFFSET. FROM is either the frame
427 pointer or argument pointer. TO is either the stack pointer or
428 hard frame pointer. */
430 score_initial_elimination_offset (int from,
431 int to ATTRIBUTE_UNUSED)
433 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
434 return score7_initial_elimination_offset (from, to);
435 else if (TARGET_SCORE3)
436 return score3_initial_elimination_offset (from, to);
441 /* Argument support functions. */
443 /* Initialize CUMULATIVE_ARGS for a function. */
445 score_init_cumulative_args (CUMULATIVE_ARGS *cum,
446 tree fntype ATTRIBUTE_UNUSED,
447 rtx libname ATTRIBUTE_UNUSED)
449 memset (cum, 0, sizeof (CUMULATIVE_ARGS));
452 /* Implement FUNCTION_ARG_ADVANCE macro. */
454 score_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
455 tree type, int named)
457 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
458 return score7_function_arg_advance (cum, mode, type, named);
459 else if (TARGET_SCORE3)
460 return score3_function_arg_advance (cum, mode, type, named);
465 /* Implement TARGET_ARG_PARTIAL_BYTES macro. */
467 score_arg_partial_bytes (CUMULATIVE_ARGS *cum,
468 enum machine_mode mode, tree type, bool named)
470 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
471 return score7_arg_partial_bytes (cum, mode, type, named);
472 else if (TARGET_SCORE3)
473 return score3_arg_partial_bytes (cum, mode, type, named);
478 /* Implement FUNCTION_ARG macro. */
480 score_function_arg (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
481 tree type, int named)
483 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
484 return score7_function_arg (cum, mode, type, named);
485 else if (TARGET_SCORE3)
486 return score3_function_arg (cum, mode, type, named);
491 /* Implement FUNCTION_VALUE and LIBCALL_VALUE. For normal calls,
492 VALTYPE is the return type and MODE is VOIDmode. For libcalls,
493 VALTYPE is null and MODE is the mode of the return value. */
495 score_function_value (tree valtype, tree func ATTRIBUTE_UNUSED,
496 enum machine_mode mode)
498 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
499 return score7_function_value (valtype, func, mode);
500 else if (TARGET_SCORE3)
501 return score3_function_value (valtype, func, mode);
506 /* Implement INITIALIZE_TRAMPOLINE macro. */
508 score_initialize_trampoline (rtx ADDR, rtx FUNC, rtx CHAIN)
510 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
511 return score7_initialize_trampoline (ADDR, FUNC, CHAIN);
512 else if (TARGET_SCORE3)
513 return score3_initialize_trampoline (ADDR, FUNC, CHAIN);
518 /* This function is used to implement REG_MODE_OK_FOR_BASE_P macro. */
520 score_regno_mode_ok_for_base_p (int regno, int strict)
522 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
523 return score7_regno_mode_ok_for_base_p (regno, strict);
524 else if (TARGET_SCORE3)
525 return score3_regno_mode_ok_for_base_p (regno, strict);
530 /* Implement GO_IF_LEGITIMATE_ADDRESS macro. */
532 score_address_p (enum machine_mode mode, rtx x, int strict)
534 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
535 return score7_address_p (mode, x, strict);
536 else if (TARGET_SCORE3)
537 return score3_address_p (mode, x, strict);
542 /* This function is used to implement LEGITIMIZE_ADDRESS. If *XLOC can
543 be legitimized in a way that the generic machinery might not expect,
544 put the new address in *XLOC and return true. */
546 score_legitimize_address (rtx *xloc)
548 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
549 return score7_legitimize_address (xloc);
550 else if (TARGET_SCORE3)
551 return score3_legitimize_address (xloc);
556 /* Return a number assessing the cost of moving a register in class
559 score_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
560 enum reg_class from, enum reg_class to)
562 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
563 return score7_register_move_cost (mode, from, to);
564 else if (TARGET_SCORE3)
565 return score3_register_move_cost (mode, from, to);
570 /* Implement TARGET_RTX_COSTS macro. */
572 score_rtx_costs (rtx x, int code, int outer_code, int *total,
573 bool speed ATTRIBUTE_UNUSED)
575 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
576 return score7_rtx_costs (x, code, outer_code, total);
577 else if (TARGET_SCORE3)
578 return score3_rtx_costs (x, code, outer_code, total);
583 /* Implement TARGET_ADDRESS_COST macro. */
585 score_address_cost (rtx addr,
586 bool speed ATTRIBUTE_UNUSED)
588 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
589 return score7_address_cost (addr);
590 else if (TARGET_SCORE3)
591 return score3_address_cost (addr);
596 /* Implement ASM_OUTPUT_EXTERNAL macro. */
598 score_output_external (FILE *file ATTRIBUTE_UNUSED,
599 tree decl, const char *name)
601 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
602 return score7_output_external (file, decl, name);
603 else if (TARGET_SCORE3)
604 return score3_output_external (file, decl, name);
609 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
610 back to a previous frame. */
612 score_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
614 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
615 return score7_return_addr (count, frame);
616 else if (TARGET_SCORE3)
617 return score3_return_addr (count, frame);
622 /* Implement PRINT_OPERAND macro. */
624 score_print_operand (FILE *file, rtx op, int c)
626 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
627 return score7_print_operand (file, op, c);
628 else if (TARGET_SCORE3)
629 return score3_print_operand (file, op, c);
634 /* Implement PRINT_OPERAND_ADDRESS macro. */
636 score_print_operand_address (FILE *file, rtx x)
638 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
639 return score7_print_operand_address (file, x);
640 else if (TARGET_SCORE3)
641 return score3_print_operand_address (file, x);
646 /* Implement SELECT_CC_MODE macro. */
648 score_select_cc_mode (enum rtx_code op, rtx x, rtx y)
650 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
651 return score7_select_cc_mode (op, x, y);
652 else if (TARGET_SCORE3)
653 return score3_select_cc_mode (op, x, y);
658 /* Return true if X is a symbolic constant that can be calculated in
659 the same way as a bare symbol. If it is, store the type of the
660 symbol in *SYMBOL_TYPE. */
662 score_symbolic_constant_p (rtx x, enum score_symbol_type *symbol_type)
664 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
665 return score7_symbolic_constant_p (x, symbol_type);
666 else if (TARGET_SCORE3)
667 return score3_symbolic_constant_p (x, symbol_type);
672 /* Generate the prologue instructions for entry into a S+core function. */
674 score_prologue (void)
676 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
677 return score7_prologue ();
678 else if (TARGET_SCORE3)
679 return score3_prologue ();
684 /* Generate the epilogue instructions in a S+core function. */
686 score_epilogue (int sibcall_p)
688 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
689 return score7_epilogue (sibcall_p);
690 else if (TARGET_SCORE3)
691 return score3_epilogue (sibcall_p);
697 score_gen_cmp (enum machine_mode mode)
699 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
700 return score7_gen_cmp (mode);
701 else if (TARGET_SCORE3)
702 return score3_gen_cmp (mode);
707 /* Call and sibcall pattern all need call this function. */
709 score_call (rtx *ops, bool sib)
711 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
712 return score7_call (ops, sib);
713 else if (TARGET_SCORE3)
714 return score3_call (ops, sib);
719 /* Call value and sibcall value pattern all need call this function. */
721 score_call_value (rtx *ops, bool sib)
723 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
724 return score7_call_value (ops, sib);
725 else if (TARGET_SCORE3)
726 return score3_call_value (ops, sib);
732 score_movsicc (rtx *ops)
734 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
735 return score7_movsicc (ops);
736 else if (TARGET_SCORE3)
737 return score3_movsicc (ops);
744 score_movdi (rtx *ops)
746 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
747 return score7_movdi (ops);
748 else if (TARGET_SCORE3)
749 return score3_movdi (ops);
755 score_zero_extract_andi (rtx *ops)
757 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
758 return score7_zero_extract_andi (ops);
759 else if (TARGET_SCORE3)
760 return score3_zero_extract_andi (ops);
765 /* Output asm insn for move. */
767 score_move (rtx *ops)
769 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
770 return score7_move (ops);
771 else if (TARGET_SCORE3)
772 return score3_move (ops);
777 /* Output asm insn for load. */
779 score_linsn (rtx *ops, enum score_mem_unit unit, bool sign)
781 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
782 return score7_linsn (ops, unit, sign);
783 else if (TARGET_SCORE3)
784 return score3_linsn (ops, unit, sign);
789 /* Output asm insn for store. */
791 score_sinsn (rtx *ops, enum score_mem_unit unit)
793 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
794 return score7_sinsn (ops, unit);
795 else if (TARGET_SCORE3)
796 return score3_sinsn (ops, unit);
801 /* Output asm insn for load immediate. */
803 score_limm (rtx *ops)
805 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
806 return score7_limm (ops);
807 else if (TARGET_SCORE3)
808 return score3_limm (ops);
814 /* Generate add insn. */
816 score_select_add_imm (rtx *ops, bool set_cc)
818 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
819 return score7_select_add_imm (ops, set_cc);
820 else if (TARGET_SCORE3)
821 return score3_select_add_imm (ops, set_cc);
826 /* Output arith insn. */
828 score_select (rtx *ops, const char *inst_pre,
829 bool commu, const char *letter, bool set_cc)
831 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D)
832 return score7_select (ops, inst_pre, commu, letter, set_cc);
833 else if (TARGET_SCORE3)
834 return score3_select (ops, inst_pre, commu, letter, set_cc);
839 /* Output switch case insn, only supported in score3. */
841 score_output_casesi (rtx *operands)
844 return score3_output_casesi (operands);
849 /* Output rpush insn, only supported in score3. */
851 score_rpush (rtx *operands)
854 return score3_rpush (operands);
859 /* Output rpop insn, only supported in score3. */
861 score_rpop (rtx *operands)
864 return score3_rpop (operands);
869 /* Emit lcb/lce insns. */
871 score_unaligned_load (rtx *ops)
879 if (INTVAL (len) != BITS_PER_WORD
880 || (INTVAL (off) % BITS_PER_UNIT) != 0)
883 gcc_assert (GET_MODE_SIZE (GET_MODE (dst)) == GET_MODE_SIZE (SImode));
885 addr_reg = copy_addr_to_reg (XEXP (src, 0));
886 emit_insn (gen_move_lcb (addr_reg, addr_reg));
887 emit_insn (gen_move_lce (addr_reg, addr_reg, dst));
892 /* Emit scb/sce insns. */
894 score_unaligned_store (rtx *ops)
902 if (INTVAL(len) != BITS_PER_WORD
903 || (INTVAL(off) % BITS_PER_UNIT) != 0)
906 gcc_assert (GET_MODE_SIZE (GET_MODE (src)) == GET_MODE_SIZE (SImode));
908 addr_reg = copy_addr_to_reg (XEXP (dst, 0));
909 emit_insn (gen_move_scb (addr_reg, addr_reg, src));
910 emit_insn (gen_move_sce (addr_reg, addr_reg));
915 /* If length is short, generate move insns straight. */
917 score_block_move_straight (rtx dst, rtx src, HOST_WIDE_INT length)
919 HOST_WIDE_INT leftover;
923 leftover = length % UNITS_PER_WORD;
925 reg_count = length / UNITS_PER_WORD;
927 regs = XALLOCAVEC (rtx, reg_count);
928 for (i = 0; i < reg_count; i++)
929 regs[i] = gen_reg_rtx (SImode);
931 /* Load from src to regs. */
932 if (MEM_ALIGN (src) >= BITS_PER_WORD)
934 HOST_WIDE_INT offset = 0;
935 for (i = 0; i < reg_count; offset += UNITS_PER_WORD, i++)
936 emit_move_insn (regs[i], adjust_address (src, SImode, offset));
938 else if (reg_count >= 1)
940 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
942 emit_insn (gen_move_lcb (src_reg, src_reg));
943 for (i = 0; i < (reg_count - 1); i++)
944 emit_insn (gen_move_lcw (src_reg, src_reg, regs[i]));
945 emit_insn (gen_move_lce (src_reg, src_reg, regs[i]));
948 /* Store regs to dest. */
949 if (MEM_ALIGN (dst) >= BITS_PER_WORD)
951 HOST_WIDE_INT offset = 0;
952 for (i = 0; i < reg_count; offset += UNITS_PER_WORD, i++)
953 emit_move_insn (adjust_address (dst, SImode, offset), regs[i]);
955 else if (reg_count >= 1)
957 rtx dst_reg = copy_addr_to_reg (XEXP (dst, 0));
959 emit_insn (gen_move_scb (dst_reg, dst_reg, regs[0]));
960 for (i = 1; i < reg_count; i++)
961 emit_insn (gen_move_scw (dst_reg, dst_reg, regs[i]));
962 emit_insn (gen_move_sce (dst_reg, dst_reg));
965 /* Mop up any left-over bytes. */
968 src = adjust_address (src, BLKmode, length);
969 dst = adjust_address (dst, BLKmode, length);
970 move_by_pieces (dst, src, leftover,
971 MIN (MEM_ALIGN (src), MEM_ALIGN (dst)), 0);
975 /* Generate loop head when dst or src is unaligned. */
977 score_block_move_loop_head (rtx dst_reg, HOST_WIDE_INT dst_align,
978 rtx src_reg, HOST_WIDE_INT src_align,
979 HOST_WIDE_INT length)
981 bool src_unaligned = (src_align < BITS_PER_WORD);
982 bool dst_unaligned = (dst_align < BITS_PER_WORD);
984 rtx temp = gen_reg_rtx (SImode);
986 gcc_assert (length == UNITS_PER_WORD);
990 emit_insn (gen_move_lcb (src_reg, src_reg));
991 emit_insn (gen_move_lcw (src_reg, src_reg, temp));
994 emit_insn (gen_move_lw_a (src_reg,
995 src_reg, gen_int_mode (4, SImode), temp));
998 emit_insn (gen_move_scb (dst_reg, dst_reg, temp));
1000 emit_insn (gen_move_sw_a (dst_reg,
1001 dst_reg, gen_int_mode (4, SImode), temp));
1004 /* Generate loop body, copy length bytes per iteration. */
1006 score_block_move_loop_body (rtx dst_reg, HOST_WIDE_INT dst_align,
1007 rtx src_reg, HOST_WIDE_INT src_align,
1008 HOST_WIDE_INT length)
1010 int reg_count = length / UNITS_PER_WORD;
1011 rtx *regs = XALLOCAVEC (rtx, reg_count);
1013 bool src_unaligned = (src_align < BITS_PER_WORD);
1014 bool dst_unaligned = (dst_align < BITS_PER_WORD);
1016 for (i = 0; i < reg_count; i++)
1017 regs[i] = gen_reg_rtx (SImode);
1021 for (i = 0; i < reg_count; i++)
1022 emit_insn (gen_move_lcw (src_reg, src_reg, regs[i]));
1026 for (i = 0; i < reg_count; i++)
1027 emit_insn (gen_move_lw_a (src_reg,
1028 src_reg, gen_int_mode (4, SImode), regs[i]));
1033 for (i = 0; i < reg_count; i++)
1034 emit_insn (gen_move_scw (dst_reg, dst_reg, regs[i]));
1038 for (i = 0; i < reg_count; i++)
1039 emit_insn (gen_move_sw_a (dst_reg,
1040 dst_reg, gen_int_mode (4, SImode), regs[i]));
1044 /* Generate loop foot, copy the leftover bytes. */
1046 score_block_move_loop_foot (rtx dst_reg, HOST_WIDE_INT dst_align,
1047 rtx src_reg, HOST_WIDE_INT src_align,
1048 HOST_WIDE_INT length)
1050 bool src_unaligned = (src_align < BITS_PER_WORD);
1051 bool dst_unaligned = (dst_align < BITS_PER_WORD);
1053 HOST_WIDE_INT leftover;
1055 leftover = length % UNITS_PER_WORD;
1059 score_block_move_loop_body (dst_reg, dst_align,
1060 src_reg, src_align, length);
1063 emit_insn (gen_move_sce (dst_reg, dst_reg));
1067 HOST_WIDE_INT src_adj = src_unaligned ? -4 : 0;
1068 HOST_WIDE_INT dst_adj = dst_unaligned ? -4 : 0;
1071 gcc_assert (leftover < UNITS_PER_WORD);
1073 if (leftover >= UNITS_PER_WORD / 2
1074 && src_align >= BITS_PER_WORD / 2
1075 && dst_align >= BITS_PER_WORD / 2)
1077 temp = gen_reg_rtx (HImode);
1078 emit_insn (gen_move_lhu_b (src_reg, src_reg,
1079 gen_int_mode (src_adj, SImode), temp));
1080 emit_insn (gen_move_sh_b (dst_reg, dst_reg,
1081 gen_int_mode (dst_adj, SImode), temp));
1082 leftover -= UNITS_PER_WORD / 2;
1083 src_adj = UNITS_PER_WORD / 2;
1084 dst_adj = UNITS_PER_WORD / 2;
1087 while (leftover > 0)
1089 temp = gen_reg_rtx (QImode);
1090 emit_insn (gen_move_lbu_b (src_reg, src_reg,
1091 gen_int_mode (src_adj, SImode), temp));
1092 emit_insn (gen_move_sb_b (dst_reg, dst_reg,
1093 gen_int_mode (dst_adj, SImode), temp));
1101 #define MIN_MOVE_REGS 3
1102 #define MIN_MOVE_BYTES (MIN_MOVE_REGS * UNITS_PER_WORD)
1103 #define MAX_MOVE_REGS 4
1104 #define MAX_MOVE_BYTES (MAX_MOVE_REGS * UNITS_PER_WORD)
1106 /* The length is large, generate a loop if necessary.
1107 The loop is consisted by loop head/body/foot. */
1109 score_block_move_loop (rtx dst, rtx src, HOST_WIDE_INT length)
1111 HOST_WIDE_INT src_align = MEM_ALIGN (src);
1112 HOST_WIDE_INT dst_align = MEM_ALIGN (dst);
1113 HOST_WIDE_INT loop_mov_bytes;
1114 HOST_WIDE_INT iteration = 0;
1115 HOST_WIDE_INT head_length = 0, leftover;
1116 rtx label, src_reg, dst_reg, final_dst;
1118 bool gen_loop_head = (src_align < BITS_PER_WORD
1119 || dst_align < BITS_PER_WORD);
1122 head_length += UNITS_PER_WORD;
1124 for (loop_mov_bytes = MAX_MOVE_BYTES;
1125 loop_mov_bytes >= MIN_MOVE_BYTES;
1126 loop_mov_bytes -= UNITS_PER_WORD)
1128 iteration = (length - head_length) / loop_mov_bytes;
1134 score_block_move_straight (dst, src, length);
1138 leftover = (length - head_length) % loop_mov_bytes;
1141 src_reg = copy_addr_to_reg (XEXP (src, 0));
1142 dst_reg = copy_addr_to_reg (XEXP (dst, 0));
1143 final_dst = expand_simple_binop (Pmode, PLUS, dst_reg, GEN_INT (length),
1147 score_block_move_loop_head (dst_reg, dst_align,
1148 src_reg, src_align, head_length);
1150 label = gen_label_rtx ();
1153 score_block_move_loop_body (dst_reg, dst_align,
1154 src_reg, src_align, loop_mov_bytes);
1156 emit_insn (gen_cmpsi (dst_reg, final_dst));
1157 emit_jump_insn (gen_bne (label));
1159 score_block_move_loop_foot (dst_reg, dst_align,
1160 src_reg, src_align, leftover);
1163 /* Generate block move, for misc.md: "movmemsi". */
1165 score_block_move (rtx *ops)
1169 rtx length = ops[2];
1171 if (TARGET_LITTLE_ENDIAN
1172 && (MEM_ALIGN (src) < BITS_PER_WORD || MEM_ALIGN (dst) < BITS_PER_WORD)
1173 && INTVAL (length) >= UNITS_PER_WORD)
1176 if (GET_CODE (length) == CONST_INT)
1178 if (INTVAL (length) <= 2 * MAX_MOVE_BYTES)
1180 score_block_move_straight (dst, src, INTVAL (length));
1183 else if (optimize &&
1184 !(flag_unroll_loops || flag_unroll_all_loops))
1186 score_block_move_loop (dst, src, INTVAL (length));
1193 struct gcc_target targetm = TARGET_INITIALIZER;