1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GCC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
28 #include "coretypes.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 #include "conditions.h"
51 #include "target-def.h"
52 #include "langhooks.h"
56 rtx fix_truncqfhi2_libfunc;
57 rtx fixuns_truncqfhi2_libfunc;
58 rtx fix_trunchfhi2_libfunc;
59 rtx fixuns_trunchfhi2_libfunc;
60 rtx floathiqf2_libfunc;
61 rtx floatunshiqf2_libfunc;
62 rtx floathihf2_libfunc;
63 rtx floatunshihf2_libfunc;
65 static int c4x_leaf_function;
67 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
69 /* Array of the smallest class containing reg number REGNO, indexed by
70 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
71 registers are available and set the class to NO_REGS for registers
72 that the target switches say are unavailable. */
74 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
76 /* Reg Modes Saved. */
77 R0R1_REGS, /* R0 QI, QF, HF No. */
78 R0R1_REGS, /* R1 QI, QF, HF No. */
79 R2R3_REGS, /* R2 QI, QF, HF No. */
80 R2R3_REGS, /* R3 QI, QF, HF No. */
81 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
84 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
85 ADDR_REGS, /* AR0 QI No. */
86 ADDR_REGS, /* AR1 QI No. */
87 ADDR_REGS, /* AR2 QI No. */
88 ADDR_REGS, /* AR3 QI QI. */
89 ADDR_REGS, /* AR4 QI QI. */
90 ADDR_REGS, /* AR5 QI QI. */
91 ADDR_REGS, /* AR6 QI QI. */
92 ADDR_REGS, /* AR7 QI QI. */
93 DP_REG, /* DP QI No. */
94 INDEX_REGS, /* IR0 QI No. */
95 INDEX_REGS, /* IR1 QI No. */
96 BK_REG, /* BK QI QI. */
97 SP_REG, /* SP QI No. */
98 ST_REG, /* ST CC No. */
99 NO_REGS, /* DIE/IE No. */
100 NO_REGS, /* IIE/IF No. */
101 NO_REGS, /* IIF/IOF No. */
102 INT_REGS, /* RS QI No. */
103 INT_REGS, /* RE QI No. */
104 RC_REG, /* RC QI No. */
105 EXT_REGS, /* R8 QI, QF, HF QI. */
106 EXT_REGS, /* R9 QI, QF, HF No. */
107 EXT_REGS, /* R10 QI, QF, HF No. */
108 EXT_REGS, /* R11 QI, QF, HF No. */
111 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
113 /* Reg Modes Saved. */
114 HFmode, /* R0 QI, QF, HF No. */
115 HFmode, /* R1 QI, QF, HF No. */
116 HFmode, /* R2 QI, QF, HF No. */
117 HFmode, /* R3 QI, QF, HF No. */
118 QFmode, /* R4 QI, QF, HF QI. */
119 QFmode, /* R5 QI, QF, HF QI. */
120 QImode, /* R6 QI, QF, HF QF. */
121 QImode, /* R7 QI, QF, HF QF. */
122 QImode, /* AR0 QI No. */
123 QImode, /* AR1 QI No. */
124 QImode, /* AR2 QI No. */
125 QImode, /* AR3 QI QI. */
126 QImode, /* AR4 QI QI. */
127 QImode, /* AR5 QI QI. */
128 QImode, /* AR6 QI QI. */
129 QImode, /* AR7 QI QI. */
130 VOIDmode, /* DP QI No. */
131 QImode, /* IR0 QI No. */
132 QImode, /* IR1 QI No. */
133 QImode, /* BK QI QI. */
134 VOIDmode, /* SP QI No. */
135 VOIDmode, /* ST CC No. */
136 VOIDmode, /* DIE/IE No. */
137 VOIDmode, /* IIE/IF No. */
138 VOIDmode, /* IIF/IOF No. */
139 QImode, /* RS QI No. */
140 QImode, /* RE QI No. */
141 VOIDmode, /* RC QI No. */
142 QFmode, /* R8 QI, QF, HF QI. */
143 HFmode, /* R9 QI, QF, HF No. */
144 HFmode, /* R10 QI, QF, HF No. */
145 HFmode, /* R11 QI, QF, HF No. */
149 /* Test and compare insns in c4x.md store the information needed to
150 generate branch and scc insns here. */
155 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
156 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
158 /* Pragma definitions. */
160 tree code_tree = NULL_TREE;
161 tree data_tree = NULL_TREE;
162 tree pure_tree = NULL_TREE;
163 tree noreturn_tree = NULL_TREE;
164 tree interrupt_tree = NULL_TREE;
165 tree naked_tree = NULL_TREE;
167 /* Forward declarations */
168 static bool c4x_handle_option (size_t, const char *, int);
169 static int c4x_isr_reg_used_p (unsigned int);
170 static int c4x_leaf_function_p (void);
171 static int c4x_naked_function_p (void);
172 static int c4x_immed_float_p (rtx);
173 static int c4x_a_register (rtx);
174 static int c4x_x_register (rtx);
175 static int c4x_immed_int_constant (rtx);
176 static int c4x_immed_float_constant (rtx);
177 static int c4x_K_constant (rtx);
178 static int c4x_N_constant (rtx);
179 static int c4x_O_constant (rtx);
180 static int c4x_R_indirect (rtx);
181 static int c4x_S_indirect (rtx);
182 static void c4x_S_address_parse (rtx , int *, int *, int *, int *);
183 static int c4x_valid_operands (enum rtx_code, rtx *, enum machine_mode, int);
184 static int c4x_arn_reg_operand (rtx, enum machine_mode, unsigned int);
185 static int c4x_arn_mem_operand (rtx, enum machine_mode, unsigned int);
186 static void c4x_file_start (void);
187 static void c4x_file_end (void);
188 static void c4x_check_attribute (const char *, tree, tree, tree *);
189 static int c4x_r11_set_p (rtx);
190 static int c4x_rptb_valid_p (rtx, rtx);
191 static void c4x_reorg (void);
192 static int c4x_label_ref_used_p (rtx, rtx);
193 static tree c4x_handle_fntype_attribute (tree *, tree, tree, int, bool *);
194 const struct attribute_spec c4x_attribute_table[];
195 static void c4x_insert_attributes (tree, tree *);
196 static void c4x_asm_named_section (const char *, unsigned int, tree);
197 static int c4x_adjust_cost (rtx, rtx, rtx, int);
198 static void c4x_globalize_label (FILE *, const char *);
199 static bool c4x_rtx_costs (rtx, int, int, int *);
200 static int c4x_address_cost (rtx);
201 static void c4x_init_libfuncs (void);
202 static void c4x_external_libcall (rtx);
203 static rtx c4x_struct_value_rtx (tree, int);
204 static tree c4x_gimplify_va_arg_expr (tree, tree, tree *, tree *);
206 /* Initialize the GCC target structure. */
207 #undef TARGET_ASM_BYTE_OP
208 #define TARGET_ASM_BYTE_OP "\t.word\t"
209 #undef TARGET_ASM_ALIGNED_HI_OP
210 #define TARGET_ASM_ALIGNED_HI_OP NULL
211 #undef TARGET_ASM_ALIGNED_SI_OP
212 #define TARGET_ASM_ALIGNED_SI_OP NULL
213 #undef TARGET_ASM_FILE_START
214 #define TARGET_ASM_FILE_START c4x_file_start
215 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
216 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
217 #undef TARGET_ASM_FILE_END
218 #define TARGET_ASM_FILE_END c4x_file_end
220 #undef TARGET_ASM_EXTERNAL_LIBCALL
221 #define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
223 /* Play safe, not the fastest code. */
224 #undef TARGET_DEFAULT_TARGET_FLAGS
225 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_ALIASES | MASK_PARALLEL \
226 | MASK_PARALLEL_MPY | MASK_RPTB)
227 #undef TARGET_HANDLE_OPTION
228 #define TARGET_HANDLE_OPTION c4x_handle_option
230 #undef TARGET_ATTRIBUTE_TABLE
231 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
233 #undef TARGET_INSERT_ATTRIBUTES
234 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
236 #undef TARGET_INIT_BUILTINS
237 #define TARGET_INIT_BUILTINS c4x_init_builtins
239 #undef TARGET_EXPAND_BUILTIN
240 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
242 #undef TARGET_SCHED_ADJUST_COST
243 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
245 #undef TARGET_ASM_GLOBALIZE_LABEL
246 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
248 #undef TARGET_RTX_COSTS
249 #define TARGET_RTX_COSTS c4x_rtx_costs
250 #undef TARGET_ADDRESS_COST
251 #define TARGET_ADDRESS_COST c4x_address_cost
253 #undef TARGET_MACHINE_DEPENDENT_REORG
254 #define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
256 #undef TARGET_INIT_LIBFUNCS
257 #define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
259 #undef TARGET_STRUCT_VALUE_RTX
260 #define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
262 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
263 #define TARGET_GIMPLIFY_VA_ARG_EXPR c4x_gimplify_va_arg_expr
265 struct gcc_target targetm = TARGET_INITIALIZER;
267 /* Implement TARGET_HANDLE_OPTION. */
270 c4x_handle_option (size_t code, const char *arg, int value)
274 case OPT_m30: c4x_cpu_version = 30; return true;
275 case OPT_m31: c4x_cpu_version = 31; return true;
276 case OPT_m32: c4x_cpu_version = 32; return true;
277 case OPT_m33: c4x_cpu_version = 33; return true;
278 case OPT_m40: c4x_cpu_version = 40; return true;
279 case OPT_m44: c4x_cpu_version = 44; return true;
282 if (arg[0] == 'c' || arg[0] == 'C')
287 case 30: case 31: case 32: case 33: case 40: case 44:
288 c4x_cpu_version = value;
294 c4x_rpts_cycles = value;
302 /* Override command line options.
303 Called once after all options have been parsed.
304 Mostly we process the processor
305 type and sometimes adjust other TARGET_ options. */
308 c4x_override_options (void)
310 /* Convert foo / 8.0 into foo * 0.125, etc. */
311 set_fast_math_flags (1);
313 /* We should phase out the following at some stage.
314 This provides compatibility with the old -mno-aliases option. */
315 if (! TARGET_ALIASES && ! flag_argument_noalias)
316 flag_argument_noalias = 1;
319 target_flags |= MASK_MPYI | MASK_DB;
322 target_flags &= ~(MASK_RPTB | MASK_PARALLEL);
324 if (!TARGET_PARALLEL)
325 target_flags &= ~MASK_PARALLEL_MPY;
329 /* This is called before c4x_override_options. */
332 c4x_optimization_options (int level ATTRIBUTE_UNUSED,
333 int size ATTRIBUTE_UNUSED)
335 /* Scheduling before register allocation can screw up global
336 register allocation, especially for functions that use MPY||ADD
337 instructions. The benefit we gain we get by scheduling before
338 register allocation is probably marginal anyhow. */
339 flag_schedule_insns = 0;
343 /* Write an ASCII string. */
345 #define C4X_ASCII_LIMIT 40
348 c4x_output_ascii (FILE *stream, const char *ptr, int len)
350 char sbuf[C4X_ASCII_LIMIT + 1];
351 int s, l, special, first = 1, onlys;
354 fprintf (stream, "\t.byte\t");
356 for (s = l = 0; len > 0; --len, ++ptr)
360 /* Escape " and \ with a \". */
361 special = *ptr == '\"' || *ptr == '\\';
363 /* If printable - add to buff. */
364 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
369 if (s < C4X_ASCII_LIMIT - 1)
384 fprintf (stream, "\"%s\"", sbuf);
386 if (TARGET_TI && l >= 80 && len > 1)
388 fprintf (stream, "\n\t.byte\t");
406 fprintf (stream, "%d", *ptr);
408 if (TARGET_TI && l >= 80 && len > 1)
410 fprintf (stream, "\n\t.byte\t");
421 fprintf (stream, "\"%s\"", sbuf);
424 fputc ('\n', stream);
429 c4x_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
434 case Pmode: /* Pointer (24/32 bits). */
436 case QImode: /* Integer (32 bits). */
437 return IS_INT_REGNO (regno);
439 case QFmode: /* Float, Double (32 bits). */
440 case HFmode: /* Long Double (40 bits). */
441 return IS_EXT_REGNO (regno);
443 case CCmode: /* Condition Codes. */
444 case CC_NOOVmode: /* Condition Codes. */
445 return IS_ST_REGNO (regno);
447 case HImode: /* Long Long (64 bits). */
448 /* We need two registers to store long longs. Note that
449 it is much easier to constrain the first register
450 to start on an even boundary. */
451 return IS_INT_REGNO (regno)
452 && IS_INT_REGNO (regno + 1)
456 return 0; /* We don't support these modes. */
462 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
464 c4x_hard_regno_rename_ok (unsigned int regno1, unsigned int regno2)
466 /* We cannot copy call saved registers from mode QI into QF or from
468 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
470 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
472 /* We cannot copy from an extended (40 bit) register to a standard
473 (32 bit) register because we only set the condition codes for
474 extended registers. */
475 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
477 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
482 /* The TI C3x C compiler register argument runtime model uses 6 registers,
483 AR2, R2, R3, RC, RS, RE.
485 The first two floating point arguments (float, double, long double)
486 that are found scanning from left to right are assigned to R2 and R3.
488 The remaining integer (char, short, int, long) or pointer arguments
489 are assigned to the remaining registers in the order AR2, R2, R3,
490 RC, RS, RE when scanning left to right, except for the last named
491 argument prior to an ellipsis denoting variable number of
492 arguments. We don't have to worry about the latter condition since
493 function.c treats the last named argument as anonymous (unnamed).
495 All arguments that cannot be passed in registers are pushed onto
496 the stack in reverse order (right to left). GCC handles that for us.
498 c4x_init_cumulative_args() is called at the start, so we can parse
499 the args to see how many floating point arguments and how many
500 integer (or pointer) arguments there are. c4x_function_arg() is
501 then called (sometimes repeatedly) for each argument (parsed left
502 to right) to obtain the register to pass the argument in, or zero
503 if the argument is to be passed on the stack. Once the compiler is
504 happy, c4x_function_arg_advance() is called.
506 Don't use R0 to pass arguments in, we use 0 to indicate a stack
509 static const int c4x_int_reglist[3][6] =
511 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
512 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
513 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
516 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
519 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
520 function whose data type is FNTYPE.
521 For a library call, FNTYPE is 0. */
524 c4x_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname)
526 tree param, next_param;
528 cum->floats = cum->ints = 0;
535 fprintf (stderr, "\nc4x_init_cumulative_args (");
538 tree ret_type = TREE_TYPE (fntype);
540 fprintf (stderr, "fntype code = %s, ret code = %s",
541 tree_code_name[(int) TREE_CODE (fntype)],
542 tree_code_name[(int) TREE_CODE (ret_type)]);
545 fprintf (stderr, "no fntype");
548 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
551 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
553 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
554 param; param = next_param)
558 next_param = TREE_CHAIN (param);
560 type = TREE_VALUE (param);
561 if (type && type != void_type_node)
563 enum machine_mode mode;
565 /* If the last arg doesn't have void type then we have
566 variable arguments. */
570 if ((mode = TYPE_MODE (type)))
572 if (! targetm.calls.must_pass_in_stack (mode, type))
574 /* Look for float, double, or long double argument. */
575 if (mode == QFmode || mode == HFmode)
577 /* Look for integer, enumeral, boolean, char, or pointer
579 else if (mode == QImode || mode == Pmode)
588 fprintf (stderr, "%s%s, args = %d)\n",
589 cum->prototype ? ", prototype" : "",
590 cum->var ? ", variable args" : "",
595 /* Update the data in CUM to advance over an argument
596 of mode MODE and data type TYPE.
597 (TYPE is null for libcalls where that information may not be available.) */
600 c4x_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
601 tree type, int named)
604 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
605 GET_MODE_NAME (mode), named);
609 && ! targetm.calls.must_pass_in_stack (mode, type))
611 /* Look for float, double, or long double argument. */
612 if (mode == QFmode || mode == HFmode)
614 /* Look for integer, enumeral, boolean, char, or pointer argument. */
615 else if (mode == QImode || mode == Pmode)
618 else if (! TARGET_MEMPARM && ! type)
620 /* Handle libcall arguments. */
621 if (mode == QFmode || mode == HFmode)
623 else if (mode == QImode || mode == Pmode)
630 /* Define where to put the arguments to a function. Value is zero to
631 push the argument on the stack, or a hard register in which to
634 MODE is the argument's machine mode.
635 TYPE is the data type of the argument (as a tree).
636 This is null for libcalls where that information may
638 CUM is a variable of type CUMULATIVE_ARGS which gives info about
639 the preceding args and about the function being called.
640 NAMED is nonzero if this argument is a named parameter
641 (otherwise it is an extra parameter matching an ellipsis). */
644 c4x_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
645 tree type, int named)
647 int reg = 0; /* Default to passing argument on stack. */
651 /* We can handle at most 2 floats in R2, R3. */
652 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
654 /* We can handle at most 6 integers minus number of floats passed
656 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
657 6 - cum->maxfloats : cum->ints;
659 /* If there is no prototype, assume all the arguments are integers. */
660 if (! cum->prototype)
663 cum->ints = cum->floats = 0;
667 /* This marks the last argument. We don't need to pass this through
669 if (type == void_type_node)
675 && ! targetm.calls.must_pass_in_stack (mode, type))
677 /* Look for float, double, or long double argument. */
678 if (mode == QFmode || mode == HFmode)
680 if (cum->floats < cum->maxfloats)
681 reg = c4x_fp_reglist[cum->floats];
683 /* Look for integer, enumeral, boolean, char, or pointer argument. */
684 else if (mode == QImode || mode == Pmode)
686 if (cum->ints < cum->maxints)
687 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
690 else if (! TARGET_MEMPARM && ! type)
692 /* We could use a different argument calling model for libcalls,
693 since we're only calling functions in libgcc. Thus we could
694 pass arguments for long longs in registers rather than on the
695 stack. In the meantime, use the odd TI format. We make the
696 assumption that we won't have more than two floating point
697 args, six integer args, and that all the arguments are of the
699 if (mode == QFmode || mode == HFmode)
700 reg = c4x_fp_reglist[cum->floats];
701 else if (mode == QImode || mode == Pmode)
702 reg = c4x_int_reglist[0][cum->ints];
707 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
708 GET_MODE_NAME (mode), named);
710 fprintf (stderr, ", reg=%s", reg_names[reg]);
712 fprintf (stderr, ", stack");
713 fprintf (stderr, ")\n");
716 return gen_rtx_REG (mode, reg);
721 /* C[34]x arguments grow in weird ways (downwards) that the standard
722 varargs stuff can't handle.. */
725 c4x_gimplify_va_arg_expr (tree valist, tree type,
726 tree *pre_p ATTRIBUTE_UNUSED,
727 tree *post_p ATTRIBUTE_UNUSED)
732 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
734 type = build_pointer_type (type);
736 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
737 build_int_cst (NULL_TREE, int_size_in_bytes (type)));
738 t = fold_convert (build_pointer_type (type), t);
739 t = build_fold_indirect_ref (t);
742 t = build_fold_indirect_ref (t);
749 c4x_isr_reg_used_p (unsigned int regno)
751 /* Don't save/restore FP or ST, we handle them separately. */
752 if (regno == FRAME_POINTER_REGNUM
753 || IS_ST_REGNO (regno))
756 /* We could be a little smarter abut saving/restoring DP.
757 We'll only save if for the big memory model or if
758 we're paranoid. ;-) */
759 if (IS_DP_REGNO (regno))
760 return ! TARGET_SMALL || TARGET_PARANOID;
762 /* Only save/restore regs in leaf function that are used. */
763 if (c4x_leaf_function)
764 return regs_ever_live[regno] && fixed_regs[regno] == 0;
766 /* Only save/restore regs that are used by the ISR and regs
767 that are likely to be used by functions the ISR calls
768 if they are not fixed. */
769 return IS_EXT_REGNO (regno)
770 || ((regs_ever_live[regno] || call_used_regs[regno])
771 && fixed_regs[regno] == 0);
776 c4x_leaf_function_p (void)
778 /* A leaf function makes no calls, so we only need
779 to save/restore the registers we actually use.
780 For the global variable leaf_function to be set, we need
781 to define LEAF_REGISTERS and all that it entails.
782 Let's check ourselves.... */
784 if (lookup_attribute ("leaf_pretend",
785 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
788 /* Use the leaf_pretend attribute at your own risk. This is a hack
789 to speed up ISRs that call a function infrequently where the
790 overhead of saving and restoring the additional registers is not
791 warranted. You must save and restore the additional registers
792 required by the called function. Caveat emptor. Here's enough
795 if (leaf_function_p ())
803 c4x_naked_function_p (void)
807 type = TREE_TYPE (current_function_decl);
808 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
813 c4x_interrupt_function_p (void)
815 const char *cfun_name;
816 if (lookup_attribute ("interrupt",
817 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
820 /* Look for TI style c_intnn. */
821 cfun_name = current_function_name ();
822 return cfun_name[0] == 'c'
823 && cfun_name[1] == '_'
824 && cfun_name[2] == 'i'
825 && cfun_name[3] == 'n'
826 && cfun_name[4] == 't'
827 && ISDIGIT (cfun_name[5])
828 && ISDIGIT (cfun_name[6]);
832 c4x_expand_prologue (void)
835 int size = get_frame_size ();
838 /* In functions where ar3 is not used but frame pointers are still
839 specified, frame pointers are not adjusted (if >= -O2) and this
840 is used so it won't needlessly push the frame pointer. */
843 /* For __naked__ function don't build a prologue. */
844 if (c4x_naked_function_p ())
849 /* For __interrupt__ function build specific prologue. */
850 if (c4x_interrupt_function_p ())
852 c4x_leaf_function = c4x_leaf_function_p ();
854 insn = emit_insn (gen_push_st ());
855 RTX_FRAME_RELATED_P (insn) = 1;
858 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
859 RTX_FRAME_RELATED_P (insn) = 1;
860 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
861 gen_rtx_REG (QImode, SP_REGNO)));
862 RTX_FRAME_RELATED_P (insn) = 1;
863 /* We require that an ISR uses fewer than 32768 words of
864 local variables, otherwise we have to go to lots of
865 effort to save a register, load it with the desired size,
866 adjust the stack pointer, and then restore the modified
867 register. Frankly, I think it is a poor ISR that
868 requires more than 32767 words of local temporary
871 error ("ISR %s requires %d words of local vars, max is 32767",
872 current_function_name (), size);
874 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
875 gen_rtx_REG (QImode, SP_REGNO),
877 RTX_FRAME_RELATED_P (insn) = 1;
879 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
881 if (c4x_isr_reg_used_p (regno))
883 if (regno == DP_REGNO)
885 insn = emit_insn (gen_push_dp ());
886 RTX_FRAME_RELATED_P (insn) = 1;
890 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
891 RTX_FRAME_RELATED_P (insn) = 1;
892 if (IS_EXT_REGNO (regno))
894 insn = emit_insn (gen_pushqf
895 (gen_rtx_REG (QFmode, regno)));
896 RTX_FRAME_RELATED_P (insn) = 1;
901 /* We need to clear the repeat mode flag if the ISR is
902 going to use a RPTB instruction or uses the RC, RS, or RE
904 if (regs_ever_live[RC_REGNO]
905 || regs_ever_live[RS_REGNO]
906 || regs_ever_live[RE_REGNO])
908 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
909 RTX_FRAME_RELATED_P (insn) = 1;
912 /* Reload DP reg if we are paranoid about some turkey
913 violating small memory model rules. */
914 if (TARGET_SMALL && TARGET_PARANOID)
916 insn = emit_insn (gen_set_ldp_prologue
917 (gen_rtx_REG (QImode, DP_REGNO),
918 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
919 RTX_FRAME_RELATED_P (insn) = 1;
924 if (frame_pointer_needed)
927 || (current_function_args_size != 0)
930 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
931 RTX_FRAME_RELATED_P (insn) = 1;
932 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
933 gen_rtx_REG (QImode, SP_REGNO)));
934 RTX_FRAME_RELATED_P (insn) = 1;
939 /* Since ar3 is not used, we don't need to push it. */
945 /* If we use ar3, we need to push it. */
947 if ((size != 0) || (current_function_args_size != 0))
949 /* If we are omitting the frame pointer, we still have
950 to make space for it so the offsets are correct
951 unless we don't use anything on the stack at all. */
958 /* Local vars are too big, it will take multiple operations
962 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
963 GEN_INT(size >> 16)));
964 RTX_FRAME_RELATED_P (insn) = 1;
965 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
966 gen_rtx_REG (QImode, R1_REGNO),
968 RTX_FRAME_RELATED_P (insn) = 1;
972 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
973 GEN_INT(size & ~0xffff)));
974 RTX_FRAME_RELATED_P (insn) = 1;
976 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
977 gen_rtx_REG (QImode, R1_REGNO),
978 GEN_INT(size & 0xffff)));
979 RTX_FRAME_RELATED_P (insn) = 1;
980 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
981 gen_rtx_REG (QImode, SP_REGNO),
982 gen_rtx_REG (QImode, R1_REGNO)));
983 RTX_FRAME_RELATED_P (insn) = 1;
987 /* Local vars take up less than 32767 words, so we can directly
989 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
990 gen_rtx_REG (QImode, SP_REGNO),
992 RTX_FRAME_RELATED_P (insn) = 1;
995 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
997 if (regs_ever_live[regno] && ! call_used_regs[regno])
999 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1001 if (TARGET_PRESERVE_FLOAT)
1003 insn = emit_insn (gen_pushqi
1004 (gen_rtx_REG (QImode, regno)));
1005 RTX_FRAME_RELATED_P (insn) = 1;
1007 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
1008 RTX_FRAME_RELATED_P (insn) = 1;
1010 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1012 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1013 RTX_FRAME_RELATED_P (insn) = 1;
1022 c4x_expand_epilogue(void)
1028 int size = get_frame_size ();
1030 /* For __naked__ function build no epilogue. */
1031 if (c4x_naked_function_p ())
1033 insn = emit_jump_insn (gen_return_from_epilogue ());
1034 RTX_FRAME_RELATED_P (insn) = 1;
1038 /* For __interrupt__ function build specific epilogue. */
1039 if (c4x_interrupt_function_p ())
1041 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1043 if (! c4x_isr_reg_used_p (regno))
1045 if (regno == DP_REGNO)
1047 insn = emit_insn (gen_pop_dp ());
1048 RTX_FRAME_RELATED_P (insn) = 1;
1052 /* We have to use unspec because the compiler will delete insns
1053 that are not call-saved. */
1054 if (IS_EXT_REGNO (regno))
1056 insn = emit_insn (gen_popqf_unspec
1057 (gen_rtx_REG (QFmode, regno)));
1058 RTX_FRAME_RELATED_P (insn) = 1;
1060 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1061 RTX_FRAME_RELATED_P (insn) = 1;
1066 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1067 gen_rtx_REG (QImode, SP_REGNO),
1069 RTX_FRAME_RELATED_P (insn) = 1;
1070 insn = emit_insn (gen_popqi
1071 (gen_rtx_REG (QImode, AR3_REGNO)));
1072 RTX_FRAME_RELATED_P (insn) = 1;
1074 insn = emit_insn (gen_pop_st ());
1075 RTX_FRAME_RELATED_P (insn) = 1;
1076 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1077 RTX_FRAME_RELATED_P (insn) = 1;
1081 if (frame_pointer_needed)
1084 || (current_function_args_size != 0)
1088 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1089 gen_rtx_MEM (QImode,
1091 (QImode, gen_rtx_REG (QImode,
1094 RTX_FRAME_RELATED_P (insn) = 1;
1096 /* We already have the return value and the fp,
1097 so we need to add those to the stack. */
1104 /* Since ar3 is not used for anything, we don't need to
1111 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1112 if (size || current_function_args_size)
1114 /* If we are omitting the frame pointer, we still have
1115 to make space for it so the offsets are correct
1116 unless we don't use anything on the stack at all. */
1121 /* Now restore the saved registers, putting in the delayed branch
1123 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1125 if (regs_ever_live[regno] && ! call_used_regs[regno])
1127 if (regno == AR3_REGNO && dont_pop_ar3)
1130 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1132 insn = emit_insn (gen_popqf_unspec
1133 (gen_rtx_REG (QFmode, regno)));
1134 RTX_FRAME_RELATED_P (insn) = 1;
1135 if (TARGET_PRESERVE_FLOAT)
1137 insn = emit_insn (gen_popqi_unspec
1138 (gen_rtx_REG (QImode, regno)));
1139 RTX_FRAME_RELATED_P (insn) = 1;
1144 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1145 RTX_FRAME_RELATED_P (insn) = 1;
1150 if (frame_pointer_needed)
1153 || (current_function_args_size != 0)
1156 /* Restore the old FP. */
1159 (gen_rtx_REG (QImode, AR3_REGNO),
1160 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1162 RTX_FRAME_RELATED_P (insn) = 1;
1168 /* Local vars are too big, it will take multiple operations
1172 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1173 GEN_INT(size >> 16)));
1174 RTX_FRAME_RELATED_P (insn) = 1;
1175 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1176 gen_rtx_REG (QImode, R3_REGNO),
1178 RTX_FRAME_RELATED_P (insn) = 1;
1182 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1183 GEN_INT(size & ~0xffff)));
1184 RTX_FRAME_RELATED_P (insn) = 1;
1186 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1187 gen_rtx_REG (QImode, R3_REGNO),
1188 GEN_INT(size & 0xffff)));
1189 RTX_FRAME_RELATED_P (insn) = 1;
1190 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1191 gen_rtx_REG (QImode, SP_REGNO),
1192 gen_rtx_REG (QImode, R3_REGNO)));
1193 RTX_FRAME_RELATED_P (insn) = 1;
1197 /* Local vars take up less than 32768 words, so we can directly
1198 subtract the number. */
1199 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1200 gen_rtx_REG (QImode, SP_REGNO),
1202 RTX_FRAME_RELATED_P (insn) = 1;
1207 insn = emit_jump_insn (gen_return_indirect_internal
1208 (gen_rtx_REG (QImode, R2_REGNO)));
1209 RTX_FRAME_RELATED_P (insn) = 1;
1213 insn = emit_jump_insn (gen_return_from_epilogue ());
1214 RTX_FRAME_RELATED_P (insn) = 1;
1221 c4x_null_epilogue_p (void)
1225 if (reload_completed
1226 && ! c4x_naked_function_p ()
1227 && ! c4x_interrupt_function_p ()
1228 && ! current_function_calls_alloca
1229 && ! current_function_args_size
1231 && ! get_frame_size ())
1233 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1234 if (regs_ever_live[regno] && ! call_used_regs[regno]
1235 && (regno != AR3_REGNO))
1244 c4x_emit_move_sequence (rtx *operands, enum machine_mode mode)
1246 rtx op0 = operands[0];
1247 rtx op1 = operands[1];
1249 if (! reload_in_progress
1252 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1253 op1 = force_reg (mode, op1);
1255 if (GET_CODE (op1) == LO_SUM
1256 && GET_MODE (op1) == Pmode
1257 && dp_reg_operand (XEXP (op1, 0), mode))
1259 /* expand_increment will sometimes create a LO_SUM immediate
1260 address. Undo this silliness. */
1261 op1 = XEXP (op1, 1);
1264 if (symbolic_address_operand (op1, mode))
1266 if (TARGET_LOAD_ADDRESS)
1268 /* Alias analysis seems to do a better job if we force
1269 constant addresses to memory after reload. */
1270 emit_insn (gen_load_immed_address (op0, op1));
1275 /* Stick symbol or label address into the constant pool. */
1276 op1 = force_const_mem (Pmode, op1);
1279 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1281 /* We could be a lot smarter about loading some of these
1283 op1 = force_const_mem (mode, op1);
1286 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1287 and emit associated (HIGH (SYMREF)) if large memory model.
1288 c4x_legitimize_address could be used to do this,
1289 perhaps by calling validize_address. */
1290 if (TARGET_EXPOSE_LDP
1291 && ! (reload_in_progress || reload_completed)
1292 && GET_CODE (op1) == MEM
1293 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1295 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1297 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1298 op1 = change_address (op1, mode,
1299 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1302 if (TARGET_EXPOSE_LDP
1303 && ! (reload_in_progress || reload_completed)
1304 && GET_CODE (op0) == MEM
1305 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1307 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1309 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1310 op0 = change_address (op0, mode,
1311 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1314 if (GET_CODE (op0) == SUBREG
1315 && mixed_subreg_operand (op0, mode))
1317 /* We should only generate these mixed mode patterns
1318 during RTL generation. If we need do it later on
1319 then we'll have to emit patterns that won't clobber CC. */
1320 if (reload_in_progress || reload_completed)
1322 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1323 op0 = SUBREG_REG (op0);
1324 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1326 op0 = copy_rtx (op0);
1327 PUT_MODE (op0, QImode);
1333 emit_insn (gen_storeqf_int_clobber (op0, op1));
1339 if (GET_CODE (op1) == SUBREG
1340 && mixed_subreg_operand (op1, mode))
1342 /* We should only generate these mixed mode patterns
1343 during RTL generation. If we need do it later on
1344 then we'll have to emit patterns that won't clobber CC. */
1345 if (reload_in_progress || reload_completed)
1347 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1348 op1 = SUBREG_REG (op1);
1349 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1351 op1 = copy_rtx (op1);
1352 PUT_MODE (op1, QImode);
1358 emit_insn (gen_loadqf_int_clobber (op0, op1));
1365 && reg_operand (op0, mode)
1366 && const_int_operand (op1, mode)
1367 && ! IS_INT16_CONST (INTVAL (op1))
1368 && ! IS_HIGH_CONST (INTVAL (op1)))
1370 emit_insn (gen_loadqi_big_constant (op0, op1));
1375 && reg_operand (op0, mode)
1376 && const_int_operand (op1, mode))
1378 emit_insn (gen_loadhi_big_constant (op0, op1));
1382 /* Adjust operands in case we have modified them. */
1386 /* Emit normal pattern. */
1392 c4x_emit_libcall (rtx libcall, enum rtx_code code,
1393 enum machine_mode dmode, enum machine_mode smode,
1394 int noperands, rtx *operands)
1404 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1405 operands[1], smode);
1406 equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
1410 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1411 operands[1], smode, operands[2], smode);
1412 equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
1419 insns = get_insns ();
1421 emit_libcall_block (insns, operands[0], ret, equiv);
1426 c4x_emit_libcall3 (rtx libcall, enum rtx_code code,
1427 enum machine_mode mode, rtx *operands)
1429 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1434 c4x_emit_libcall_mulhi (rtx libcall, enum rtx_code code,
1435 enum machine_mode mode, rtx *operands)
1442 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1443 operands[1], mode, operands[2], mode);
1444 equiv = gen_rtx_TRUNCATE (mode,
1445 gen_rtx_LSHIFTRT (HImode,
1446 gen_rtx_MULT (HImode,
1447 gen_rtx_fmt_e (code, HImode, operands[1]),
1448 gen_rtx_fmt_e (code, HImode, operands[2])),
1450 insns = get_insns ();
1452 emit_libcall_block (insns, operands[0], ret, equiv);
1457 c4x_legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
1459 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1460 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1461 rtx disp = NULL_RTX; /* Displacement. */
1464 code = GET_CODE (addr);
1467 /* Register indirect with auto increment/decrement. We don't
1468 allow SP here---push_operand should recognize an operand
1469 being pushed on the stack. */
1474 if (mode != QImode && mode != QFmode)
1478 base = XEXP (addr, 0);
1486 rtx op0 = XEXP (addr, 0);
1487 rtx op1 = XEXP (addr, 1);
1489 if (mode != QImode && mode != QFmode)
1493 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1495 base = XEXP (op1, 0);
1498 if (REGNO (base) != REGNO (op0))
1500 if (REG_P (XEXP (op1, 1)))
1501 indx = XEXP (op1, 1);
1503 disp = XEXP (op1, 1);
1507 /* Register indirect. */
1512 /* Register indirect with displacement or index. */
1515 rtx op0 = XEXP (addr, 0);
1516 rtx op1 = XEXP (addr, 1);
1517 enum rtx_code code0 = GET_CODE (op0);
1524 base = op0; /* Base + index. */
1526 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1534 base = op0; /* Base + displacement. */
1545 /* Direct addressing with DP register. */
1548 rtx op0 = XEXP (addr, 0);
1549 rtx op1 = XEXP (addr, 1);
1551 /* HImode and HFmode direct memory references aren't truly
1552 offsettable (consider case at end of data page). We
1553 probably get better code by loading a pointer and using an
1554 indirect memory reference. */
1555 if (mode == HImode || mode == HFmode)
1558 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1561 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1564 if (GET_CODE (op1) == CONST)
1570 /* Direct addressing with some work for the assembler... */
1572 /* Direct addressing. */
1575 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1577 /* These need to be converted to a LO_SUM (...).
1578 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1581 /* Do not allow direct memory access to absolute addresses.
1582 This is more pain than it's worth, especially for the
1583 small memory model where we can't guarantee that
1584 this address is within the data page---we don't want
1585 to modify the DP register in the small memory model,
1586 even temporarily, since an interrupt can sneak in.... */
1590 /* Indirect indirect addressing. */
1595 fatal_insn ("using CONST_DOUBLE for address", addr);
1601 /* Validate the base register. */
1604 /* Check that the address is offsettable for HImode and HFmode. */
1605 if (indx && (mode == HImode || mode == HFmode))
1608 /* Handle DP based stuff. */
1609 if (REGNO (base) == DP_REGNO)
1611 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1613 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1617 /* Now validate the index register. */
1620 if (GET_CODE (indx) != REG)
1622 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1624 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1628 /* Validate displacement. */
1631 if (GET_CODE (disp) != CONST_INT)
1633 if (mode == HImode || mode == HFmode)
1635 /* The offset displacement must be legitimate. */
1636 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1641 if (! IS_DISP8_CONST (INTVAL (disp)))
1644 /* Can't add an index with a disp. */
1653 c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED,
1654 enum machine_mode mode ATTRIBUTE_UNUSED)
1656 if (GET_CODE (orig) == SYMBOL_REF
1657 || GET_CODE (orig) == LABEL_REF)
1659 if (mode == HImode || mode == HFmode)
1661 /* We need to force the address into
1662 a register so that it is offsettable. */
1663 rtx addr_reg = gen_reg_rtx (Pmode);
1664 emit_move_insn (addr_reg, orig);
1669 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1672 emit_insn (gen_set_ldp (dp_reg, orig));
1674 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1682 /* Provide the costs of an addressing mode that contains ADDR.
1683 If ADDR is not a valid address, its cost is irrelevant.
1684 This is used in cse and loop optimization to determine
1685 if it is worthwhile storing a common address into a register.
1686 Unfortunately, the C4x address cost depends on other operands. */
1689 c4x_address_cost (rtx addr)
1691 switch (GET_CODE (addr))
1702 /* These shouldn't be directly generated. */
1710 rtx op1 = XEXP (addr, 1);
1712 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1713 return TARGET_SMALL ? 3 : 4;
1715 if (GET_CODE (op1) == CONST)
1717 rtx offset = const0_rtx;
1719 op1 = eliminate_constant_term (op1, &offset);
1721 /* ??? These costs need rethinking... */
1722 if (GET_CODE (op1) == LABEL_REF)
1725 if (GET_CODE (op1) != SYMBOL_REF)
1728 if (INTVAL (offset) == 0)
1733 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1739 register rtx op0 = XEXP (addr, 0);
1740 register rtx op1 = XEXP (addr, 1);
1742 if (GET_CODE (op0) != REG)
1745 switch (GET_CODE (op1))
1751 /* This cost for REG+REG must be greater than the cost
1752 for REG if we want autoincrement addressing modes. */
1756 /* The following tries to improve GIV combination
1757 in strength reduce but appears not to help. */
1758 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1761 if (IS_DISP1_CONST (INTVAL (op1)))
1764 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1779 c4x_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
1781 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1784 if (mode == CC_NOOVmode
1785 && (code == LE || code == GE || code == LT || code == GT))
1788 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1789 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1790 gen_rtx_COMPARE (mode, x, y)));
1795 c4x_output_cbranch (const char *form, rtx seq)
1802 static char str[100];
1806 delay = XVECEXP (final_sequence, 0, 1);
1807 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1808 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1809 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1812 cp = &str [strlen (str)];
1837 c4x_print_operand (FILE *file, rtx op, int letter)
1844 case '#': /* Delayed. */
1846 fprintf (file, "d");
1850 code = GET_CODE (op);
1853 case 'A': /* Direct address. */
1854 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1855 fprintf (file, "@");
1858 case 'H': /* Sethi. */
1859 output_addr_const (file, op);
1862 case 'I': /* Reversed condition. */
1863 code = reverse_condition (code);
1866 case 'L': /* Log 2 of constant. */
1867 if (code != CONST_INT)
1868 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1869 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1872 case 'N': /* Ones complement of small constant. */
1873 if (code != CONST_INT)
1874 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1875 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
1878 case 'K': /* Generate ldp(k) if direct address. */
1881 && GET_CODE (XEXP (op, 0)) == LO_SUM
1882 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1883 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1885 op1 = XEXP (XEXP (op, 0), 1);
1886 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1888 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1889 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1890 fprintf (file, "\n");
1895 case 'M': /* Generate ldp(k) if direct address. */
1896 if (! TARGET_SMALL /* Only used in asm statements. */
1898 && (GET_CODE (XEXP (op, 0)) == CONST
1899 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1901 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1902 output_address (XEXP (op, 0));
1903 fprintf (file, "\n\t");
1907 case 'O': /* Offset address. */
1908 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1910 else if (code == MEM)
1911 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1912 else if (code == REG)
1913 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1915 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1918 case 'C': /* Call. */
1921 case 'U': /* Call/callu. */
1922 if (code != SYMBOL_REF)
1923 fprintf (file, "u");
1933 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1935 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1937 fprintf (file, "%s", reg_names[REGNO (op)]);
1941 output_address (XEXP (op, 0));
1948 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1949 sizeof (str), 0, 1);
1950 fprintf (file, "%s", str);
1955 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
1959 fprintf (file, "ne");
1963 fprintf (file, "eq");
1967 fprintf (file, "ge");
1971 fprintf (file, "gt");
1975 fprintf (file, "le");
1979 fprintf (file, "lt");
1983 fprintf (file, "hs");
1987 fprintf (file, "hi");
1991 fprintf (file, "ls");
1995 fprintf (file, "lo");
1999 output_addr_const (file, op);
2003 output_addr_const (file, XEXP (op, 0));
2010 fatal_insn ("c4x_print_operand: Bad operand case", op);
2017 c4x_print_operand_address (FILE *file, rtx addr)
2019 switch (GET_CODE (addr))
2022 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2026 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2030 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2035 rtx op0 = XEXP (XEXP (addr, 1), 0);
2036 rtx op1 = XEXP (XEXP (addr, 1), 1);
2038 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2039 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2040 reg_names[REGNO (op1)]);
2041 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2042 fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
2043 reg_names[REGNO (op0)], INTVAL (op1));
2044 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2045 fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
2046 reg_names[REGNO (op0)], -INTVAL (op1));
2047 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2048 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2049 reg_names[REGNO (op1)]);
2051 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2057 rtx op0 = XEXP (XEXP (addr, 1), 0);
2058 rtx op1 = XEXP (XEXP (addr, 1), 1);
2060 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2061 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2062 reg_names[REGNO (op1)]);
2063 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2064 fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
2065 reg_names[REGNO (op0)], INTVAL (op1));
2066 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2067 fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
2068 reg_names[REGNO (op0)], -INTVAL (op1));
2069 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2070 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2071 reg_names[REGNO (op1)]);
2073 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2078 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2082 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2085 case PLUS: /* Indirect with displacement. */
2087 rtx op0 = XEXP (addr, 0);
2088 rtx op1 = XEXP (addr, 1);
2094 if (IS_INDEX_REG (op0))
2096 fprintf (file, "*+%s(%s)",
2097 reg_names[REGNO (op1)],
2098 reg_names[REGNO (op0)]); /* Index + base. */
2102 fprintf (file, "*+%s(%s)",
2103 reg_names[REGNO (op0)],
2104 reg_names[REGNO (op1)]); /* Base + index. */
2107 else if (INTVAL (op1) < 0)
2109 fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
2110 reg_names[REGNO (op0)],
2111 -INTVAL (op1)); /* Base - displacement. */
2115 fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
2116 reg_names[REGNO (op0)],
2117 INTVAL (op1)); /* Base + displacement. */
2121 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2127 rtx op0 = XEXP (addr, 0);
2128 rtx op1 = XEXP (addr, 1);
2130 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2131 c4x_print_operand_address (file, op1);
2133 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2140 fprintf (file, "@");
2141 output_addr_const (file, addr);
2144 /* We shouldn't access CONST_INT addresses. */
2148 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2154 /* Return nonzero if the floating point operand will fit
2155 in the immediate field. */
2158 c4x_immed_float_p (rtx op)
2164 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2165 if (GET_MODE (op) == HFmode)
2166 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2169 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2173 /* Sign extend exponent. */
2174 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2175 if (exponent == -128)
2177 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2178 return 0; /* Precision doesn't fit. */
2179 return (exponent <= 7) /* Positive exp. */
2180 && (exponent >= -7); /* Negative exp. */
2184 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2185 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2187 None of the last four instructions from the bottom of the block can
2188 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2189 BcondAT or RETIcondD.
2191 This routine scans the four previous insns for a jump insn, and if
2192 one is found, returns 1 so that we bung in a nop instruction.
2193 This simple minded strategy will add a nop, when it may not
2194 be required. Say when there is a JUMP_INSN near the end of the
2195 block that doesn't get converted into a delayed branch.
2197 Note that we cannot have a call insn, since we don't generate
2198 repeat loops with calls in them (although I suppose we could, but
2199 there's no benefit.)
2201 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2204 c4x_rptb_nop_p (rtx insn)
2209 /* Extract the start label from the jump pattern (rptb_end). */
2210 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2212 /* If there is a label at the end of the loop we must insert
2215 insn = previous_insn (insn);
2216 } while (GET_CODE (insn) == NOTE
2217 || GET_CODE (insn) == USE
2218 || GET_CODE (insn) == CLOBBER);
2219 if (GET_CODE (insn) == CODE_LABEL)
2222 for (i = 0; i < 4; i++)
2224 /* Search back for prev non-note and non-label insn. */
2225 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2226 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2228 if (insn == start_label)
2231 insn = previous_insn (insn);
2234 /* If we have a jump instruction we should insert a NOP. If we
2235 hit repeat block top we should only insert a NOP if the loop
2237 if (GET_CODE (insn) == JUMP_INSN)
2239 insn = previous_insn (insn);
2245 /* The C4x looping instruction needs to be emitted at the top of the
2246 loop. Emitting the true RTL for a looping instruction at the top of
2247 the loop can cause problems with flow analysis. So instead, a dummy
2248 doloop insn is emitted at the end of the loop. This routine checks
2249 for the presence of this doloop insn and then searches back to the
2250 top of the loop, where it inserts the true looping insn (provided
2251 there are no instructions in the loop which would cause problems).
2252 Any additional labels can be emitted at this point. In addition, if
2253 the desired loop count register was not allocated, this routine does
2256 Before we can create a repeat block looping instruction we have to
2257 verify that there are no jumps outside the loop and no jumps outside
2258 the loop go into this loop. This can happen in the basic blocks reorder
2259 pass. The C4x cpu cannot handle this. */
2262 c4x_label_ref_used_p (rtx x, rtx code_label)
2271 code = GET_CODE (x);
2272 if (code == LABEL_REF)
2273 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2275 fmt = GET_RTX_FORMAT (code);
2276 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2280 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2283 else if (fmt[i] == 'E')
2284 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2285 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2293 c4x_rptb_valid_p (rtx insn, rtx start_label)
2299 /* Find the start label. */
2300 for (; insn; insn = PREV_INSN (insn))
2301 if (insn == start_label)
2304 /* Note found then we cannot use a rptb or rpts. The label was
2305 probably moved by the basic block reorder pass. */
2310 /* If any jump jumps inside this block then we must fail. */
2311 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2313 if (GET_CODE (insn) == CODE_LABEL)
2315 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2316 if (GET_CODE (tmp) == JUMP_INSN
2317 && c4x_label_ref_used_p (tmp, insn))
2321 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2323 if (GET_CODE (insn) == CODE_LABEL)
2325 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2326 if (GET_CODE (tmp) == JUMP_INSN
2327 && c4x_label_ref_used_p (tmp, insn))
2331 /* If any jump jumps outside this block then we must fail. */
2332 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2334 if (GET_CODE (insn) == CODE_LABEL)
2336 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2337 if (GET_CODE (tmp) == JUMP_INSN
2338 && c4x_label_ref_used_p (tmp, insn))
2340 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2341 if (GET_CODE (tmp) == JUMP_INSN
2342 && c4x_label_ref_used_p (tmp, insn))
2347 /* All checks OK. */
2353 c4x_rptb_insert (rtx insn)
2357 rtx new_start_label;
2360 /* If the count register has not been allocated to RC, say if
2361 there is a movmem pattern in the loop, then do not insert a
2362 RPTB instruction. Instead we emit a decrement and branch
2363 at the end of the loop. */
2364 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2365 if (REGNO (count_reg) != RC_REGNO)
2368 /* Extract the start label from the jump pattern (rptb_end). */
2369 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2371 if (! c4x_rptb_valid_p (insn, start_label))
2373 /* We cannot use the rptb insn. Replace it so reorg can use
2374 the delay slots of the jump insn. */
2375 emit_insn_before (gen_addqi3 (count_reg, count_reg, constm1_rtx), insn);
2376 emit_insn_before (gen_cmpqi (count_reg, const0_rtx), insn);
2377 emit_insn_before (gen_bge (start_label), insn);
2378 LABEL_NUSES (start_label)++;
2383 end_label = gen_label_rtx ();
2384 LABEL_NUSES (end_label)++;
2385 emit_label_after (end_label, insn);
2387 new_start_label = gen_label_rtx ();
2388 LABEL_NUSES (new_start_label)++;
2390 for (; insn; insn = PREV_INSN (insn))
2392 if (insn == start_label)
2394 if (GET_CODE (insn) == JUMP_INSN &&
2395 JUMP_LABEL (insn) == start_label)
2396 redirect_jump (insn, new_start_label, 0);
2399 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2401 emit_label_after (new_start_label, insn);
2403 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2404 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2406 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2407 if (LABEL_NUSES (start_label) == 0)
2408 delete_insn (start_label);
2412 /* We need to use direct addressing for large constants and addresses
2413 that cannot fit within an instruction. We must check for these
2414 after after the final jump optimization pass, since this may
2415 introduce a local_move insn for a SYMBOL_REF. This pass
2416 must come before delayed branch slot filling since it can generate
2417 additional instructions.
2419 This function also fixes up RTPB style loops that didn't get RC
2420 allocated as the loop counter. */
2427 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2429 /* Look for insn. */
2432 int insn_code_number;
2435 insn_code_number = recog_memoized (insn);
2437 if (insn_code_number < 0)
2440 /* Insert the RTX for RPTB at the top of the loop
2441 and a label at the end of the loop. */
2442 if (insn_code_number == CODE_FOR_rptb_end)
2443 c4x_rptb_insert(insn);
2445 /* We need to split the insn here. Otherwise the calls to
2446 force_const_mem will not work for load_immed_address. */
2449 /* Don't split the insn if it has been deleted. */
2450 if (! INSN_DELETED_P (old))
2451 insn = try_split (PATTERN(old), old, 1);
2453 /* When not optimizing, the old insn will be still left around
2454 with only the 'deleted' bit set. Transform it into a note
2455 to avoid confusion of subsequent processing. */
2456 if (INSN_DELETED_P (old))
2458 PUT_CODE (old, NOTE);
2459 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2460 NOTE_SOURCE_FILE (old) = 0;
2468 c4x_a_register (rtx op)
2470 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2475 c4x_x_register (rtx op)
2477 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2482 c4x_immed_int_constant (rtx op)
2484 if (GET_CODE (op) != CONST_INT)
2487 return GET_MODE (op) == VOIDmode
2488 || GET_MODE_CLASS (GET_MODE (op)) == MODE_INT
2489 || GET_MODE_CLASS (GET_MODE (op)) == MODE_PARTIAL_INT;
2494 c4x_immed_float_constant (rtx op)
2496 if (GET_CODE (op) != CONST_DOUBLE)
2499 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2500 present this only means that a MEM rtx has been generated. It does
2501 not mean the rtx is really in memory. */
2503 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2508 c4x_shiftable_constant (rtx op)
2512 int val = INTVAL (op);
2514 for (i = 0; i < 16; i++)
2519 mask = ((0xffff >> i) << 16) | 0xffff;
2520 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2521 : (val >> i) & mask))
2528 c4x_H_constant (rtx op)
2530 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2535 c4x_I_constant (rtx op)
2537 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2542 c4x_J_constant (rtx op)
2546 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2551 c4x_K_constant (rtx op)
2553 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2555 return IS_INT5_CONST (INTVAL (op));
2560 c4x_L_constant (rtx op)
2562 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2567 c4x_N_constant (rtx op)
2569 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2574 c4x_O_constant (rtx op)
2576 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2580 /* The constraints do not have to check the register class,
2581 except when needed to discriminate between the constraints.
2582 The operand has been checked by the predicates to be valid. */
2584 /* ARx + 9-bit signed const or IRn
2585 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2586 We don't include the pre/post inc/dec forms here since
2587 they are handled by the <> constraints. */
2590 c4x_Q_constraint (rtx op)
2592 enum machine_mode mode = GET_MODE (op);
2594 if (GET_CODE (op) != MEM)
2597 switch (GET_CODE (op))
2604 rtx op0 = XEXP (op, 0);
2605 rtx op1 = XEXP (op, 1);
2613 if (GET_CODE (op1) != CONST_INT)
2616 /* HImode and HFmode must be offsettable. */
2617 if (mode == HImode || mode == HFmode)
2618 return IS_DISP8_OFF_CONST (INTVAL (op1));
2620 return IS_DISP8_CONST (INTVAL (op1));
2631 /* ARx + 5-bit unsigned const
2632 *ARx, *+ARx(n) for n < 32. */
2635 c4x_R_constraint (rtx op)
2637 enum machine_mode mode = GET_MODE (op);
2641 if (GET_CODE (op) != MEM)
2644 switch (GET_CODE (op))
2651 rtx op0 = XEXP (op, 0);
2652 rtx op1 = XEXP (op, 1);
2657 if (GET_CODE (op1) != CONST_INT)
2660 /* HImode and HFmode must be offsettable. */
2661 if (mode == HImode || mode == HFmode)
2662 return IS_UINT5_CONST (INTVAL (op1) + 1);
2664 return IS_UINT5_CONST (INTVAL (op1));
2676 c4x_R_indirect (rtx op)
2678 enum machine_mode mode = GET_MODE (op);
2680 if (TARGET_C3X || GET_CODE (op) != MEM)
2684 switch (GET_CODE (op))
2687 return IS_ADDR_OR_PSEUDO_REG (op);
2691 rtx op0 = XEXP (op, 0);
2692 rtx op1 = XEXP (op, 1);
2694 /* HImode and HFmode must be offsettable. */
2695 if (mode == HImode || mode == HFmode)
2696 return IS_ADDR_OR_PSEUDO_REG (op0)
2697 && GET_CODE (op1) == CONST_INT
2698 && IS_UINT5_CONST (INTVAL (op1) + 1);
2701 && IS_ADDR_OR_PSEUDO_REG (op0)
2702 && GET_CODE (op1) == CONST_INT
2703 && IS_UINT5_CONST (INTVAL (op1));
2714 /* ARx + 1-bit unsigned const or IRn
2715 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2716 We don't include the pre/post inc/dec forms here since
2717 they are handled by the <> constraints. */
2720 c4x_S_constraint (rtx op)
2722 enum machine_mode mode = GET_MODE (op);
2723 if (GET_CODE (op) != MEM)
2726 switch (GET_CODE (op))
2734 rtx op0 = XEXP (op, 0);
2735 rtx op1 = XEXP (op, 1);
2737 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2738 || (op0 != XEXP (op1, 0)))
2741 op0 = XEXP (op1, 0);
2742 op1 = XEXP (op1, 1);
2743 return REG_P (op0) && REG_P (op1);
2744 /* Pre or post_modify with a displacement of 0 or 1
2745 should not be generated. */
2751 rtx op0 = XEXP (op, 0);
2752 rtx op1 = XEXP (op, 1);
2760 if (GET_CODE (op1) != CONST_INT)
2763 /* HImode and HFmode must be offsettable. */
2764 if (mode == HImode || mode == HFmode)
2765 return IS_DISP1_OFF_CONST (INTVAL (op1));
2767 return IS_DISP1_CONST (INTVAL (op1));
2779 c4x_S_indirect (rtx op)
2781 enum machine_mode mode = GET_MODE (op);
2782 if (GET_CODE (op) != MEM)
2786 switch (GET_CODE (op))
2790 if (mode != QImode && mode != QFmode)
2797 return IS_ADDR_OR_PSEUDO_REG (op);
2802 rtx op0 = XEXP (op, 0);
2803 rtx op1 = XEXP (op, 1);
2805 if (mode != QImode && mode != QFmode)
2808 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2809 || (op0 != XEXP (op1, 0)))
2812 op0 = XEXP (op1, 0);
2813 op1 = XEXP (op1, 1);
2814 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2815 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2816 /* Pre or post_modify with a displacement of 0 or 1
2817 should not be generated. */
2822 rtx op0 = XEXP (op, 0);
2823 rtx op1 = XEXP (op, 1);
2827 /* HImode and HFmode must be offsettable. */
2828 if (mode == HImode || mode == HFmode)
2829 return IS_ADDR_OR_PSEUDO_REG (op0)
2830 && GET_CODE (op1) == CONST_INT
2831 && IS_DISP1_OFF_CONST (INTVAL (op1));
2834 return (IS_INDEX_OR_PSEUDO_REG (op1)
2835 && IS_ADDR_OR_PSEUDO_REG (op0))
2836 || (IS_ADDR_OR_PSEUDO_REG (op1)
2837 && IS_INDEX_OR_PSEUDO_REG (op0));
2839 return IS_ADDR_OR_PSEUDO_REG (op0)
2840 && GET_CODE (op1) == CONST_INT
2841 && IS_DISP1_CONST (INTVAL (op1));
2853 /* Direct memory operand. */
2856 c4x_T_constraint (rtx op)
2858 if (GET_CODE (op) != MEM)
2862 if (GET_CODE (op) != LO_SUM)
2864 /* Allow call operands. */
2865 return GET_CODE (op) == SYMBOL_REF
2866 && GET_MODE (op) == Pmode
2867 && SYMBOL_REF_FUNCTION_P (op);
2870 /* HImode and HFmode are not offsettable. */
2871 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2874 if ((GET_CODE (XEXP (op, 0)) == REG)
2875 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2876 return c4x_U_constraint (XEXP (op, 1));
2882 /* Symbolic operand. */
2885 c4x_U_constraint (rtx op)
2887 /* Don't allow direct addressing to an arbitrary constant. */
2888 return GET_CODE (op) == CONST
2889 || GET_CODE (op) == SYMBOL_REF
2890 || GET_CODE (op) == LABEL_REF;
2895 c4x_autoinc_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2897 if (GET_CODE (op) == MEM)
2899 enum rtx_code code = GET_CODE (XEXP (op, 0));
2905 || code == PRE_MODIFY
2906 || code == POST_MODIFY
2914 /* Match any operand. */
2917 any_operand (register rtx op ATTRIBUTE_UNUSED,
2918 enum machine_mode mode ATTRIBUTE_UNUSED)
2924 /* Nonzero if OP is a floating point value with value 0.0. */
2927 fp_zero_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2931 if (GET_CODE (op) != CONST_DOUBLE)
2933 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2934 return REAL_VALUES_EQUAL (r, dconst0);
2939 const_operand (register rtx op, register enum machine_mode mode)
2945 if (GET_CODE (op) != CONST_DOUBLE
2946 || GET_MODE (op) != mode
2947 || GET_MODE_CLASS (mode) != MODE_FLOAT)
2950 return c4x_immed_float_p (op);
2956 if (GET_CODE (op) != CONST_INT
2957 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
2958 || GET_MODE_CLASS (mode) != MODE_INT)
2961 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
2973 stik_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2975 return c4x_K_constant (op);
2980 not_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2982 return c4x_N_constant (op);
2987 reg_operand (rtx op, enum machine_mode mode)
2989 if (GET_CODE (op) == SUBREG
2990 && GET_MODE (op) == QFmode)
2992 return register_operand (op, mode);
2997 mixed_subreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2999 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3000 int and a long double. */
3001 if (GET_CODE (op) == SUBREG
3002 && (GET_MODE (op) == QFmode)
3003 && (GET_MODE (SUBREG_REG (op)) == QImode
3004 || GET_MODE (SUBREG_REG (op)) == HImode))
3011 reg_imm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3013 if (REG_P (op) || CONSTANT_P (op))
3020 not_modify_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3022 if (REG_P (op) || CONSTANT_P (op))
3024 if (GET_CODE (op) != MEM)
3027 switch (GET_CODE (op))
3034 rtx op0 = XEXP (op, 0);
3035 rtx op1 = XEXP (op, 1);
3040 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3046 rtx op0 = XEXP (op, 0);
3048 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3066 not_rc_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3068 if (REG_P (op) && REGNO (op) == RC_REGNO)
3074 /* Extended precision register R0-R1. */
3077 r0r1_reg_operand (rtx op, enum machine_mode mode)
3079 if (! reg_operand (op, mode))
3081 if (GET_CODE (op) == SUBREG)
3082 op = SUBREG_REG (op);
3083 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3087 /* Extended precision register R2-R3. */
3090 r2r3_reg_operand (rtx op, enum machine_mode mode)
3092 if (! reg_operand (op, mode))
3094 if (GET_CODE (op) == SUBREG)
3095 op = SUBREG_REG (op);
3096 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3100 /* Low extended precision register R0-R7. */
3103 ext_low_reg_operand (rtx op, enum machine_mode mode)
3105 if (! reg_operand (op, mode))
3107 if (GET_CODE (op) == SUBREG)
3108 op = SUBREG_REG (op);
3109 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3113 /* Extended precision register. */
3116 ext_reg_operand (rtx op, enum machine_mode mode)
3118 if (! reg_operand (op, mode))
3120 if (GET_CODE (op) == SUBREG)
3121 op = SUBREG_REG (op);
3124 return IS_EXT_OR_PSEUDO_REG (op);
3128 /* Standard precision register. */
3131 std_reg_operand (rtx op, enum machine_mode mode)
3133 if (! reg_operand (op, mode))
3135 if (GET_CODE (op) == SUBREG)
3136 op = SUBREG_REG (op);
3137 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3140 /* Standard precision or normal register. */
3143 std_or_reg_operand (rtx op, enum machine_mode mode)
3145 if (reload_in_progress)
3146 return std_reg_operand (op, mode);
3147 return reg_operand (op, mode);
3150 /* Address register. */
3153 addr_reg_operand (rtx op, enum machine_mode mode)
3155 if (! reg_operand (op, mode))
3157 return c4x_a_register (op);
3161 /* Index register. */
3164 index_reg_operand (rtx op, enum machine_mode mode)
3166 if (! reg_operand (op, mode))
3168 if (GET_CODE (op) == SUBREG)
3169 op = SUBREG_REG (op);
3170 return c4x_x_register (op);
3177 dp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3179 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3186 sp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3188 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3195 st_reg_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3197 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3204 rc_reg_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3206 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3211 call_address_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3213 return (REG_P (op) || symbolic_address_operand (op, mode));
3217 /* Symbolic address operand. */
3220 symbolic_address_operand (register rtx op,
3221 enum machine_mode mode ATTRIBUTE_UNUSED)
3223 switch (GET_CODE (op))
3235 /* Check dst operand of a move instruction. */
3238 dst_operand (rtx op, enum machine_mode mode)
3240 if (GET_CODE (op) == SUBREG
3241 && mixed_subreg_operand (op, mode))
3245 return reg_operand (op, mode);
3247 return nonimmediate_operand (op, mode);
3251 /* Check src operand of two operand arithmetic instructions. */
3254 src_operand (rtx op, enum machine_mode mode)
3256 if (GET_CODE (op) == SUBREG
3257 && mixed_subreg_operand (op, mode))
3261 return reg_operand (op, mode);
3263 if (mode == VOIDmode)
3264 mode = GET_MODE (op);
3266 if (GET_CODE (op) == CONST_INT)
3267 return (mode == QImode || mode == Pmode || mode == HImode)
3268 && c4x_I_constant (op);
3270 /* We don't like CONST_DOUBLE integers. */
3271 if (GET_CODE (op) == CONST_DOUBLE)
3272 return c4x_H_constant (op);
3274 /* Disallow symbolic addresses. Only the predicate
3275 symbolic_address_operand will match these. */
3276 if (GET_CODE (op) == SYMBOL_REF
3277 || GET_CODE (op) == LABEL_REF
3278 || GET_CODE (op) == CONST)
3281 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
3282 access to symbolic addresses. These operands will get forced
3283 into a register and the movqi expander will generate a
3284 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
3285 if (GET_CODE (op) == MEM
3286 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3287 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3288 || GET_CODE (XEXP (op, 0)) == CONST)))
3289 return !TARGET_EXPOSE_LDP &&
3290 ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3292 return general_operand (op, mode);
3297 src_hi_operand (rtx op, enum machine_mode mode)
3299 if (c4x_O_constant (op))
3301 return src_operand (op, mode);
3305 /* Check src operand of two operand logical instructions. */
3308 lsrc_operand (rtx op, enum machine_mode mode)
3310 if (mode == VOIDmode)
3311 mode = GET_MODE (op);
3313 if (mode != QImode && mode != Pmode)
3314 fatal_insn ("mode not QImode", op);
3316 if (GET_CODE (op) == CONST_INT)
3317 return c4x_L_constant (op) || c4x_J_constant (op);
3319 return src_operand (op, mode);
3323 /* Check src operand of two operand tricky instructions. */
3326 tsrc_operand (rtx op, enum machine_mode mode)
3328 if (mode == VOIDmode)
3329 mode = GET_MODE (op);
3331 if (mode != QImode && mode != Pmode)
3332 fatal_insn ("mode not QImode", op);
3334 if (GET_CODE (op) == CONST_INT)
3335 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3337 return src_operand (op, mode);
3341 /* Check src operand of two operand non immediate instructions. */
3344 nonimmediate_src_operand (rtx op, enum machine_mode mode)
3346 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3349 return src_operand (op, mode);
3353 /* Check logical src operand of two operand non immediate instructions. */
3356 nonimmediate_lsrc_operand (rtx op, enum machine_mode mode)
3358 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3361 return lsrc_operand (op, mode);
3366 reg_or_const_operand (rtx op, enum machine_mode mode)
3368 return reg_operand (op, mode) || const_operand (op, mode);
3372 /* Check for indirect operands allowable in parallel instruction. */
3375 par_ind_operand (rtx op, enum machine_mode mode)
3377 if (mode != VOIDmode && mode != GET_MODE (op))
3380 return c4x_S_indirect (op);
3384 /* Check for operands allowable in parallel instruction. */
3387 parallel_operand (rtx op, enum machine_mode mode)
3389 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3394 c4x_S_address_parse (rtx op, int *base, int *incdec, int *index, int *disp)
3401 if (GET_CODE (op) != MEM)
3402 fatal_insn ("invalid indirect memory address", op);
3405 switch (GET_CODE (op))
3408 *base = REGNO (XEXP (op, 0));
3414 *base = REGNO (XEXP (op, 0));
3420 *base = REGNO (XEXP (op, 0));
3426 *base = REGNO (XEXP (op, 0));
3432 *base = REGNO (XEXP (op, 0));
3433 if (REG_P (XEXP (XEXP (op, 1), 1)))
3435 *index = REGNO (XEXP (XEXP (op, 1), 1));
3436 *disp = 0; /* ??? */
3439 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3444 *base = REGNO (XEXP (op, 0));
3445 if (REG_P (XEXP (XEXP (op, 1), 1)))
3447 *index = REGNO (XEXP (XEXP (op, 1), 1));
3448 *disp = 1; /* ??? */
3451 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3462 rtx op0 = XEXP (op, 0);
3463 rtx op1 = XEXP (op, 1);
3465 if (c4x_a_register (op0))
3467 if (c4x_x_register (op1))
3469 *base = REGNO (op0);
3470 *index = REGNO (op1);
3473 else if ((GET_CODE (op1) == CONST_INT
3474 && IS_DISP1_CONST (INTVAL (op1))))
3476 *base = REGNO (op0);
3477 *disp = INTVAL (op1);
3481 else if (c4x_x_register (op0) && c4x_a_register (op1))
3483 *base = REGNO (op1);
3484 *index = REGNO (op0);
3491 fatal_insn ("invalid indirect (S) memory address", op);
3497 c4x_address_conflict (rtx op0, rtx op1, int store0, int store1)
3508 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3511 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3512 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3514 if (store0 && store1)
3516 /* If we have two stores in parallel to the same address, then
3517 the C4x only executes one of the stores. This is unlikely to
3518 cause problems except when writing to a hardware device such
3519 as a FIFO since the second write will be lost. The user
3520 should flag the hardware location as being volatile so that
3521 we don't do this optimization. While it is unlikely that we
3522 have an aliased address if both locations are not marked
3523 volatile, it is probably safer to flag a potential conflict
3524 if either location is volatile. */
3525 if (! flag_argument_noalias)
3527 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3532 /* If have a parallel load and a store to the same address, the load
3533 is performed first, so there is no conflict. Similarly, there is
3534 no conflict if have parallel loads from the same address. */
3536 /* Cannot use auto increment or auto decrement twice for same
3538 if (base0 == base1 && incdec0 && incdec0)
3541 /* It might be too confusing for GCC if we have use a base register
3542 with a side effect and a memory reference using the same register
3544 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3547 /* We cannot optimize the case where op1 and op2 refer to the same
3549 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3557 /* Check for while loop inside a decrement and branch loop. */
3560 c4x_label_conflict (rtx insn, rtx jump, rtx db)
3564 if (GET_CODE (insn) == CODE_LABEL)
3566 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3568 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3571 insn = PREV_INSN (insn);
3577 /* Validate combination of operands for parallel load/store instructions. */
3580 valid_parallel_load_store (rtx *operands,
3581 enum machine_mode mode ATTRIBUTE_UNUSED)
3583 rtx op0 = operands[0];
3584 rtx op1 = operands[1];
3585 rtx op2 = operands[2];
3586 rtx op3 = operands[3];
3588 if (GET_CODE (op0) == SUBREG)
3589 op0 = SUBREG_REG (op0);
3590 if (GET_CODE (op1) == SUBREG)
3591 op1 = SUBREG_REG (op1);
3592 if (GET_CODE (op2) == SUBREG)
3593 op2 = SUBREG_REG (op2);
3594 if (GET_CODE (op3) == SUBREG)
3595 op3 = SUBREG_REG (op3);
3597 /* The patterns should only allow ext_low_reg_operand() or
3598 par_ind_operand() operands. Thus of the 4 operands, only 2
3599 should be REGs and the other 2 should be MEMs. */
3601 /* This test prevents the multipack pass from using this pattern if
3602 op0 is used as an index or base register in op2 or op3, since
3603 this combination will require reloading. */
3604 if (GET_CODE (op0) == REG
3605 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3606 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3610 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3611 return (REGNO (op0) != REGNO (op2))
3612 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3613 && ! c4x_address_conflict (op1, op3, 0, 0);
3616 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3617 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3618 && ! c4x_address_conflict (op0, op2, 1, 1);
3621 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3622 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3623 && ! c4x_address_conflict (op1, op2, 0, 1);
3626 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3627 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3628 && ! c4x_address_conflict (op0, op3, 1, 0);
3635 valid_parallel_operands_4 (rtx *operands,
3636 enum machine_mode mode ATTRIBUTE_UNUSED)
3638 rtx op0 = operands[0];
3639 rtx op2 = operands[2];
3641 if (GET_CODE (op0) == SUBREG)
3642 op0 = SUBREG_REG (op0);
3643 if (GET_CODE (op2) == SUBREG)
3644 op2 = SUBREG_REG (op2);
3646 /* This test prevents the multipack pass from using this pattern if
3647 op0 is used as an index or base register in op2, since this combination
3648 will require reloading. */
3649 if (GET_CODE (op0) == REG
3650 && GET_CODE (op2) == MEM
3651 && reg_mentioned_p (op0, XEXP (op2, 0)))
3659 valid_parallel_operands_5 (rtx *operands,
3660 enum machine_mode mode ATTRIBUTE_UNUSED)
3663 rtx op0 = operands[0];
3664 rtx op1 = operands[1];
3665 rtx op2 = operands[2];
3666 rtx op3 = operands[3];
3668 if (GET_CODE (op0) == SUBREG)
3669 op0 = SUBREG_REG (op0);
3670 if (GET_CODE (op1) == SUBREG)
3671 op1 = SUBREG_REG (op1);
3672 if (GET_CODE (op2) == SUBREG)
3673 op2 = SUBREG_REG (op2);
3675 /* The patterns should only allow ext_low_reg_operand() or
3676 par_ind_operand() operands. Operands 1 and 2 may be commutative
3677 but only one of them can be a register. */
3678 if (GET_CODE (op1) == REG)
3680 if (GET_CODE (op2) == REG)
3686 /* This test prevents the multipack pass from using this pattern if
3687 op0 is used as an index or base register in op3, since this combination
3688 will require reloading. */
3689 if (GET_CODE (op0) == REG
3690 && GET_CODE (op3) == MEM
3691 && reg_mentioned_p (op0, XEXP (op3, 0)))
3699 valid_parallel_operands_6 (rtx *operands,
3700 enum machine_mode mode ATTRIBUTE_UNUSED)
3703 rtx op0 = operands[0];
3704 rtx op1 = operands[1];
3705 rtx op2 = operands[2];
3706 rtx op4 = operands[4];
3707 rtx op5 = operands[5];
3709 if (GET_CODE (op1) == SUBREG)
3710 op1 = SUBREG_REG (op1);
3711 if (GET_CODE (op2) == SUBREG)
3712 op2 = SUBREG_REG (op2);
3713 if (GET_CODE (op4) == SUBREG)
3714 op4 = SUBREG_REG (op4);
3715 if (GET_CODE (op5) == SUBREG)
3716 op5 = SUBREG_REG (op5);
3718 /* The patterns should only allow ext_low_reg_operand() or
3719 par_ind_operand() operands. Thus of the 4 input operands, only 2
3720 should be REGs and the other 2 should be MEMs. */
3722 if (GET_CODE (op1) == REG)
3724 if (GET_CODE (op2) == REG)
3726 if (GET_CODE (op4) == REG)
3728 if (GET_CODE (op5) == REG)
3731 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3732 Perhaps we should count the MEMs as well? */
3736 /* This test prevents the multipack pass from using this pattern if
3737 op0 is used as an index or base register in op4 or op5, since
3738 this combination will require reloading. */
3739 if (GET_CODE (op0) == REG
3740 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3741 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3748 /* Validate combination of src operands. Note that the operands have
3749 been screened by the src_operand predicate. We just have to check
3750 that the combination of operands is valid. If FORCE is set, ensure
3751 that the destination regno is valid if we have a 2 operand insn. */
3754 c4x_valid_operands (enum rtx_code code, rtx *operands,
3755 enum machine_mode mode ATTRIBUTE_UNUSED,
3761 enum rtx_code code1;
3762 enum rtx_code code2;
3765 /* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
3766 if (code == IF_THEN_ELSE)
3767 return 1 || (operands[0] == operands[2] || operands[0] == operands[3]);
3769 if (code == COMPARE)
3782 if (GET_CODE (op0) == SUBREG)
3783 op0 = SUBREG_REG (op0);
3784 if (GET_CODE (op1) == SUBREG)
3785 op1 = SUBREG_REG (op1);
3786 if (GET_CODE (op2) == SUBREG)
3787 op2 = SUBREG_REG (op2);
3789 code1 = GET_CODE (op1);
3790 code2 = GET_CODE (op2);
3793 if (code1 == REG && code2 == REG)
3796 if (code1 == MEM && code2 == MEM)
3798 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3800 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3803 /* We cannot handle two MEMs or two CONSTS, etc. */
3812 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3817 if (! c4x_H_constant (op2))
3821 /* Any valid memory operand screened by src_operand is OK. */
3826 fatal_insn ("c4x_valid_operands: Internal error", op2);
3830 if (GET_CODE (op0) == SCRATCH)
3836 /* Check that we have a valid destination register for a two operand
3838 return ! force || code == COMPARE || REGNO (op1) == REGNO (op0);
3842 /* Check non-commutative operators. */
3843 if (code == ASHIFTRT || code == LSHIFTRT
3844 || code == ASHIFT || code == COMPARE)
3846 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3849 /* Assume MINUS is commutative since the subtract patterns
3850 also support the reverse subtract instructions. Since op1
3851 is not a register, and op2 is a register, op1 can only
3852 be a restricted memory operand for a shift instruction. */
3861 if (! c4x_H_constant (op1))
3865 /* Any valid memory operand screened by src_operand is OK. */
3874 if (GET_CODE (op0) == SCRATCH)
3880 /* Check that we have a valid destination register for a two operand
3882 return ! force || REGNO (op1) == REGNO (op0);
3885 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3892 int valid_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3895 /* If we are not optimizing then we have to let anything go and let
3896 reload fix things up. instantiate_decl in function.c can produce
3897 invalid insns by changing the offset of a memory operand from a
3898 valid one into an invalid one, when the second operand is also a
3899 memory operand. The alternative is not to allow two memory
3900 operands for an insn when not optimizing. The problem only rarely
3901 occurs, for example with the C-torture program DFcmp.c. */
3903 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
3908 legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3910 /* Compare only has 2 operands. */
3911 if (code == COMPARE)
3913 /* During RTL generation, force constants into pseudos so that
3914 they can get hoisted out of loops. This will tie up an extra
3915 register but can save an extra cycle. Only do this if loop
3916 optimization enabled. (We cannot pull this trick for add and
3917 sub instructions since the flow pass won't find
3918 autoincrements etc.) This allows us to generate compare
3919 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
3920 of LDI *AR0++, R0; CMPI 42, R0.
3922 Note that expand_binops will try to load an expensive constant
3923 into a register if it is used within a loop. Unfortunately,
3924 the cost mechanism doesn't allow us to look at the other
3925 operand to decide whether the constant is expensive. */
3927 if (! reload_in_progress
3930 && GET_CODE (operands[1]) == CONST_INT
3931 && rtx_cost (operands[1], code) > 1)
3932 operands[1] = force_reg (mode, operands[1]);
3934 if (! reload_in_progress
3935 && ! c4x_valid_operands (code, operands, mode, 0))
3936 operands[0] = force_reg (mode, operands[0]);
3940 /* We cannot do this for ADDI/SUBI insns since we will
3941 defeat the flow pass from finding autoincrement addressing
3943 if (! reload_in_progress
3944 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
3947 && GET_CODE (operands[2]) == CONST_INT
3948 && rtx_cost (operands[2], code) > 1)
3949 operands[2] = force_reg (mode, operands[2]);
3951 /* We can get better code on a C30 if we force constant shift counts
3952 into a register. This way they can get hoisted out of loops,
3953 tying up a register but saving an instruction. The downside is
3954 that they may get allocated to an address or index register, and
3955 thus we will get a pipeline conflict if there is a nearby
3956 indirect address using an address register.
3958 Note that expand_binops will not try to load an expensive constant
3959 into a register if it is used within a loop for a shift insn. */
3961 if (! reload_in_progress
3962 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
3964 /* If the operand combination is invalid, we force operand1 into a
3965 register, preventing reload from having doing to do this at a
3967 operands[1] = force_reg (mode, operands[1]);
3970 emit_move_insn (operands[0], operands[1]);
3971 operands[1] = copy_rtx (operands[0]);
3975 /* Just in case... */
3976 if (! c4x_valid_operands (code, operands, mode, 0))
3977 operands[2] = force_reg (mode, operands[2]);
3981 /* Right shifts require a negative shift count, but GCC expects
3982 a positive count, so we emit a NEG. */
3983 if ((code == ASHIFTRT || code == LSHIFTRT)
3984 && (GET_CODE (operands[2]) != CONST_INT))
3985 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
3988 /* When the shift count is greater than 32 then the result
3989 can be implementation dependent. We truncate the result to
3990 fit in 5 bits so that we do not emit invalid code when
3991 optimizing---such as trying to generate lhu2 with 20021124-1.c. */
3992 if (((code == ASHIFTRT || code == LSHIFTRT || code == ASHIFT)
3993 && (GET_CODE (operands[2]) == CONST_INT))
3994 && INTVAL (operands[2]) > (GET_MODE_BITSIZE (mode) - 1))
3996 = GEN_INT (INTVAL (operands[2]) & (GET_MODE_BITSIZE (mode) - 1));
4002 /* The following predicates are used for instruction scheduling. */
4005 group1_reg_operand (rtx op, enum machine_mode mode)
4007 if (mode != VOIDmode && mode != GET_MODE (op))
4009 if (GET_CODE (op) == SUBREG)
4010 op = SUBREG_REG (op);
4011 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4016 group1_mem_operand (rtx op, enum machine_mode mode)
4018 if (mode != VOIDmode && mode != GET_MODE (op))
4021 if (GET_CODE (op) == MEM)
4024 if (GET_CODE (op) == PLUS)
4026 rtx op0 = XEXP (op, 0);
4027 rtx op1 = XEXP (op, 1);
4029 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4030 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4033 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4041 /* Return true if any one of the address registers. */
4044 arx_reg_operand (rtx op, enum machine_mode mode)
4046 if (mode != VOIDmode && mode != GET_MODE (op))
4048 if (GET_CODE (op) == SUBREG)
4049 op = SUBREG_REG (op);
4050 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4055 c4x_arn_reg_operand (rtx op, enum machine_mode mode, unsigned int regno)
4057 if (mode != VOIDmode && mode != GET_MODE (op))
4059 if (GET_CODE (op) == SUBREG)
4060 op = SUBREG_REG (op);
4061 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4066 c4x_arn_mem_operand (rtx op, enum machine_mode mode, unsigned int regno)
4068 if (mode != VOIDmode && mode != GET_MODE (op))
4071 if (GET_CODE (op) == MEM)
4074 switch (GET_CODE (op))
4083 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4087 if (REG_P (XEXP (op, 0)) && (! reload_completed
4088 || (REGNO (XEXP (op, 0)) == regno)))
4090 if (REG_P (XEXP (XEXP (op, 1), 1))
4091 && (! reload_completed
4092 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4098 rtx op0 = XEXP (op, 0);
4099 rtx op1 = XEXP (op, 1);
4101 if ((REG_P (op0) && (! reload_completed
4102 || (REGNO (op0) == regno)))
4103 || (REG_P (op1) && (! reload_completed
4104 || (REGNO (op1) == regno))))
4118 ar0_reg_operand (rtx op, enum machine_mode mode)
4120 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4125 ar0_mem_operand (rtx op, enum machine_mode mode)
4127 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4132 ar1_reg_operand (rtx op, enum machine_mode mode)
4134 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4139 ar1_mem_operand (rtx op, enum machine_mode mode)
4141 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4146 ar2_reg_operand (rtx op, enum machine_mode mode)
4148 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4153 ar2_mem_operand (rtx op, enum machine_mode mode)
4155 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4160 ar3_reg_operand (rtx op, enum machine_mode mode)
4162 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4167 ar3_mem_operand (rtx op, enum machine_mode mode)
4169 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4174 ar4_reg_operand (rtx op, enum machine_mode mode)
4176 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4181 ar4_mem_operand (rtx op, enum machine_mode mode)
4183 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4188 ar5_reg_operand (rtx op, enum machine_mode mode)
4190 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4195 ar5_mem_operand (rtx op, enum machine_mode mode)
4197 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4202 ar6_reg_operand (rtx op, enum machine_mode mode)
4204 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4209 ar6_mem_operand (rtx op, enum machine_mode mode)
4211 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4216 ar7_reg_operand (rtx op, enum machine_mode mode)
4218 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4223 ar7_mem_operand (rtx op, enum machine_mode mode)
4225 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4230 ir0_reg_operand (rtx op, enum machine_mode mode)
4232 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4237 ir0_mem_operand (rtx op, enum machine_mode mode)
4239 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4244 ir1_reg_operand (rtx op, enum machine_mode mode)
4246 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4251 ir1_mem_operand (rtx op, enum machine_mode mode)
4253 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4257 /* This is similar to operand_subword but allows autoincrement
4261 c4x_operand_subword (rtx op, int i, int validate_address,
4262 enum machine_mode mode)
4264 if (mode != HImode && mode != HFmode)
4265 fatal_insn ("c4x_operand_subword: invalid mode", op);
4267 if (mode == HFmode && REG_P (op))
4268 fatal_insn ("c4x_operand_subword: invalid operand", op);
4270 if (GET_CODE (op) == MEM)
4272 enum rtx_code code = GET_CODE (XEXP (op, 0));
4273 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4274 enum machine_mode submode;
4279 else if (mode == HFmode)
4286 return gen_rtx_MEM (submode, XEXP (op, 0));
4292 /* We could handle these with some difficulty.
4293 e.g., *p-- => *(p-=2); *(p+1). */
4294 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4300 fatal_insn ("c4x_operand_subword: invalid address", op);
4302 /* Even though offsettable_address_p considers (MEM
4303 (LO_SUM)) to be offsettable, it is not safe if the
4304 address is at the end of the data page since we also have
4305 to fix up the associated high PART. In this case where
4306 we are trying to split a HImode or HFmode memory
4307 reference, we would have to emit another insn to reload a
4308 new HIGH value. It's easier to disable LO_SUM memory references
4309 in HImode or HFmode and we probably get better code. */
4311 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4318 return operand_subword (op, i, validate_address, mode);
4323 struct name_list *next;
4327 static struct name_list *global_head;
4328 static struct name_list *extern_head;
4331 /* Add NAME to list of global symbols and remove from external list if
4332 present on external list. */
4335 c4x_global_label (const char *name)
4337 struct name_list *p, *last;
4339 /* Do not insert duplicate names, so linearly search through list of
4344 if (strcmp (p->name, name) == 0)
4348 p = (struct name_list *) xmalloc (sizeof *p);
4349 p->next = global_head;
4353 /* Remove this name from ref list if present. */
4358 if (strcmp (p->name, name) == 0)
4361 last->next = p->next;
4363 extern_head = p->next;
4372 /* Add NAME to list of external symbols. */
4375 c4x_external_ref (const char *name)
4377 struct name_list *p;
4379 /* Do not insert duplicate names. */
4383 if (strcmp (p->name, name) == 0)
4388 /* Do not insert ref if global found. */
4392 if (strcmp (p->name, name) == 0)
4396 p = (struct name_list *) xmalloc (sizeof *p);
4397 p->next = extern_head;
4402 /* We need to have a data section we can identify so that we can set
4403 the DP register back to a data pointer in the small memory model.
4404 This is only required for ISRs if we are paranoid that someone
4405 may have quietly changed this register on the sly. */
4407 c4x_file_start (void)
4409 default_file_start ();
4410 fprintf (asm_out_file, "\t.version\t%d\n", c4x_cpu_version);
4411 fputs ("\n\t.data\ndata_sec:\n", asm_out_file);
4418 struct name_list *p;
4420 /* Output all external names that are not global. */
4424 fprintf (asm_out_file, "\t.ref\t");
4425 assemble_name (asm_out_file, p->name);
4426 fprintf (asm_out_file, "\n");
4429 fprintf (asm_out_file, "\t.end\n");
4434 c4x_check_attribute (const char *attrib, tree list, tree decl, tree *attributes)
4436 while (list != NULL_TREE
4437 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4438 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4439 list = TREE_CHAIN (list);
4441 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4447 c4x_insert_attributes (tree decl, tree *attributes)
4449 switch (TREE_CODE (decl))
4452 c4x_check_attribute ("section", code_tree, decl, attributes);
4453 c4x_check_attribute ("const", pure_tree, decl, attributes);
4454 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4455 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4456 c4x_check_attribute ("naked", naked_tree, decl, attributes);
4460 c4x_check_attribute ("section", data_tree, decl, attributes);
4468 /* Table of valid machine attributes. */
4469 const struct attribute_spec c4x_attribute_table[] =
4471 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4472 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4473 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4474 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4475 { NULL, 0, 0, false, false, false, NULL }
4478 /* Handle an attribute requiring a FUNCTION_TYPE;
4479 arguments as in struct attribute_spec.handler. */
4481 c4x_handle_fntype_attribute (tree *node, tree name,
4482 tree args ATTRIBUTE_UNUSED,
4483 int flags ATTRIBUTE_UNUSED,
4486 if (TREE_CODE (*node) != FUNCTION_TYPE)
4488 warning ("%qs attribute only applies to functions",
4489 IDENTIFIER_POINTER (name));
4490 *no_add_attrs = true;
4497 /* !!! FIXME to emit RPTS correctly. */
4500 c4x_rptb_rpts_p (rtx insn, rtx op)
4502 /* The next insn should be our label marking where the
4503 repeat block starts. */
4504 insn = NEXT_INSN (insn);
4505 if (GET_CODE (insn) != CODE_LABEL)
4507 /* Some insns may have been shifted between the RPTB insn
4508 and the top label... They were probably destined to
4509 be moved out of the loop. For now, let's leave them
4510 where they are and print a warning. We should
4511 probably move these insns before the repeat block insn. */
4513 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4518 /* Skip any notes. */
4519 insn = next_nonnote_insn (insn);
4521 /* This should be our first insn in the loop. */
4522 if (! INSN_P (insn))
4525 /* Skip any notes. */
4526 insn = next_nonnote_insn (insn);
4528 if (! INSN_P (insn))
4531 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4537 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4541 /* Check if register r11 is used as the destination of an insn. */
4544 c4x_r11_set_p(rtx x)
4553 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4554 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4556 if (INSN_P (x) && (set = single_set (x)))
4559 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4562 fmt = GET_RTX_FORMAT (GET_CODE (x));
4563 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4567 if (c4x_r11_set_p (XEXP (x, i)))
4570 else if (fmt[i] == 'E')
4571 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4572 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4579 /* The c4x sometimes has a problem when the insn before the laj insn
4580 sets the r11 register. Check for this situation. */
4583 c4x_check_laj_p (rtx insn)
4585 insn = prev_nonnote_insn (insn);
4587 /* If this is the start of the function no nop is needed. */
4591 /* If the previous insn is a code label we have to insert a nop. This
4592 could be a jump or table jump. We can find the normal jumps by
4593 scanning the function but this will not find table jumps. */
4594 if (GET_CODE (insn) == CODE_LABEL)
4597 /* If the previous insn sets register r11 we have to insert a nop. */
4598 if (c4x_r11_set_p (insn))
4601 /* No nop needed. */
4606 /* Adjust the cost of a scheduling dependency. Return the new cost of
4607 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4608 A set of an address register followed by a use occurs a 2 cycle
4609 stall (reduced to a single cycle on the c40 using LDA), while
4610 a read of an address register followed by a use occurs a single cycle. */
4612 #define SET_USE_COST 3
4613 #define SETLDA_USE_COST 2
4614 #define READ_USE_COST 2
4617 c4x_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
4619 /* Don't worry about this until we know what registers have been
4621 if (flag_schedule_insns == 0 && ! reload_completed)
4624 /* How do we handle dependencies where a read followed by another
4625 read causes a pipeline stall? For example, a read of ar0 followed
4626 by the use of ar0 for a memory reference. It looks like we
4627 need to extend the scheduler to handle this case. */
4629 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4630 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4631 so only deal with insns we know about. */
4632 if (recog_memoized (dep_insn) < 0)
4635 if (REG_NOTE_KIND (link) == 0)
4639 /* Data dependency; DEP_INSN writes a register that INSN reads some
4643 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4644 max = SET_USE_COST > max ? SET_USE_COST : max;
4645 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4646 max = READ_USE_COST > max ? READ_USE_COST : max;
4650 /* This could be significantly optimized. We should look
4651 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4652 insn uses ar0-ar7. We then test if the same register
4653 is used. The tricky bit is that some operands will
4654 use several registers... */
4655 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4656 max = SET_USE_COST > max ? SET_USE_COST : max;
4657 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4658 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4659 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4660 max = READ_USE_COST > max ? READ_USE_COST : max;
4662 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4663 max = SET_USE_COST > max ? SET_USE_COST : max;
4664 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4665 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4666 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4667 max = READ_USE_COST > max ? READ_USE_COST : max;
4669 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4670 max = SET_USE_COST > max ? SET_USE_COST : max;
4671 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4672 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4673 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4674 max = READ_USE_COST > max ? READ_USE_COST : max;
4676 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4677 max = SET_USE_COST > max ? SET_USE_COST : max;
4678 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4679 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4680 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4681 max = READ_USE_COST > max ? READ_USE_COST : max;
4683 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4684 max = SET_USE_COST > max ? SET_USE_COST : max;
4685 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4686 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4687 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4688 max = READ_USE_COST > max ? READ_USE_COST : max;
4690 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4691 max = SET_USE_COST > max ? SET_USE_COST : max;
4692 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4693 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4694 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4695 max = READ_USE_COST > max ? READ_USE_COST : max;
4697 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4698 max = SET_USE_COST > max ? SET_USE_COST : max;
4699 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4700 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4701 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4702 max = READ_USE_COST > max ? READ_USE_COST : max;
4704 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4705 max = SET_USE_COST > max ? SET_USE_COST : max;
4706 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4707 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4708 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4709 max = READ_USE_COST > max ? READ_USE_COST : max;
4711 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4712 max = SET_USE_COST > max ? SET_USE_COST : max;
4713 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4714 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4716 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4717 max = SET_USE_COST > max ? SET_USE_COST : max;
4718 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4719 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4725 /* For other data dependencies, the default cost specified in the
4729 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4731 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4734 /* For c4x anti dependencies, the cost is 0. */
4737 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4739 /* Output dependency; DEP_INSN writes a register that INSN writes some
4742 /* For c4x output dependencies, the cost is 0. */
4750 c4x_init_builtins (void)
4752 tree endlink = void_list_node;
4754 lang_hooks.builtin_function ("fast_ftoi",
4757 tree_cons (NULL_TREE, double_type_node,
4759 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4760 lang_hooks.builtin_function ("ansi_ftoi",
4763 tree_cons (NULL_TREE, double_type_node,
4765 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL,
4768 lang_hooks.builtin_function ("fast_imult",
4771 tree_cons (NULL_TREE, integer_type_node,
4772 tree_cons (NULL_TREE,
4775 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL,
4779 lang_hooks.builtin_function ("toieee",
4782 tree_cons (NULL_TREE, double_type_node,
4784 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL,
4786 lang_hooks.builtin_function ("frieee",
4789 tree_cons (NULL_TREE, double_type_node,
4791 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL,
4793 lang_hooks.builtin_function ("fast_invf",
4796 tree_cons (NULL_TREE, double_type_node,
4798 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL,
4805 c4x_expand_builtin (tree exp, rtx target,
4806 rtx subtarget ATTRIBUTE_UNUSED,
4807 enum machine_mode mode ATTRIBUTE_UNUSED,
4808 int ignore ATTRIBUTE_UNUSED)
4810 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4811 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4812 tree arglist = TREE_OPERAND (exp, 1);
4818 case C4X_BUILTIN_FIX:
4819 arg0 = TREE_VALUE (arglist);
4820 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4821 if (! target || ! register_operand (target, QImode))
4822 target = gen_reg_rtx (QImode);
4823 emit_insn (gen_fixqfqi_clobber (target, r0));
4826 case C4X_BUILTIN_FIX_ANSI:
4827 arg0 = TREE_VALUE (arglist);
4828 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4829 if (! target || ! register_operand (target, QImode))
4830 target = gen_reg_rtx (QImode);
4831 emit_insn (gen_fix_truncqfqi2 (target, r0));
4834 case C4X_BUILTIN_MPYI:
4837 arg0 = TREE_VALUE (arglist);
4838 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4839 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4840 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4841 if (! target || ! register_operand (target, QImode))
4842 target = gen_reg_rtx (QImode);
4843 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
4846 case C4X_BUILTIN_TOIEEE:
4849 arg0 = TREE_VALUE (arglist);
4850 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4851 if (! target || ! register_operand (target, QFmode))
4852 target = gen_reg_rtx (QFmode);
4853 emit_insn (gen_toieee (target, r0));
4856 case C4X_BUILTIN_FRIEEE:
4859 arg0 = TREE_VALUE (arglist);
4860 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4861 if (register_operand (r0, QFmode))
4863 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
4864 emit_move_insn (r1, r0);
4867 if (! target || ! register_operand (target, QFmode))
4868 target = gen_reg_rtx (QFmode);
4869 emit_insn (gen_frieee (target, r0));
4872 case C4X_BUILTIN_RCPF:
4875 arg0 = TREE_VALUE (arglist);
4876 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4877 if (! target || ! register_operand (target, QFmode))
4878 target = gen_reg_rtx (QFmode);
4879 emit_insn (gen_rcpfqf_clobber (target, r0));
4886 c4x_init_libfuncs (void)
4888 set_optab_libfunc (smul_optab, QImode, "__mulqi3");
4889 set_optab_libfunc (sdiv_optab, QImode, "__divqi3");
4890 set_optab_libfunc (udiv_optab, QImode, "__udivqi3");
4891 set_optab_libfunc (smod_optab, QImode, "__modqi3");
4892 set_optab_libfunc (umod_optab, QImode, "__umodqi3");
4893 set_optab_libfunc (sdiv_optab, QFmode, "__divqf3");
4894 set_optab_libfunc (smul_optab, HFmode, "__mulhf3");
4895 set_optab_libfunc (sdiv_optab, HFmode, "__divhf3");
4896 set_optab_libfunc (smul_optab, HImode, "__mulhi3");
4897 set_optab_libfunc (sdiv_optab, HImode, "__divhi3");
4898 set_optab_libfunc (udiv_optab, HImode, "__udivhi3");
4899 set_optab_libfunc (smod_optab, HImode, "__modhi3");
4900 set_optab_libfunc (umod_optab, HImode, "__umodhi3");
4901 set_optab_libfunc (ffs_optab, QImode, "__ffs");
4902 smulhi3_libfunc = init_one_libfunc ("__smulhi3_high");
4903 umulhi3_libfunc = init_one_libfunc ("__umulhi3_high");
4904 fix_truncqfhi2_libfunc = init_one_libfunc ("__fix_truncqfhi2");
4905 fixuns_truncqfhi2_libfunc = init_one_libfunc ("__ufix_truncqfhi2");
4906 fix_trunchfhi2_libfunc = init_one_libfunc ("__fix_trunchfhi2");
4907 fixuns_trunchfhi2_libfunc = init_one_libfunc ("__ufix_trunchfhi2");
4908 floathiqf2_libfunc = init_one_libfunc ("__floathiqf2");
4909 floatunshiqf2_libfunc = init_one_libfunc ("__ufloathiqf2");
4910 floathihf2_libfunc = init_one_libfunc ("__floathihf2");
4911 floatunshihf2_libfunc = init_one_libfunc ("__ufloathihf2");
4915 c4x_asm_named_section (const char *name, unsigned int flags ATTRIBUTE_UNUSED,
4916 tree decl ATTRIBUTE_UNUSED)
4918 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
4922 c4x_globalize_label (FILE *stream, const char *name)
4924 default_globalize_label (stream, name);
4925 c4x_global_label (name);
4928 #define SHIFT_CODE_P(C) \
4929 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
4930 #define LOGICAL_CODE_P(C) \
4931 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
4933 /* Compute a (partial) cost for rtx X. Return true if the complete
4934 cost has been computed, and false if subexpressions should be
4935 scanned. In either case, *TOTAL contains the cost result. */
4938 c4x_rtx_costs (rtx x, int code, int outer_code, int *total)
4944 /* Some small integers are effectively free for the C40. We should
4945 also consider if we are using the small memory model. With
4946 the big memory model we require an extra insn for a constant
4947 loaded from memory. */
4951 if (c4x_J_constant (x))
4953 else if (! TARGET_C3X
4954 && outer_code == AND
4955 && (val == 255 || val == 65535))
4957 else if (! TARGET_C3X
4958 && (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
4959 && (val == 16 || val == 24))
4961 else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
4963 else if (LOGICAL_CODE_P (outer_code)
4964 ? c4x_L_constant (x) : c4x_I_constant (x))
4977 if (c4x_H_constant (x))
4979 else if (GET_MODE (x) == QFmode)
4985 /* ??? Note that we return true, rather than false so that rtx_cost
4986 doesn't include the constant costs. Otherwise expand_mult will
4987 think that it is cheaper to synthesize a multiply rather than to
4988 use a multiply instruction. I think this is because the algorithm
4989 synth_mult doesn't take into account the loading of the operands,
4990 whereas the calculation of mult_cost does. */
4999 *total = COSTS_N_INSNS (1);
5003 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
5004 || TARGET_MPYI ? 1 : 14);
5011 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
5020 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
5023 c4x_external_libcall (rtx fun)
5025 /* This is only needed to keep asm30 happy for ___divqf3 etc. */
5026 c4x_external_ref (XSTR (fun, 0));
5029 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
5032 c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
5033 int incoming ATTRIBUTE_UNUSED)
5035 return gen_rtx_REG (Pmode, AR0_REGNO);