1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "conditions.h"
49 #include "c4x-protos.h"
51 #include "target-def.h"
55 rtx fix_truncqfhi2_libfunc;
56 rtx fixuns_truncqfhi2_libfunc;
57 rtx fix_trunchfhi2_libfunc;
58 rtx fixuns_trunchfhi2_libfunc;
59 rtx floathiqf2_libfunc;
60 rtx floatunshiqf2_libfunc;
61 rtx floathihf2_libfunc;
62 rtx floatunshihf2_libfunc;
64 static int c4x_leaf_function;
66 static const char *float_reg_names[] = FLOAT_REGISTER_NAMES;
68 /* Array of the smallest class containing reg number REGNO, indexed by
69 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
70 registers are available and set the class to NO_REGS for registers
71 that the target switches say are unavailable. */
73 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
75 /* Reg Modes Saved. */
76 R0R1_REGS, /* R0 QI, QF, HF No. */
77 R0R1_REGS, /* R1 QI, QF, HF No. */
78 R2R3_REGS, /* R2 QI, QF, HF No. */
79 R2R3_REGS, /* R3 QI, QF, HF No. */
80 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
81 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
83 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
84 ADDR_REGS, /* AR0 QI No. */
85 ADDR_REGS, /* AR1 QI No. */
86 ADDR_REGS, /* AR2 QI No. */
87 ADDR_REGS, /* AR3 QI QI. */
88 ADDR_REGS, /* AR4 QI QI. */
89 ADDR_REGS, /* AR5 QI QI. */
90 ADDR_REGS, /* AR6 QI QI. */
91 ADDR_REGS, /* AR7 QI QI. */
92 DP_REG, /* DP QI No. */
93 INDEX_REGS, /* IR0 QI No. */
94 INDEX_REGS, /* IR1 QI No. */
95 BK_REG, /* BK QI QI. */
96 SP_REG, /* SP QI No. */
97 ST_REG, /* ST CC No. */
98 NO_REGS, /* DIE/IE No. */
99 NO_REGS, /* IIE/IF No. */
100 NO_REGS, /* IIF/IOF No. */
101 INT_REGS, /* RS QI No. */
102 INT_REGS, /* RE QI No. */
103 RC_REG, /* RC QI No. */
104 EXT_REGS, /* R8 QI, QF, HF QI. */
105 EXT_REGS, /* R9 QI, QF, HF No. */
106 EXT_REGS, /* R10 QI, QF, HF No. */
107 EXT_REGS, /* R11 QI, QF, HF No. */
110 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
112 /* Reg Modes Saved. */
113 HFmode, /* R0 QI, QF, HF No. */
114 HFmode, /* R1 QI, QF, HF No. */
115 HFmode, /* R2 QI, QF, HF No. */
116 HFmode, /* R3 QI, QF, HF No. */
117 QFmode, /* R4 QI, QF, HF QI. */
118 QFmode, /* R5 QI, QF, HF QI. */
119 QImode, /* R6 QI, QF, HF QF. */
120 QImode, /* R7 QI, QF, HF QF. */
121 QImode, /* AR0 QI No. */
122 QImode, /* AR1 QI No. */
123 QImode, /* AR2 QI No. */
124 QImode, /* AR3 QI QI. */
125 QImode, /* AR4 QI QI. */
126 QImode, /* AR5 QI QI. */
127 QImode, /* AR6 QI QI. */
128 QImode, /* AR7 QI QI. */
129 VOIDmode, /* DP QI No. */
130 QImode, /* IR0 QI No. */
131 QImode, /* IR1 QI No. */
132 QImode, /* BK QI QI. */
133 VOIDmode, /* SP QI No. */
134 VOIDmode, /* ST CC No. */
135 VOIDmode, /* DIE/IE No. */
136 VOIDmode, /* IIE/IF No. */
137 VOIDmode, /* IIF/IOF No. */
138 QImode, /* RS QI No. */
139 QImode, /* RE QI No. */
140 VOIDmode, /* RC QI No. */
141 QFmode, /* R8 QI, QF, HF QI. */
142 HFmode, /* R9 QI, QF, HF No. */
143 HFmode, /* R10 QI, QF, HF No. */
144 HFmode, /* R11 QI, QF, HF No. */
148 /* Test and compare insns in c4x.md store the information needed to
149 generate branch and scc insns here. */
151 struct rtx_def *c4x_compare_op0 = NULL_RTX;
152 struct rtx_def *c4x_compare_op1 = NULL_RTX;
154 const char *c4x_rpts_cycles_string;
155 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
156 const char *c4x_cpu_version_string;
157 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
159 /* Pragma definitions. */
161 static tree code_tree = NULL_TREE;
162 static tree data_tree = NULL_TREE;
163 static tree pure_tree = NULL_TREE;
164 static tree noreturn_tree = NULL_TREE;
165 static tree interrupt_tree = NULL_TREE;
167 /* Forward declarations */
168 static void c4x_add_gc_roots PARAMS ((void));
169 static int c4x_isr_reg_used_p PARAMS ((unsigned int));
170 static int c4x_leaf_function_p PARAMS ((void));
171 static int c4x_assembler_function_p PARAMS ((void));
172 static int c4x_immed_float_p PARAMS ((rtx));
173 static int c4x_a_register PARAMS ((rtx));
174 static int c4x_x_register PARAMS ((rtx));
175 static int c4x_immed_int_constant PARAMS ((rtx));
176 static int c4x_immed_float_constant PARAMS ((rtx));
177 static int c4x_K_constant PARAMS ((rtx));
178 static int c4x_N_constant PARAMS ((rtx));
179 static int c4x_O_constant PARAMS ((rtx));
180 static int c4x_R_indirect PARAMS ((rtx));
181 static int c4x_S_indirect PARAMS ((rtx));
182 static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
183 static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
184 enum machine_mode, int));
185 static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
186 static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
187 static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
188 static int c4x_parse_pragma PARAMS ((const char *, tree *, tree *));
189 static int c4x_r11_set_p PARAMS ((rtx));
190 static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
191 static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
192 static int c4x_valid_type_attribute_p PARAMS ((tree, tree, tree, tree));
193 static void c4x_insert_attributes PARAMS ((tree, tree *));
194 static void c4x_asm_named_section PARAMS ((const char *, unsigned int,
197 /* Initialize the GCC target structure. */
198 #undef TARGET_VALID_TYPE_ATTRIBUTE
199 #define TARGET_VALID_TYPE_ATTRIBUTE c4x_valid_type_attribute_p
201 #undef TARGET_INSERT_ATTRIBUTES
202 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
204 #undef TARGET_INIT_BUILTINS
205 #define TARGET_INIT_BUILTINS c4x_init_builtins
207 #undef TARGET_EXPAND_BUILTIN
208 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
210 struct gcc_target targetm = TARGET_INITIALIZER;
212 /* Called to register all of our global variables with the garbage
218 ggc_add_rtx_root (&c4x_compare_op0, 1);
219 ggc_add_rtx_root (&c4x_compare_op1, 1);
220 ggc_add_tree_root (&code_tree, 1);
221 ggc_add_tree_root (&data_tree, 1);
222 ggc_add_tree_root (&pure_tree, 1);
223 ggc_add_tree_root (&noreturn_tree, 1);
224 ggc_add_tree_root (&interrupt_tree, 1);
225 ggc_add_rtx_root (&smulhi3_libfunc, 1);
226 ggc_add_rtx_root (&umulhi3_libfunc, 1);
227 ggc_add_rtx_root (&fix_truncqfhi2_libfunc, 1);
228 ggc_add_rtx_root (&fixuns_truncqfhi2_libfunc, 1);
229 ggc_add_rtx_root (&fix_trunchfhi2_libfunc, 1);
230 ggc_add_rtx_root (&fixuns_trunchfhi2_libfunc, 1);
231 ggc_add_rtx_root (&floathiqf2_libfunc, 1);
232 ggc_add_rtx_root (&floatunshiqf2_libfunc, 1);
233 ggc_add_rtx_root (&floathihf2_libfunc, 1);
234 ggc_add_rtx_root (&floatunshihf2_libfunc, 1);
238 /* Override command line options.
239 Called once after all options have been parsed.
240 Mostly we process the processor
241 type and sometimes adjust other TARGET_ options. */
244 c4x_override_options ()
246 if (c4x_rpts_cycles_string)
247 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
252 c4x_cpu_version = 30;
254 c4x_cpu_version = 31;
256 c4x_cpu_version = 32;
258 c4x_cpu_version = 33;
260 c4x_cpu_version = 40;
262 c4x_cpu_version = 44;
264 c4x_cpu_version = 40;
266 /* -mcpu=xx overrides -m40 etc. */
267 if (c4x_cpu_version_string)
269 const char *p = c4x_cpu_version_string;
271 /* Also allow -mcpu=c30 etc. */
272 if (*p == 'c' || *p == 'C')
274 c4x_cpu_version = atoi (p);
277 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
278 C40_FLAG | C44_FLAG);
280 switch (c4x_cpu_version)
282 case 30: target_flags |= C30_FLAG; break;
283 case 31: target_flags |= C31_FLAG; break;
284 case 32: target_flags |= C32_FLAG; break;
285 case 33: target_flags |= C33_FLAG; break;
286 case 40: target_flags |= C40_FLAG; break;
287 case 44: target_flags |= C44_FLAG; break;
289 warning ("Unknown CPU version %d, using 40.\n", c4x_cpu_version);
290 c4x_cpu_version = 40;
291 target_flags |= C40_FLAG;
294 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
295 target_flags |= C3X_FLAG;
297 target_flags &= ~C3X_FLAG;
299 /* Convert foo / 8.0 into foo * 0.125, etc. */
300 set_fast_math_flags();
302 /* We should phase out the following at some stage.
303 This provides compatibility with the old -mno-aliases option. */
304 if (! TARGET_ALIASES && ! flag_argument_noalias)
305 flag_argument_noalias = 1;
307 /* Register global variables with the garbage collector. */
312 /* This is called before c4x_override_options. */
315 c4x_optimization_options (level, size)
316 int level ATTRIBUTE_UNUSED;
317 int size ATTRIBUTE_UNUSED;
319 /* Scheduling before register allocation can screw up global
320 register allocation, especially for functions that use MPY||ADD
321 instructions. The benefit we gain we get by scheduling before
322 register allocation is probably marginal anyhow. */
323 flag_schedule_insns = 0;
327 /* Write an ASCII string. */
329 #define C4X_ASCII_LIMIT 40
332 c4x_output_ascii (stream, ptr, len)
337 char sbuf[C4X_ASCII_LIMIT + 1];
338 int s, l, special, first = 1, onlys;
341 fprintf (stream, "\t.byte\t");
343 for (s = l = 0; len > 0; --len, ++ptr)
347 /* Escape " and \ with a \". */
348 special = *ptr == '\"' || *ptr == '\\';
350 /* If printable - add to buff. */
351 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
356 if (s < C4X_ASCII_LIMIT - 1)
371 fprintf (stream, "\"%s\"", sbuf);
373 if (TARGET_TI && l >= 80 && len > 1)
375 fprintf (stream, "\n\t.byte\t");
393 fprintf (stream, "%d", *ptr);
395 if (TARGET_TI && l >= 80 && len > 1)
397 fprintf (stream, "\n\t.byte\t");
408 fprintf (stream, "\"%s\"", sbuf);
411 fputc ('\n', stream);
416 c4x_hard_regno_mode_ok (regno, mode)
418 enum machine_mode mode;
423 case Pmode: /* Pointer (24/32 bits). */
425 case QImode: /* Integer (32 bits). */
426 return IS_INT_REGNO (regno);
428 case QFmode: /* Float, Double (32 bits). */
429 case HFmode: /* Long Double (40 bits). */
430 return IS_EXT_REGNO (regno);
432 case CCmode: /* Condition Codes. */
433 case CC_NOOVmode: /* Condition Codes. */
434 return IS_ST_REGNO (regno);
436 case HImode: /* Long Long (64 bits). */
437 /* We need two registers to store long longs. Note that
438 it is much easier to constrain the first register
439 to start on an even boundary. */
440 return IS_INT_REGNO (regno)
441 && IS_INT_REGNO (regno + 1)
445 return 0; /* We don't support these modes. */
451 /* Return non-zero if REGNO1 can be renamed to REGNO2. */
453 c4x_hard_regno_rename_ok (regno1, regno2)
457 /* We can not copy call saved registers from mode QI into QF or from
459 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
461 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
463 /* We cannot copy from an extended (40 bit) register to a standard
464 (32 bit) register because we only set the condition codes for
465 extended registers. */
466 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
468 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
473 /* The TI C3x C compiler register argument runtime model uses 6 registers,
474 AR2, R2, R3, RC, RS, RE.
476 The first two floating point arguments (float, double, long double)
477 that are found scanning from left to right are assigned to R2 and R3.
479 The remaining integer (char, short, int, long) or pointer arguments
480 are assigned to the remaining registers in the order AR2, R2, R3,
481 RC, RS, RE when scanning left to right, except for the last named
482 argument prior to an ellipsis denoting variable number of
483 arguments. We don't have to worry about the latter condition since
484 function.c treats the last named argument as anonymous (unnamed).
486 All arguments that cannot be passed in registers are pushed onto
487 the stack in reverse order (right to left). GCC handles that for us.
489 c4x_init_cumulative_args() is called at the start, so we can parse
490 the args to see how many floating point arguments and how many
491 integer (or pointer) arguments there are. c4x_function_arg() is
492 then called (sometimes repeatedly) for each argument (parsed left
493 to right) to obtain the register to pass the argument in, or zero
494 if the argument is to be passed on the stack. Once the compiler is
495 happy, c4x_function_arg_advance() is called.
497 Don't use R0 to pass arguments in, we use 0 to indicate a stack
500 static int c4x_int_reglist[3][6] =
502 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
503 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
504 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
507 static int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
510 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
511 function whose data type is FNTYPE.
512 For a library call, FNTYPE is 0. */
515 c4x_init_cumulative_args (cum, fntype, libname)
516 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
517 tree fntype; /* Tree ptr for function decl. */
518 rtx libname; /* SYMBOL_REF of library name or 0. */
520 tree param, next_param;
522 cum->floats = cum->ints = 0;
529 fprintf (stderr, "\nc4x_init_cumulative_args (");
532 tree ret_type = TREE_TYPE (fntype);
534 fprintf (stderr, "fntype code = %s, ret code = %s",
535 tree_code_name[(int) TREE_CODE (fntype)],
536 tree_code_name[(int) TREE_CODE (ret_type)]);
539 fprintf (stderr, "no fntype");
542 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
545 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
547 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
548 param; param = next_param)
552 next_param = TREE_CHAIN (param);
554 type = TREE_VALUE (param);
555 if (type && type != void_type_node)
557 enum machine_mode mode;
559 /* If the last arg doesn't have void type then we have
560 variable arguments. */
564 if ((mode = TYPE_MODE (type)))
566 if (! MUST_PASS_IN_STACK (mode, type))
568 /* Look for float, double, or long double argument. */
569 if (mode == QFmode || mode == HFmode)
571 /* Look for integer, enumeral, boolean, char, or pointer
573 else if (mode == QImode || mode == Pmode)
582 fprintf (stderr, "%s%s, args = %d)\n",
583 cum->prototype ? ", prototype" : "",
584 cum->var ? ", variable args" : "",
589 /* Update the data in CUM to advance over an argument
590 of mode MODE and data type TYPE.
591 (TYPE is null for libcalls where that information may not be available.) */
594 c4x_function_arg_advance (cum, mode, type, named)
595 CUMULATIVE_ARGS *cum; /* Current arg information. */
596 enum machine_mode mode; /* Current arg mode. */
597 tree type; /* Type of the arg or 0 if lib support. */
598 int named; /* Whether or not the argument was named. */
601 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
602 GET_MODE_NAME (mode), named);
606 && ! MUST_PASS_IN_STACK (mode, type))
608 /* Look for float, double, or long double argument. */
609 if (mode == QFmode || mode == HFmode)
611 /* Look for integer, enumeral, boolean, char, or pointer argument. */
612 else if (mode == QImode || mode == Pmode)
615 else if (! TARGET_MEMPARM && ! type)
617 /* Handle libcall arguments. */
618 if (mode == QFmode || mode == HFmode)
620 else if (mode == QImode || mode == Pmode)
627 /* Define where to put the arguments to a function. Value is zero to
628 push the argument on the stack, or a hard register in which to
631 MODE is the argument's machine mode.
632 TYPE is the data type of the argument (as a tree).
633 This is null for libcalls where that information may
635 CUM is a variable of type CUMULATIVE_ARGS which gives info about
636 the preceding args and about the function being called.
637 NAMED is nonzero if this argument is a named parameter
638 (otherwise it is an extra parameter matching an ellipsis). */
641 c4x_function_arg (cum, mode, type, named)
642 CUMULATIVE_ARGS *cum; /* Current arg information. */
643 enum machine_mode mode; /* Current arg mode. */
644 tree type; /* Type of the arg or 0 if lib support. */
645 int named; /* != 0 for normal args, == 0 for ... args. */
647 int reg = 0; /* Default to passing argument on stack. */
651 /* We can handle at most 2 floats in R2, R3. */
652 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
654 /* We can handle at most 6 integers minus number of floats passed
656 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
657 6 - cum->maxfloats : cum->ints;
659 /* If there is no prototype, assume all the arguments are integers. */
660 if (! cum->prototype)
663 cum->ints = cum->floats = 0;
667 /* This marks the last argument. We don't need to pass this through
669 if (type == void_type_node)
675 && ! MUST_PASS_IN_STACK (mode, type))
677 /* Look for float, double, or long double argument. */
678 if (mode == QFmode || mode == HFmode)
680 if (cum->floats < cum->maxfloats)
681 reg = c4x_fp_reglist[cum->floats];
683 /* Look for integer, enumeral, boolean, char, or pointer argument. */
684 else if (mode == QImode || mode == Pmode)
686 if (cum->ints < cum->maxints)
687 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
690 else if (! TARGET_MEMPARM && ! type)
692 /* We could use a different argument calling model for libcalls,
693 since we're only calling functions in libgcc. Thus we could
694 pass arguments for long longs in registers rather than on the
695 stack. In the meantime, use the odd TI format. We make the
696 assumption that we won't have more than two floating point
697 args, six integer args, and that all the arguments are of the
699 if (mode == QFmode || mode == HFmode)
700 reg = c4x_fp_reglist[cum->floats];
701 else if (mode == QImode || mode == Pmode)
702 reg = c4x_int_reglist[0][cum->ints];
707 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
708 GET_MODE_NAME (mode), named);
710 fprintf (stderr, ", reg=%s", reg_names[reg]);
712 fprintf (stderr, ", stack");
713 fprintf (stderr, ")\n");
716 return gen_rtx_REG (mode, reg);
723 c4x_va_start (stdarg_p, valist, nextarg)
728 nextarg = plus_constant (nextarg, stdarg_p ? 0 : UNITS_PER_WORD * 2);
730 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
734 /* C[34]x arguments grow in weird ways (downwards) that the standard
735 varargs stuff can't handle.. */
737 c4x_va_arg (valist, type)
742 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
743 build_int_2 (int_size_in_bytes (type), 0));
744 TREE_SIDE_EFFECTS (t) = 1;
746 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
751 c4x_isr_reg_used_p (regno)
754 /* Don't save/restore FP or ST, we handle them separately. */
755 if (regno == FRAME_POINTER_REGNUM
756 || IS_ST_REGNO (regno))
759 /* We could be a little smarter abut saving/restoring DP.
760 We'll only save if for the big memory model or if
761 we're paranoid. ;-) */
762 if (IS_DP_REGNO (regno))
763 return ! TARGET_SMALL || TARGET_PARANOID;
765 /* Only save/restore regs in leaf function that are used. */
766 if (c4x_leaf_function)
767 return regs_ever_live[regno] && fixed_regs[regno] == 0;
769 /* Only save/restore regs that are used by the ISR and regs
770 that are likely to be used by functions the ISR calls
771 if they are not fixed. */
772 return IS_EXT_REGNO (regno)
773 || ((regs_ever_live[regno] || call_used_regs[regno])
774 && fixed_regs[regno] == 0);
779 c4x_leaf_function_p ()
781 /* A leaf function makes no calls, so we only need
782 to save/restore the registers we actually use.
783 For the global variable leaf_function to be set, we need
784 to define LEAF_REGISTERS and all that it entails.
785 Let's check ourselves... */
787 if (lookup_attribute ("leaf_pretend",
788 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
791 /* Use the leaf_pretend attribute at your own risk. This is a hack
792 to speed up ISRs that call a function infrequently where the
793 overhead of saving and restoring the additional registers is not
794 warranted. You must save and restore the additional registers
795 required by the called function. Caveat emptor. Here's enough
798 if (leaf_function_p ())
806 c4x_assembler_function_p ()
810 type = TREE_TYPE (current_function_decl);
811 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type)) != NULL)
812 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL);
817 c4x_interrupt_function_p ()
819 if (lookup_attribute ("interrupt",
820 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
823 /* Look for TI style c_intnn. */
824 return current_function_name[0] == 'c'
825 && current_function_name[1] == '_'
826 && current_function_name[2] == 'i'
827 && current_function_name[3] == 'n'
828 && current_function_name[4] == 't'
829 && ISDIGIT (current_function_name[5])
830 && ISDIGIT (current_function_name[6]);
834 c4x_expand_prologue ()
837 int size = get_frame_size ();
840 /* In functions where ar3 is not used but frame pointers are still
841 specified, frame pointers are not adjusted (if >= -O2) and this
842 is used so it won't needlessly push the frame pointer. */
845 /* For __assembler__ function don't build a prologue. */
846 if (c4x_assembler_function_p ())
851 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
852 if (profile_block_flag == 2)
854 FUNCTION_BLOCK_PROFILER_EXIT
858 /* For __interrupt__ function build specific prologue. */
859 if (c4x_interrupt_function_p ())
861 c4x_leaf_function = c4x_leaf_function_p ();
863 insn = emit_insn (gen_push_st ());
864 RTX_FRAME_RELATED_P (insn) = 1;
867 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
868 RTX_FRAME_RELATED_P (insn) = 1;
869 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
870 gen_rtx_REG (QImode, SP_REGNO)));
871 RTX_FRAME_RELATED_P (insn) = 1;
872 /* We require that an ISR uses fewer than 32768 words of
873 local variables, otherwise we have to go to lots of
874 effort to save a register, load it with the desired size,
875 adjust the stack pointer, and then restore the modified
876 register. Frankly, I think it is a poor ISR that
877 requires more than 32767 words of local temporary
880 error ("ISR %s requires %d words of local vars, max is 32767.",
881 current_function_name, size);
883 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
884 gen_rtx_REG (QImode, SP_REGNO),
886 RTX_FRAME_RELATED_P (insn) = 1;
888 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
890 if (c4x_isr_reg_used_p (regno))
892 if (regno == DP_REGNO)
894 insn = emit_insn (gen_push_dp ());
895 RTX_FRAME_RELATED_P (insn) = 1;
899 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
900 RTX_FRAME_RELATED_P (insn) = 1;
901 if (IS_EXT_REGNO (regno))
903 insn = emit_insn (gen_pushqf
904 (gen_rtx_REG (QFmode, regno)));
905 RTX_FRAME_RELATED_P (insn) = 1;
910 /* We need to clear the repeat mode flag if the ISR is
911 going to use a RPTB instruction or uses the RC, RS, or RE
913 if (regs_ever_live[RC_REGNO]
914 || regs_ever_live[RS_REGNO]
915 || regs_ever_live[RE_REGNO])
917 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
918 RTX_FRAME_RELATED_P (insn) = 1;
921 /* Reload DP reg if we are paranoid about some turkey
922 violating small memory model rules. */
923 if (TARGET_SMALL && TARGET_PARANOID)
925 insn = emit_insn (gen_set_ldp_prologue
926 (gen_rtx_REG (QImode, DP_REGNO),
927 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
928 RTX_FRAME_RELATED_P (insn) = 1;
933 if (frame_pointer_needed)
936 || (current_function_args_size != 0)
939 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
940 RTX_FRAME_RELATED_P (insn) = 1;
941 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
942 gen_rtx_REG (QImode, SP_REGNO)));
943 RTX_FRAME_RELATED_P (insn) = 1;
948 /* Since ar3 is not used, we don't need to push it. */
954 /* If we use ar3, we need to push it. */
956 if ((size != 0) || (current_function_args_size != 0))
958 /* If we are omitting the frame pointer, we still have
959 to make space for it so the offsets are correct
960 unless we don't use anything on the stack at all. */
967 /* Local vars are too big, it will take multiple operations
971 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
972 GEN_INT(size >> 16)));
973 RTX_FRAME_RELATED_P (insn) = 1;
974 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
975 gen_rtx_REG (QImode, R1_REGNO),
977 RTX_FRAME_RELATED_P (insn) = 1;
981 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
982 GEN_INT(size & ~0xffff)));
983 RTX_FRAME_RELATED_P (insn) = 1;
985 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
986 gen_rtx_REG (QImode, R1_REGNO),
987 GEN_INT(size & 0xffff)));
988 RTX_FRAME_RELATED_P (insn) = 1;
989 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
990 gen_rtx_REG (QImode, SP_REGNO),
991 gen_rtx_REG (QImode, R1_REGNO)));
992 RTX_FRAME_RELATED_P (insn) = 1;
996 /* Local vars take up less than 32767 words, so we can directly
998 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
999 gen_rtx_REG (QImode, SP_REGNO),
1001 RTX_FRAME_RELATED_P (insn) = 1;
1004 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1006 if (regs_ever_live[regno] && ! call_used_regs[regno])
1008 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1010 if (TARGET_PRESERVE_FLOAT)
1012 insn = emit_insn (gen_pushqi
1013 (gen_rtx_REG (QImode, regno)));
1014 RTX_FRAME_RELATED_P (insn) = 1;
1016 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
1017 RTX_FRAME_RELATED_P (insn) = 1;
1019 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1021 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1022 RTX_FRAME_RELATED_P (insn) = 1;
1031 c4x_expand_epilogue()
1037 int size = get_frame_size ();
1039 /* For __assembler__ function build no epilogue. */
1040 if (c4x_assembler_function_p ())
1042 insn = emit_jump_insn (gen_return_from_epilogue ());
1043 RTX_FRAME_RELATED_P (insn) = 1;
1047 /* For __interrupt__ function build specific epilogue. */
1048 if (c4x_interrupt_function_p ())
1050 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1052 if (! c4x_isr_reg_used_p (regno))
1054 if (regno == DP_REGNO)
1056 insn = emit_insn (gen_pop_dp ());
1057 RTX_FRAME_RELATED_P (insn) = 1;
1061 /* We have to use unspec because the compiler will delete insns
1062 that are not call-saved. */
1063 if (IS_EXT_REGNO (regno))
1065 insn = emit_insn (gen_popqf_unspec
1066 (gen_rtx_REG (QFmode, regno)));
1067 RTX_FRAME_RELATED_P (insn) = 1;
1069 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1070 RTX_FRAME_RELATED_P (insn) = 1;
1075 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1076 gen_rtx_REG (QImode, SP_REGNO),
1078 RTX_FRAME_RELATED_P (insn) = 1;
1079 insn = emit_insn (gen_popqi
1080 (gen_rtx_REG (QImode, AR3_REGNO)));
1081 RTX_FRAME_RELATED_P (insn) = 1;
1083 insn = emit_insn (gen_pop_st ());
1084 RTX_FRAME_RELATED_P (insn) = 1;
1085 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1086 RTX_FRAME_RELATED_P (insn) = 1;
1090 if (frame_pointer_needed)
1093 || (current_function_args_size != 0)
1097 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1098 gen_rtx_MEM (QImode,
1100 (QImode, gen_rtx_REG (QImode,
1103 RTX_FRAME_RELATED_P (insn) = 1;
1105 /* We already have the return value and the fp,
1106 so we need to add those to the stack. */
1113 /* Since ar3 is not used for anything, we don't need to
1120 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1121 if (size || current_function_args_size)
1123 /* If we are ommitting the frame pointer, we still have
1124 to make space for it so the offsets are correct
1125 unless we don't use anything on the stack at all. */
1130 /* Now restore the saved registers, putting in the delayed branch
1132 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1134 if (regs_ever_live[regno] && ! call_used_regs[regno])
1136 if (regno == AR3_REGNO && dont_pop_ar3)
1139 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1141 insn = emit_insn (gen_popqf_unspec
1142 (gen_rtx_REG (QFmode, regno)));
1143 RTX_FRAME_RELATED_P (insn) = 1;
1144 if (TARGET_PRESERVE_FLOAT)
1146 insn = emit_insn (gen_popqi_unspec
1147 (gen_rtx_REG (QImode, regno)));
1148 RTX_FRAME_RELATED_P (insn) = 1;
1153 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1154 RTX_FRAME_RELATED_P (insn) = 1;
1159 if (frame_pointer_needed)
1162 || (current_function_args_size != 0)
1165 /* Restore the old FP. */
1168 (gen_rtx_REG (QImode, AR3_REGNO),
1169 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1171 RTX_FRAME_RELATED_P (insn) = 1;
1177 /* Local vars are too big, it will take multiple operations
1181 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1182 GEN_INT(size >> 16)));
1183 RTX_FRAME_RELATED_P (insn) = 1;
1184 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1185 gen_rtx_REG (QImode, R3_REGNO),
1187 RTX_FRAME_RELATED_P (insn) = 1;
1191 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1192 GEN_INT(size & ~0xffff)));
1193 RTX_FRAME_RELATED_P (insn) = 1;
1195 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1196 gen_rtx_REG (QImode, R3_REGNO),
1197 GEN_INT(size & 0xffff)));
1198 RTX_FRAME_RELATED_P (insn) = 1;
1199 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1200 gen_rtx_REG (QImode, SP_REGNO),
1201 gen_rtx_REG (QImode, R3_REGNO)));
1202 RTX_FRAME_RELATED_P (insn) = 1;
1206 /* Local vars take up less than 32768 words, so we can directly
1207 subtract the number. */
1208 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1209 gen_rtx_REG (QImode, SP_REGNO),
1211 RTX_FRAME_RELATED_P (insn) = 1;
1216 insn = emit_jump_insn (gen_return_indirect_internal
1217 (gen_rtx_REG (QImode, R2_REGNO)));
1218 RTX_FRAME_RELATED_P (insn) = 1;
1222 insn = emit_jump_insn (gen_return_from_epilogue ());
1223 RTX_FRAME_RELATED_P (insn) = 1;
1230 c4x_null_epilogue_p ()
1234 if (reload_completed
1235 && ! c4x_assembler_function_p ()
1236 && ! c4x_interrupt_function_p ()
1237 && ! current_function_calls_alloca
1238 && ! current_function_args_size
1239 && ! (profile_block_flag == 2)
1241 && ! get_frame_size ())
1243 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1244 if (regs_ever_live[regno] && ! call_used_regs[regno]
1245 && (regno != AR3_REGNO))
1254 c4x_emit_move_sequence (operands, mode)
1256 enum machine_mode mode;
1258 rtx op0 = operands[0];
1259 rtx op1 = operands[1];
1261 if (! reload_in_progress
1264 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1265 op1 = force_reg (mode, op1);
1267 if (GET_CODE (op1) == LO_SUM
1268 && GET_MODE (op1) == Pmode
1269 && dp_reg_operand (XEXP (op1, 0), mode))
1271 /* expand_increment will sometimes create a LO_SUM immediate
1273 op1 = XEXP (op1, 1);
1275 else if (symbolic_address_operand (op1, mode))
1277 if (TARGET_LOAD_ADDRESS)
1279 /* Alias analysis seems to do a better job if we force
1280 constant addresses to memory after reload. */
1281 emit_insn (gen_load_immed_address (op0, op1));
1286 /* Stick symbol or label address into the constant pool. */
1287 op1 = force_const_mem (Pmode, op1);
1290 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1292 /* We could be a lot smarter about loading some of these
1294 op1 = force_const_mem (mode, op1);
1297 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1298 and emit associated (HIGH (SYMREF)) if large memory model.
1299 c4x_legitimize_address could be used to do this,
1300 perhaps by calling validize_address. */
1301 if (TARGET_EXPOSE_LDP
1302 && ! (reload_in_progress || reload_completed)
1303 && GET_CODE (op1) == MEM
1304 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1306 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1308 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1309 op1 = change_address (op1, mode,
1310 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1313 if (TARGET_EXPOSE_LDP
1314 && ! (reload_in_progress || reload_completed)
1315 && GET_CODE (op0) == MEM
1316 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1318 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1320 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1321 op0 = change_address (op0, mode,
1322 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1325 if (GET_CODE (op0) == SUBREG
1326 && mixed_subreg_operand (op0, mode))
1328 /* We should only generate these mixed mode patterns
1329 during RTL generation. If we need do it later on
1330 then we'll have to emit patterns that won't clobber CC. */
1331 if (reload_in_progress || reload_completed)
1333 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1334 op0 = SUBREG_REG (op0);
1335 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1337 op0 = copy_rtx (op0);
1338 PUT_MODE (op0, QImode);
1344 emit_insn (gen_storeqf_int_clobber (op0, op1));
1350 if (GET_CODE (op1) == SUBREG
1351 && mixed_subreg_operand (op1, mode))
1353 /* We should only generate these mixed mode patterns
1354 during RTL generation. If we need do it later on
1355 then we'll have to emit patterns that won't clobber CC. */
1356 if (reload_in_progress || reload_completed)
1358 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1359 op1 = SUBREG_REG (op1);
1360 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1362 op1 = copy_rtx (op1);
1363 PUT_MODE (op1, QImode);
1369 emit_insn (gen_loadqf_int_clobber (op0, op1));
1376 && reg_operand (op0, mode)
1377 && const_int_operand (op1, mode)
1378 && ! IS_INT16_CONST (INTVAL (op1))
1379 && ! IS_HIGH_CONST (INTVAL (op1)))
1381 emit_insn (gen_loadqi_big_constant (op0, op1));
1386 && reg_operand (op0, mode)
1387 && const_int_operand (op1, mode))
1389 emit_insn (gen_loadhi_big_constant (op0, op1));
1393 /* Adjust operands in case we have modified them. */
1397 /* Emit normal pattern. */
1403 c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1406 enum machine_mode dmode;
1407 enum machine_mode smode;
1419 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1420 operands[1], smode);
1421 equiv = gen_rtx (code, dmode, operands[1]);
1425 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1426 operands[1], smode, operands[2], smode);
1427 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1434 insns = get_insns ();
1436 emit_libcall_block (insns, operands[0], ret, equiv);
1441 c4x_emit_libcall3 (libcall, code, mode, operands)
1444 enum machine_mode mode;
1447 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1452 c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1455 enum machine_mode mode;
1463 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1464 operands[1], mode, operands[2], mode);
1465 equiv = gen_rtx_TRUNCATE (mode,
1466 gen_rtx_LSHIFTRT (HImode,
1467 gen_rtx_MULT (HImode,
1468 gen_rtx (code, HImode, operands[1]),
1469 gen_rtx (code, HImode, operands[2])),
1471 insns = get_insns ();
1473 emit_libcall_block (insns, operands[0], ret, equiv);
1477 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1478 yet use this info. */
1480 c4x_encode_section_info (decl)
1484 if (TREE_CODE (TREE_TYPE (decl)) == FUNCTION_TYPE)
1485 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1487 if (TREE_CODE (decl) == FUNCTION_DECL)
1488 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1494 c4x_check_legit_addr (mode, addr, strict)
1495 enum machine_mode mode;
1499 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1500 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1501 rtx disp = NULL_RTX; /* Displacement. */
1504 code = GET_CODE (addr);
1507 /* Register indirect with auto increment/decrement. We don't
1508 allow SP here---push_operand should recognise an operand
1509 being pushed on the stack. */
1514 if (mode != QImode && mode != QFmode)
1518 base = XEXP (addr, 0);
1526 rtx op0 = XEXP (addr, 0);
1527 rtx op1 = XEXP (addr, 1);
1529 if (mode != QImode && mode != QFmode)
1533 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1535 base = XEXP (op1, 0);
1538 if (REG_P (XEXP (op1, 1)))
1539 indx = XEXP (op1, 1);
1541 disp = XEXP (op1, 1);
1545 /* Register indirect. */
1550 /* Register indirect with displacement or index. */
1553 rtx op0 = XEXP (addr, 0);
1554 rtx op1 = XEXP (addr, 1);
1555 enum rtx_code code0 = GET_CODE (op0);
1562 base = op0; /* Base + index. */
1564 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1572 base = op0; /* Base + displacement. */
1583 /* Direct addressing with DP register. */
1586 rtx op0 = XEXP (addr, 0);
1587 rtx op1 = XEXP (addr, 1);
1589 /* HImode and HFmode direct memory references aren't truly
1590 offsettable (consider case at end of data page). We
1591 probably get better code by loading a pointer and using an
1592 indirect memory reference. */
1593 if (mode == HImode || mode == HFmode)
1596 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1599 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1602 if (GET_CODE (op1) == CONST)
1608 /* Direct addressing with some work for the assembler... */
1610 /* Direct addressing. */
1613 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1615 /* These need to be converted to a LO_SUM (...).
1616 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1619 /* Do not allow direct memory access to absolute addresses.
1620 This is more pain than it's worth, especially for the
1621 small memory model where we can't guarantee that
1622 this address is within the data page---we don't want
1623 to modify the DP register in the small memory model,
1624 even temporarily, since an interrupt can sneak in.... */
1628 /* Indirect indirect addressing. */
1633 fatal_insn ("Using CONST_DOUBLE for address", addr);
1639 /* Validate the base register. */
1642 /* Check that the address is offsettable for HImode and HFmode. */
1643 if (indx && (mode == HImode || mode == HFmode))
1646 /* Handle DP based stuff. */
1647 if (REGNO (base) == DP_REGNO)
1649 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1651 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1655 /* Now validate the index register. */
1658 if (GET_CODE (indx) != REG)
1660 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1662 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1666 /* Validate displacement. */
1669 if (GET_CODE (disp) != CONST_INT)
1671 if (mode == HImode || mode == HFmode)
1673 /* The offset displacement must be legitimate. */
1674 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1679 if (! IS_DISP8_CONST (INTVAL (disp)))
1682 /* Can't add an index with a disp. */
1691 c4x_legitimize_address (orig, mode)
1692 rtx orig ATTRIBUTE_UNUSED;
1693 enum machine_mode mode ATTRIBUTE_UNUSED;
1695 if (GET_CODE (orig) == SYMBOL_REF
1696 || GET_CODE (orig) == LABEL_REF)
1698 if (mode == HImode || mode == HFmode)
1700 /* We need to force the address into
1701 a register so that it is offsettable. */
1702 rtx addr_reg = gen_reg_rtx (Pmode);
1703 emit_move_insn (addr_reg, orig);
1708 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1711 emit_insn (gen_set_ldp (dp_reg, orig));
1713 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1721 /* Provide the costs of an addressing mode that contains ADDR.
1722 If ADDR is not a valid address, its cost is irrelevant.
1723 This is used in cse and loop optimisation to determine
1724 if it is worthwhile storing a common address into a register.
1725 Unfortunately, the C4x address cost depends on other operands. */
1728 c4x_address_cost (addr)
1731 switch (GET_CODE (addr))
1742 /* These shouldn't be directly generated. */
1750 rtx op1 = XEXP (addr, 1);
1752 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1753 return TARGET_SMALL ? 3 : 4;
1755 if (GET_CODE (op1) == CONST)
1757 rtx offset = const0_rtx;
1759 op1 = eliminate_constant_term (op1, &offset);
1761 /* ??? These costs need rethinking... */
1762 if (GET_CODE (op1) == LABEL_REF)
1765 if (GET_CODE (op1) != SYMBOL_REF)
1768 if (INTVAL (offset) == 0)
1773 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1779 register rtx op0 = XEXP (addr, 0);
1780 register rtx op1 = XEXP (addr, 1);
1782 if (GET_CODE (op0) != REG)
1785 switch (GET_CODE (op1))
1791 /* This cost for REG+REG must be greater than the cost
1792 for REG if we want autoincrement addressing modes. */
1796 /* The following tries to improve GIV combination
1797 in strength reduce but appears not to help. */
1798 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1801 if (IS_DISP1_CONST (INTVAL (op1)))
1804 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1819 c4x_gen_compare_reg (code, x, y)
1823 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1826 if (mode == CC_NOOVmode
1827 && (code == LE || code == GE || code == LT || code == GT))
1830 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1831 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1832 gen_rtx_COMPARE (mode, x, y)));
1837 c4x_output_cbranch (form, seq)
1846 static char str[100];
1850 delay = XVECEXP (final_sequence, 0, 1);
1851 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1852 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1853 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1856 cp = &str [strlen (str)];
1881 c4x_print_operand (file, op, letter)
1882 FILE *file; /* File to write to. */
1883 rtx op; /* Operand to print. */
1884 int letter; /* %<letter> or 0. */
1891 case '#': /* Delayed. */
1893 asm_fprintf (file, "d");
1897 code = GET_CODE (op);
1900 case 'A': /* Direct address. */
1901 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1902 asm_fprintf (file, "@");
1905 case 'H': /* Sethi. */
1906 output_addr_const (file, op);
1909 case 'I': /* Reversed condition. */
1910 code = reverse_condition (code);
1913 case 'L': /* Log 2 of constant. */
1914 if (code != CONST_INT)
1915 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1916 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1919 case 'N': /* Ones complement of small constant. */
1920 if (code != CONST_INT)
1921 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1922 fprintf (file, "%d", ~INTVAL (op));
1925 case 'K': /* Generate ldp(k) if direct address. */
1928 && GET_CODE (XEXP (op, 0)) == LO_SUM
1929 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1930 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1932 op1 = XEXP (XEXP (op, 0), 1);
1933 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1935 asm_fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1936 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1937 asm_fprintf (file, "\n");
1942 case 'M': /* Generate ldp(k) if direct address. */
1943 if (! TARGET_SMALL /* Only used in asm statements. */
1945 && (GET_CODE (XEXP (op, 0)) == CONST
1946 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1948 asm_fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1949 output_address (XEXP (op, 0));
1950 asm_fprintf (file, "\n\t");
1954 case 'O': /* Offset address. */
1955 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1957 else if (code == MEM)
1958 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1959 else if (code == REG)
1960 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1962 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1965 case 'C': /* Call. */
1968 case 'U': /* Call/callu. */
1969 if (code != SYMBOL_REF)
1970 asm_fprintf (file, "u");
1980 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1982 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1984 fprintf (file, "%s", reg_names[REGNO (op)]);
1988 output_address (XEXP (op, 0));
1996 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
1997 REAL_VALUE_TO_DECIMAL (r, "%20f", str);
1998 fprintf (file, "%s", str);
2003 fprintf (file, "%d", INTVAL (op));
2007 asm_fprintf (file, "ne");
2011 asm_fprintf (file, "eq");
2015 asm_fprintf (file, "ge");
2019 asm_fprintf (file, "gt");
2023 asm_fprintf (file, "le");
2027 asm_fprintf (file, "lt");
2031 asm_fprintf (file, "hs");
2035 asm_fprintf (file, "hi");
2039 asm_fprintf (file, "ls");
2043 asm_fprintf (file, "lo");
2047 output_addr_const (file, op);
2051 output_addr_const (file, XEXP (op, 0));
2058 fatal_insn ("c4x_print_operand: Bad operand case", op);
2065 c4x_print_operand_address (file, addr)
2069 switch (GET_CODE (addr))
2072 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2076 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2080 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2085 rtx op0 = XEXP (XEXP (addr, 1), 0);
2086 rtx op1 = XEXP (XEXP (addr, 1), 1);
2088 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2089 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2090 reg_names[REGNO (op1)]);
2091 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2092 fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2094 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2095 fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2097 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2098 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2099 reg_names[REGNO (op1)]);
2101 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2107 rtx op0 = XEXP (XEXP (addr, 1), 0);
2108 rtx op1 = XEXP (XEXP (addr, 1), 1);
2110 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2111 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2112 reg_names[REGNO (op1)]);
2113 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2114 fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2116 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2117 fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2119 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2120 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2121 reg_names[REGNO (op1)]);
2123 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2128 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2132 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2135 case PLUS: /* Indirect with displacement. */
2137 rtx op0 = XEXP (addr, 0);
2138 rtx op1 = XEXP (addr, 1);
2144 if (IS_INDEX_REG (op0))
2146 fprintf (file, "*+%s(%s)",
2147 reg_names[REGNO (op1)],
2148 reg_names[REGNO (op0)]); /* Index + base. */
2152 fprintf (file, "*+%s(%s)",
2153 reg_names[REGNO (op0)],
2154 reg_names[REGNO (op1)]); /* Base + index. */
2157 else if (INTVAL (op1) < 0)
2159 fprintf (file, "*-%s(%d)",
2160 reg_names[REGNO (op0)],
2161 -INTVAL (op1)); /* Base - displacement. */
2165 fprintf (file, "*+%s(%d)",
2166 reg_names[REGNO (op0)],
2167 INTVAL (op1)); /* Base + displacement. */
2171 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2177 rtx op0 = XEXP (addr, 0);
2178 rtx op1 = XEXP (addr, 1);
2180 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2181 c4x_print_operand_address (file, op1);
2183 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2190 fprintf (file, "@");
2191 output_addr_const (file, addr);
2194 /* We shouldn't access CONST_INT addresses. */
2198 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2204 /* Return nonzero if the floating point operand will fit
2205 in the immediate field. */
2208 c4x_immed_float_p (op)
2215 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2216 if (GET_MODE (op) == HFmode)
2217 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2220 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2224 /* Sign extend exponent. */
2225 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2226 if (exponent == -128)
2228 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2229 return 0; /* Precision doesn't fit. */
2230 return (exponent <= 7) /* Positive exp. */
2231 && (exponent >= -7); /* Negative exp. */
2235 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2236 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2238 None of the last four instructions from the bottom of the block can
2239 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2240 BcondAT or RETIcondD.
2242 This routine scans the four previous insns for a jump insn, and if
2243 one is found, returns 1 so that we bung in a nop instruction.
2244 This simple minded strategy will add a nop, when it may not
2245 be required. Say when there is a JUMP_INSN near the end of the
2246 block that doesn't get converted into a delayed branch.
2248 Note that we cannot have a call insn, since we don't generate
2249 repeat loops with calls in them (although I suppose we could, but
2250 there's no benefit.)
2252 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2255 c4x_rptb_nop_p (insn)
2261 /* Extract the start label from the jump pattern (rptb_end). */
2262 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2264 /* If there is a label at the end of the loop we must insert
2267 insn = previous_insn (insn);
2268 } while (GET_CODE (insn) == NOTE
2269 || GET_CODE (insn) == USE
2270 || GET_CODE (insn) == CLOBBER);
2271 if (GET_CODE (insn) == CODE_LABEL)
2274 for (i = 0; i < 4; i++)
2276 /* Search back for prev non-note and non-label insn. */
2277 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2278 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2280 if (insn == start_label)
2283 insn = previous_insn (insn);
2286 /* If we have a jump instruction we should insert a NOP. If we
2287 hit repeat block top we should only insert a NOP if the loop
2289 if (GET_CODE (insn) == JUMP_INSN)
2291 insn = previous_insn (insn);
2297 /* The C4x looping instruction needs to be emitted at the top of the
2298 loop. Emitting the true RTL for a looping instruction at the top of
2299 the loop can cause problems with flow analysis. So instead, a dummy
2300 doloop insn is emitted at the end of the loop. This routine checks
2301 for the presence of this doloop insn and then searches back to the
2302 top of the loop, where it inserts the true looping insn (provided
2303 there are no instructions in the loop which would cause problems).
2304 Any additional labels can be emitted at this point. In addition, if
2305 the desired loop count register was not allocated, this routine does
2308 Before we can create a repeat block looping instruction we have to
2309 verify that there are no jumps outside the loop and no jumps outside
2310 the loop go into this loop. This can happen in the basic blocks reorder
2311 pass. The C4x cpu can not handle this. */
2314 c4x_label_ref_used_p (x, code_label)
2324 code = GET_CODE (x);
2325 if (code == LABEL_REF)
2326 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2328 fmt = GET_RTX_FORMAT (code);
2329 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2333 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2336 else if (fmt[i] == 'E')
2337 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2338 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2346 c4x_rptb_valid_p (insn, start_label)
2347 rtx insn, start_label;
2353 /* Find the start label. */
2354 for (; insn; insn = PREV_INSN (insn))
2355 if (insn == start_label)
2358 /* Note found then we can not use a rptb or rpts. The label was
2359 probably moved by the basic block reorder pass. */
2364 /* If any jump jumps inside this block then we must fail. */
2365 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2367 if (GET_CODE (insn) == CODE_LABEL)
2369 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2370 if (GET_CODE (tmp) == JUMP_INSN
2371 && c4x_label_ref_used_p (tmp, insn))
2375 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2377 if (GET_CODE (insn) == CODE_LABEL)
2379 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2380 if (GET_CODE (tmp) == JUMP_INSN
2381 && c4x_label_ref_used_p (tmp, insn))
2385 /* If any jump jumps outside this block then we must fail. */
2386 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2388 if (GET_CODE (insn) == CODE_LABEL)
2390 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2391 if (GET_CODE (tmp) == JUMP_INSN
2392 && c4x_label_ref_used_p (tmp, insn))
2394 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2395 if (GET_CODE (tmp) == JUMP_INSN
2396 && c4x_label_ref_used_p (tmp, insn))
2401 /* All checks OK. */
2407 c4x_rptb_insert (insn)
2412 rtx new_start_label;
2415 /* If the count register has not been allocated to RC, say if
2416 there is a movstr pattern in the loop, then do not insert a
2417 RPTB instruction. Instead we emit a decrement and branch
2418 at the end of the loop. */
2419 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2420 if (REGNO (count_reg) != RC_REGNO)
2423 /* Extract the start label from the jump pattern (rptb_end). */
2424 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2426 if (! c4x_rptb_valid_p (insn, start_label))
2428 /* We can not use the rptb insn. Replace it so reorg can use
2429 the delay slots of the jump insn. */
2430 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2431 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2432 emit_insn_before (gen_bge (start_label), insn);
2433 LABEL_NUSES (start_label)++;
2438 end_label = gen_label_rtx ();
2439 LABEL_NUSES (end_label)++;
2440 emit_label_after (end_label, insn);
2442 new_start_label = gen_label_rtx ();
2443 LABEL_NUSES (new_start_label)++;
2445 for (; insn; insn = PREV_INSN (insn))
2447 if (insn == start_label)
2449 if (GET_CODE (insn) == JUMP_INSN &&
2450 JUMP_LABEL (insn) == start_label)
2451 redirect_jump (insn, new_start_label, 0);
2454 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2456 emit_label_after (new_start_label, insn);
2458 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2459 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2461 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2462 if (LABEL_NUSES (start_label) == 0)
2463 delete_insn (start_label);
2467 /* This function is a C4x special called immediately before delayed
2468 branch scheduling. We fix up RTPB style loops that didn't get RC
2469 allocated as the loop counter. */
2472 c4x_process_after_reload (first)
2477 for (insn = first; insn; insn = NEXT_INSN (insn))
2479 /* Look for insn. */
2482 int insn_code_number;
2485 insn_code_number = recog_memoized (insn);
2487 if (insn_code_number < 0)
2490 /* Insert the RTX for RPTB at the top of the loop
2491 and a label at the end of the loop. */
2492 if (insn_code_number == CODE_FOR_rptb_end)
2493 c4x_rptb_insert(insn);
2495 /* We need to split the insn here. Otherwise the calls to
2496 force_const_mem will not work for load_immed_address. */
2499 /* Don't split the insn if it has been deleted. */
2500 if (! INSN_DELETED_P (old))
2501 insn = try_split (PATTERN(old), old, 1);
2503 /* When not optimizing, the old insn will be still left around
2504 with only the 'deleted' bit set. Transform it into a note
2505 to avoid confusion of subsequent processing. */
2506 if (INSN_DELETED_P (old))
2508 PUT_CODE (old, NOTE);
2509 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2510 NOTE_SOURCE_FILE (old) = 0;
2521 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2529 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2534 c4x_immed_int_constant (op)
2537 if (GET_CODE (op) != CONST_INT)
2540 return GET_MODE (op) == VOIDmode
2541 || GET_MODE_CLASS (op) == MODE_INT
2542 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2547 c4x_immed_float_constant (op)
2550 if (GET_CODE (op) != CONST_DOUBLE)
2553 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2554 present this only means that a MEM rtx has been generated. It does
2555 not mean the rtx is really in memory. */
2557 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2562 c4x_shiftable_constant (op)
2567 int val = INTVAL (op);
2569 for (i = 0; i < 16; i++)
2574 mask = ((0xffff >> i) << 16) | 0xffff;
2575 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2576 : (val >> i) & mask))
2586 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2594 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2604 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2612 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2614 return IS_INT5_CONST (INTVAL (op));
2622 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2630 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2638 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2642 /* The constraints do not have to check the register class,
2643 except when needed to discriminate between the constraints.
2644 The operand has been checked by the predicates to be valid. */
2646 /* ARx + 9-bit signed const or IRn
2647 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2648 We don't include the pre/post inc/dec forms here since
2649 they are handled by the <> constraints. */
2652 c4x_Q_constraint (op)
2655 enum machine_mode mode = GET_MODE (op);
2657 if (GET_CODE (op) != MEM)
2660 switch (GET_CODE (op))
2667 rtx op0 = XEXP (op, 0);
2668 rtx op1 = XEXP (op, 1);
2676 if (GET_CODE (op1) != CONST_INT)
2679 /* HImode and HFmode must be offsettable. */
2680 if (mode == HImode || mode == HFmode)
2681 return IS_DISP8_OFF_CONST (INTVAL (op1));
2683 return IS_DISP8_CONST (INTVAL (op1));
2694 /* ARx + 5-bit unsigned const
2695 *ARx, *+ARx(n) for n < 32. */
2698 c4x_R_constraint (op)
2701 enum machine_mode mode = GET_MODE (op);
2705 if (GET_CODE (op) != MEM)
2708 switch (GET_CODE (op))
2715 rtx op0 = XEXP (op, 0);
2716 rtx op1 = XEXP (op, 1);
2721 if (GET_CODE (op1) != CONST_INT)
2724 /* HImode and HFmode must be offsettable. */
2725 if (mode == HImode || mode == HFmode)
2726 return IS_UINT5_CONST (INTVAL (op1) + 1);
2728 return IS_UINT5_CONST (INTVAL (op1));
2743 enum machine_mode mode = GET_MODE (op);
2745 if (TARGET_C3X || GET_CODE (op) != MEM)
2749 switch (GET_CODE (op))
2752 return IS_ADDR_OR_PSEUDO_REG (op);
2756 rtx op0 = XEXP (op, 0);
2757 rtx op1 = XEXP (op, 1);
2759 /* HImode and HFmode must be offsettable. */
2760 if (mode == HImode || mode == HFmode)
2761 return IS_ADDR_OR_PSEUDO_REG (op0)
2762 && GET_CODE (op1) == CONST_INT
2763 && IS_UINT5_CONST (INTVAL (op1) + 1);
2766 && IS_ADDR_OR_PSEUDO_REG (op0)
2767 && GET_CODE (op1) == CONST_INT
2768 && IS_UINT5_CONST (INTVAL (op1));
2779 /* ARx + 1-bit unsigned const or IRn
2780 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2781 We don't include the pre/post inc/dec forms here since
2782 they are handled by the <> constraints. */
2785 c4x_S_constraint (op)
2788 enum machine_mode mode = GET_MODE (op);
2789 if (GET_CODE (op) != MEM)
2792 switch (GET_CODE (op))
2800 rtx op0 = XEXP (op, 0);
2801 rtx op1 = XEXP (op, 1);
2803 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2804 || (op0 != XEXP (op1, 0)))
2807 op0 = XEXP (op1, 0);
2808 op1 = XEXP (op1, 1);
2809 return REG_P (op0) && REG_P (op1);
2810 /* Pre or post_modify with a displacement of 0 or 1
2811 should not be generated. */
2817 rtx op0 = XEXP (op, 0);
2818 rtx op1 = XEXP (op, 1);
2826 if (GET_CODE (op1) != CONST_INT)
2829 /* HImode and HFmode must be offsettable. */
2830 if (mode == HImode || mode == HFmode)
2831 return IS_DISP1_OFF_CONST (INTVAL (op1));
2833 return IS_DISP1_CONST (INTVAL (op1));
2848 enum machine_mode mode = GET_MODE (op);
2849 if (GET_CODE (op) != MEM)
2853 switch (GET_CODE (op))
2857 if (mode != QImode && mode != QFmode)
2864 return IS_ADDR_OR_PSEUDO_REG (op);
2869 rtx op0 = XEXP (op, 0);
2870 rtx op1 = XEXP (op, 1);
2872 if (mode != QImode && mode != QFmode)
2875 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2876 || (op0 != XEXP (op1, 0)))
2879 op0 = XEXP (op1, 0);
2880 op1 = XEXP (op1, 1);
2881 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2882 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2883 /* Pre or post_modify with a displacement of 0 or 1
2884 should not be generated. */
2889 rtx op0 = XEXP (op, 0);
2890 rtx op1 = XEXP (op, 1);
2894 /* HImode and HFmode must be offsettable. */
2895 if (mode == HImode || mode == HFmode)
2896 return IS_ADDR_OR_PSEUDO_REG (op0)
2897 && GET_CODE (op1) == CONST_INT
2898 && IS_DISP1_OFF_CONST (INTVAL (op1));
2901 return (IS_INDEX_OR_PSEUDO_REG (op1)
2902 && IS_ADDR_OR_PSEUDO_REG (op0))
2903 || (IS_ADDR_OR_PSEUDO_REG (op1)
2904 && IS_INDEX_OR_PSEUDO_REG (op0));
2906 return IS_ADDR_OR_PSEUDO_REG (op0)
2907 && GET_CODE (op1) == CONST_INT
2908 && IS_DISP1_CONST (INTVAL (op1));
2920 /* Direct memory operand. */
2923 c4x_T_constraint (op)
2926 if (GET_CODE (op) != MEM)
2930 if (GET_CODE (op) != LO_SUM)
2932 /* Allow call operands. */
2933 return GET_CODE (op) == SYMBOL_REF
2934 && GET_MODE (op) == Pmode
2935 && SYMBOL_REF_FLAG (op);
2938 /* HImode and HFmode are not offsettable. */
2939 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2942 if ((GET_CODE (XEXP (op, 0)) == REG)
2943 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2944 return c4x_U_constraint (XEXP (op, 1));
2950 /* Symbolic operand. */
2953 c4x_U_constraint (op)
2956 /* Don't allow direct addressing to an arbitrary constant. */
2957 return GET_CODE (op) == CONST
2958 || GET_CODE (op) == SYMBOL_REF
2959 || GET_CODE (op) == LABEL_REF;
2964 c4x_autoinc_operand (op, mode)
2966 enum machine_mode mode ATTRIBUTE_UNUSED;
2968 if (GET_CODE (op) == MEM)
2970 enum rtx_code code = GET_CODE (XEXP (op, 0));
2976 || code == PRE_MODIFY
2977 || code == POST_MODIFY
2985 /* Match any operand. */
2988 any_operand (op, mode)
2989 register rtx op ATTRIBUTE_UNUSED;
2990 enum machine_mode mode ATTRIBUTE_UNUSED;
2996 /* Nonzero if OP is a floating point value with value 0.0. */
2999 fp_zero_operand (op, mode)
3001 enum machine_mode mode ATTRIBUTE_UNUSED;
3005 if (GET_CODE (op) != CONST_DOUBLE)
3007 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
3008 return REAL_VALUES_EQUAL (r, dconst0);
3013 const_operand (op, mode)
3015 register enum machine_mode mode;
3021 if (GET_CODE (op) != CONST_DOUBLE
3022 || GET_MODE (op) != mode
3023 || GET_MODE_CLASS (mode) != MODE_FLOAT)
3026 return c4x_immed_float_p (op);
3032 if (GET_CODE (op) == CONSTANT_P_RTX)
3035 if (GET_CODE (op) != CONST_INT
3036 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3037 || GET_MODE_CLASS (mode) != MODE_INT)
3040 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3052 stik_const_operand (op, mode)
3054 enum machine_mode mode ATTRIBUTE_UNUSED;
3056 return c4x_K_constant (op);
3061 not_const_operand (op, mode)
3063 enum machine_mode mode ATTRIBUTE_UNUSED;
3065 return c4x_N_constant (op);
3070 reg_operand (op, mode)
3072 enum machine_mode mode;
3074 if (GET_CODE (op) == SUBREG
3075 && GET_MODE (op) == QFmode)
3077 return register_operand (op, mode);
3082 mixed_subreg_operand (op, mode)
3084 enum machine_mode mode ATTRIBUTE_UNUSED;
3086 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3087 int and a long double. */
3088 if (GET_CODE (op) == SUBREG
3089 && (GET_MODE (op) == QFmode)
3090 && (GET_MODE (SUBREG_REG (op)) == QImode
3091 || GET_MODE (SUBREG_REG (op)) == HImode))
3098 reg_imm_operand (op, mode)
3100 enum machine_mode mode ATTRIBUTE_UNUSED;
3102 if (REG_P (op) || CONSTANT_P (op))
3109 not_modify_reg (op, mode)
3111 enum machine_mode mode ATTRIBUTE_UNUSED;
3113 if (REG_P (op) || CONSTANT_P (op))
3115 if (GET_CODE (op) != MEM)
3118 switch (GET_CODE (op))
3125 rtx op0 = XEXP (op, 0);
3126 rtx op1 = XEXP (op, 1);
3131 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3137 rtx op0 = XEXP (op, 0);
3139 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3157 not_rc_reg (op, mode)
3159 enum machine_mode mode ATTRIBUTE_UNUSED;
3161 if (REG_P (op) && REGNO (op) == RC_REGNO)
3167 /* Extended precision register R0-R1. */
3170 r0r1_reg_operand (op, mode)
3172 enum machine_mode mode;
3174 if (! reg_operand (op, mode))
3176 if (GET_CODE (op) == SUBREG)
3177 op = SUBREG_REG (op);
3178 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3182 /* Extended precision register R2-R3. */
3185 r2r3_reg_operand (op, mode)
3187 enum machine_mode mode;
3189 if (! reg_operand (op, mode))
3191 if (GET_CODE (op) == SUBREG)
3192 op = SUBREG_REG (op);
3193 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3197 /* Low extended precision register R0-R7. */
3200 ext_low_reg_operand (op, mode)
3202 enum machine_mode mode;
3204 if (! reg_operand (op, mode))
3206 if (GET_CODE (op) == SUBREG)
3207 op = SUBREG_REG (op);
3208 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3212 /* Extended precision register. */
3215 ext_reg_operand (op, mode)
3217 enum machine_mode mode;
3219 if (! reg_operand (op, mode))
3221 if (GET_CODE (op) == SUBREG)
3222 op = SUBREG_REG (op);
3225 return IS_EXT_OR_PSEUDO_REG (op);
3229 /* Standard precision register. */
3232 std_reg_operand (op, mode)
3234 enum machine_mode mode;
3236 if (! reg_operand (op, mode))
3238 if (GET_CODE (op) == SUBREG)
3239 op = SUBREG_REG (op);
3240 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3243 /* Standard precision or normal register. */
3246 std_or_reg_operand (op, mode)
3248 enum machine_mode mode;
3250 if (reload_in_progress)
3251 return std_reg_operand (op, mode);
3252 return reg_operand (op, mode);
3255 /* Address register. */
3258 addr_reg_operand (op, mode)
3260 enum machine_mode mode;
3262 if (! reg_operand (op, mode))
3264 return c4x_a_register (op);
3268 /* Index register. */
3271 index_reg_operand (op, mode)
3273 enum machine_mode mode;
3275 if (! reg_operand (op, mode))
3277 if (GET_CODE (op) == SUBREG)
3278 op = SUBREG_REG (op);
3279 return c4x_x_register (op);
3286 dp_reg_operand (op, mode)
3288 enum machine_mode mode ATTRIBUTE_UNUSED;
3290 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3297 sp_reg_operand (op, mode)
3299 enum machine_mode mode ATTRIBUTE_UNUSED;
3301 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3308 st_reg_operand (op, mode)
3310 enum machine_mode mode ATTRIBUTE_UNUSED;
3312 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3319 rc_reg_operand (op, mode)
3321 enum machine_mode mode ATTRIBUTE_UNUSED;
3323 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3328 call_address_operand (op, mode)
3330 enum machine_mode mode ATTRIBUTE_UNUSED;
3332 return (REG_P (op) || symbolic_address_operand (op, mode));
3336 /* Symbolic address operand. */
3339 symbolic_address_operand (op, mode)
3341 enum machine_mode mode ATTRIBUTE_UNUSED;
3343 switch (GET_CODE (op))
3355 /* Check dst operand of a move instruction. */
3358 dst_operand (op, mode)
3360 enum machine_mode mode;
3362 if (GET_CODE (op) == SUBREG
3363 && mixed_subreg_operand (op, mode))
3367 return reg_operand (op, mode);
3369 return nonimmediate_operand (op, mode);
3373 /* Check src operand of two operand arithmetic instructions. */
3376 src_operand (op, mode)
3378 enum machine_mode mode;
3380 if (GET_CODE (op) == SUBREG
3381 && mixed_subreg_operand (op, mode))
3385 return reg_operand (op, mode);
3387 if (mode == VOIDmode)
3388 mode = GET_MODE (op);
3390 if (GET_CODE (op) == CONST_INT)
3391 return (mode == QImode || mode == Pmode || mode == HImode)
3392 && c4x_I_constant (op);
3394 /* We don't like CONST_DOUBLE integers. */
3395 if (GET_CODE (op) == CONST_DOUBLE)
3396 return c4x_H_constant (op);
3398 /* Disallow symbolic addresses. Only the predicate
3399 symbolic_address_operand will match these. */
3400 if (GET_CODE (op) == SYMBOL_REF
3401 || GET_CODE (op) == LABEL_REF
3402 || GET_CODE (op) == CONST)
3405 /* If TARGET_LOAD_DIRECT_MEMS is non-zero, disallow direct memory
3406 access to symbolic addresses. These operands will get forced
3407 into a register and the movqi expander will generate a
3408 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is non-zero. */
3409 if (GET_CODE (op) == MEM
3410 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3411 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3412 || GET_CODE (XEXP (op, 0)) == CONST)))
3413 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3415 return general_operand (op, mode);
3420 src_hi_operand (op, mode)
3422 enum machine_mode mode;
3424 if (c4x_O_constant (op))
3426 return src_operand (op, mode);
3430 /* Check src operand of two operand logical instructions. */
3433 lsrc_operand (op, mode)
3435 enum machine_mode mode;
3437 if (mode == VOIDmode)
3438 mode = GET_MODE (op);
3440 if (mode != QImode && mode != Pmode)
3441 fatal_insn ("Mode not QImode", op);
3443 if (GET_CODE (op) == CONST_INT)
3444 return c4x_L_constant (op) || c4x_J_constant (op);
3446 return src_operand (op, mode);
3450 /* Check src operand of two operand tricky instructions. */
3453 tsrc_operand (op, mode)
3455 enum machine_mode mode;
3457 if (mode == VOIDmode)
3458 mode = GET_MODE (op);
3460 if (mode != QImode && mode != Pmode)
3461 fatal_insn ("Mode not QImode", op);
3463 if (GET_CODE (op) == CONST_INT)
3464 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3466 return src_operand (op, mode);
3471 reg_or_const_operand (op, mode)
3473 enum machine_mode mode;
3475 return reg_operand (op, mode) || const_operand (op, mode);
3479 /* Check for indirect operands allowable in parallel instruction. */
3482 par_ind_operand (op, mode)
3484 enum machine_mode mode;
3486 if (mode != VOIDmode && mode != GET_MODE (op))
3489 return c4x_S_indirect (op);
3493 /* Check for operands allowable in parallel instruction. */
3496 parallel_operand (op, mode)
3498 enum machine_mode mode;
3500 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3505 c4x_S_address_parse (op, base, incdec, index, disp)
3517 if (GET_CODE (op) != MEM)
3518 fatal_insn ("Invalid indirect memory address", op);
3521 switch (GET_CODE (op))
3524 *base = REGNO (XEXP (op, 0));
3530 *base = REGNO (XEXP (op, 0));
3536 *base = REGNO (XEXP (op, 0));
3542 *base = REGNO (XEXP (op, 0));
3548 *base = REGNO (XEXP (op, 0));
3549 if (REG_P (XEXP (XEXP (op, 1), 1)))
3551 *index = REGNO (XEXP (XEXP (op, 1), 1));
3552 *disp = 0; /* ??? */
3555 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3560 *base = REGNO (XEXP (op, 0));
3561 if (REG_P (XEXP (XEXP (op, 1), 1)))
3563 *index = REGNO (XEXP (XEXP (op, 1), 1));
3564 *disp = 1; /* ??? */
3567 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3578 rtx op0 = XEXP (op, 0);
3579 rtx op1 = XEXP (op, 1);
3581 if (c4x_a_register (op0))
3583 if (c4x_x_register (op1))
3585 *base = REGNO (op0);
3586 *index = REGNO (op1);
3589 else if ((GET_CODE (op1) == CONST_INT
3590 && IS_DISP1_CONST (INTVAL (op1))))
3592 *base = REGNO (op0);
3593 *disp = INTVAL (op1);
3597 else if (c4x_x_register (op0) && c4x_a_register (op1))
3599 *base = REGNO (op1);
3600 *index = REGNO (op0);
3607 fatal_insn ("Invalid indirect (S) memory address", op);
3613 c4x_address_conflict (op0, op1, store0, store1)
3628 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3631 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3632 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3634 if (store0 && store1)
3636 /* If we have two stores in parallel to the same address, then
3637 the C4x only executes one of the stores. This is unlikely to
3638 cause problems except when writing to a hardware device such
3639 as a FIFO since the second write will be lost. The user
3640 should flag the hardware location as being volatile so that
3641 we don't do this optimisation. While it is unlikely that we
3642 have an aliased address if both locations are not marked
3643 volatile, it is probably safer to flag a potential conflict
3644 if either location is volatile. */
3645 if (! flag_argument_noalias)
3647 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3652 /* If have a parallel load and a store to the same address, the load
3653 is performed first, so there is no conflict. Similarly, there is
3654 no conflict if have parallel loads from the same address. */
3656 /* Cannot use auto increment or auto decrement twice for same
3658 if (base0 == base1 && incdec0 && incdec0)
3661 /* It might be too confusing for GCC if we have use a base register
3662 with a side effect and a memory reference using the same register
3664 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3667 /* We can not optimize the case where op1 and op2 refer to the same
3669 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3677 /* Check for while loop inside a decrement and branch loop. */
3680 c4x_label_conflict (insn, jump, db)
3687 if (GET_CODE (insn) == CODE_LABEL)
3689 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3691 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3694 insn = PREV_INSN (insn);
3700 /* Validate combination of operands for parallel load/store instructions. */
3703 valid_parallel_load_store (operands, mode)
3705 enum machine_mode mode ATTRIBUTE_UNUSED;
3707 rtx op0 = operands[0];
3708 rtx op1 = operands[1];
3709 rtx op2 = operands[2];
3710 rtx op3 = operands[3];
3712 if (GET_CODE (op0) == SUBREG)
3713 op0 = SUBREG_REG (op0);
3714 if (GET_CODE (op1) == SUBREG)
3715 op1 = SUBREG_REG (op1);
3716 if (GET_CODE (op2) == SUBREG)
3717 op2 = SUBREG_REG (op2);
3718 if (GET_CODE (op3) == SUBREG)
3719 op3 = SUBREG_REG (op3);
3721 /* The patterns should only allow ext_low_reg_operand() or
3722 par_ind_operand() operands. Thus of the 4 operands, only 2
3723 should be REGs and the other 2 should be MEMs. */
3725 /* This test prevents the multipack pass from using this pattern if
3726 op0 is used as an index or base register in op2 or op3, since
3727 this combination will require reloading. */
3728 if (GET_CODE (op0) == REG
3729 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3730 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3734 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3735 return (REGNO (op0) != REGNO (op2))
3736 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3737 && ! c4x_address_conflict (op1, op3, 0, 0);
3740 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3741 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3742 && ! c4x_address_conflict (op0, op2, 1, 1);
3745 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3746 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3747 && ! c4x_address_conflict (op1, op2, 0, 1);
3750 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3751 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3752 && ! c4x_address_conflict (op0, op3, 1, 0);
3759 valid_parallel_operands_4 (operands, mode)
3761 enum machine_mode mode ATTRIBUTE_UNUSED;
3763 rtx op0 = operands[0];
3764 rtx op2 = operands[2];
3766 if (GET_CODE (op0) == SUBREG)
3767 op0 = SUBREG_REG (op0);
3768 if (GET_CODE (op2) == SUBREG)
3769 op2 = SUBREG_REG (op2);
3771 /* This test prevents the multipack pass from using this pattern if
3772 op0 is used as an index or base register in op2, since this combination
3773 will require reloading. */
3774 if (GET_CODE (op0) == REG
3775 && GET_CODE (op2) == MEM
3776 && reg_mentioned_p (op0, XEXP (op2, 0)))
3784 valid_parallel_operands_5 (operands, mode)
3786 enum machine_mode mode ATTRIBUTE_UNUSED;
3789 rtx op0 = operands[0];
3790 rtx op1 = operands[1];
3791 rtx op2 = operands[2];
3792 rtx op3 = operands[3];
3794 if (GET_CODE (op0) == SUBREG)
3795 op0 = SUBREG_REG (op0);
3796 if (GET_CODE (op1) == SUBREG)
3797 op1 = SUBREG_REG (op1);
3798 if (GET_CODE (op2) == SUBREG)
3799 op2 = SUBREG_REG (op2);
3801 /* The patterns should only allow ext_low_reg_operand() or
3802 par_ind_operand() operands. Operands 1 and 2 may be commutative
3803 but only one of them can be a register. */
3804 if (GET_CODE (op1) == REG)
3806 if (GET_CODE (op2) == REG)
3812 /* This test prevents the multipack pass from using this pattern if
3813 op0 is used as an index or base register in op3, since this combination
3814 will require reloading. */
3815 if (GET_CODE (op0) == REG
3816 && GET_CODE (op3) == MEM
3817 && reg_mentioned_p (op0, XEXP (op3, 0)))
3825 valid_parallel_operands_6 (operands, mode)
3827 enum machine_mode mode ATTRIBUTE_UNUSED;
3830 rtx op0 = operands[0];
3831 rtx op1 = operands[1];
3832 rtx op2 = operands[2];
3833 rtx op4 = operands[4];
3834 rtx op5 = operands[5];
3836 if (GET_CODE (op1) == SUBREG)
3837 op1 = SUBREG_REG (op1);
3838 if (GET_CODE (op2) == SUBREG)
3839 op2 = SUBREG_REG (op2);
3840 if (GET_CODE (op4) == SUBREG)
3841 op4 = SUBREG_REG (op4);
3842 if (GET_CODE (op5) == SUBREG)
3843 op5 = SUBREG_REG (op5);
3845 /* The patterns should only allow ext_low_reg_operand() or
3846 par_ind_operand() operands. Thus of the 4 input operands, only 2
3847 should be REGs and the other 2 should be MEMs. */
3849 if (GET_CODE (op1) == REG)
3851 if (GET_CODE (op2) == REG)
3853 if (GET_CODE (op4) == REG)
3855 if (GET_CODE (op5) == REG)
3858 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3859 Perhaps we should count the MEMs as well? */
3863 /* This test prevents the multipack pass from using this pattern if
3864 op0 is used as an index or base register in op4 or op5, since
3865 this combination will require reloading. */
3866 if (GET_CODE (op0) == REG
3867 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3868 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3875 /* Validate combination of src operands. Note that the operands have
3876 been screened by the src_operand predicate. We just have to check
3877 that the combination of operands is valid. If FORCE is set, ensure
3878 that the destination regno is valid if we have a 2 operand insn. */
3881 c4x_valid_operands (code, operands, mode, force)
3884 enum machine_mode mode ATTRIBUTE_UNUSED;
3889 enum rtx_code code1;
3890 enum rtx_code code2;
3892 if (code == COMPARE)
3903 if (GET_CODE (op1) == SUBREG)
3904 op1 = SUBREG_REG (op1);
3905 if (GET_CODE (op2) == SUBREG)
3906 op2 = SUBREG_REG (op2);
3908 code1 = GET_CODE (op1);
3909 code2 = GET_CODE (op2);
3911 if (code1 == REG && code2 == REG)
3914 if (code1 == MEM && code2 == MEM)
3916 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3918 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3929 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3934 if (! c4x_H_constant (op2))
3938 /* Any valid memory operand screened by src_operand is OK. */
3941 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3942 into a stack slot memory address comprising a PLUS and a
3948 fatal_insn ("c4x_valid_operands: Internal error", op2);
3952 /* Check that we have a valid destination register for a two operand
3954 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3957 /* We assume MINUS is commutative since the subtract patterns
3958 also support the reverse subtract instructions. Since op1
3959 is not a register, and op2 is a register, op1 can only
3960 be a restricted memory operand for a shift instruction. */
3961 if (code == ASHIFTRT || code == LSHIFTRT
3962 || code == ASHIFT || code == COMPARE)
3964 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3969 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3974 if (! c4x_H_constant (op1))
3978 /* Any valid memory operand screened by src_operand is OK. */
3986 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3987 into a stack slot memory address comprising a PLUS and a
3997 /* Check that we have a valid destination register for a two operand
3999 return ! force || REGNO (op1) == REGNO (operands[0]);
4003 int valid_operands (code, operands, mode)
4006 enum machine_mode mode;
4009 /* If we are not optimizing then we have to let anything go and let
4010 reload fix things up. instantiate_decl in function.c can produce
4011 invalid insns by changing the offset of a memory operand from a
4012 valid one into an invalid one, when the second operand is also a
4013 memory operand. The alternative is not to allow two memory
4014 operands for an insn when not optimizing. The problem only rarely
4015 occurs, for example with the C-torture program DFcmp.c. */
4017 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
4022 legitimize_operands (code, operands, mode)
4025 enum machine_mode mode;
4027 /* Compare only has 2 operands. */
4028 if (code == COMPARE)
4030 /* During RTL generation, force constants into pseudos so that
4031 they can get hoisted out of loops. This will tie up an extra
4032 register but can save an extra cycle. Only do this if loop
4033 optimisation enabled. (We cannot pull this trick for add and
4034 sub instructions since the flow pass won't find
4035 autoincrements etc.) This allows us to generate compare
4036 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4037 of LDI *AR0++, R0; CMPI 42, R0.
4039 Note that expand_binops will try to load an expensive constant
4040 into a register if it is used within a loop. Unfortunately,
4041 the cost mechanism doesn't allow us to look at the other
4042 operand to decide whether the constant is expensive. */
4044 if (! reload_in_progress
4047 && GET_CODE (operands[1]) == CONST_INT
4048 && preserve_subexpressions_p ()
4049 && rtx_cost (operands[1], code) > 1)
4050 operands[1] = force_reg (mode, operands[1]);
4052 if (! reload_in_progress
4053 && ! c4x_valid_operands (code, operands, mode, 0))
4054 operands[0] = force_reg (mode, operands[0]);
4058 /* We cannot do this for ADDI/SUBI insns since we will
4059 defeat the flow pass from finding autoincrement addressing
4061 if (! reload_in_progress
4062 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
4065 && GET_CODE (operands[2]) == CONST_INT
4066 && preserve_subexpressions_p ()
4067 && rtx_cost (operands[2], code) > 1)
4068 operands[2] = force_reg (mode, operands[2]);
4070 /* We can get better code on a C30 if we force constant shift counts
4071 into a register. This way they can get hoisted out of loops,
4072 tying up a register, but saving an instruction. The downside is
4073 that they may get allocated to an address or index register, and
4074 thus we will get a pipeline conflict if there is a nearby
4075 indirect address using an address register.
4077 Note that expand_binops will not try to load an expensive constant
4078 into a register if it is used within a loop for a shift insn. */
4080 if (! reload_in_progress
4081 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
4083 /* If the operand combination is invalid, we force operand1 into a
4084 register, preventing reload from having doing to do this at a
4086 operands[1] = force_reg (mode, operands[1]);
4089 emit_move_insn (operands[0], operands[1]);
4090 operands[1] = copy_rtx (operands[0]);
4094 /* Just in case... */
4095 if (! c4x_valid_operands (code, operands, mode, 0))
4096 operands[2] = force_reg (mode, operands[2]);
4100 /* Right shifts require a negative shift count, but GCC expects
4101 a positive count, so we emit a NEG. */
4102 if ((code == ASHIFTRT || code == LSHIFTRT)
4103 && (GET_CODE (operands[2]) != CONST_INT))
4104 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
4110 /* The following predicates are used for instruction scheduling. */
4113 group1_reg_operand (op, mode)
4115 enum machine_mode mode;
4117 if (mode != VOIDmode && mode != GET_MODE (op))
4119 if (GET_CODE (op) == SUBREG)
4120 op = SUBREG_REG (op);
4121 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4126 group1_mem_operand (op, mode)
4128 enum machine_mode mode;
4130 if (mode != VOIDmode && mode != GET_MODE (op))
4133 if (GET_CODE (op) == MEM)
4136 if (GET_CODE (op) == PLUS)
4138 rtx op0 = XEXP (op, 0);
4139 rtx op1 = XEXP (op, 1);
4141 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4142 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4145 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4153 /* Return true if any one of the address registers. */
4156 arx_reg_operand (op, mode)
4158 enum machine_mode mode;
4160 if (mode != VOIDmode && mode != GET_MODE (op))
4162 if (GET_CODE (op) == SUBREG)
4163 op = SUBREG_REG (op);
4164 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4169 c4x_arn_reg_operand (op, mode, regno)
4171 enum machine_mode mode;
4174 if (mode != VOIDmode && mode != GET_MODE (op))
4176 if (GET_CODE (op) == SUBREG)
4177 op = SUBREG_REG (op);
4178 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4183 c4x_arn_mem_operand (op, mode, regno)
4185 enum machine_mode mode;
4188 if (mode != VOIDmode && mode != GET_MODE (op))
4191 if (GET_CODE (op) == MEM)
4194 switch (GET_CODE (op))
4203 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4207 if (REG_P (XEXP (op, 0)) && (! reload_completed
4208 || (REGNO (XEXP (op, 0)) == regno)))
4210 if (REG_P (XEXP (XEXP (op, 1), 1))
4211 && (! reload_completed
4212 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4218 rtx op0 = XEXP (op, 0);
4219 rtx op1 = XEXP (op, 1);
4221 if ((REG_P (op0) && (! reload_completed
4222 || (REGNO (op0) == regno)))
4223 || (REG_P (op1) && (! reload_completed
4224 || (REGNO (op1) == regno))))
4238 ar0_reg_operand (op, mode)
4240 enum machine_mode mode;
4242 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4247 ar0_mem_operand (op, mode)
4249 enum machine_mode mode;
4251 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4256 ar1_reg_operand (op, mode)
4258 enum machine_mode mode;
4260 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4265 ar1_mem_operand (op, mode)
4267 enum machine_mode mode;
4269 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4274 ar2_reg_operand (op, mode)
4276 enum machine_mode mode;
4278 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4283 ar2_mem_operand (op, mode)
4285 enum machine_mode mode;
4287 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4292 ar3_reg_operand (op, mode)
4294 enum machine_mode mode;
4296 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4301 ar3_mem_operand (op, mode)
4303 enum machine_mode mode;
4305 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4310 ar4_reg_operand (op, mode)
4312 enum machine_mode mode;
4314 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4319 ar4_mem_operand (op, mode)
4321 enum machine_mode mode;
4323 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4328 ar5_reg_operand (op, mode)
4330 enum machine_mode mode;
4332 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4337 ar5_mem_operand (op, mode)
4339 enum machine_mode mode;
4341 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4346 ar6_reg_operand (op, mode)
4348 enum machine_mode mode;
4350 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4355 ar6_mem_operand (op, mode)
4357 enum machine_mode mode;
4359 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4364 ar7_reg_operand (op, mode)
4366 enum machine_mode mode;
4368 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4373 ar7_mem_operand (op, mode)
4375 enum machine_mode mode;
4377 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4382 ir0_reg_operand (op, mode)
4384 enum machine_mode mode;
4386 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4391 ir0_mem_operand (op, mode)
4393 enum machine_mode mode;
4395 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4400 ir1_reg_operand (op, mode)
4402 enum machine_mode mode;
4404 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4409 ir1_mem_operand (op, mode)
4411 enum machine_mode mode;
4413 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4417 /* This is similar to operand_subword but allows autoincrement
4421 c4x_operand_subword (op, i, validate_address, mode)
4424 int validate_address;
4425 enum machine_mode mode;
4427 if (mode != HImode && mode != HFmode)
4428 fatal_insn ("c4x_operand_subword: invalid mode", op);
4430 if (mode == HFmode && REG_P (op))
4431 fatal_insn ("c4x_operand_subword: invalid operand", op);
4433 if (GET_CODE (op) == MEM)
4435 enum rtx_code code = GET_CODE (XEXP (op, 0));
4436 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4437 enum machine_mode submode;
4442 else if (mode == HFmode)
4449 return gen_rtx_MEM (submode, XEXP (op, 0));
4455 /* We could handle these with some difficulty.
4456 e.g., *p-- => *(p-=2); *(p+1). */
4457 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4463 fatal_insn ("c4x_operand_subword: invalid address", op);
4465 /* Even though offsettable_address_p considers (MEM
4466 (LO_SUM)) to be offsettable, it is not safe if the
4467 address is at the end of the data page since we also have
4468 to fix up the associated high PART. In this case where
4469 we are trying to split a HImode or HFmode memory
4470 reference, we would have to emit another insn to reload a
4471 new HIGH value. It's easier to disable LO_SUM memory references
4472 in HImode or HFmode and we probably get better code. */
4474 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4481 return operand_subword (op, i, validate_address, mode);
4484 /* Handle machine specific pragmas for compatibility with existing
4485 compilers for the C3x/C4x.
4488 ----------------------------------------------------------
4489 CODE_SECTION(symbol,"section") section("section")
4490 DATA_SECTION(symbol,"section") section("section")
4491 FUNC_CANNOT_INLINE(function)
4492 FUNC_EXT_CALLED(function)
4493 FUNC_IS_PURE(function) const
4494 FUNC_IS_SYSTEM(function)
4495 FUNC_NEVER_RETURNS(function) noreturn
4496 FUNC_NO_GLOBAL_ASG(function)
4497 FUNC_NO_IND_ASG(function)
4498 INTERRUPT(function) interrupt
4502 /* Parse a C4x pragma, of the form ( function [, "section"] ) \n.
4503 FUNC is loaded with the IDENTIFIER_NODE of the function, SECT with
4504 the STRING_CST node of the string. If SECT is null, then this
4505 pragma doesn't take a section string. Returns 0 for a good pragma,
4506 -1 for a malformed pragma. */
4507 #define BAD(msgid, arg) do { warning (msgid, arg); return -1; } while (0)
4509 static int (*c_lex_func) (tree *);
4512 c4x_init_pragma (get_token)
4513 int (*get_token) PARAMS ((tree *));
4515 c_lex_func = get_token;
4520 c4x_parse_pragma (name, func, sect)
4527 if (c_lex_func (&x) != CPP_OPEN_PAREN)
4528 BAD ("missing '(' after '#pragma %s' - ignored", name);
4530 if (c_lex_func (&f) != CPP_NAME)
4531 BAD ("missing function name in '#pragma %s' - ignored", name);
4535 if (c_lex_func (&x) != CPP_COMMA)
4536 BAD ("malformed '#pragma %s' - ignored", name);
4537 if (c_lex_func (&s) != CPP_STRING)
4538 BAD ("missing section name in '#pragma %s' - ignored", name);
4542 if (c_lex_func (&x) != CPP_CLOSE_PAREN)
4543 BAD ("missing ')' for '#pragma %s' - ignored", name);
4545 if (c_lex_func (&x) != CPP_EOF)
4546 warning ("junk at end of '#pragma %s'", name);
4553 c4x_pr_CODE_SECTION (pfile)
4554 cpp_reader *pfile ATTRIBUTE_UNUSED;
4558 if (c4x_parse_pragma ("CODE_SECTION", &func, §))
4560 code_tree = chainon (code_tree,
4561 build_tree_list (func,
4562 build_tree_list (NULL_TREE, sect)));
4566 c4x_pr_DATA_SECTION (pfile)
4567 cpp_reader *pfile ATTRIBUTE_UNUSED;
4571 if (c4x_parse_pragma ("DATA_SECTION", &func, §))
4573 data_tree = chainon (data_tree,
4574 build_tree_list (func,
4575 build_tree_list (NULL_TREE, sect)));
4579 c4x_pr_FUNC_IS_PURE (pfile)
4580 cpp_reader *pfile ATTRIBUTE_UNUSED;
4584 if (c4x_parse_pragma ("FUNC_IS_PURE", &func, 0))
4586 pure_tree = chainon (pure_tree, build_tree_list (func, NULL_TREE));
4590 c4x_pr_FUNC_NEVER_RETURNS (pfile)
4591 cpp_reader *pfile ATTRIBUTE_UNUSED;
4595 if (c4x_parse_pragma ("FUNC_NEVER_RETURNS", &func, 0))
4597 noreturn_tree = chainon (noreturn_tree, build_tree_list (func, NULL_TREE));
4601 c4x_pr_INTERRUPT (pfile)
4602 cpp_reader *pfile ATTRIBUTE_UNUSED;
4606 if (c4x_parse_pragma ("INTERRUPT", &func, 0))
4608 interrupt_tree = chainon (interrupt_tree, build_tree_list (func, NULL_TREE));
4611 /* Used for FUNC_CANNOT_INLINE, FUNC_EXT_CALLED, FUNC_IS_SYSTEM,
4612 FUNC_NO_GLOBAL_ASG, and FUNC_NO_IND_ASG. */
4614 c4x_pr_ignored (pfile)
4615 cpp_reader *pfile ATTRIBUTE_UNUSED;
4621 struct name_list *next;
4625 static struct name_list *global_head;
4626 static struct name_list *extern_head;
4629 /* Add NAME to list of global symbols and remove from external list if
4630 present on external list. */
4633 c4x_global_label (name)
4636 struct name_list *p, *last;
4638 /* Do not insert duplicate names, so linearly search through list of
4643 if (strcmp (p->name, name) == 0)
4647 p = (struct name_list *) permalloc (sizeof *p);
4648 p->next = global_head;
4652 /* Remove this name from ref list if present. */
4657 if (strcmp (p->name, name) == 0)
4660 last->next = p->next;
4662 extern_head = p->next;
4671 /* Add NAME to list of external symbols. */
4674 c4x_external_ref (name)
4677 struct name_list *p;
4679 /* Do not insert duplicate names. */
4683 if (strcmp (p->name, name) == 0)
4688 /* Do not insert ref if global found. */
4692 if (strcmp (p->name, name) == 0)
4696 p = (struct name_list *) permalloc (sizeof *p);
4697 p->next = extern_head;
4707 struct name_list *p;
4709 /* Output all external names that are not global. */
4713 fprintf (fp, "\t.ref\t");
4714 assemble_name (fp, p->name);
4718 fprintf (fp, "\t.end\n");
4723 c4x_check_attribute (attrib, list, decl, attributes)
4725 tree list, decl, *attributes;
4727 while (list != NULL_TREE
4728 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4729 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4730 list = TREE_CHAIN (list);
4732 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4738 c4x_insert_attributes (decl, attributes)
4739 tree decl, *attributes;
4741 switch (TREE_CODE (decl))
4744 c4x_check_attribute ("section", code_tree, decl, attributes);
4745 c4x_check_attribute ("const", pure_tree, decl, attributes);
4746 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4747 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4751 c4x_check_attribute ("section", data_tree, decl, attributes);
4760 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine
4761 specific attribute for TYPE. The attributes in ATTRIBUTES have
4762 previously been assigned to TYPE. */
4765 c4x_valid_type_attribute_p (type, attributes, identifier, args)
4767 tree attributes ATTRIBUTE_UNUSED;
4769 tree args ATTRIBUTE_UNUSED;
4771 if (TREE_CODE (type) != FUNCTION_TYPE)
4774 if (is_attribute_p ("interrupt", identifier))
4777 if (is_attribute_p ("assembler", identifier))
4780 if (is_attribute_p ("leaf_pretend", identifier))
4787 /* !!! FIXME to emit RPTS correctly. */
4790 c4x_rptb_rpts_p (insn, op)
4793 /* The next insn should be our label marking where the
4794 repeat block starts. */
4795 insn = NEXT_INSN (insn);
4796 if (GET_CODE (insn) != CODE_LABEL)
4798 /* Some insns may have been shifted between the RPTB insn
4799 and the top label... They were probably destined to
4800 be moved out of the loop. For now, let's leave them
4801 where they are and print a warning. We should
4802 probably move these insns before the repeat block insn. */
4804 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4809 /* Skip any notes. */
4810 insn = next_nonnote_insn (insn);
4812 /* This should be our first insn in the loop. */
4813 if (! INSN_P (insn))
4816 /* Skip any notes. */
4817 insn = next_nonnote_insn (insn);
4819 if (! INSN_P (insn))
4822 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4828 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4832 /* Check if register r11 is used as the destination of an insn. */
4845 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4846 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4848 if (INSN_P (x) && (set = single_set (x)))
4851 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4854 fmt = GET_RTX_FORMAT (GET_CODE (x));
4855 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4859 if (c4x_r11_set_p (XEXP (x, i)))
4862 else if (fmt[i] == 'E')
4863 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4864 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4871 /* The c4x sometimes has a problem when the insn before the laj insn
4872 sets the r11 register. Check for this situation. */
4875 c4x_check_laj_p (insn)
4878 insn = prev_nonnote_insn (insn);
4880 /* If this is the start of the function no nop is needed. */
4884 /* If the previous insn is a code label we have to insert a nop. This
4885 could be a jump or table jump. We can find the normal jumps by
4886 scanning the function but this will not find table jumps. */
4887 if (GET_CODE (insn) == CODE_LABEL)
4890 /* If the previous insn sets register r11 we have to insert a nop. */
4891 if (c4x_r11_set_p (insn))
4894 /* No nop needed. */
4899 /* Adjust the cost of a scheduling dependency. Return the new cost of
4900 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4901 A set of an address register followed by a use occurs a 2 cycle
4902 stall (reduced to a single cycle on the c40 using LDA), while
4903 a read of an address register followed by a use occurs a single cycle. */
4905 #define SET_USE_COST 3
4906 #define SETLDA_USE_COST 2
4907 #define READ_USE_COST 2
4911 c4x_adjust_cost (insn, link, dep_insn, cost)
4917 /* Don't worry about this until we know what registers have been
4919 if (flag_schedule_insns == 0 && ! reload_completed)
4922 /* How do we handle dependencies where a read followed by another
4923 read causes a pipeline stall? For example, a read of ar0 followed
4924 by the use of ar0 for a memory reference. It looks like we
4925 need to extend the scheduler to handle this case. */
4927 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4928 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4929 so only deal with insns we know about. */
4930 if (recog_memoized (dep_insn) < 0)
4933 if (REG_NOTE_KIND (link) == 0)
4937 /* Data dependency; DEP_INSN writes a register that INSN reads some
4941 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4942 max = SET_USE_COST > max ? SET_USE_COST : max;
4943 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4944 max = READ_USE_COST > max ? READ_USE_COST : max;
4948 /* This could be significantly optimized. We should look
4949 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4950 insn uses ar0-ar7. We then test if the same register
4951 is used. The tricky bit is that some operands will
4952 use several registers... */
4953 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4954 max = SET_USE_COST > max ? SET_USE_COST : max;
4955 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4956 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4957 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4958 max = READ_USE_COST > max ? READ_USE_COST : max;
4960 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4961 max = SET_USE_COST > max ? SET_USE_COST : max;
4962 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4963 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4964 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4965 max = READ_USE_COST > max ? READ_USE_COST : max;
4967 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4968 max = SET_USE_COST > max ? SET_USE_COST : max;
4969 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4970 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4971 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4972 max = READ_USE_COST > max ? READ_USE_COST : max;
4974 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4975 max = SET_USE_COST > max ? SET_USE_COST : max;
4976 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4977 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4978 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4979 max = READ_USE_COST > max ? READ_USE_COST : max;
4981 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4982 max = SET_USE_COST > max ? SET_USE_COST : max;
4983 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4984 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4985 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4986 max = READ_USE_COST > max ? READ_USE_COST : max;
4988 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4989 max = SET_USE_COST > max ? SET_USE_COST : max;
4990 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4991 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4992 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4993 max = READ_USE_COST > max ? READ_USE_COST : max;
4995 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4996 max = SET_USE_COST > max ? SET_USE_COST : max;
4997 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4998 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4999 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
5000 max = READ_USE_COST > max ? READ_USE_COST : max;
5002 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
5003 max = SET_USE_COST > max ? SET_USE_COST : max;
5004 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
5005 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5006 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
5007 max = READ_USE_COST > max ? READ_USE_COST : max;
5009 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
5010 max = SET_USE_COST > max ? SET_USE_COST : max;
5011 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
5012 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5014 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
5015 max = SET_USE_COST > max ? SET_USE_COST : max;
5016 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
5017 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5023 /* For other data dependencies, the default cost specified in the
5027 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
5029 /* Anti dependency; DEP_INSN reads a register that INSN writes some
5032 /* For c4x anti dependencies, the cost is 0. */
5035 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
5037 /* Output dependency; DEP_INSN writes a register that INSN writes some
5040 /* For c4x output dependencies, the cost is 0. */
5048 c4x_init_builtins ()
5050 tree endlink = void_list_node;
5052 builtin_function ("fast_ftoi",
5055 tree_cons (NULL_TREE, double_type_node, endlink)),
5056 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL);
5057 builtin_function ("ansi_ftoi",
5060 tree_cons (NULL_TREE, double_type_node, endlink)),
5061 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL);
5063 builtin_function ("fast_imult",
5066 tree_cons (NULL_TREE, integer_type_node,
5067 tree_cons (NULL_TREE,
5068 integer_type_node, endlink))),
5069 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL);
5072 builtin_function ("toieee",
5075 tree_cons (NULL_TREE, double_type_node, endlink)),
5076 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL);
5077 builtin_function ("frieee",
5080 tree_cons (NULL_TREE, double_type_node, endlink)),
5081 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL);
5082 builtin_function ("fast_invf",
5085 tree_cons (NULL_TREE, double_type_node, endlink)),
5086 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL);
5092 c4x_expand_builtin (exp, target, subtarget, mode, ignore)
5095 rtx subtarget ATTRIBUTE_UNUSED;
5096 enum machine_mode mode ATTRIBUTE_UNUSED;
5097 int ignore ATTRIBUTE_UNUSED;
5099 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5100 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5101 tree arglist = TREE_OPERAND (exp, 1);
5107 case C4X_BUILTIN_FIX:
5108 arg0 = TREE_VALUE (arglist);
5109 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5110 r0 = protect_from_queue (r0, 0);
5111 if (! target || ! register_operand (target, QImode))
5112 target = gen_reg_rtx (QImode);
5113 emit_insn (gen_fixqfqi_clobber (target, r0));
5116 case C4X_BUILTIN_FIX_ANSI:
5117 arg0 = TREE_VALUE (arglist);
5118 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5119 r0 = protect_from_queue (r0, 0);
5120 if (! target || ! register_operand (target, QImode))
5121 target = gen_reg_rtx (QImode);
5122 emit_insn (gen_fix_truncqfqi2 (target, r0));
5125 case C4X_BUILTIN_MPYI:
5128 arg0 = TREE_VALUE (arglist);
5129 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5130 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
5131 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
5132 r0 = protect_from_queue (r0, 0);
5133 r1 = protect_from_queue (r1, 0);
5134 if (! target || ! register_operand (target, QImode))
5135 target = gen_reg_rtx (QImode);
5136 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
5139 case C4X_BUILTIN_TOIEEE:
5142 arg0 = TREE_VALUE (arglist);
5143 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5144 r0 = protect_from_queue (r0, 0);
5145 if (! target || ! register_operand (target, QFmode))
5146 target = gen_reg_rtx (QFmode);
5147 emit_insn (gen_toieee (target, r0));
5150 case C4X_BUILTIN_FRIEEE:
5153 arg0 = TREE_VALUE (arglist);
5154 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5155 put_var_into_stack (arg0);
5156 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5157 r0 = protect_from_queue (r0, 0);
5158 if (register_operand (r0, QFmode))
5160 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5161 emit_move_insn (r1, r0);
5164 if (! target || ! register_operand (target, QFmode))
5165 target = gen_reg_rtx (QFmode);
5166 emit_insn (gen_frieee (target, r0));
5169 case C4X_BUILTIN_RCPF:
5172 arg0 = TREE_VALUE (arglist);
5173 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5174 r0 = protect_from_queue (r0, 0);
5175 if (! target || ! register_operand (target, QFmode))
5176 target = gen_reg_rtx (QFmode);
5177 emit_insn (gen_rcpfqf_clobber (target, r0));
5184 c4x_asm_named_section (name, flags, align)
5186 unsigned int flags ATTRIBUTE_UNUSED;
5187 unsigned int align ATTRIBUTE_UNUSED;
5189 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);