1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "conditions.h"
51 #include "c4x-protos.h"
53 #include "target-def.h"
57 rtx fix_truncqfhi2_libfunc;
58 rtx fixuns_truncqfhi2_libfunc;
59 rtx fix_trunchfhi2_libfunc;
60 rtx fixuns_trunchfhi2_libfunc;
61 rtx floathiqf2_libfunc;
62 rtx floatunshiqf2_libfunc;
63 rtx floathihf2_libfunc;
64 rtx floatunshihf2_libfunc;
66 static int c4x_leaf_function;
68 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
70 /* Array of the smallest class containing reg number REGNO, indexed by
71 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
72 registers are available and set the class to NO_REGS for registers
73 that the target switches say are unavailable. */
75 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
77 /* Reg Modes Saved. */
78 R0R1_REGS, /* R0 QI, QF, HF No. */
79 R0R1_REGS, /* R1 QI, QF, HF No. */
80 R2R3_REGS, /* R2 QI, QF, HF No. */
81 R2R3_REGS, /* R3 QI, QF, HF No. */
82 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
84 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
85 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
86 ADDR_REGS, /* AR0 QI No. */
87 ADDR_REGS, /* AR1 QI No. */
88 ADDR_REGS, /* AR2 QI No. */
89 ADDR_REGS, /* AR3 QI QI. */
90 ADDR_REGS, /* AR4 QI QI. */
91 ADDR_REGS, /* AR5 QI QI. */
92 ADDR_REGS, /* AR6 QI QI. */
93 ADDR_REGS, /* AR7 QI QI. */
94 DP_REG, /* DP QI No. */
95 INDEX_REGS, /* IR0 QI No. */
96 INDEX_REGS, /* IR1 QI No. */
97 BK_REG, /* BK QI QI. */
98 SP_REG, /* SP QI No. */
99 ST_REG, /* ST CC No. */
100 NO_REGS, /* DIE/IE No. */
101 NO_REGS, /* IIE/IF No. */
102 NO_REGS, /* IIF/IOF No. */
103 INT_REGS, /* RS QI No. */
104 INT_REGS, /* RE QI No. */
105 RC_REG, /* RC QI No. */
106 EXT_REGS, /* R8 QI, QF, HF QI. */
107 EXT_REGS, /* R9 QI, QF, HF No. */
108 EXT_REGS, /* R10 QI, QF, HF No. */
109 EXT_REGS, /* R11 QI, QF, HF No. */
112 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
114 /* Reg Modes Saved. */
115 HFmode, /* R0 QI, QF, HF No. */
116 HFmode, /* R1 QI, QF, HF No. */
117 HFmode, /* R2 QI, QF, HF No. */
118 HFmode, /* R3 QI, QF, HF No. */
119 QFmode, /* R4 QI, QF, HF QI. */
120 QFmode, /* R5 QI, QF, HF QI. */
121 QImode, /* R6 QI, QF, HF QF. */
122 QImode, /* R7 QI, QF, HF QF. */
123 QImode, /* AR0 QI No. */
124 QImode, /* AR1 QI No. */
125 QImode, /* AR2 QI No. */
126 QImode, /* AR3 QI QI. */
127 QImode, /* AR4 QI QI. */
128 QImode, /* AR5 QI QI. */
129 QImode, /* AR6 QI QI. */
130 QImode, /* AR7 QI QI. */
131 VOIDmode, /* DP QI No. */
132 QImode, /* IR0 QI No. */
133 QImode, /* IR1 QI No. */
134 QImode, /* BK QI QI. */
135 VOIDmode, /* SP QI No. */
136 VOIDmode, /* ST CC No. */
137 VOIDmode, /* DIE/IE No. */
138 VOIDmode, /* IIE/IF No. */
139 VOIDmode, /* IIF/IOF No. */
140 QImode, /* RS QI No. */
141 QImode, /* RE QI No. */
142 VOIDmode, /* RC QI No. */
143 QFmode, /* R8 QI, QF, HF QI. */
144 HFmode, /* R9 QI, QF, HF No. */
145 HFmode, /* R10 QI, QF, HF No. */
146 HFmode, /* R11 QI, QF, HF No. */
150 /* Test and compare insns in c4x.md store the information needed to
151 generate branch and scc insns here. */
153 struct rtx_def *c4x_compare_op0 = NULL_RTX;
154 struct rtx_def *c4x_compare_op1 = NULL_RTX;
156 const char *c4x_rpts_cycles_string;
157 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
158 const char *c4x_cpu_version_string;
159 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
161 /* Pragma definitions. */
163 static tree code_tree = NULL_TREE;
164 static tree data_tree = NULL_TREE;
165 static tree pure_tree = NULL_TREE;
166 static tree noreturn_tree = NULL_TREE;
167 static tree interrupt_tree = NULL_TREE;
169 /* Forward declarations */
170 static void c4x_add_gc_roots PARAMS ((void));
171 static int c4x_isr_reg_used_p PARAMS ((unsigned int));
172 static int c4x_leaf_function_p PARAMS ((void));
173 static int c4x_assembler_function_p PARAMS ((void));
174 static int c4x_immed_float_p PARAMS ((rtx));
175 static int c4x_a_register PARAMS ((rtx));
176 static int c4x_x_register PARAMS ((rtx));
177 static int c4x_immed_int_constant PARAMS ((rtx));
178 static int c4x_immed_float_constant PARAMS ((rtx));
179 static int c4x_K_constant PARAMS ((rtx));
180 static int c4x_N_constant PARAMS ((rtx));
181 static int c4x_O_constant PARAMS ((rtx));
182 static int c4x_R_indirect PARAMS ((rtx));
183 static int c4x_S_indirect PARAMS ((rtx));
184 static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
185 static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
186 enum machine_mode, int));
187 static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
188 static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
189 static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
190 static int c4x_parse_pragma PARAMS ((const char *, tree *, tree *));
191 static int c4x_r11_set_p PARAMS ((rtx));
192 static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
193 static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
194 static tree c4x_handle_fntype_attribute PARAMS ((tree *, tree, tree, int, bool *));
195 const struct attribute_spec c4x_attribute_table[];
196 static void c4x_insert_attributes PARAMS ((tree, tree *));
197 static void c4x_asm_named_section PARAMS ((const char *, unsigned int));
198 static int c4x_adjust_cost PARAMS ((rtx, rtx, rtx, int));
200 /* Initialize the GCC target structure. */
201 #undef TARGET_ATTRIBUTE_TABLE
202 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
204 #undef TARGET_INSERT_ATTRIBUTES
205 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
207 #undef TARGET_INIT_BUILTINS
208 #define TARGET_INIT_BUILTINS c4x_init_builtins
210 #undef TARGET_EXPAND_BUILTIN
211 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
213 #undef TARGET_SCHED_ADJUST_COST
214 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
216 struct gcc_target targetm = TARGET_INITIALIZER;
218 /* Called to register all of our global variables with the garbage
224 ggc_add_rtx_root (&c4x_compare_op0, 1);
225 ggc_add_rtx_root (&c4x_compare_op1, 1);
226 ggc_add_tree_root (&code_tree, 1);
227 ggc_add_tree_root (&data_tree, 1);
228 ggc_add_tree_root (&pure_tree, 1);
229 ggc_add_tree_root (&noreturn_tree, 1);
230 ggc_add_tree_root (&interrupt_tree, 1);
231 ggc_add_rtx_root (&smulhi3_libfunc, 1);
232 ggc_add_rtx_root (&umulhi3_libfunc, 1);
233 ggc_add_rtx_root (&fix_truncqfhi2_libfunc, 1);
234 ggc_add_rtx_root (&fixuns_truncqfhi2_libfunc, 1);
235 ggc_add_rtx_root (&fix_trunchfhi2_libfunc, 1);
236 ggc_add_rtx_root (&fixuns_trunchfhi2_libfunc, 1);
237 ggc_add_rtx_root (&floathiqf2_libfunc, 1);
238 ggc_add_rtx_root (&floatunshiqf2_libfunc, 1);
239 ggc_add_rtx_root (&floathihf2_libfunc, 1);
240 ggc_add_rtx_root (&floatunshihf2_libfunc, 1);
244 /* Override command line options.
245 Called once after all options have been parsed.
246 Mostly we process the processor
247 type and sometimes adjust other TARGET_ options. */
250 c4x_override_options ()
252 if (c4x_rpts_cycles_string)
253 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
258 c4x_cpu_version = 30;
260 c4x_cpu_version = 31;
262 c4x_cpu_version = 32;
264 c4x_cpu_version = 33;
266 c4x_cpu_version = 40;
268 c4x_cpu_version = 44;
270 c4x_cpu_version = 40;
272 /* -mcpu=xx overrides -m40 etc. */
273 if (c4x_cpu_version_string)
275 const char *p = c4x_cpu_version_string;
277 /* Also allow -mcpu=c30 etc. */
278 if (*p == 'c' || *p == 'C')
280 c4x_cpu_version = atoi (p);
283 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
284 C40_FLAG | C44_FLAG);
286 switch (c4x_cpu_version)
288 case 30: target_flags |= C30_FLAG; break;
289 case 31: target_flags |= C31_FLAG; break;
290 case 32: target_flags |= C32_FLAG; break;
291 case 33: target_flags |= C33_FLAG; break;
292 case 40: target_flags |= C40_FLAG; break;
293 case 44: target_flags |= C44_FLAG; break;
295 warning ("Unknown CPU version %d, using 40.\n", c4x_cpu_version);
296 c4x_cpu_version = 40;
297 target_flags |= C40_FLAG;
300 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
301 target_flags |= C3X_FLAG;
303 target_flags &= ~C3X_FLAG;
305 /* Convert foo / 8.0 into foo * 0.125, etc. */
306 set_fast_math_flags();
308 /* We should phase out the following at some stage.
309 This provides compatibility with the old -mno-aliases option. */
310 if (! TARGET_ALIASES && ! flag_argument_noalias)
311 flag_argument_noalias = 1;
313 /* Register global variables with the garbage collector. */
318 /* This is called before c4x_override_options. */
321 c4x_optimization_options (level, size)
322 int level ATTRIBUTE_UNUSED;
323 int size ATTRIBUTE_UNUSED;
325 /* Scheduling before register allocation can screw up global
326 register allocation, especially for functions that use MPY||ADD
327 instructions. The benefit we gain we get by scheduling before
328 register allocation is probably marginal anyhow. */
329 flag_schedule_insns = 0;
333 /* Write an ASCII string. */
335 #define C4X_ASCII_LIMIT 40
338 c4x_output_ascii (stream, ptr, len)
343 char sbuf[C4X_ASCII_LIMIT + 1];
344 int s, l, special, first = 1, onlys;
347 fprintf (stream, "\t.byte\t");
349 for (s = l = 0; len > 0; --len, ++ptr)
353 /* Escape " and \ with a \". */
354 special = *ptr == '\"' || *ptr == '\\';
356 /* If printable - add to buff. */
357 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
362 if (s < C4X_ASCII_LIMIT - 1)
377 fprintf (stream, "\"%s\"", sbuf);
379 if (TARGET_TI && l >= 80 && len > 1)
381 fprintf (stream, "\n\t.byte\t");
399 fprintf (stream, "%d", *ptr);
401 if (TARGET_TI && l >= 80 && len > 1)
403 fprintf (stream, "\n\t.byte\t");
414 fprintf (stream, "\"%s\"", sbuf);
417 fputc ('\n', stream);
422 c4x_hard_regno_mode_ok (regno, mode)
424 enum machine_mode mode;
429 case Pmode: /* Pointer (24/32 bits). */
431 case QImode: /* Integer (32 bits). */
432 return IS_INT_REGNO (regno);
434 case QFmode: /* Float, Double (32 bits). */
435 case HFmode: /* Long Double (40 bits). */
436 return IS_EXT_REGNO (regno);
438 case CCmode: /* Condition Codes. */
439 case CC_NOOVmode: /* Condition Codes. */
440 return IS_ST_REGNO (regno);
442 case HImode: /* Long Long (64 bits). */
443 /* We need two registers to store long longs. Note that
444 it is much easier to constrain the first register
445 to start on an even boundary. */
446 return IS_INT_REGNO (regno)
447 && IS_INT_REGNO (regno + 1)
451 return 0; /* We don't support these modes. */
457 /* Return non-zero if REGNO1 can be renamed to REGNO2. */
459 c4x_hard_regno_rename_ok (regno1, regno2)
463 /* We can not copy call saved registers from mode QI into QF or from
465 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
467 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
469 /* We cannot copy from an extended (40 bit) register to a standard
470 (32 bit) register because we only set the condition codes for
471 extended registers. */
472 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
474 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
479 /* The TI C3x C compiler register argument runtime model uses 6 registers,
480 AR2, R2, R3, RC, RS, RE.
482 The first two floating point arguments (float, double, long double)
483 that are found scanning from left to right are assigned to R2 and R3.
485 The remaining integer (char, short, int, long) or pointer arguments
486 are assigned to the remaining registers in the order AR2, R2, R3,
487 RC, RS, RE when scanning left to right, except for the last named
488 argument prior to an ellipsis denoting variable number of
489 arguments. We don't have to worry about the latter condition since
490 function.c treats the last named argument as anonymous (unnamed).
492 All arguments that cannot be passed in registers are pushed onto
493 the stack in reverse order (right to left). GCC handles that for us.
495 c4x_init_cumulative_args() is called at the start, so we can parse
496 the args to see how many floating point arguments and how many
497 integer (or pointer) arguments there are. c4x_function_arg() is
498 then called (sometimes repeatedly) for each argument (parsed left
499 to right) to obtain the register to pass the argument in, or zero
500 if the argument is to be passed on the stack. Once the compiler is
501 happy, c4x_function_arg_advance() is called.
503 Don't use R0 to pass arguments in, we use 0 to indicate a stack
506 static const int c4x_int_reglist[3][6] =
508 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
509 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
510 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
513 static int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
516 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
517 function whose data type is FNTYPE.
518 For a library call, FNTYPE is 0. */
521 c4x_init_cumulative_args (cum, fntype, libname)
522 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
523 tree fntype; /* Tree ptr for function decl. */
524 rtx libname; /* SYMBOL_REF of library name or 0. */
526 tree param, next_param;
528 cum->floats = cum->ints = 0;
535 fprintf (stderr, "\nc4x_init_cumulative_args (");
538 tree ret_type = TREE_TYPE (fntype);
540 fprintf (stderr, "fntype code = %s, ret code = %s",
541 tree_code_name[(int) TREE_CODE (fntype)],
542 tree_code_name[(int) TREE_CODE (ret_type)]);
545 fprintf (stderr, "no fntype");
548 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
551 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
553 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
554 param; param = next_param)
558 next_param = TREE_CHAIN (param);
560 type = TREE_VALUE (param);
561 if (type && type != void_type_node)
563 enum machine_mode mode;
565 /* If the last arg doesn't have void type then we have
566 variable arguments. */
570 if ((mode = TYPE_MODE (type)))
572 if (! MUST_PASS_IN_STACK (mode, type))
574 /* Look for float, double, or long double argument. */
575 if (mode == QFmode || mode == HFmode)
577 /* Look for integer, enumeral, boolean, char, or pointer
579 else if (mode == QImode || mode == Pmode)
588 fprintf (stderr, "%s%s, args = %d)\n",
589 cum->prototype ? ", prototype" : "",
590 cum->var ? ", variable args" : "",
595 /* Update the data in CUM to advance over an argument
596 of mode MODE and data type TYPE.
597 (TYPE is null for libcalls where that information may not be available.) */
600 c4x_function_arg_advance (cum, mode, type, named)
601 CUMULATIVE_ARGS *cum; /* Current arg information. */
602 enum machine_mode mode; /* Current arg mode. */
603 tree type; /* Type of the arg or 0 if lib support. */
604 int named; /* Whether or not the argument was named. */
607 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
608 GET_MODE_NAME (mode), named);
612 && ! MUST_PASS_IN_STACK (mode, type))
614 /* Look for float, double, or long double argument. */
615 if (mode == QFmode || mode == HFmode)
617 /* Look for integer, enumeral, boolean, char, or pointer argument. */
618 else if (mode == QImode || mode == Pmode)
621 else if (! TARGET_MEMPARM && ! type)
623 /* Handle libcall arguments. */
624 if (mode == QFmode || mode == HFmode)
626 else if (mode == QImode || mode == Pmode)
633 /* Define where to put the arguments to a function. Value is zero to
634 push the argument on the stack, or a hard register in which to
637 MODE is the argument's machine mode.
638 TYPE is the data type of the argument (as a tree).
639 This is null for libcalls where that information may
641 CUM is a variable of type CUMULATIVE_ARGS which gives info about
642 the preceding args and about the function being called.
643 NAMED is nonzero if this argument is a named parameter
644 (otherwise it is an extra parameter matching an ellipsis). */
647 c4x_function_arg (cum, mode, type, named)
648 CUMULATIVE_ARGS *cum; /* Current arg information. */
649 enum machine_mode mode; /* Current arg mode. */
650 tree type; /* Type of the arg or 0 if lib support. */
651 int named; /* != 0 for normal args, == 0 for ... args. */
653 int reg = 0; /* Default to passing argument on stack. */
657 /* We can handle at most 2 floats in R2, R3. */
658 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
660 /* We can handle at most 6 integers minus number of floats passed
662 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
663 6 - cum->maxfloats : cum->ints;
665 /* If there is no prototype, assume all the arguments are integers. */
666 if (! cum->prototype)
669 cum->ints = cum->floats = 0;
673 /* This marks the last argument. We don't need to pass this through
675 if (type == void_type_node)
681 && ! MUST_PASS_IN_STACK (mode, type))
683 /* Look for float, double, or long double argument. */
684 if (mode == QFmode || mode == HFmode)
686 if (cum->floats < cum->maxfloats)
687 reg = c4x_fp_reglist[cum->floats];
689 /* Look for integer, enumeral, boolean, char, or pointer argument. */
690 else if (mode == QImode || mode == Pmode)
692 if (cum->ints < cum->maxints)
693 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
696 else if (! TARGET_MEMPARM && ! type)
698 /* We could use a different argument calling model for libcalls,
699 since we're only calling functions in libgcc. Thus we could
700 pass arguments for long longs in registers rather than on the
701 stack. In the meantime, use the odd TI format. We make the
702 assumption that we won't have more than two floating point
703 args, six integer args, and that all the arguments are of the
705 if (mode == QFmode || mode == HFmode)
706 reg = c4x_fp_reglist[cum->floats];
707 else if (mode == QImode || mode == Pmode)
708 reg = c4x_int_reglist[0][cum->ints];
713 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
714 GET_MODE_NAME (mode), named);
716 fprintf (stderr, ", reg=%s", reg_names[reg]);
718 fprintf (stderr, ", stack");
719 fprintf (stderr, ")\n");
722 return gen_rtx_REG (mode, reg);
729 c4x_va_start (stdarg_p, valist, nextarg)
734 nextarg = plus_constant (nextarg, stdarg_p ? 0 : UNITS_PER_WORD * 2);
736 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
740 /* C[34]x arguments grow in weird ways (downwards) that the standard
741 varargs stuff can't handle.. */
743 c4x_va_arg (valist, type)
748 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
749 build_int_2 (int_size_in_bytes (type), 0));
750 TREE_SIDE_EFFECTS (t) = 1;
752 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
757 c4x_isr_reg_used_p (regno)
760 /* Don't save/restore FP or ST, we handle them separately. */
761 if (regno == FRAME_POINTER_REGNUM
762 || IS_ST_REGNO (regno))
765 /* We could be a little smarter abut saving/restoring DP.
766 We'll only save if for the big memory model or if
767 we're paranoid. ;-) */
768 if (IS_DP_REGNO (regno))
769 return ! TARGET_SMALL || TARGET_PARANOID;
771 /* Only save/restore regs in leaf function that are used. */
772 if (c4x_leaf_function)
773 return regs_ever_live[regno] && fixed_regs[regno] == 0;
775 /* Only save/restore regs that are used by the ISR and regs
776 that are likely to be used by functions the ISR calls
777 if they are not fixed. */
778 return IS_EXT_REGNO (regno)
779 || ((regs_ever_live[regno] || call_used_regs[regno])
780 && fixed_regs[regno] == 0);
785 c4x_leaf_function_p ()
787 /* A leaf function makes no calls, so we only need
788 to save/restore the registers we actually use.
789 For the global variable leaf_function to be set, we need
790 to define LEAF_REGISTERS and all that it entails.
791 Let's check ourselves... */
793 if (lookup_attribute ("leaf_pretend",
794 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
797 /* Use the leaf_pretend attribute at your own risk. This is a hack
798 to speed up ISRs that call a function infrequently where the
799 overhead of saving and restoring the additional registers is not
800 warranted. You must save and restore the additional registers
801 required by the called function. Caveat emptor. Here's enough
804 if (leaf_function_p ())
812 c4x_assembler_function_p ()
816 type = TREE_TYPE (current_function_decl);
817 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type)) != NULL)
818 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL);
823 c4x_interrupt_function_p ()
825 if (lookup_attribute ("interrupt",
826 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
829 /* Look for TI style c_intnn. */
830 return current_function_name[0] == 'c'
831 && current_function_name[1] == '_'
832 && current_function_name[2] == 'i'
833 && current_function_name[3] == 'n'
834 && current_function_name[4] == 't'
835 && ISDIGIT (current_function_name[5])
836 && ISDIGIT (current_function_name[6]);
840 c4x_expand_prologue ()
843 int size = get_frame_size ();
846 /* In functions where ar3 is not used but frame pointers are still
847 specified, frame pointers are not adjusted (if >= -O2) and this
848 is used so it won't needlessly push the frame pointer. */
851 /* For __assembler__ function don't build a prologue. */
852 if (c4x_assembler_function_p ())
857 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
858 if (profile_block_flag == 2)
860 FUNCTION_BLOCK_PROFILER_EXIT
864 /* For __interrupt__ function build specific prologue. */
865 if (c4x_interrupt_function_p ())
867 c4x_leaf_function = c4x_leaf_function_p ();
869 insn = emit_insn (gen_push_st ());
870 RTX_FRAME_RELATED_P (insn) = 1;
873 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
874 RTX_FRAME_RELATED_P (insn) = 1;
875 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
876 gen_rtx_REG (QImode, SP_REGNO)));
877 RTX_FRAME_RELATED_P (insn) = 1;
878 /* We require that an ISR uses fewer than 32768 words of
879 local variables, otherwise we have to go to lots of
880 effort to save a register, load it with the desired size,
881 adjust the stack pointer, and then restore the modified
882 register. Frankly, I think it is a poor ISR that
883 requires more than 32767 words of local temporary
886 error ("ISR %s requires %d words of local vars, max is 32767.",
887 current_function_name, size);
889 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
890 gen_rtx_REG (QImode, SP_REGNO),
892 RTX_FRAME_RELATED_P (insn) = 1;
894 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
896 if (c4x_isr_reg_used_p (regno))
898 if (regno == DP_REGNO)
900 insn = emit_insn (gen_push_dp ());
901 RTX_FRAME_RELATED_P (insn) = 1;
905 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
906 RTX_FRAME_RELATED_P (insn) = 1;
907 if (IS_EXT_REGNO (regno))
909 insn = emit_insn (gen_pushqf
910 (gen_rtx_REG (QFmode, regno)));
911 RTX_FRAME_RELATED_P (insn) = 1;
916 /* We need to clear the repeat mode flag if the ISR is
917 going to use a RPTB instruction or uses the RC, RS, or RE
919 if (regs_ever_live[RC_REGNO]
920 || regs_ever_live[RS_REGNO]
921 || regs_ever_live[RE_REGNO])
923 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
924 RTX_FRAME_RELATED_P (insn) = 1;
927 /* Reload DP reg if we are paranoid about some turkey
928 violating small memory model rules. */
929 if (TARGET_SMALL && TARGET_PARANOID)
931 insn = emit_insn (gen_set_ldp_prologue
932 (gen_rtx_REG (QImode, DP_REGNO),
933 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
934 RTX_FRAME_RELATED_P (insn) = 1;
939 if (frame_pointer_needed)
942 || (current_function_args_size != 0)
945 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
946 RTX_FRAME_RELATED_P (insn) = 1;
947 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
948 gen_rtx_REG (QImode, SP_REGNO)));
949 RTX_FRAME_RELATED_P (insn) = 1;
954 /* Since ar3 is not used, we don't need to push it. */
960 /* If we use ar3, we need to push it. */
962 if ((size != 0) || (current_function_args_size != 0))
964 /* If we are omitting the frame pointer, we still have
965 to make space for it so the offsets are correct
966 unless we don't use anything on the stack at all. */
973 /* Local vars are too big, it will take multiple operations
977 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
978 GEN_INT(size >> 16)));
979 RTX_FRAME_RELATED_P (insn) = 1;
980 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
981 gen_rtx_REG (QImode, R1_REGNO),
983 RTX_FRAME_RELATED_P (insn) = 1;
987 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
988 GEN_INT(size & ~0xffff)));
989 RTX_FRAME_RELATED_P (insn) = 1;
991 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
992 gen_rtx_REG (QImode, R1_REGNO),
993 GEN_INT(size & 0xffff)));
994 RTX_FRAME_RELATED_P (insn) = 1;
995 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
996 gen_rtx_REG (QImode, SP_REGNO),
997 gen_rtx_REG (QImode, R1_REGNO)));
998 RTX_FRAME_RELATED_P (insn) = 1;
1002 /* Local vars take up less than 32767 words, so we can directly
1004 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
1005 gen_rtx_REG (QImode, SP_REGNO),
1007 RTX_FRAME_RELATED_P (insn) = 1;
1010 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1012 if (regs_ever_live[regno] && ! call_used_regs[regno])
1014 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1016 if (TARGET_PRESERVE_FLOAT)
1018 insn = emit_insn (gen_pushqi
1019 (gen_rtx_REG (QImode, regno)));
1020 RTX_FRAME_RELATED_P (insn) = 1;
1022 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
1023 RTX_FRAME_RELATED_P (insn) = 1;
1025 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1027 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1028 RTX_FRAME_RELATED_P (insn) = 1;
1037 c4x_expand_epilogue()
1043 int size = get_frame_size ();
1045 /* For __assembler__ function build no epilogue. */
1046 if (c4x_assembler_function_p ())
1048 insn = emit_jump_insn (gen_return_from_epilogue ());
1049 RTX_FRAME_RELATED_P (insn) = 1;
1053 /* For __interrupt__ function build specific epilogue. */
1054 if (c4x_interrupt_function_p ())
1056 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1058 if (! c4x_isr_reg_used_p (regno))
1060 if (regno == DP_REGNO)
1062 insn = emit_insn (gen_pop_dp ());
1063 RTX_FRAME_RELATED_P (insn) = 1;
1067 /* We have to use unspec because the compiler will delete insns
1068 that are not call-saved. */
1069 if (IS_EXT_REGNO (regno))
1071 insn = emit_insn (gen_popqf_unspec
1072 (gen_rtx_REG (QFmode, regno)));
1073 RTX_FRAME_RELATED_P (insn) = 1;
1075 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1076 RTX_FRAME_RELATED_P (insn) = 1;
1081 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1082 gen_rtx_REG (QImode, SP_REGNO),
1084 RTX_FRAME_RELATED_P (insn) = 1;
1085 insn = emit_insn (gen_popqi
1086 (gen_rtx_REG (QImode, AR3_REGNO)));
1087 RTX_FRAME_RELATED_P (insn) = 1;
1089 insn = emit_insn (gen_pop_st ());
1090 RTX_FRAME_RELATED_P (insn) = 1;
1091 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1092 RTX_FRAME_RELATED_P (insn) = 1;
1096 if (frame_pointer_needed)
1099 || (current_function_args_size != 0)
1103 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1104 gen_rtx_MEM (QImode,
1106 (QImode, gen_rtx_REG (QImode,
1109 RTX_FRAME_RELATED_P (insn) = 1;
1111 /* We already have the return value and the fp,
1112 so we need to add those to the stack. */
1119 /* Since ar3 is not used for anything, we don't need to
1126 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1127 if (size || current_function_args_size)
1129 /* If we are ommitting the frame pointer, we still have
1130 to make space for it so the offsets are correct
1131 unless we don't use anything on the stack at all. */
1136 /* Now restore the saved registers, putting in the delayed branch
1138 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1140 if (regs_ever_live[regno] && ! call_used_regs[regno])
1142 if (regno == AR3_REGNO && dont_pop_ar3)
1145 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1147 insn = emit_insn (gen_popqf_unspec
1148 (gen_rtx_REG (QFmode, regno)));
1149 RTX_FRAME_RELATED_P (insn) = 1;
1150 if (TARGET_PRESERVE_FLOAT)
1152 insn = emit_insn (gen_popqi_unspec
1153 (gen_rtx_REG (QImode, regno)));
1154 RTX_FRAME_RELATED_P (insn) = 1;
1159 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1160 RTX_FRAME_RELATED_P (insn) = 1;
1165 if (frame_pointer_needed)
1168 || (current_function_args_size != 0)
1171 /* Restore the old FP. */
1174 (gen_rtx_REG (QImode, AR3_REGNO),
1175 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1177 RTX_FRAME_RELATED_P (insn) = 1;
1183 /* Local vars are too big, it will take multiple operations
1187 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1188 GEN_INT(size >> 16)));
1189 RTX_FRAME_RELATED_P (insn) = 1;
1190 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1191 gen_rtx_REG (QImode, R3_REGNO),
1193 RTX_FRAME_RELATED_P (insn) = 1;
1197 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1198 GEN_INT(size & ~0xffff)));
1199 RTX_FRAME_RELATED_P (insn) = 1;
1201 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1202 gen_rtx_REG (QImode, R3_REGNO),
1203 GEN_INT(size & 0xffff)));
1204 RTX_FRAME_RELATED_P (insn) = 1;
1205 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1206 gen_rtx_REG (QImode, SP_REGNO),
1207 gen_rtx_REG (QImode, R3_REGNO)));
1208 RTX_FRAME_RELATED_P (insn) = 1;
1212 /* Local vars take up less than 32768 words, so we can directly
1213 subtract the number. */
1214 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1215 gen_rtx_REG (QImode, SP_REGNO),
1217 RTX_FRAME_RELATED_P (insn) = 1;
1222 insn = emit_jump_insn (gen_return_indirect_internal
1223 (gen_rtx_REG (QImode, R2_REGNO)));
1224 RTX_FRAME_RELATED_P (insn) = 1;
1228 insn = emit_jump_insn (gen_return_from_epilogue ());
1229 RTX_FRAME_RELATED_P (insn) = 1;
1236 c4x_null_epilogue_p ()
1240 if (reload_completed
1241 && ! c4x_assembler_function_p ()
1242 && ! c4x_interrupt_function_p ()
1243 && ! current_function_calls_alloca
1244 && ! current_function_args_size
1245 && ! (profile_block_flag == 2)
1247 && ! get_frame_size ())
1249 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1250 if (regs_ever_live[regno] && ! call_used_regs[regno]
1251 && (regno != AR3_REGNO))
1260 c4x_emit_move_sequence (operands, mode)
1262 enum machine_mode mode;
1264 rtx op0 = operands[0];
1265 rtx op1 = operands[1];
1267 if (! reload_in_progress
1270 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1271 op1 = force_reg (mode, op1);
1273 if (GET_CODE (op1) == LO_SUM
1274 && GET_MODE (op1) == Pmode
1275 && dp_reg_operand (XEXP (op1, 0), mode))
1277 /* expand_increment will sometimes create a LO_SUM immediate
1279 op1 = XEXP (op1, 1);
1281 else if (symbolic_address_operand (op1, mode))
1283 if (TARGET_LOAD_ADDRESS)
1285 /* Alias analysis seems to do a better job if we force
1286 constant addresses to memory after reload. */
1287 emit_insn (gen_load_immed_address (op0, op1));
1292 /* Stick symbol or label address into the constant pool. */
1293 op1 = force_const_mem (Pmode, op1);
1296 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1298 /* We could be a lot smarter about loading some of these
1300 op1 = force_const_mem (mode, op1);
1303 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1304 and emit associated (HIGH (SYMREF)) if large memory model.
1305 c4x_legitimize_address could be used to do this,
1306 perhaps by calling validize_address. */
1307 if (TARGET_EXPOSE_LDP
1308 && ! (reload_in_progress || reload_completed)
1309 && GET_CODE (op1) == MEM
1310 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1312 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1314 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1315 op1 = change_address (op1, mode,
1316 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1319 if (TARGET_EXPOSE_LDP
1320 && ! (reload_in_progress || reload_completed)
1321 && GET_CODE (op0) == MEM
1322 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1324 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1326 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1327 op0 = change_address (op0, mode,
1328 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1331 if (GET_CODE (op0) == SUBREG
1332 && mixed_subreg_operand (op0, mode))
1334 /* We should only generate these mixed mode patterns
1335 during RTL generation. If we need do it later on
1336 then we'll have to emit patterns that won't clobber CC. */
1337 if (reload_in_progress || reload_completed)
1339 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1340 op0 = SUBREG_REG (op0);
1341 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1343 op0 = copy_rtx (op0);
1344 PUT_MODE (op0, QImode);
1350 emit_insn (gen_storeqf_int_clobber (op0, op1));
1356 if (GET_CODE (op1) == SUBREG
1357 && mixed_subreg_operand (op1, mode))
1359 /* We should only generate these mixed mode patterns
1360 during RTL generation. If we need do it later on
1361 then we'll have to emit patterns that won't clobber CC. */
1362 if (reload_in_progress || reload_completed)
1364 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1365 op1 = SUBREG_REG (op1);
1366 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1368 op1 = copy_rtx (op1);
1369 PUT_MODE (op1, QImode);
1375 emit_insn (gen_loadqf_int_clobber (op0, op1));
1382 && reg_operand (op0, mode)
1383 && const_int_operand (op1, mode)
1384 && ! IS_INT16_CONST (INTVAL (op1))
1385 && ! IS_HIGH_CONST (INTVAL (op1)))
1387 emit_insn (gen_loadqi_big_constant (op0, op1));
1392 && reg_operand (op0, mode)
1393 && const_int_operand (op1, mode))
1395 emit_insn (gen_loadhi_big_constant (op0, op1));
1399 /* Adjust operands in case we have modified them. */
1403 /* Emit normal pattern. */
1409 c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1412 enum machine_mode dmode;
1413 enum machine_mode smode;
1425 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1426 operands[1], smode);
1427 equiv = gen_rtx (code, dmode, operands[1]);
1431 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1432 operands[1], smode, operands[2], smode);
1433 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1440 insns = get_insns ();
1442 emit_libcall_block (insns, operands[0], ret, equiv);
1447 c4x_emit_libcall3 (libcall, code, mode, operands)
1450 enum machine_mode mode;
1453 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1458 c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1461 enum machine_mode mode;
1469 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1470 operands[1], mode, operands[2], mode);
1471 equiv = gen_rtx_TRUNCATE (mode,
1472 gen_rtx_LSHIFTRT (HImode,
1473 gen_rtx_MULT (HImode,
1474 gen_rtx (code, HImode, operands[1]),
1475 gen_rtx (code, HImode, operands[2])),
1477 insns = get_insns ();
1479 emit_libcall_block (insns, operands[0], ret, equiv);
1483 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1484 yet use this info. */
1486 c4x_encode_section_info (decl)
1490 if (TREE_CODE (TREE_TYPE (decl)) == FUNCTION_TYPE)
1491 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1493 if (TREE_CODE (decl) == FUNCTION_DECL)
1494 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1500 c4x_check_legit_addr (mode, addr, strict)
1501 enum machine_mode mode;
1505 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1506 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1507 rtx disp = NULL_RTX; /* Displacement. */
1510 code = GET_CODE (addr);
1513 /* Register indirect with auto increment/decrement. We don't
1514 allow SP here---push_operand should recognise an operand
1515 being pushed on the stack. */
1520 if (mode != QImode && mode != QFmode)
1524 base = XEXP (addr, 0);
1532 rtx op0 = XEXP (addr, 0);
1533 rtx op1 = XEXP (addr, 1);
1535 if (mode != QImode && mode != QFmode)
1539 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1541 base = XEXP (op1, 0);
1544 if (REG_P (XEXP (op1, 1)))
1545 indx = XEXP (op1, 1);
1547 disp = XEXP (op1, 1);
1551 /* Register indirect. */
1556 /* Register indirect with displacement or index. */
1559 rtx op0 = XEXP (addr, 0);
1560 rtx op1 = XEXP (addr, 1);
1561 enum rtx_code code0 = GET_CODE (op0);
1568 base = op0; /* Base + index. */
1570 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1578 base = op0; /* Base + displacement. */
1589 /* Direct addressing with DP register. */
1592 rtx op0 = XEXP (addr, 0);
1593 rtx op1 = XEXP (addr, 1);
1595 /* HImode and HFmode direct memory references aren't truly
1596 offsettable (consider case at end of data page). We
1597 probably get better code by loading a pointer and using an
1598 indirect memory reference. */
1599 if (mode == HImode || mode == HFmode)
1602 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1605 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1608 if (GET_CODE (op1) == CONST)
1614 /* Direct addressing with some work for the assembler... */
1616 /* Direct addressing. */
1619 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1621 /* These need to be converted to a LO_SUM (...).
1622 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1625 /* Do not allow direct memory access to absolute addresses.
1626 This is more pain than it's worth, especially for the
1627 small memory model where we can't guarantee that
1628 this address is within the data page---we don't want
1629 to modify the DP register in the small memory model,
1630 even temporarily, since an interrupt can sneak in.... */
1634 /* Indirect indirect addressing. */
1639 fatal_insn ("Using CONST_DOUBLE for address", addr);
1645 /* Validate the base register. */
1648 /* Check that the address is offsettable for HImode and HFmode. */
1649 if (indx && (mode == HImode || mode == HFmode))
1652 /* Handle DP based stuff. */
1653 if (REGNO (base) == DP_REGNO)
1655 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1657 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1661 /* Now validate the index register. */
1664 if (GET_CODE (indx) != REG)
1666 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1668 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1672 /* Validate displacement. */
1675 if (GET_CODE (disp) != CONST_INT)
1677 if (mode == HImode || mode == HFmode)
1679 /* The offset displacement must be legitimate. */
1680 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1685 if (! IS_DISP8_CONST (INTVAL (disp)))
1688 /* Can't add an index with a disp. */
1697 c4x_legitimize_address (orig, mode)
1698 rtx orig ATTRIBUTE_UNUSED;
1699 enum machine_mode mode ATTRIBUTE_UNUSED;
1701 if (GET_CODE (orig) == SYMBOL_REF
1702 || GET_CODE (orig) == LABEL_REF)
1704 if (mode == HImode || mode == HFmode)
1706 /* We need to force the address into
1707 a register so that it is offsettable. */
1708 rtx addr_reg = gen_reg_rtx (Pmode);
1709 emit_move_insn (addr_reg, orig);
1714 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1717 emit_insn (gen_set_ldp (dp_reg, orig));
1719 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1727 /* Provide the costs of an addressing mode that contains ADDR.
1728 If ADDR is not a valid address, its cost is irrelevant.
1729 This is used in cse and loop optimisation to determine
1730 if it is worthwhile storing a common address into a register.
1731 Unfortunately, the C4x address cost depends on other operands. */
1734 c4x_address_cost (addr)
1737 switch (GET_CODE (addr))
1748 /* These shouldn't be directly generated. */
1756 rtx op1 = XEXP (addr, 1);
1758 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1759 return TARGET_SMALL ? 3 : 4;
1761 if (GET_CODE (op1) == CONST)
1763 rtx offset = const0_rtx;
1765 op1 = eliminate_constant_term (op1, &offset);
1767 /* ??? These costs need rethinking... */
1768 if (GET_CODE (op1) == LABEL_REF)
1771 if (GET_CODE (op1) != SYMBOL_REF)
1774 if (INTVAL (offset) == 0)
1779 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1785 register rtx op0 = XEXP (addr, 0);
1786 register rtx op1 = XEXP (addr, 1);
1788 if (GET_CODE (op0) != REG)
1791 switch (GET_CODE (op1))
1797 /* This cost for REG+REG must be greater than the cost
1798 for REG if we want autoincrement addressing modes. */
1802 /* The following tries to improve GIV combination
1803 in strength reduce but appears not to help. */
1804 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1807 if (IS_DISP1_CONST (INTVAL (op1)))
1810 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1825 c4x_gen_compare_reg (code, x, y)
1829 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1832 if (mode == CC_NOOVmode
1833 && (code == LE || code == GE || code == LT || code == GT))
1836 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1837 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1838 gen_rtx_COMPARE (mode, x, y)));
1843 c4x_output_cbranch (form, seq)
1852 static char str[100];
1856 delay = XVECEXP (final_sequence, 0, 1);
1857 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1858 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1859 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1862 cp = &str [strlen (str)];
1887 c4x_print_operand (file, op, letter)
1888 FILE *file; /* File to write to. */
1889 rtx op; /* Operand to print. */
1890 int letter; /* %<letter> or 0. */
1897 case '#': /* Delayed. */
1899 asm_fprintf (file, "d");
1903 code = GET_CODE (op);
1906 case 'A': /* Direct address. */
1907 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1908 asm_fprintf (file, "@");
1911 case 'H': /* Sethi. */
1912 output_addr_const (file, op);
1915 case 'I': /* Reversed condition. */
1916 code = reverse_condition (code);
1919 case 'L': /* Log 2 of constant. */
1920 if (code != CONST_INT)
1921 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1922 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1925 case 'N': /* Ones complement of small constant. */
1926 if (code != CONST_INT)
1927 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1928 fprintf (file, "%d", ~INTVAL (op));
1931 case 'K': /* Generate ldp(k) if direct address. */
1934 && GET_CODE (XEXP (op, 0)) == LO_SUM
1935 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1936 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1938 op1 = XEXP (XEXP (op, 0), 1);
1939 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1941 asm_fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1942 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1943 asm_fprintf (file, "\n");
1948 case 'M': /* Generate ldp(k) if direct address. */
1949 if (! TARGET_SMALL /* Only used in asm statements. */
1951 && (GET_CODE (XEXP (op, 0)) == CONST
1952 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1954 asm_fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1955 output_address (XEXP (op, 0));
1956 asm_fprintf (file, "\n\t");
1960 case 'O': /* Offset address. */
1961 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1963 else if (code == MEM)
1964 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1965 else if (code == REG)
1966 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1968 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1971 case 'C': /* Call. */
1974 case 'U': /* Call/callu. */
1975 if (code != SYMBOL_REF)
1976 asm_fprintf (file, "u");
1986 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1988 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1990 fprintf (file, "%s", reg_names[REGNO (op)]);
1994 output_address (XEXP (op, 0));
2002 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2003 REAL_VALUE_TO_DECIMAL (r, "%20f", str);
2004 fprintf (file, "%s", str);
2009 fprintf (file, "%d", INTVAL (op));
2013 asm_fprintf (file, "ne");
2017 asm_fprintf (file, "eq");
2021 asm_fprintf (file, "ge");
2025 asm_fprintf (file, "gt");
2029 asm_fprintf (file, "le");
2033 asm_fprintf (file, "lt");
2037 asm_fprintf (file, "hs");
2041 asm_fprintf (file, "hi");
2045 asm_fprintf (file, "ls");
2049 asm_fprintf (file, "lo");
2053 output_addr_const (file, op);
2057 output_addr_const (file, XEXP (op, 0));
2064 fatal_insn ("c4x_print_operand: Bad operand case", op);
2071 c4x_print_operand_address (file, addr)
2075 switch (GET_CODE (addr))
2078 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2082 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2086 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2091 rtx op0 = XEXP (XEXP (addr, 1), 0);
2092 rtx op1 = XEXP (XEXP (addr, 1), 1);
2094 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2095 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2096 reg_names[REGNO (op1)]);
2097 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2098 fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2100 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2101 fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2103 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2104 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2105 reg_names[REGNO (op1)]);
2107 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2113 rtx op0 = XEXP (XEXP (addr, 1), 0);
2114 rtx op1 = XEXP (XEXP (addr, 1), 1);
2116 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2117 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2118 reg_names[REGNO (op1)]);
2119 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2120 fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2122 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2123 fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2125 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2126 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2127 reg_names[REGNO (op1)]);
2129 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2134 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2138 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2141 case PLUS: /* Indirect with displacement. */
2143 rtx op0 = XEXP (addr, 0);
2144 rtx op1 = XEXP (addr, 1);
2150 if (IS_INDEX_REG (op0))
2152 fprintf (file, "*+%s(%s)",
2153 reg_names[REGNO (op1)],
2154 reg_names[REGNO (op0)]); /* Index + base. */
2158 fprintf (file, "*+%s(%s)",
2159 reg_names[REGNO (op0)],
2160 reg_names[REGNO (op1)]); /* Base + index. */
2163 else if (INTVAL (op1) < 0)
2165 fprintf (file, "*-%s(%d)",
2166 reg_names[REGNO (op0)],
2167 -INTVAL (op1)); /* Base - displacement. */
2171 fprintf (file, "*+%s(%d)",
2172 reg_names[REGNO (op0)],
2173 INTVAL (op1)); /* Base + displacement. */
2177 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2183 rtx op0 = XEXP (addr, 0);
2184 rtx op1 = XEXP (addr, 1);
2186 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2187 c4x_print_operand_address (file, op1);
2189 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2196 fprintf (file, "@");
2197 output_addr_const (file, addr);
2200 /* We shouldn't access CONST_INT addresses. */
2204 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2210 /* Return nonzero if the floating point operand will fit
2211 in the immediate field. */
2214 c4x_immed_float_p (op)
2221 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2222 if (GET_MODE (op) == HFmode)
2223 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2226 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2230 /* Sign extend exponent. */
2231 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2232 if (exponent == -128)
2234 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2235 return 0; /* Precision doesn't fit. */
2236 return (exponent <= 7) /* Positive exp. */
2237 && (exponent >= -7); /* Negative exp. */
2241 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2242 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2244 None of the last four instructions from the bottom of the block can
2245 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2246 BcondAT or RETIcondD.
2248 This routine scans the four previous insns for a jump insn, and if
2249 one is found, returns 1 so that we bung in a nop instruction.
2250 This simple minded strategy will add a nop, when it may not
2251 be required. Say when there is a JUMP_INSN near the end of the
2252 block that doesn't get converted into a delayed branch.
2254 Note that we cannot have a call insn, since we don't generate
2255 repeat loops with calls in them (although I suppose we could, but
2256 there's no benefit.)
2258 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2261 c4x_rptb_nop_p (insn)
2267 /* Extract the start label from the jump pattern (rptb_end). */
2268 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2270 /* If there is a label at the end of the loop we must insert
2273 insn = previous_insn (insn);
2274 } while (GET_CODE (insn) == NOTE
2275 || GET_CODE (insn) == USE
2276 || GET_CODE (insn) == CLOBBER);
2277 if (GET_CODE (insn) == CODE_LABEL)
2280 for (i = 0; i < 4; i++)
2282 /* Search back for prev non-note and non-label insn. */
2283 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2284 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2286 if (insn == start_label)
2289 insn = previous_insn (insn);
2292 /* If we have a jump instruction we should insert a NOP. If we
2293 hit repeat block top we should only insert a NOP if the loop
2295 if (GET_CODE (insn) == JUMP_INSN)
2297 insn = previous_insn (insn);
2303 /* The C4x looping instruction needs to be emitted at the top of the
2304 loop. Emitting the true RTL for a looping instruction at the top of
2305 the loop can cause problems with flow analysis. So instead, a dummy
2306 doloop insn is emitted at the end of the loop. This routine checks
2307 for the presence of this doloop insn and then searches back to the
2308 top of the loop, where it inserts the true looping insn (provided
2309 there are no instructions in the loop which would cause problems).
2310 Any additional labels can be emitted at this point. In addition, if
2311 the desired loop count register was not allocated, this routine does
2314 Before we can create a repeat block looping instruction we have to
2315 verify that there are no jumps outside the loop and no jumps outside
2316 the loop go into this loop. This can happen in the basic blocks reorder
2317 pass. The C4x cpu can not handle this. */
2320 c4x_label_ref_used_p (x, code_label)
2330 code = GET_CODE (x);
2331 if (code == LABEL_REF)
2332 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2334 fmt = GET_RTX_FORMAT (code);
2335 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2339 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2342 else if (fmt[i] == 'E')
2343 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2344 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2352 c4x_rptb_valid_p (insn, start_label)
2353 rtx insn, start_label;
2359 /* Find the start label. */
2360 for (; insn; insn = PREV_INSN (insn))
2361 if (insn == start_label)
2364 /* Note found then we can not use a rptb or rpts. The label was
2365 probably moved by the basic block reorder pass. */
2370 /* If any jump jumps inside this block then we must fail. */
2371 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2373 if (GET_CODE (insn) == CODE_LABEL)
2375 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2376 if (GET_CODE (tmp) == JUMP_INSN
2377 && c4x_label_ref_used_p (tmp, insn))
2381 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2383 if (GET_CODE (insn) == CODE_LABEL)
2385 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2386 if (GET_CODE (tmp) == JUMP_INSN
2387 && c4x_label_ref_used_p (tmp, insn))
2391 /* If any jump jumps outside this block then we must fail. */
2392 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2394 if (GET_CODE (insn) == CODE_LABEL)
2396 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2397 if (GET_CODE (tmp) == JUMP_INSN
2398 && c4x_label_ref_used_p (tmp, insn))
2400 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2401 if (GET_CODE (tmp) == JUMP_INSN
2402 && c4x_label_ref_used_p (tmp, insn))
2407 /* All checks OK. */
2413 c4x_rptb_insert (insn)
2418 rtx new_start_label;
2421 /* If the count register has not been allocated to RC, say if
2422 there is a movstr pattern in the loop, then do not insert a
2423 RPTB instruction. Instead we emit a decrement and branch
2424 at the end of the loop. */
2425 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2426 if (REGNO (count_reg) != RC_REGNO)
2429 /* Extract the start label from the jump pattern (rptb_end). */
2430 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2432 if (! c4x_rptb_valid_p (insn, start_label))
2434 /* We can not use the rptb insn. Replace it so reorg can use
2435 the delay slots of the jump insn. */
2436 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2437 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2438 emit_insn_before (gen_bge (start_label), insn);
2439 LABEL_NUSES (start_label)++;
2444 end_label = gen_label_rtx ();
2445 LABEL_NUSES (end_label)++;
2446 emit_label_after (end_label, insn);
2448 new_start_label = gen_label_rtx ();
2449 LABEL_NUSES (new_start_label)++;
2451 for (; insn; insn = PREV_INSN (insn))
2453 if (insn == start_label)
2455 if (GET_CODE (insn) == JUMP_INSN &&
2456 JUMP_LABEL (insn) == start_label)
2457 redirect_jump (insn, new_start_label, 0);
2460 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2462 emit_label_after (new_start_label, insn);
2464 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2465 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2467 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2468 if (LABEL_NUSES (start_label) == 0)
2469 delete_insn (start_label);
2473 /* This function is a C4x special called immediately before delayed
2474 branch scheduling. We fix up RTPB style loops that didn't get RC
2475 allocated as the loop counter. */
2478 c4x_process_after_reload (first)
2483 for (insn = first; insn; insn = NEXT_INSN (insn))
2485 /* Look for insn. */
2488 int insn_code_number;
2491 insn_code_number = recog_memoized (insn);
2493 if (insn_code_number < 0)
2496 /* Insert the RTX for RPTB at the top of the loop
2497 and a label at the end of the loop. */
2498 if (insn_code_number == CODE_FOR_rptb_end)
2499 c4x_rptb_insert(insn);
2501 /* We need to split the insn here. Otherwise the calls to
2502 force_const_mem will not work for load_immed_address. */
2505 /* Don't split the insn if it has been deleted. */
2506 if (! INSN_DELETED_P (old))
2507 insn = try_split (PATTERN(old), old, 1);
2509 /* When not optimizing, the old insn will be still left around
2510 with only the 'deleted' bit set. Transform it into a note
2511 to avoid confusion of subsequent processing. */
2512 if (INSN_DELETED_P (old))
2514 PUT_CODE (old, NOTE);
2515 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2516 NOTE_SOURCE_FILE (old) = 0;
2527 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2535 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2540 c4x_immed_int_constant (op)
2543 if (GET_CODE (op) != CONST_INT)
2546 return GET_MODE (op) == VOIDmode
2547 || GET_MODE_CLASS (op) == MODE_INT
2548 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2553 c4x_immed_float_constant (op)
2556 if (GET_CODE (op) != CONST_DOUBLE)
2559 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2560 present this only means that a MEM rtx has been generated. It does
2561 not mean the rtx is really in memory. */
2563 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2568 c4x_shiftable_constant (op)
2573 int val = INTVAL (op);
2575 for (i = 0; i < 16; i++)
2580 mask = ((0xffff >> i) << 16) | 0xffff;
2581 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2582 : (val >> i) & mask))
2592 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2600 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2610 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2618 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2620 return IS_INT5_CONST (INTVAL (op));
2628 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2636 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2644 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2648 /* The constraints do not have to check the register class,
2649 except when needed to discriminate between the constraints.
2650 The operand has been checked by the predicates to be valid. */
2652 /* ARx + 9-bit signed const or IRn
2653 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2654 We don't include the pre/post inc/dec forms here since
2655 they are handled by the <> constraints. */
2658 c4x_Q_constraint (op)
2661 enum machine_mode mode = GET_MODE (op);
2663 if (GET_CODE (op) != MEM)
2666 switch (GET_CODE (op))
2673 rtx op0 = XEXP (op, 0);
2674 rtx op1 = XEXP (op, 1);
2682 if (GET_CODE (op1) != CONST_INT)
2685 /* HImode and HFmode must be offsettable. */
2686 if (mode == HImode || mode == HFmode)
2687 return IS_DISP8_OFF_CONST (INTVAL (op1));
2689 return IS_DISP8_CONST (INTVAL (op1));
2700 /* ARx + 5-bit unsigned const
2701 *ARx, *+ARx(n) for n < 32. */
2704 c4x_R_constraint (op)
2707 enum machine_mode mode = GET_MODE (op);
2711 if (GET_CODE (op) != MEM)
2714 switch (GET_CODE (op))
2721 rtx op0 = XEXP (op, 0);
2722 rtx op1 = XEXP (op, 1);
2727 if (GET_CODE (op1) != CONST_INT)
2730 /* HImode and HFmode must be offsettable. */
2731 if (mode == HImode || mode == HFmode)
2732 return IS_UINT5_CONST (INTVAL (op1) + 1);
2734 return IS_UINT5_CONST (INTVAL (op1));
2749 enum machine_mode mode = GET_MODE (op);
2751 if (TARGET_C3X || GET_CODE (op) != MEM)
2755 switch (GET_CODE (op))
2758 return IS_ADDR_OR_PSEUDO_REG (op);
2762 rtx op0 = XEXP (op, 0);
2763 rtx op1 = XEXP (op, 1);
2765 /* HImode and HFmode must be offsettable. */
2766 if (mode == HImode || mode == HFmode)
2767 return IS_ADDR_OR_PSEUDO_REG (op0)
2768 && GET_CODE (op1) == CONST_INT
2769 && IS_UINT5_CONST (INTVAL (op1) + 1);
2772 && IS_ADDR_OR_PSEUDO_REG (op0)
2773 && GET_CODE (op1) == CONST_INT
2774 && IS_UINT5_CONST (INTVAL (op1));
2785 /* ARx + 1-bit unsigned const or IRn
2786 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2787 We don't include the pre/post inc/dec forms here since
2788 they are handled by the <> constraints. */
2791 c4x_S_constraint (op)
2794 enum machine_mode mode = GET_MODE (op);
2795 if (GET_CODE (op) != MEM)
2798 switch (GET_CODE (op))
2806 rtx op0 = XEXP (op, 0);
2807 rtx op1 = XEXP (op, 1);
2809 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2810 || (op0 != XEXP (op1, 0)))
2813 op0 = XEXP (op1, 0);
2814 op1 = XEXP (op1, 1);
2815 return REG_P (op0) && REG_P (op1);
2816 /* Pre or post_modify with a displacement of 0 or 1
2817 should not be generated. */
2823 rtx op0 = XEXP (op, 0);
2824 rtx op1 = XEXP (op, 1);
2832 if (GET_CODE (op1) != CONST_INT)
2835 /* HImode and HFmode must be offsettable. */
2836 if (mode == HImode || mode == HFmode)
2837 return IS_DISP1_OFF_CONST (INTVAL (op1));
2839 return IS_DISP1_CONST (INTVAL (op1));
2854 enum machine_mode mode = GET_MODE (op);
2855 if (GET_CODE (op) != MEM)
2859 switch (GET_CODE (op))
2863 if (mode != QImode && mode != QFmode)
2870 return IS_ADDR_OR_PSEUDO_REG (op);
2875 rtx op0 = XEXP (op, 0);
2876 rtx op1 = XEXP (op, 1);
2878 if (mode != QImode && mode != QFmode)
2881 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2882 || (op0 != XEXP (op1, 0)))
2885 op0 = XEXP (op1, 0);
2886 op1 = XEXP (op1, 1);
2887 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2888 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2889 /* Pre or post_modify with a displacement of 0 or 1
2890 should not be generated. */
2895 rtx op0 = XEXP (op, 0);
2896 rtx op1 = XEXP (op, 1);
2900 /* HImode and HFmode must be offsettable. */
2901 if (mode == HImode || mode == HFmode)
2902 return IS_ADDR_OR_PSEUDO_REG (op0)
2903 && GET_CODE (op1) == CONST_INT
2904 && IS_DISP1_OFF_CONST (INTVAL (op1));
2907 return (IS_INDEX_OR_PSEUDO_REG (op1)
2908 && IS_ADDR_OR_PSEUDO_REG (op0))
2909 || (IS_ADDR_OR_PSEUDO_REG (op1)
2910 && IS_INDEX_OR_PSEUDO_REG (op0));
2912 return IS_ADDR_OR_PSEUDO_REG (op0)
2913 && GET_CODE (op1) == CONST_INT
2914 && IS_DISP1_CONST (INTVAL (op1));
2926 /* Direct memory operand. */
2929 c4x_T_constraint (op)
2932 if (GET_CODE (op) != MEM)
2936 if (GET_CODE (op) != LO_SUM)
2938 /* Allow call operands. */
2939 return GET_CODE (op) == SYMBOL_REF
2940 && GET_MODE (op) == Pmode
2941 && SYMBOL_REF_FLAG (op);
2944 /* HImode and HFmode are not offsettable. */
2945 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2948 if ((GET_CODE (XEXP (op, 0)) == REG)
2949 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2950 return c4x_U_constraint (XEXP (op, 1));
2956 /* Symbolic operand. */
2959 c4x_U_constraint (op)
2962 /* Don't allow direct addressing to an arbitrary constant. */
2963 return GET_CODE (op) == CONST
2964 || GET_CODE (op) == SYMBOL_REF
2965 || GET_CODE (op) == LABEL_REF;
2970 c4x_autoinc_operand (op, mode)
2972 enum machine_mode mode ATTRIBUTE_UNUSED;
2974 if (GET_CODE (op) == MEM)
2976 enum rtx_code code = GET_CODE (XEXP (op, 0));
2982 || code == PRE_MODIFY
2983 || code == POST_MODIFY
2991 /* Match any operand. */
2994 any_operand (op, mode)
2995 register rtx op ATTRIBUTE_UNUSED;
2996 enum machine_mode mode ATTRIBUTE_UNUSED;
3002 /* Nonzero if OP is a floating point value with value 0.0. */
3005 fp_zero_operand (op, mode)
3007 enum machine_mode mode ATTRIBUTE_UNUSED;
3011 if (GET_CODE (op) != CONST_DOUBLE)
3013 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
3014 return REAL_VALUES_EQUAL (r, dconst0);
3019 const_operand (op, mode)
3021 register enum machine_mode mode;
3027 if (GET_CODE (op) != CONST_DOUBLE
3028 || GET_MODE (op) != mode
3029 || GET_MODE_CLASS (mode) != MODE_FLOAT)
3032 return c4x_immed_float_p (op);
3038 if (GET_CODE (op) == CONSTANT_P_RTX)
3041 if (GET_CODE (op) != CONST_INT
3042 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3043 || GET_MODE_CLASS (mode) != MODE_INT)
3046 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3058 stik_const_operand (op, mode)
3060 enum machine_mode mode ATTRIBUTE_UNUSED;
3062 return c4x_K_constant (op);
3067 not_const_operand (op, mode)
3069 enum machine_mode mode ATTRIBUTE_UNUSED;
3071 return c4x_N_constant (op);
3076 reg_operand (op, mode)
3078 enum machine_mode mode;
3080 if (GET_CODE (op) == SUBREG
3081 && GET_MODE (op) == QFmode)
3083 return register_operand (op, mode);
3088 mixed_subreg_operand (op, mode)
3090 enum machine_mode mode ATTRIBUTE_UNUSED;
3092 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3093 int and a long double. */
3094 if (GET_CODE (op) == SUBREG
3095 && (GET_MODE (op) == QFmode)
3096 && (GET_MODE (SUBREG_REG (op)) == QImode
3097 || GET_MODE (SUBREG_REG (op)) == HImode))
3104 reg_imm_operand (op, mode)
3106 enum machine_mode mode ATTRIBUTE_UNUSED;
3108 if (REG_P (op) || CONSTANT_P (op))
3115 not_modify_reg (op, mode)
3117 enum machine_mode mode ATTRIBUTE_UNUSED;
3119 if (REG_P (op) || CONSTANT_P (op))
3121 if (GET_CODE (op) != MEM)
3124 switch (GET_CODE (op))
3131 rtx op0 = XEXP (op, 0);
3132 rtx op1 = XEXP (op, 1);
3137 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3143 rtx op0 = XEXP (op, 0);
3145 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3163 not_rc_reg (op, mode)
3165 enum machine_mode mode ATTRIBUTE_UNUSED;
3167 if (REG_P (op) && REGNO (op) == RC_REGNO)
3173 /* Extended precision register R0-R1. */
3176 r0r1_reg_operand (op, mode)
3178 enum machine_mode mode;
3180 if (! reg_operand (op, mode))
3182 if (GET_CODE (op) == SUBREG)
3183 op = SUBREG_REG (op);
3184 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3188 /* Extended precision register R2-R3. */
3191 r2r3_reg_operand (op, mode)
3193 enum machine_mode mode;
3195 if (! reg_operand (op, mode))
3197 if (GET_CODE (op) == SUBREG)
3198 op = SUBREG_REG (op);
3199 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3203 /* Low extended precision register R0-R7. */
3206 ext_low_reg_operand (op, mode)
3208 enum machine_mode mode;
3210 if (! reg_operand (op, mode))
3212 if (GET_CODE (op) == SUBREG)
3213 op = SUBREG_REG (op);
3214 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3218 /* Extended precision register. */
3221 ext_reg_operand (op, mode)
3223 enum machine_mode mode;
3225 if (! reg_operand (op, mode))
3227 if (GET_CODE (op) == SUBREG)
3228 op = SUBREG_REG (op);
3231 return IS_EXT_OR_PSEUDO_REG (op);
3235 /* Standard precision register. */
3238 std_reg_operand (op, mode)
3240 enum machine_mode mode;
3242 if (! reg_operand (op, mode))
3244 if (GET_CODE (op) == SUBREG)
3245 op = SUBREG_REG (op);
3246 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3249 /* Standard precision or normal register. */
3252 std_or_reg_operand (op, mode)
3254 enum machine_mode mode;
3256 if (reload_in_progress)
3257 return std_reg_operand (op, mode);
3258 return reg_operand (op, mode);
3261 /* Address register. */
3264 addr_reg_operand (op, mode)
3266 enum machine_mode mode;
3268 if (! reg_operand (op, mode))
3270 return c4x_a_register (op);
3274 /* Index register. */
3277 index_reg_operand (op, mode)
3279 enum machine_mode mode;
3281 if (! reg_operand (op, mode))
3283 if (GET_CODE (op) == SUBREG)
3284 op = SUBREG_REG (op);
3285 return c4x_x_register (op);
3292 dp_reg_operand (op, mode)
3294 enum machine_mode mode ATTRIBUTE_UNUSED;
3296 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3303 sp_reg_operand (op, mode)
3305 enum machine_mode mode ATTRIBUTE_UNUSED;
3307 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3314 st_reg_operand (op, mode)
3316 enum machine_mode mode ATTRIBUTE_UNUSED;
3318 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3325 rc_reg_operand (op, mode)
3327 enum machine_mode mode ATTRIBUTE_UNUSED;
3329 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3334 call_address_operand (op, mode)
3336 enum machine_mode mode ATTRIBUTE_UNUSED;
3338 return (REG_P (op) || symbolic_address_operand (op, mode));
3342 /* Symbolic address operand. */
3345 symbolic_address_operand (op, mode)
3347 enum machine_mode mode ATTRIBUTE_UNUSED;
3349 switch (GET_CODE (op))
3361 /* Check dst operand of a move instruction. */
3364 dst_operand (op, mode)
3366 enum machine_mode mode;
3368 if (GET_CODE (op) == SUBREG
3369 && mixed_subreg_operand (op, mode))
3373 return reg_operand (op, mode);
3375 return nonimmediate_operand (op, mode);
3379 /* Check src operand of two operand arithmetic instructions. */
3382 src_operand (op, mode)
3384 enum machine_mode mode;
3386 if (GET_CODE (op) == SUBREG
3387 && mixed_subreg_operand (op, mode))
3391 return reg_operand (op, mode);
3393 if (mode == VOIDmode)
3394 mode = GET_MODE (op);
3396 if (GET_CODE (op) == CONST_INT)
3397 return (mode == QImode || mode == Pmode || mode == HImode)
3398 && c4x_I_constant (op);
3400 /* We don't like CONST_DOUBLE integers. */
3401 if (GET_CODE (op) == CONST_DOUBLE)
3402 return c4x_H_constant (op);
3404 /* Disallow symbolic addresses. Only the predicate
3405 symbolic_address_operand will match these. */
3406 if (GET_CODE (op) == SYMBOL_REF
3407 || GET_CODE (op) == LABEL_REF
3408 || GET_CODE (op) == CONST)
3411 /* If TARGET_LOAD_DIRECT_MEMS is non-zero, disallow direct memory
3412 access to symbolic addresses. These operands will get forced
3413 into a register and the movqi expander will generate a
3414 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is non-zero. */
3415 if (GET_CODE (op) == MEM
3416 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3417 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3418 || GET_CODE (XEXP (op, 0)) == CONST)))
3419 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3421 return general_operand (op, mode);
3426 src_hi_operand (op, mode)
3428 enum machine_mode mode;
3430 if (c4x_O_constant (op))
3432 return src_operand (op, mode);
3436 /* Check src operand of two operand logical instructions. */
3439 lsrc_operand (op, mode)
3441 enum machine_mode mode;
3443 if (mode == VOIDmode)
3444 mode = GET_MODE (op);
3446 if (mode != QImode && mode != Pmode)
3447 fatal_insn ("Mode not QImode", op);
3449 if (GET_CODE (op) == CONST_INT)
3450 return c4x_L_constant (op) || c4x_J_constant (op);
3452 return src_operand (op, mode);
3456 /* Check src operand of two operand tricky instructions. */
3459 tsrc_operand (op, mode)
3461 enum machine_mode mode;
3463 if (mode == VOIDmode)
3464 mode = GET_MODE (op);
3466 if (mode != QImode && mode != Pmode)
3467 fatal_insn ("Mode not QImode", op);
3469 if (GET_CODE (op) == CONST_INT)
3470 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3472 return src_operand (op, mode);
3477 reg_or_const_operand (op, mode)
3479 enum machine_mode mode;
3481 return reg_operand (op, mode) || const_operand (op, mode);
3485 /* Check for indirect operands allowable in parallel instruction. */
3488 par_ind_operand (op, mode)
3490 enum machine_mode mode;
3492 if (mode != VOIDmode && mode != GET_MODE (op))
3495 return c4x_S_indirect (op);
3499 /* Check for operands allowable in parallel instruction. */
3502 parallel_operand (op, mode)
3504 enum machine_mode mode;
3506 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3511 c4x_S_address_parse (op, base, incdec, index, disp)
3523 if (GET_CODE (op) != MEM)
3524 fatal_insn ("Invalid indirect memory address", op);
3527 switch (GET_CODE (op))
3530 *base = REGNO (XEXP (op, 0));
3536 *base = REGNO (XEXP (op, 0));
3542 *base = REGNO (XEXP (op, 0));
3548 *base = REGNO (XEXP (op, 0));
3554 *base = REGNO (XEXP (op, 0));
3555 if (REG_P (XEXP (XEXP (op, 1), 1)))
3557 *index = REGNO (XEXP (XEXP (op, 1), 1));
3558 *disp = 0; /* ??? */
3561 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3566 *base = REGNO (XEXP (op, 0));
3567 if (REG_P (XEXP (XEXP (op, 1), 1)))
3569 *index = REGNO (XEXP (XEXP (op, 1), 1));
3570 *disp = 1; /* ??? */
3573 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3584 rtx op0 = XEXP (op, 0);
3585 rtx op1 = XEXP (op, 1);
3587 if (c4x_a_register (op0))
3589 if (c4x_x_register (op1))
3591 *base = REGNO (op0);
3592 *index = REGNO (op1);
3595 else if ((GET_CODE (op1) == CONST_INT
3596 && IS_DISP1_CONST (INTVAL (op1))))
3598 *base = REGNO (op0);
3599 *disp = INTVAL (op1);
3603 else if (c4x_x_register (op0) && c4x_a_register (op1))
3605 *base = REGNO (op1);
3606 *index = REGNO (op0);
3613 fatal_insn ("Invalid indirect (S) memory address", op);
3619 c4x_address_conflict (op0, op1, store0, store1)
3634 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3637 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3638 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3640 if (store0 && store1)
3642 /* If we have two stores in parallel to the same address, then
3643 the C4x only executes one of the stores. This is unlikely to
3644 cause problems except when writing to a hardware device such
3645 as a FIFO since the second write will be lost. The user
3646 should flag the hardware location as being volatile so that
3647 we don't do this optimisation. While it is unlikely that we
3648 have an aliased address if both locations are not marked
3649 volatile, it is probably safer to flag a potential conflict
3650 if either location is volatile. */
3651 if (! flag_argument_noalias)
3653 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3658 /* If have a parallel load and a store to the same address, the load
3659 is performed first, so there is no conflict. Similarly, there is
3660 no conflict if have parallel loads from the same address. */
3662 /* Cannot use auto increment or auto decrement twice for same
3664 if (base0 == base1 && incdec0 && incdec0)
3667 /* It might be too confusing for GCC if we have use a base register
3668 with a side effect and a memory reference using the same register
3670 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3673 /* We can not optimize the case where op1 and op2 refer to the same
3675 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3683 /* Check for while loop inside a decrement and branch loop. */
3686 c4x_label_conflict (insn, jump, db)
3693 if (GET_CODE (insn) == CODE_LABEL)
3695 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3697 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3700 insn = PREV_INSN (insn);
3706 /* Validate combination of operands for parallel load/store instructions. */
3709 valid_parallel_load_store (operands, mode)
3711 enum machine_mode mode ATTRIBUTE_UNUSED;
3713 rtx op0 = operands[0];
3714 rtx op1 = operands[1];
3715 rtx op2 = operands[2];
3716 rtx op3 = operands[3];
3718 if (GET_CODE (op0) == SUBREG)
3719 op0 = SUBREG_REG (op0);
3720 if (GET_CODE (op1) == SUBREG)
3721 op1 = SUBREG_REG (op1);
3722 if (GET_CODE (op2) == SUBREG)
3723 op2 = SUBREG_REG (op2);
3724 if (GET_CODE (op3) == SUBREG)
3725 op3 = SUBREG_REG (op3);
3727 /* The patterns should only allow ext_low_reg_operand() or
3728 par_ind_operand() operands. Thus of the 4 operands, only 2
3729 should be REGs and the other 2 should be MEMs. */
3731 /* This test prevents the multipack pass from using this pattern if
3732 op0 is used as an index or base register in op2 or op3, since
3733 this combination will require reloading. */
3734 if (GET_CODE (op0) == REG
3735 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3736 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3740 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3741 return (REGNO (op0) != REGNO (op2))
3742 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3743 && ! c4x_address_conflict (op1, op3, 0, 0);
3746 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3747 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3748 && ! c4x_address_conflict (op0, op2, 1, 1);
3751 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3752 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3753 && ! c4x_address_conflict (op1, op2, 0, 1);
3756 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3757 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3758 && ! c4x_address_conflict (op0, op3, 1, 0);
3765 valid_parallel_operands_4 (operands, mode)
3767 enum machine_mode mode ATTRIBUTE_UNUSED;
3769 rtx op0 = operands[0];
3770 rtx op2 = operands[2];
3772 if (GET_CODE (op0) == SUBREG)
3773 op0 = SUBREG_REG (op0);
3774 if (GET_CODE (op2) == SUBREG)
3775 op2 = SUBREG_REG (op2);
3777 /* This test prevents the multipack pass from using this pattern if
3778 op0 is used as an index or base register in op2, since this combination
3779 will require reloading. */
3780 if (GET_CODE (op0) == REG
3781 && GET_CODE (op2) == MEM
3782 && reg_mentioned_p (op0, XEXP (op2, 0)))
3790 valid_parallel_operands_5 (operands, mode)
3792 enum machine_mode mode ATTRIBUTE_UNUSED;
3795 rtx op0 = operands[0];
3796 rtx op1 = operands[1];
3797 rtx op2 = operands[2];
3798 rtx op3 = operands[3];
3800 if (GET_CODE (op0) == SUBREG)
3801 op0 = SUBREG_REG (op0);
3802 if (GET_CODE (op1) == SUBREG)
3803 op1 = SUBREG_REG (op1);
3804 if (GET_CODE (op2) == SUBREG)
3805 op2 = SUBREG_REG (op2);
3807 /* The patterns should only allow ext_low_reg_operand() or
3808 par_ind_operand() operands. Operands 1 and 2 may be commutative
3809 but only one of them can be a register. */
3810 if (GET_CODE (op1) == REG)
3812 if (GET_CODE (op2) == REG)
3818 /* This test prevents the multipack pass from using this pattern if
3819 op0 is used as an index or base register in op3, since this combination
3820 will require reloading. */
3821 if (GET_CODE (op0) == REG
3822 && GET_CODE (op3) == MEM
3823 && reg_mentioned_p (op0, XEXP (op3, 0)))
3831 valid_parallel_operands_6 (operands, mode)
3833 enum machine_mode mode ATTRIBUTE_UNUSED;
3836 rtx op0 = operands[0];
3837 rtx op1 = operands[1];
3838 rtx op2 = operands[2];
3839 rtx op4 = operands[4];
3840 rtx op5 = operands[5];
3842 if (GET_CODE (op1) == SUBREG)
3843 op1 = SUBREG_REG (op1);
3844 if (GET_CODE (op2) == SUBREG)
3845 op2 = SUBREG_REG (op2);
3846 if (GET_CODE (op4) == SUBREG)
3847 op4 = SUBREG_REG (op4);
3848 if (GET_CODE (op5) == SUBREG)
3849 op5 = SUBREG_REG (op5);
3851 /* The patterns should only allow ext_low_reg_operand() or
3852 par_ind_operand() operands. Thus of the 4 input operands, only 2
3853 should be REGs and the other 2 should be MEMs. */
3855 if (GET_CODE (op1) == REG)
3857 if (GET_CODE (op2) == REG)
3859 if (GET_CODE (op4) == REG)
3861 if (GET_CODE (op5) == REG)
3864 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3865 Perhaps we should count the MEMs as well? */
3869 /* This test prevents the multipack pass from using this pattern if
3870 op0 is used as an index or base register in op4 or op5, since
3871 this combination will require reloading. */
3872 if (GET_CODE (op0) == REG
3873 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3874 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3881 /* Validate combination of src operands. Note that the operands have
3882 been screened by the src_operand predicate. We just have to check
3883 that the combination of operands is valid. If FORCE is set, ensure
3884 that the destination regno is valid if we have a 2 operand insn. */
3887 c4x_valid_operands (code, operands, mode, force)
3890 enum machine_mode mode ATTRIBUTE_UNUSED;
3895 enum rtx_code code1;
3896 enum rtx_code code2;
3898 if (code == COMPARE)
3909 if (GET_CODE (op1) == SUBREG)
3910 op1 = SUBREG_REG (op1);
3911 if (GET_CODE (op2) == SUBREG)
3912 op2 = SUBREG_REG (op2);
3914 code1 = GET_CODE (op1);
3915 code2 = GET_CODE (op2);
3917 if (code1 == REG && code2 == REG)
3920 if (code1 == MEM && code2 == MEM)
3922 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3924 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3935 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3940 if (! c4x_H_constant (op2))
3944 /* Any valid memory operand screened by src_operand is OK. */
3947 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3948 into a stack slot memory address comprising a PLUS and a
3954 fatal_insn ("c4x_valid_operands: Internal error", op2);
3958 /* Check that we have a valid destination register for a two operand
3960 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3963 /* We assume MINUS is commutative since the subtract patterns
3964 also support the reverse subtract instructions. Since op1
3965 is not a register, and op2 is a register, op1 can only
3966 be a restricted memory operand for a shift instruction. */
3967 if (code == ASHIFTRT || code == LSHIFTRT
3968 || code == ASHIFT || code == COMPARE)
3970 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3975 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3980 if (! c4x_H_constant (op1))
3984 /* Any valid memory operand screened by src_operand is OK. */
3992 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3993 into a stack slot memory address comprising a PLUS and a
4003 /* Check that we have a valid destination register for a two operand
4005 return ! force || REGNO (op1) == REGNO (operands[0]);
4009 int valid_operands (code, operands, mode)
4012 enum machine_mode mode;
4015 /* If we are not optimizing then we have to let anything go and let
4016 reload fix things up. instantiate_decl in function.c can produce
4017 invalid insns by changing the offset of a memory operand from a
4018 valid one into an invalid one, when the second operand is also a
4019 memory operand. The alternative is not to allow two memory
4020 operands for an insn when not optimizing. The problem only rarely
4021 occurs, for example with the C-torture program DFcmp.c. */
4023 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
4028 legitimize_operands (code, operands, mode)
4031 enum machine_mode mode;
4033 /* Compare only has 2 operands. */
4034 if (code == COMPARE)
4036 /* During RTL generation, force constants into pseudos so that
4037 they can get hoisted out of loops. This will tie up an extra
4038 register but can save an extra cycle. Only do this if loop
4039 optimisation enabled. (We cannot pull this trick for add and
4040 sub instructions since the flow pass won't find
4041 autoincrements etc.) This allows us to generate compare
4042 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4043 of LDI *AR0++, R0; CMPI 42, R0.
4045 Note that expand_binops will try to load an expensive constant
4046 into a register if it is used within a loop. Unfortunately,
4047 the cost mechanism doesn't allow us to look at the other
4048 operand to decide whether the constant is expensive. */
4050 if (! reload_in_progress
4053 && GET_CODE (operands[1]) == CONST_INT
4054 && preserve_subexpressions_p ()
4055 && rtx_cost (operands[1], code) > 1)
4056 operands[1] = force_reg (mode, operands[1]);
4058 if (! reload_in_progress
4059 && ! c4x_valid_operands (code, operands, mode, 0))
4060 operands[0] = force_reg (mode, operands[0]);
4064 /* We cannot do this for ADDI/SUBI insns since we will
4065 defeat the flow pass from finding autoincrement addressing
4067 if (! reload_in_progress
4068 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
4071 && GET_CODE (operands[2]) == CONST_INT
4072 && preserve_subexpressions_p ()
4073 && rtx_cost (operands[2], code) > 1)
4074 operands[2] = force_reg (mode, operands[2]);
4076 /* We can get better code on a C30 if we force constant shift counts
4077 into a register. This way they can get hoisted out of loops,
4078 tying up a register, but saving an instruction. The downside is
4079 that they may get allocated to an address or index register, and
4080 thus we will get a pipeline conflict if there is a nearby
4081 indirect address using an address register.
4083 Note that expand_binops will not try to load an expensive constant
4084 into a register if it is used within a loop for a shift insn. */
4086 if (! reload_in_progress
4087 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
4089 /* If the operand combination is invalid, we force operand1 into a
4090 register, preventing reload from having doing to do this at a
4092 operands[1] = force_reg (mode, operands[1]);
4095 emit_move_insn (operands[0], operands[1]);
4096 operands[1] = copy_rtx (operands[0]);
4100 /* Just in case... */
4101 if (! c4x_valid_operands (code, operands, mode, 0))
4102 operands[2] = force_reg (mode, operands[2]);
4106 /* Right shifts require a negative shift count, but GCC expects
4107 a positive count, so we emit a NEG. */
4108 if ((code == ASHIFTRT || code == LSHIFTRT)
4109 && (GET_CODE (operands[2]) != CONST_INT))
4110 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
4116 /* The following predicates are used for instruction scheduling. */
4119 group1_reg_operand (op, mode)
4121 enum machine_mode mode;
4123 if (mode != VOIDmode && mode != GET_MODE (op))
4125 if (GET_CODE (op) == SUBREG)
4126 op = SUBREG_REG (op);
4127 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4132 group1_mem_operand (op, mode)
4134 enum machine_mode mode;
4136 if (mode != VOIDmode && mode != GET_MODE (op))
4139 if (GET_CODE (op) == MEM)
4142 if (GET_CODE (op) == PLUS)
4144 rtx op0 = XEXP (op, 0);
4145 rtx op1 = XEXP (op, 1);
4147 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4148 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4151 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4159 /* Return true if any one of the address registers. */
4162 arx_reg_operand (op, mode)
4164 enum machine_mode mode;
4166 if (mode != VOIDmode && mode != GET_MODE (op))
4168 if (GET_CODE (op) == SUBREG)
4169 op = SUBREG_REG (op);
4170 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4175 c4x_arn_reg_operand (op, mode, regno)
4177 enum machine_mode mode;
4180 if (mode != VOIDmode && mode != GET_MODE (op))
4182 if (GET_CODE (op) == SUBREG)
4183 op = SUBREG_REG (op);
4184 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4189 c4x_arn_mem_operand (op, mode, regno)
4191 enum machine_mode mode;
4194 if (mode != VOIDmode && mode != GET_MODE (op))
4197 if (GET_CODE (op) == MEM)
4200 switch (GET_CODE (op))
4209 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4213 if (REG_P (XEXP (op, 0)) && (! reload_completed
4214 || (REGNO (XEXP (op, 0)) == regno)))
4216 if (REG_P (XEXP (XEXP (op, 1), 1))
4217 && (! reload_completed
4218 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4224 rtx op0 = XEXP (op, 0);
4225 rtx op1 = XEXP (op, 1);
4227 if ((REG_P (op0) && (! reload_completed
4228 || (REGNO (op0) == regno)))
4229 || (REG_P (op1) && (! reload_completed
4230 || (REGNO (op1) == regno))))
4244 ar0_reg_operand (op, mode)
4246 enum machine_mode mode;
4248 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4253 ar0_mem_operand (op, mode)
4255 enum machine_mode mode;
4257 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4262 ar1_reg_operand (op, mode)
4264 enum machine_mode mode;
4266 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4271 ar1_mem_operand (op, mode)
4273 enum machine_mode mode;
4275 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4280 ar2_reg_operand (op, mode)
4282 enum machine_mode mode;
4284 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4289 ar2_mem_operand (op, mode)
4291 enum machine_mode mode;
4293 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4298 ar3_reg_operand (op, mode)
4300 enum machine_mode mode;
4302 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4307 ar3_mem_operand (op, mode)
4309 enum machine_mode mode;
4311 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4316 ar4_reg_operand (op, mode)
4318 enum machine_mode mode;
4320 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4325 ar4_mem_operand (op, mode)
4327 enum machine_mode mode;
4329 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4334 ar5_reg_operand (op, mode)
4336 enum machine_mode mode;
4338 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4343 ar5_mem_operand (op, mode)
4345 enum machine_mode mode;
4347 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4352 ar6_reg_operand (op, mode)
4354 enum machine_mode mode;
4356 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4361 ar6_mem_operand (op, mode)
4363 enum machine_mode mode;
4365 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4370 ar7_reg_operand (op, mode)
4372 enum machine_mode mode;
4374 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4379 ar7_mem_operand (op, mode)
4381 enum machine_mode mode;
4383 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4388 ir0_reg_operand (op, mode)
4390 enum machine_mode mode;
4392 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4397 ir0_mem_operand (op, mode)
4399 enum machine_mode mode;
4401 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4406 ir1_reg_operand (op, mode)
4408 enum machine_mode mode;
4410 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4415 ir1_mem_operand (op, mode)
4417 enum machine_mode mode;
4419 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4423 /* This is similar to operand_subword but allows autoincrement
4427 c4x_operand_subword (op, i, validate_address, mode)
4430 int validate_address;
4431 enum machine_mode mode;
4433 if (mode != HImode && mode != HFmode)
4434 fatal_insn ("c4x_operand_subword: invalid mode", op);
4436 if (mode == HFmode && REG_P (op))
4437 fatal_insn ("c4x_operand_subword: invalid operand", op);
4439 if (GET_CODE (op) == MEM)
4441 enum rtx_code code = GET_CODE (XEXP (op, 0));
4442 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4443 enum machine_mode submode;
4448 else if (mode == HFmode)
4455 return gen_rtx_MEM (submode, XEXP (op, 0));
4461 /* We could handle these with some difficulty.
4462 e.g., *p-- => *(p-=2); *(p+1). */
4463 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4469 fatal_insn ("c4x_operand_subword: invalid address", op);
4471 /* Even though offsettable_address_p considers (MEM
4472 (LO_SUM)) to be offsettable, it is not safe if the
4473 address is at the end of the data page since we also have
4474 to fix up the associated high PART. In this case where
4475 we are trying to split a HImode or HFmode memory
4476 reference, we would have to emit another insn to reload a
4477 new HIGH value. It's easier to disable LO_SUM memory references
4478 in HImode or HFmode and we probably get better code. */
4480 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4487 return operand_subword (op, i, validate_address, mode);
4490 /* Handle machine specific pragmas for compatibility with existing
4491 compilers for the C3x/C4x.
4494 ----------------------------------------------------------
4495 CODE_SECTION(symbol,"section") section("section")
4496 DATA_SECTION(symbol,"section") section("section")
4497 FUNC_CANNOT_INLINE(function)
4498 FUNC_EXT_CALLED(function)
4499 FUNC_IS_PURE(function) const
4500 FUNC_IS_SYSTEM(function)
4501 FUNC_NEVER_RETURNS(function) noreturn
4502 FUNC_NO_GLOBAL_ASG(function)
4503 FUNC_NO_IND_ASG(function)
4504 INTERRUPT(function) interrupt
4508 /* Parse a C4x pragma, of the form ( function [, "section"] ) \n.
4509 FUNC is loaded with the IDENTIFIER_NODE of the function, SECT with
4510 the STRING_CST node of the string. If SECT is null, then this
4511 pragma doesn't take a section string. Returns 0 for a good pragma,
4512 -1 for a malformed pragma. */
4513 #define BAD(msgid, arg) do { warning (msgid, arg); return -1; } while (0)
4515 static int (*c_lex_func) (tree *);
4518 c4x_init_pragma (get_token)
4519 int (*get_token) PARAMS ((tree *));
4521 c_lex_func = get_token;
4526 c4x_parse_pragma (name, func, sect)
4533 if (c_lex_func (&x) != CPP_OPEN_PAREN)
4534 BAD ("missing '(' after '#pragma %s' - ignored", name);
4536 if (c_lex_func (&f) != CPP_NAME)
4537 BAD ("missing function name in '#pragma %s' - ignored", name);
4541 if (c_lex_func (&x) != CPP_COMMA)
4542 BAD ("malformed '#pragma %s' - ignored", name);
4543 if (c_lex_func (&s) != CPP_STRING)
4544 BAD ("missing section name in '#pragma %s' - ignored", name);
4548 if (c_lex_func (&x) != CPP_CLOSE_PAREN)
4549 BAD ("missing ')' for '#pragma %s' - ignored", name);
4551 if (c_lex_func (&x) != CPP_EOF)
4552 warning ("junk at end of '#pragma %s'", name);
4559 c4x_pr_CODE_SECTION (pfile)
4560 cpp_reader *pfile ATTRIBUTE_UNUSED;
4564 if (c4x_parse_pragma ("CODE_SECTION", &func, §))
4566 code_tree = chainon (code_tree,
4567 build_tree_list (func,
4568 build_tree_list (NULL_TREE, sect)));
4572 c4x_pr_DATA_SECTION (pfile)
4573 cpp_reader *pfile ATTRIBUTE_UNUSED;
4577 if (c4x_parse_pragma ("DATA_SECTION", &func, §))
4579 data_tree = chainon (data_tree,
4580 build_tree_list (func,
4581 build_tree_list (NULL_TREE, sect)));
4585 c4x_pr_FUNC_IS_PURE (pfile)
4586 cpp_reader *pfile ATTRIBUTE_UNUSED;
4590 if (c4x_parse_pragma ("FUNC_IS_PURE", &func, 0))
4592 pure_tree = chainon (pure_tree, build_tree_list (func, NULL_TREE));
4596 c4x_pr_FUNC_NEVER_RETURNS (pfile)
4597 cpp_reader *pfile ATTRIBUTE_UNUSED;
4601 if (c4x_parse_pragma ("FUNC_NEVER_RETURNS", &func, 0))
4603 noreturn_tree = chainon (noreturn_tree, build_tree_list (func, NULL_TREE));
4607 c4x_pr_INTERRUPT (pfile)
4608 cpp_reader *pfile ATTRIBUTE_UNUSED;
4612 if (c4x_parse_pragma ("INTERRUPT", &func, 0))
4614 interrupt_tree = chainon (interrupt_tree, build_tree_list (func, NULL_TREE));
4617 /* Used for FUNC_CANNOT_INLINE, FUNC_EXT_CALLED, FUNC_IS_SYSTEM,
4618 FUNC_NO_GLOBAL_ASG, and FUNC_NO_IND_ASG. */
4620 c4x_pr_ignored (pfile)
4621 cpp_reader *pfile ATTRIBUTE_UNUSED;
4627 struct name_list *next;
4631 static struct name_list *global_head;
4632 static struct name_list *extern_head;
4635 /* Add NAME to list of global symbols and remove from external list if
4636 present on external list. */
4639 c4x_global_label (name)
4642 struct name_list *p, *last;
4644 /* Do not insert duplicate names, so linearly search through list of
4649 if (strcmp (p->name, name) == 0)
4653 p = (struct name_list *) permalloc (sizeof *p);
4654 p->next = global_head;
4658 /* Remove this name from ref list if present. */
4663 if (strcmp (p->name, name) == 0)
4666 last->next = p->next;
4668 extern_head = p->next;
4677 /* Add NAME to list of external symbols. */
4680 c4x_external_ref (name)
4683 struct name_list *p;
4685 /* Do not insert duplicate names. */
4689 if (strcmp (p->name, name) == 0)
4694 /* Do not insert ref if global found. */
4698 if (strcmp (p->name, name) == 0)
4702 p = (struct name_list *) permalloc (sizeof *p);
4703 p->next = extern_head;
4713 struct name_list *p;
4715 /* Output all external names that are not global. */
4719 fprintf (fp, "\t.ref\t");
4720 assemble_name (fp, p->name);
4724 fprintf (fp, "\t.end\n");
4729 c4x_check_attribute (attrib, list, decl, attributes)
4731 tree list, decl, *attributes;
4733 while (list != NULL_TREE
4734 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4735 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4736 list = TREE_CHAIN (list);
4738 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4744 c4x_insert_attributes (decl, attributes)
4745 tree decl, *attributes;
4747 switch (TREE_CODE (decl))
4750 c4x_check_attribute ("section", code_tree, decl, attributes);
4751 c4x_check_attribute ("const", pure_tree, decl, attributes);
4752 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4753 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4757 c4x_check_attribute ("section", data_tree, decl, attributes);
4765 /* Table of valid machine attributes. */
4766 const struct attribute_spec c4x_attribute_table[] =
4768 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4769 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4770 /* FIXME: code elsewhere in this file treats "naked" as a synonym of
4771 "interrupt"; should it be accepted here? */
4772 { "assembler", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4773 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4774 { NULL, 0, 0, false, false, false, NULL }
4777 /* Handle an attribute requiring a FUNCTION_TYPE;
4778 arguments as in struct attribute_spec.handler. */
4780 c4x_handle_fntype_attribute (node, name, args, flags, no_add_attrs)
4783 tree args ATTRIBUTE_UNUSED;
4784 int flags ATTRIBUTE_UNUSED;
4787 if (TREE_CODE (*node) != FUNCTION_TYPE)
4789 warning ("`%s' attribute only applies to functions",
4790 IDENTIFIER_POINTER (name));
4791 *no_add_attrs = true;
4798 /* !!! FIXME to emit RPTS correctly. */
4801 c4x_rptb_rpts_p (insn, op)
4804 /* The next insn should be our label marking where the
4805 repeat block starts. */
4806 insn = NEXT_INSN (insn);
4807 if (GET_CODE (insn) != CODE_LABEL)
4809 /* Some insns may have been shifted between the RPTB insn
4810 and the top label... They were probably destined to
4811 be moved out of the loop. For now, let's leave them
4812 where they are and print a warning. We should
4813 probably move these insns before the repeat block insn. */
4815 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4820 /* Skip any notes. */
4821 insn = next_nonnote_insn (insn);
4823 /* This should be our first insn in the loop. */
4824 if (! INSN_P (insn))
4827 /* Skip any notes. */
4828 insn = next_nonnote_insn (insn);
4830 if (! INSN_P (insn))
4833 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4839 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4843 /* Check if register r11 is used as the destination of an insn. */
4856 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4857 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4859 if (INSN_P (x) && (set = single_set (x)))
4862 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4865 fmt = GET_RTX_FORMAT (GET_CODE (x));
4866 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4870 if (c4x_r11_set_p (XEXP (x, i)))
4873 else if (fmt[i] == 'E')
4874 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4875 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4882 /* The c4x sometimes has a problem when the insn before the laj insn
4883 sets the r11 register. Check for this situation. */
4886 c4x_check_laj_p (insn)
4889 insn = prev_nonnote_insn (insn);
4891 /* If this is the start of the function no nop is needed. */
4895 /* If the previous insn is a code label we have to insert a nop. This
4896 could be a jump or table jump. We can find the normal jumps by
4897 scanning the function but this will not find table jumps. */
4898 if (GET_CODE (insn) == CODE_LABEL)
4901 /* If the previous insn sets register r11 we have to insert a nop. */
4902 if (c4x_r11_set_p (insn))
4905 /* No nop needed. */
4910 /* Adjust the cost of a scheduling dependency. Return the new cost of
4911 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4912 A set of an address register followed by a use occurs a 2 cycle
4913 stall (reduced to a single cycle on the c40 using LDA), while
4914 a read of an address register followed by a use occurs a single cycle. */
4916 #define SET_USE_COST 3
4917 #define SETLDA_USE_COST 2
4918 #define READ_USE_COST 2
4921 c4x_adjust_cost (insn, link, dep_insn, cost)
4927 /* Don't worry about this until we know what registers have been
4929 if (flag_schedule_insns == 0 && ! reload_completed)
4932 /* How do we handle dependencies where a read followed by another
4933 read causes a pipeline stall? For example, a read of ar0 followed
4934 by the use of ar0 for a memory reference. It looks like we
4935 need to extend the scheduler to handle this case. */
4937 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4938 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4939 so only deal with insns we know about. */
4940 if (recog_memoized (dep_insn) < 0)
4943 if (REG_NOTE_KIND (link) == 0)
4947 /* Data dependency; DEP_INSN writes a register that INSN reads some
4951 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4952 max = SET_USE_COST > max ? SET_USE_COST : max;
4953 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4954 max = READ_USE_COST > max ? READ_USE_COST : max;
4958 /* This could be significantly optimized. We should look
4959 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4960 insn uses ar0-ar7. We then test if the same register
4961 is used. The tricky bit is that some operands will
4962 use several registers... */
4963 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4964 max = SET_USE_COST > max ? SET_USE_COST : max;
4965 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4966 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4967 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4968 max = READ_USE_COST > max ? READ_USE_COST : max;
4970 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4971 max = SET_USE_COST > max ? SET_USE_COST : max;
4972 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4973 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4974 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4975 max = READ_USE_COST > max ? READ_USE_COST : max;
4977 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4978 max = SET_USE_COST > max ? SET_USE_COST : max;
4979 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4980 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4981 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4982 max = READ_USE_COST > max ? READ_USE_COST : max;
4984 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4985 max = SET_USE_COST > max ? SET_USE_COST : max;
4986 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4987 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4988 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4989 max = READ_USE_COST > max ? READ_USE_COST : max;
4991 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4992 max = SET_USE_COST > max ? SET_USE_COST : max;
4993 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4994 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4995 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4996 max = READ_USE_COST > max ? READ_USE_COST : max;
4998 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4999 max = SET_USE_COST > max ? SET_USE_COST : max;
5000 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
5001 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5002 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
5003 max = READ_USE_COST > max ? READ_USE_COST : max;
5005 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
5006 max = SET_USE_COST > max ? SET_USE_COST : max;
5007 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
5008 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5009 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
5010 max = READ_USE_COST > max ? READ_USE_COST : max;
5012 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
5013 max = SET_USE_COST > max ? SET_USE_COST : max;
5014 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
5015 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5016 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
5017 max = READ_USE_COST > max ? READ_USE_COST : max;
5019 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
5020 max = SET_USE_COST > max ? SET_USE_COST : max;
5021 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
5022 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5024 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
5025 max = SET_USE_COST > max ? SET_USE_COST : max;
5026 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
5027 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5033 /* For other data dependencies, the default cost specified in the
5037 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
5039 /* Anti dependency; DEP_INSN reads a register that INSN writes some
5042 /* For c4x anti dependencies, the cost is 0. */
5045 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
5047 /* Output dependency; DEP_INSN writes a register that INSN writes some
5050 /* For c4x output dependencies, the cost is 0. */
5058 c4x_init_builtins ()
5060 tree endlink = void_list_node;
5062 builtin_function ("fast_ftoi",
5065 tree_cons (NULL_TREE, double_type_node, endlink)),
5066 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL);
5067 builtin_function ("ansi_ftoi",
5070 tree_cons (NULL_TREE, double_type_node, endlink)),
5071 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL);
5073 builtin_function ("fast_imult",
5076 tree_cons (NULL_TREE, integer_type_node,
5077 tree_cons (NULL_TREE,
5078 integer_type_node, endlink))),
5079 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL);
5082 builtin_function ("toieee",
5085 tree_cons (NULL_TREE, double_type_node, endlink)),
5086 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL);
5087 builtin_function ("frieee",
5090 tree_cons (NULL_TREE, double_type_node, endlink)),
5091 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL);
5092 builtin_function ("fast_invf",
5095 tree_cons (NULL_TREE, double_type_node, endlink)),
5096 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL);
5102 c4x_expand_builtin (exp, target, subtarget, mode, ignore)
5105 rtx subtarget ATTRIBUTE_UNUSED;
5106 enum machine_mode mode ATTRIBUTE_UNUSED;
5107 int ignore ATTRIBUTE_UNUSED;
5109 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5110 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5111 tree arglist = TREE_OPERAND (exp, 1);
5117 case C4X_BUILTIN_FIX:
5118 arg0 = TREE_VALUE (arglist);
5119 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5120 r0 = protect_from_queue (r0, 0);
5121 if (! target || ! register_operand (target, QImode))
5122 target = gen_reg_rtx (QImode);
5123 emit_insn (gen_fixqfqi_clobber (target, r0));
5126 case C4X_BUILTIN_FIX_ANSI:
5127 arg0 = TREE_VALUE (arglist);
5128 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5129 r0 = protect_from_queue (r0, 0);
5130 if (! target || ! register_operand (target, QImode))
5131 target = gen_reg_rtx (QImode);
5132 emit_insn (gen_fix_truncqfqi2 (target, r0));
5135 case C4X_BUILTIN_MPYI:
5138 arg0 = TREE_VALUE (arglist);
5139 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5140 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
5141 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
5142 r0 = protect_from_queue (r0, 0);
5143 r1 = protect_from_queue (r1, 0);
5144 if (! target || ! register_operand (target, QImode))
5145 target = gen_reg_rtx (QImode);
5146 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
5149 case C4X_BUILTIN_TOIEEE:
5152 arg0 = TREE_VALUE (arglist);
5153 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5154 r0 = protect_from_queue (r0, 0);
5155 if (! target || ! register_operand (target, QFmode))
5156 target = gen_reg_rtx (QFmode);
5157 emit_insn (gen_toieee (target, r0));
5160 case C4X_BUILTIN_FRIEEE:
5163 arg0 = TREE_VALUE (arglist);
5164 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5165 put_var_into_stack (arg0);
5166 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5167 r0 = protect_from_queue (r0, 0);
5168 if (register_operand (r0, QFmode))
5170 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5171 emit_move_insn (r1, r0);
5174 if (! target || ! register_operand (target, QFmode))
5175 target = gen_reg_rtx (QFmode);
5176 emit_insn (gen_frieee (target, r0));
5179 case C4X_BUILTIN_RCPF:
5182 arg0 = TREE_VALUE (arglist);
5183 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5184 r0 = protect_from_queue (r0, 0);
5185 if (! target || ! register_operand (target, QFmode))
5186 target = gen_reg_rtx (QFmode);
5187 emit_insn (gen_rcpfqf_clobber (target, r0));
5194 c4x_asm_named_section (name, flags)
5196 unsigned int flags ATTRIBUTE_UNUSED;
5198 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);