1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "conditions.h"
49 #include "c4x-protos.h"
51 #include "target-def.h"
55 rtx fix_truncqfhi2_libfunc;
56 rtx fixuns_truncqfhi2_libfunc;
57 rtx fix_trunchfhi2_libfunc;
58 rtx fixuns_trunchfhi2_libfunc;
59 rtx floathiqf2_libfunc;
60 rtx floatunshiqf2_libfunc;
61 rtx floathihf2_libfunc;
62 rtx floatunshihf2_libfunc;
64 static int c4x_leaf_function;
66 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
68 /* Array of the smallest class containing reg number REGNO, indexed by
69 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
70 registers are available and set the class to NO_REGS for registers
71 that the target switches say are unavailable. */
73 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
75 /* Reg Modes Saved. */
76 R0R1_REGS, /* R0 QI, QF, HF No. */
77 R0R1_REGS, /* R1 QI, QF, HF No. */
78 R2R3_REGS, /* R2 QI, QF, HF No. */
79 R2R3_REGS, /* R3 QI, QF, HF No. */
80 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
81 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
83 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
84 ADDR_REGS, /* AR0 QI No. */
85 ADDR_REGS, /* AR1 QI No. */
86 ADDR_REGS, /* AR2 QI No. */
87 ADDR_REGS, /* AR3 QI QI. */
88 ADDR_REGS, /* AR4 QI QI. */
89 ADDR_REGS, /* AR5 QI QI. */
90 ADDR_REGS, /* AR6 QI QI. */
91 ADDR_REGS, /* AR7 QI QI. */
92 DP_REG, /* DP QI No. */
93 INDEX_REGS, /* IR0 QI No. */
94 INDEX_REGS, /* IR1 QI No. */
95 BK_REG, /* BK QI QI. */
96 SP_REG, /* SP QI No. */
97 ST_REG, /* ST CC No. */
98 NO_REGS, /* DIE/IE No. */
99 NO_REGS, /* IIE/IF No. */
100 NO_REGS, /* IIF/IOF No. */
101 INT_REGS, /* RS QI No. */
102 INT_REGS, /* RE QI No. */
103 RC_REG, /* RC QI No. */
104 EXT_REGS, /* R8 QI, QF, HF QI. */
105 EXT_REGS, /* R9 QI, QF, HF No. */
106 EXT_REGS, /* R10 QI, QF, HF No. */
107 EXT_REGS, /* R11 QI, QF, HF No. */
110 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
112 /* Reg Modes Saved. */
113 HFmode, /* R0 QI, QF, HF No. */
114 HFmode, /* R1 QI, QF, HF No. */
115 HFmode, /* R2 QI, QF, HF No. */
116 HFmode, /* R3 QI, QF, HF No. */
117 QFmode, /* R4 QI, QF, HF QI. */
118 QFmode, /* R5 QI, QF, HF QI. */
119 QImode, /* R6 QI, QF, HF QF. */
120 QImode, /* R7 QI, QF, HF QF. */
121 QImode, /* AR0 QI No. */
122 QImode, /* AR1 QI No. */
123 QImode, /* AR2 QI No. */
124 QImode, /* AR3 QI QI. */
125 QImode, /* AR4 QI QI. */
126 QImode, /* AR5 QI QI. */
127 QImode, /* AR6 QI QI. */
128 QImode, /* AR7 QI QI. */
129 VOIDmode, /* DP QI No. */
130 QImode, /* IR0 QI No. */
131 QImode, /* IR1 QI No. */
132 QImode, /* BK QI QI. */
133 VOIDmode, /* SP QI No. */
134 VOIDmode, /* ST CC No. */
135 VOIDmode, /* DIE/IE No. */
136 VOIDmode, /* IIE/IF No. */
137 VOIDmode, /* IIF/IOF No. */
138 QImode, /* RS QI No. */
139 QImode, /* RE QI No. */
140 VOIDmode, /* RC QI No. */
141 QFmode, /* R8 QI, QF, HF QI. */
142 HFmode, /* R9 QI, QF, HF No. */
143 HFmode, /* R10 QI, QF, HF No. */
144 HFmode, /* R11 QI, QF, HF No. */
148 /* Test and compare insns in c4x.md store the information needed to
149 generate branch and scc insns here. */
154 const char *c4x_rpts_cycles_string;
155 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
156 const char *c4x_cpu_version_string;
157 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
159 /* Pragma definitions. */
161 tree code_tree = NULL_TREE;
162 tree data_tree = NULL_TREE;
163 tree pure_tree = NULL_TREE;
164 tree noreturn_tree = NULL_TREE;
165 tree interrupt_tree = NULL_TREE;
167 /* Forward declarations */
168 static int c4x_isr_reg_used_p PARAMS ((unsigned int));
169 static int c4x_leaf_function_p PARAMS ((void));
170 static int c4x_assembler_function_p PARAMS ((void));
171 static int c4x_immed_float_p PARAMS ((rtx));
172 static int c4x_a_register PARAMS ((rtx));
173 static int c4x_x_register PARAMS ((rtx));
174 static int c4x_immed_int_constant PARAMS ((rtx));
175 static int c4x_immed_float_constant PARAMS ((rtx));
176 static int c4x_K_constant PARAMS ((rtx));
177 static int c4x_N_constant PARAMS ((rtx));
178 static int c4x_O_constant PARAMS ((rtx));
179 static int c4x_R_indirect PARAMS ((rtx));
180 static int c4x_S_indirect PARAMS ((rtx));
181 static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
182 static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
183 enum machine_mode, int));
184 static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
185 static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
186 static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
187 static int c4x_r11_set_p PARAMS ((rtx));
188 static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
189 static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
190 static tree c4x_handle_fntype_attribute PARAMS ((tree *, tree, tree, int, bool *));
191 const struct attribute_spec c4x_attribute_table[];
192 static void c4x_insert_attributes PARAMS ((tree, tree *));
193 static void c4x_asm_named_section PARAMS ((const char *, unsigned int));
194 static int c4x_adjust_cost PARAMS ((rtx, rtx, rtx, int));
195 static void c4x_encode_section_info PARAMS ((tree, int));
197 /* Initialize the GCC target structure. */
198 #undef TARGET_ASM_BYTE_OP
199 #define TARGET_ASM_BYTE_OP "\t.word\t"
200 #undef TARGET_ASM_ALIGNED_HI_OP
201 #define TARGET_ASM_ALIGNED_HI_OP NULL
202 #undef TARGET_ASM_ALIGNED_SI_OP
203 #define TARGET_ASM_ALIGNED_SI_OP NULL
205 #undef TARGET_ATTRIBUTE_TABLE
206 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
208 #undef TARGET_INSERT_ATTRIBUTES
209 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
211 #undef TARGET_INIT_BUILTINS
212 #define TARGET_INIT_BUILTINS c4x_init_builtins
214 #undef TARGET_EXPAND_BUILTIN
215 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
217 #undef TARGET_SCHED_ADJUST_COST
218 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
220 #undef TARGET_ENCODE_SECTION_INFO
221 #define TARGET_ENCODE_SECTION_INFO c4x_encode_section_info
223 struct gcc_target targetm = TARGET_INITIALIZER;
225 /* Override command line options.
226 Called once after all options have been parsed.
227 Mostly we process the processor
228 type and sometimes adjust other TARGET_ options. */
231 c4x_override_options ()
233 if (c4x_rpts_cycles_string)
234 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
239 c4x_cpu_version = 30;
241 c4x_cpu_version = 31;
243 c4x_cpu_version = 32;
245 c4x_cpu_version = 33;
247 c4x_cpu_version = 40;
249 c4x_cpu_version = 44;
251 c4x_cpu_version = 40;
253 /* -mcpu=xx overrides -m40 etc. */
254 if (c4x_cpu_version_string)
256 const char *p = c4x_cpu_version_string;
258 /* Also allow -mcpu=c30 etc. */
259 if (*p == 'c' || *p == 'C')
261 c4x_cpu_version = atoi (p);
264 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
265 C40_FLAG | C44_FLAG);
267 switch (c4x_cpu_version)
269 case 30: target_flags |= C30_FLAG; break;
270 case 31: target_flags |= C31_FLAG; break;
271 case 32: target_flags |= C32_FLAG; break;
272 case 33: target_flags |= C33_FLAG; break;
273 case 40: target_flags |= C40_FLAG; break;
274 case 44: target_flags |= C44_FLAG; break;
276 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
277 c4x_cpu_version = 40;
278 target_flags |= C40_FLAG;
281 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
282 target_flags |= C3X_FLAG;
284 target_flags &= ~C3X_FLAG;
286 /* Convert foo / 8.0 into foo * 0.125, etc. */
287 set_fast_math_flags (1);
289 /* We should phase out the following at some stage.
290 This provides compatibility with the old -mno-aliases option. */
291 if (! TARGET_ALIASES && ! flag_argument_noalias)
292 flag_argument_noalias = 1;
296 /* This is called before c4x_override_options. */
299 c4x_optimization_options (level, size)
300 int level ATTRIBUTE_UNUSED;
301 int size ATTRIBUTE_UNUSED;
303 /* Scheduling before register allocation can screw up global
304 register allocation, especially for functions that use MPY||ADD
305 instructions. The benefit we gain we get by scheduling before
306 register allocation is probably marginal anyhow. */
307 flag_schedule_insns = 0;
311 /* Write an ASCII string. */
313 #define C4X_ASCII_LIMIT 40
316 c4x_output_ascii (stream, ptr, len)
321 char sbuf[C4X_ASCII_LIMIT + 1];
322 int s, l, special, first = 1, onlys;
325 fprintf (stream, "\t.byte\t");
327 for (s = l = 0; len > 0; --len, ++ptr)
331 /* Escape " and \ with a \". */
332 special = *ptr == '\"' || *ptr == '\\';
334 /* If printable - add to buff. */
335 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
340 if (s < C4X_ASCII_LIMIT - 1)
355 fprintf (stream, "\"%s\"", sbuf);
357 if (TARGET_TI && l >= 80 && len > 1)
359 fprintf (stream, "\n\t.byte\t");
377 fprintf (stream, "%d", *ptr);
379 if (TARGET_TI && l >= 80 && len > 1)
381 fprintf (stream, "\n\t.byte\t");
392 fprintf (stream, "\"%s\"", sbuf);
395 fputc ('\n', stream);
400 c4x_hard_regno_mode_ok (regno, mode)
402 enum machine_mode mode;
407 case Pmode: /* Pointer (24/32 bits). */
409 case QImode: /* Integer (32 bits). */
410 return IS_INT_REGNO (regno);
412 case QFmode: /* Float, Double (32 bits). */
413 case HFmode: /* Long Double (40 bits). */
414 return IS_EXT_REGNO (regno);
416 case CCmode: /* Condition Codes. */
417 case CC_NOOVmode: /* Condition Codes. */
418 return IS_ST_REGNO (regno);
420 case HImode: /* Long Long (64 bits). */
421 /* We need two registers to store long longs. Note that
422 it is much easier to constrain the first register
423 to start on an even boundary. */
424 return IS_INT_REGNO (regno)
425 && IS_INT_REGNO (regno + 1)
429 return 0; /* We don't support these modes. */
435 /* Return non-zero if REGNO1 can be renamed to REGNO2. */
437 c4x_hard_regno_rename_ok (regno1, regno2)
441 /* We can not copy call saved registers from mode QI into QF or from
443 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
445 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
447 /* We cannot copy from an extended (40 bit) register to a standard
448 (32 bit) register because we only set the condition codes for
449 extended registers. */
450 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
452 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
457 /* The TI C3x C compiler register argument runtime model uses 6 registers,
458 AR2, R2, R3, RC, RS, RE.
460 The first two floating point arguments (float, double, long double)
461 that are found scanning from left to right are assigned to R2 and R3.
463 The remaining integer (char, short, int, long) or pointer arguments
464 are assigned to the remaining registers in the order AR2, R2, R3,
465 RC, RS, RE when scanning left to right, except for the last named
466 argument prior to an ellipsis denoting variable number of
467 arguments. We don't have to worry about the latter condition since
468 function.c treats the last named argument as anonymous (unnamed).
470 All arguments that cannot be passed in registers are pushed onto
471 the stack in reverse order (right to left). GCC handles that for us.
473 c4x_init_cumulative_args() is called at the start, so we can parse
474 the args to see how many floating point arguments and how many
475 integer (or pointer) arguments there are. c4x_function_arg() is
476 then called (sometimes repeatedly) for each argument (parsed left
477 to right) to obtain the register to pass the argument in, or zero
478 if the argument is to be passed on the stack. Once the compiler is
479 happy, c4x_function_arg_advance() is called.
481 Don't use R0 to pass arguments in, we use 0 to indicate a stack
484 static const int c4x_int_reglist[3][6] =
486 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
487 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
488 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
491 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
494 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
495 function whose data type is FNTYPE.
496 For a library call, FNTYPE is 0. */
499 c4x_init_cumulative_args (cum, fntype, libname)
500 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
501 tree fntype; /* Tree ptr for function decl. */
502 rtx libname; /* SYMBOL_REF of library name or 0. */
504 tree param, next_param;
506 cum->floats = cum->ints = 0;
513 fprintf (stderr, "\nc4x_init_cumulative_args (");
516 tree ret_type = TREE_TYPE (fntype);
518 fprintf (stderr, "fntype code = %s, ret code = %s",
519 tree_code_name[(int) TREE_CODE (fntype)],
520 tree_code_name[(int) TREE_CODE (ret_type)]);
523 fprintf (stderr, "no fntype");
526 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
529 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
531 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
532 param; param = next_param)
536 next_param = TREE_CHAIN (param);
538 type = TREE_VALUE (param);
539 if (type && type != void_type_node)
541 enum machine_mode mode;
543 /* If the last arg doesn't have void type then we have
544 variable arguments. */
548 if ((mode = TYPE_MODE (type)))
550 if (! MUST_PASS_IN_STACK (mode, type))
552 /* Look for float, double, or long double argument. */
553 if (mode == QFmode || mode == HFmode)
555 /* Look for integer, enumeral, boolean, char, or pointer
557 else if (mode == QImode || mode == Pmode)
566 fprintf (stderr, "%s%s, args = %d)\n",
567 cum->prototype ? ", prototype" : "",
568 cum->var ? ", variable args" : "",
573 /* Update the data in CUM to advance over an argument
574 of mode MODE and data type TYPE.
575 (TYPE is null for libcalls where that information may not be available.) */
578 c4x_function_arg_advance (cum, mode, type, named)
579 CUMULATIVE_ARGS *cum; /* Current arg information. */
580 enum machine_mode mode; /* Current arg mode. */
581 tree type; /* Type of the arg or 0 if lib support. */
582 int named; /* Whether or not the argument was named. */
585 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
586 GET_MODE_NAME (mode), named);
590 && ! MUST_PASS_IN_STACK (mode, type))
592 /* Look for float, double, or long double argument. */
593 if (mode == QFmode || mode == HFmode)
595 /* Look for integer, enumeral, boolean, char, or pointer argument. */
596 else if (mode == QImode || mode == Pmode)
599 else if (! TARGET_MEMPARM && ! type)
601 /* Handle libcall arguments. */
602 if (mode == QFmode || mode == HFmode)
604 else if (mode == QImode || mode == Pmode)
611 /* Define where to put the arguments to a function. Value is zero to
612 push the argument on the stack, or a hard register in which to
615 MODE is the argument's machine mode.
616 TYPE is the data type of the argument (as a tree).
617 This is null for libcalls where that information may
619 CUM is a variable of type CUMULATIVE_ARGS which gives info about
620 the preceding args and about the function being called.
621 NAMED is nonzero if this argument is a named parameter
622 (otherwise it is an extra parameter matching an ellipsis). */
625 c4x_function_arg (cum, mode, type, named)
626 CUMULATIVE_ARGS *cum; /* Current arg information. */
627 enum machine_mode mode; /* Current arg mode. */
628 tree type; /* Type of the arg or 0 if lib support. */
629 int named; /* != 0 for normal args, == 0 for ... args. */
631 int reg = 0; /* Default to passing argument on stack. */
635 /* We can handle at most 2 floats in R2, R3. */
636 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
638 /* We can handle at most 6 integers minus number of floats passed
640 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
641 6 - cum->maxfloats : cum->ints;
643 /* If there is no prototype, assume all the arguments are integers. */
644 if (! cum->prototype)
647 cum->ints = cum->floats = 0;
651 /* This marks the last argument. We don't need to pass this through
653 if (type == void_type_node)
659 && ! MUST_PASS_IN_STACK (mode, type))
661 /* Look for float, double, or long double argument. */
662 if (mode == QFmode || mode == HFmode)
664 if (cum->floats < cum->maxfloats)
665 reg = c4x_fp_reglist[cum->floats];
667 /* Look for integer, enumeral, boolean, char, or pointer argument. */
668 else if (mode == QImode || mode == Pmode)
670 if (cum->ints < cum->maxints)
671 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
674 else if (! TARGET_MEMPARM && ! type)
676 /* We could use a different argument calling model for libcalls,
677 since we're only calling functions in libgcc. Thus we could
678 pass arguments for long longs in registers rather than on the
679 stack. In the meantime, use the odd TI format. We make the
680 assumption that we won't have more than two floating point
681 args, six integer args, and that all the arguments are of the
683 if (mode == QFmode || mode == HFmode)
684 reg = c4x_fp_reglist[cum->floats];
685 else if (mode == QImode || mode == Pmode)
686 reg = c4x_int_reglist[0][cum->ints];
691 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
692 GET_MODE_NAME (mode), named);
694 fprintf (stderr, ", reg=%s", reg_names[reg]);
696 fprintf (stderr, ", stack");
697 fprintf (stderr, ")\n");
700 return gen_rtx_REG (mode, reg);
707 c4x_va_start (stdarg_p, valist, nextarg)
712 nextarg = plus_constant (nextarg, stdarg_p ? 0 : UNITS_PER_WORD * 2);
714 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
718 /* C[34]x arguments grow in weird ways (downwards) that the standard
719 varargs stuff can't handle.. */
721 c4x_va_arg (valist, type)
726 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
727 build_int_2 (int_size_in_bytes (type), 0));
728 TREE_SIDE_EFFECTS (t) = 1;
730 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
735 c4x_isr_reg_used_p (regno)
738 /* Don't save/restore FP or ST, we handle them separately. */
739 if (regno == FRAME_POINTER_REGNUM
740 || IS_ST_REGNO (regno))
743 /* We could be a little smarter abut saving/restoring DP.
744 We'll only save if for the big memory model or if
745 we're paranoid. ;-) */
746 if (IS_DP_REGNO (regno))
747 return ! TARGET_SMALL || TARGET_PARANOID;
749 /* Only save/restore regs in leaf function that are used. */
750 if (c4x_leaf_function)
751 return regs_ever_live[regno] && fixed_regs[regno] == 0;
753 /* Only save/restore regs that are used by the ISR and regs
754 that are likely to be used by functions the ISR calls
755 if they are not fixed. */
756 return IS_EXT_REGNO (regno)
757 || ((regs_ever_live[regno] || call_used_regs[regno])
758 && fixed_regs[regno] == 0);
763 c4x_leaf_function_p ()
765 /* A leaf function makes no calls, so we only need
766 to save/restore the registers we actually use.
767 For the global variable leaf_function to be set, we need
768 to define LEAF_REGISTERS and all that it entails.
769 Let's check ourselves... */
771 if (lookup_attribute ("leaf_pretend",
772 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
775 /* Use the leaf_pretend attribute at your own risk. This is a hack
776 to speed up ISRs that call a function infrequently where the
777 overhead of saving and restoring the additional registers is not
778 warranted. You must save and restore the additional registers
779 required by the called function. Caveat emptor. Here's enough
782 if (leaf_function_p ())
790 c4x_assembler_function_p ()
794 type = TREE_TYPE (current_function_decl);
795 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type)) != NULL)
796 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL);
801 c4x_interrupt_function_p ()
803 if (lookup_attribute ("interrupt",
804 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
807 /* Look for TI style c_intnn. */
808 return current_function_name[0] == 'c'
809 && current_function_name[1] == '_'
810 && current_function_name[2] == 'i'
811 && current_function_name[3] == 'n'
812 && current_function_name[4] == 't'
813 && ISDIGIT (current_function_name[5])
814 && ISDIGIT (current_function_name[6]);
818 c4x_expand_prologue ()
821 int size = get_frame_size ();
824 /* In functions where ar3 is not used but frame pointers are still
825 specified, frame pointers are not adjusted (if >= -O2) and this
826 is used so it won't needlessly push the frame pointer. */
829 /* For __assembler__ function don't build a prologue. */
830 if (c4x_assembler_function_p ())
835 /* For __interrupt__ function build specific prologue. */
836 if (c4x_interrupt_function_p ())
838 c4x_leaf_function = c4x_leaf_function_p ();
840 insn = emit_insn (gen_push_st ());
841 RTX_FRAME_RELATED_P (insn) = 1;
844 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
845 RTX_FRAME_RELATED_P (insn) = 1;
846 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
847 gen_rtx_REG (QImode, SP_REGNO)));
848 RTX_FRAME_RELATED_P (insn) = 1;
849 /* We require that an ISR uses fewer than 32768 words of
850 local variables, otherwise we have to go to lots of
851 effort to save a register, load it with the desired size,
852 adjust the stack pointer, and then restore the modified
853 register. Frankly, I think it is a poor ISR that
854 requires more than 32767 words of local temporary
857 error ("ISR %s requires %d words of local vars, max is 32767",
858 current_function_name, size);
860 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
861 gen_rtx_REG (QImode, SP_REGNO),
863 RTX_FRAME_RELATED_P (insn) = 1;
865 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
867 if (c4x_isr_reg_used_p (regno))
869 if (regno == DP_REGNO)
871 insn = emit_insn (gen_push_dp ());
872 RTX_FRAME_RELATED_P (insn) = 1;
876 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
877 RTX_FRAME_RELATED_P (insn) = 1;
878 if (IS_EXT_REGNO (regno))
880 insn = emit_insn (gen_pushqf
881 (gen_rtx_REG (QFmode, regno)));
882 RTX_FRAME_RELATED_P (insn) = 1;
887 /* We need to clear the repeat mode flag if the ISR is
888 going to use a RPTB instruction or uses the RC, RS, or RE
890 if (regs_ever_live[RC_REGNO]
891 || regs_ever_live[RS_REGNO]
892 || regs_ever_live[RE_REGNO])
894 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
895 RTX_FRAME_RELATED_P (insn) = 1;
898 /* Reload DP reg if we are paranoid about some turkey
899 violating small memory model rules. */
900 if (TARGET_SMALL && TARGET_PARANOID)
902 insn = emit_insn (gen_set_ldp_prologue
903 (gen_rtx_REG (QImode, DP_REGNO),
904 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
905 RTX_FRAME_RELATED_P (insn) = 1;
910 if (frame_pointer_needed)
913 || (current_function_args_size != 0)
916 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
917 RTX_FRAME_RELATED_P (insn) = 1;
918 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
919 gen_rtx_REG (QImode, SP_REGNO)));
920 RTX_FRAME_RELATED_P (insn) = 1;
925 /* Since ar3 is not used, we don't need to push it. */
931 /* If we use ar3, we need to push it. */
933 if ((size != 0) || (current_function_args_size != 0))
935 /* If we are omitting the frame pointer, we still have
936 to make space for it so the offsets are correct
937 unless we don't use anything on the stack at all. */
944 /* Local vars are too big, it will take multiple operations
948 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
949 GEN_INT(size >> 16)));
950 RTX_FRAME_RELATED_P (insn) = 1;
951 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
952 gen_rtx_REG (QImode, R1_REGNO),
954 RTX_FRAME_RELATED_P (insn) = 1;
958 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
959 GEN_INT(size & ~0xffff)));
960 RTX_FRAME_RELATED_P (insn) = 1;
962 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
963 gen_rtx_REG (QImode, R1_REGNO),
964 GEN_INT(size & 0xffff)));
965 RTX_FRAME_RELATED_P (insn) = 1;
966 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
967 gen_rtx_REG (QImode, SP_REGNO),
968 gen_rtx_REG (QImode, R1_REGNO)));
969 RTX_FRAME_RELATED_P (insn) = 1;
973 /* Local vars take up less than 32767 words, so we can directly
975 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
976 gen_rtx_REG (QImode, SP_REGNO),
978 RTX_FRAME_RELATED_P (insn) = 1;
981 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
983 if (regs_ever_live[regno] && ! call_used_regs[regno])
985 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
987 if (TARGET_PRESERVE_FLOAT)
989 insn = emit_insn (gen_pushqi
990 (gen_rtx_REG (QImode, regno)));
991 RTX_FRAME_RELATED_P (insn) = 1;
993 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
994 RTX_FRAME_RELATED_P (insn) = 1;
996 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
998 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
999 RTX_FRAME_RELATED_P (insn) = 1;
1008 c4x_expand_epilogue()
1014 int size = get_frame_size ();
1016 /* For __assembler__ function build no epilogue. */
1017 if (c4x_assembler_function_p ())
1019 insn = emit_jump_insn (gen_return_from_epilogue ());
1020 RTX_FRAME_RELATED_P (insn) = 1;
1024 /* For __interrupt__ function build specific epilogue. */
1025 if (c4x_interrupt_function_p ())
1027 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1029 if (! c4x_isr_reg_used_p (regno))
1031 if (regno == DP_REGNO)
1033 insn = emit_insn (gen_pop_dp ());
1034 RTX_FRAME_RELATED_P (insn) = 1;
1038 /* We have to use unspec because the compiler will delete insns
1039 that are not call-saved. */
1040 if (IS_EXT_REGNO (regno))
1042 insn = emit_insn (gen_popqf_unspec
1043 (gen_rtx_REG (QFmode, regno)));
1044 RTX_FRAME_RELATED_P (insn) = 1;
1046 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1047 RTX_FRAME_RELATED_P (insn) = 1;
1052 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1053 gen_rtx_REG (QImode, SP_REGNO),
1055 RTX_FRAME_RELATED_P (insn) = 1;
1056 insn = emit_insn (gen_popqi
1057 (gen_rtx_REG (QImode, AR3_REGNO)));
1058 RTX_FRAME_RELATED_P (insn) = 1;
1060 insn = emit_insn (gen_pop_st ());
1061 RTX_FRAME_RELATED_P (insn) = 1;
1062 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1063 RTX_FRAME_RELATED_P (insn) = 1;
1067 if (frame_pointer_needed)
1070 || (current_function_args_size != 0)
1074 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1075 gen_rtx_MEM (QImode,
1077 (QImode, gen_rtx_REG (QImode,
1080 RTX_FRAME_RELATED_P (insn) = 1;
1082 /* We already have the return value and the fp,
1083 so we need to add those to the stack. */
1090 /* Since ar3 is not used for anything, we don't need to
1097 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1098 if (size || current_function_args_size)
1100 /* If we are ommitting the frame pointer, we still have
1101 to make space for it so the offsets are correct
1102 unless we don't use anything on the stack at all. */
1107 /* Now restore the saved registers, putting in the delayed branch
1109 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1111 if (regs_ever_live[regno] && ! call_used_regs[regno])
1113 if (regno == AR3_REGNO && dont_pop_ar3)
1116 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1118 insn = emit_insn (gen_popqf_unspec
1119 (gen_rtx_REG (QFmode, regno)));
1120 RTX_FRAME_RELATED_P (insn) = 1;
1121 if (TARGET_PRESERVE_FLOAT)
1123 insn = emit_insn (gen_popqi_unspec
1124 (gen_rtx_REG (QImode, regno)));
1125 RTX_FRAME_RELATED_P (insn) = 1;
1130 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1131 RTX_FRAME_RELATED_P (insn) = 1;
1136 if (frame_pointer_needed)
1139 || (current_function_args_size != 0)
1142 /* Restore the old FP. */
1145 (gen_rtx_REG (QImode, AR3_REGNO),
1146 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1148 RTX_FRAME_RELATED_P (insn) = 1;
1154 /* Local vars are too big, it will take multiple operations
1158 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1159 GEN_INT(size >> 16)));
1160 RTX_FRAME_RELATED_P (insn) = 1;
1161 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1162 gen_rtx_REG (QImode, R3_REGNO),
1164 RTX_FRAME_RELATED_P (insn) = 1;
1168 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1169 GEN_INT(size & ~0xffff)));
1170 RTX_FRAME_RELATED_P (insn) = 1;
1172 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1173 gen_rtx_REG (QImode, R3_REGNO),
1174 GEN_INT(size & 0xffff)));
1175 RTX_FRAME_RELATED_P (insn) = 1;
1176 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1177 gen_rtx_REG (QImode, SP_REGNO),
1178 gen_rtx_REG (QImode, R3_REGNO)));
1179 RTX_FRAME_RELATED_P (insn) = 1;
1183 /* Local vars take up less than 32768 words, so we can directly
1184 subtract the number. */
1185 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1186 gen_rtx_REG (QImode, SP_REGNO),
1188 RTX_FRAME_RELATED_P (insn) = 1;
1193 insn = emit_jump_insn (gen_return_indirect_internal
1194 (gen_rtx_REG (QImode, R2_REGNO)));
1195 RTX_FRAME_RELATED_P (insn) = 1;
1199 insn = emit_jump_insn (gen_return_from_epilogue ());
1200 RTX_FRAME_RELATED_P (insn) = 1;
1207 c4x_null_epilogue_p ()
1211 if (reload_completed
1212 && ! c4x_assembler_function_p ()
1213 && ! c4x_interrupt_function_p ()
1214 && ! current_function_calls_alloca
1215 && ! current_function_args_size
1217 && ! get_frame_size ())
1219 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1220 if (regs_ever_live[regno] && ! call_used_regs[regno]
1221 && (regno != AR3_REGNO))
1230 c4x_emit_move_sequence (operands, mode)
1232 enum machine_mode mode;
1234 rtx op0 = operands[0];
1235 rtx op1 = operands[1];
1237 if (! reload_in_progress
1240 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1241 op1 = force_reg (mode, op1);
1243 if (GET_CODE (op1) == LO_SUM
1244 && GET_MODE (op1) == Pmode
1245 && dp_reg_operand (XEXP (op1, 0), mode))
1247 /* expand_increment will sometimes create a LO_SUM immediate
1249 op1 = XEXP (op1, 1);
1251 else if (symbolic_address_operand (op1, mode))
1253 if (TARGET_LOAD_ADDRESS)
1255 /* Alias analysis seems to do a better job if we force
1256 constant addresses to memory after reload. */
1257 emit_insn (gen_load_immed_address (op0, op1));
1262 /* Stick symbol or label address into the constant pool. */
1263 op1 = force_const_mem (Pmode, op1);
1266 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1268 /* We could be a lot smarter about loading some of these
1270 op1 = force_const_mem (mode, op1);
1273 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1274 and emit associated (HIGH (SYMREF)) if large memory model.
1275 c4x_legitimize_address could be used to do this,
1276 perhaps by calling validize_address. */
1277 if (TARGET_EXPOSE_LDP
1278 && ! (reload_in_progress || reload_completed)
1279 && GET_CODE (op1) == MEM
1280 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1282 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1284 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1285 op1 = change_address (op1, mode,
1286 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1289 if (TARGET_EXPOSE_LDP
1290 && ! (reload_in_progress || reload_completed)
1291 && GET_CODE (op0) == MEM
1292 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1294 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1296 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1297 op0 = change_address (op0, mode,
1298 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1301 if (GET_CODE (op0) == SUBREG
1302 && mixed_subreg_operand (op0, mode))
1304 /* We should only generate these mixed mode patterns
1305 during RTL generation. If we need do it later on
1306 then we'll have to emit patterns that won't clobber CC. */
1307 if (reload_in_progress || reload_completed)
1309 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1310 op0 = SUBREG_REG (op0);
1311 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1313 op0 = copy_rtx (op0);
1314 PUT_MODE (op0, QImode);
1320 emit_insn (gen_storeqf_int_clobber (op0, op1));
1326 if (GET_CODE (op1) == SUBREG
1327 && mixed_subreg_operand (op1, mode))
1329 /* We should only generate these mixed mode patterns
1330 during RTL generation. If we need do it later on
1331 then we'll have to emit patterns that won't clobber CC. */
1332 if (reload_in_progress || reload_completed)
1334 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1335 op1 = SUBREG_REG (op1);
1336 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1338 op1 = copy_rtx (op1);
1339 PUT_MODE (op1, QImode);
1345 emit_insn (gen_loadqf_int_clobber (op0, op1));
1352 && reg_operand (op0, mode)
1353 && const_int_operand (op1, mode)
1354 && ! IS_INT16_CONST (INTVAL (op1))
1355 && ! IS_HIGH_CONST (INTVAL (op1)))
1357 emit_insn (gen_loadqi_big_constant (op0, op1));
1362 && reg_operand (op0, mode)
1363 && const_int_operand (op1, mode))
1365 emit_insn (gen_loadhi_big_constant (op0, op1));
1369 /* Adjust operands in case we have modified them. */
1373 /* Emit normal pattern. */
1379 c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1382 enum machine_mode dmode;
1383 enum machine_mode smode;
1395 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1396 operands[1], smode);
1397 equiv = gen_rtx (code, dmode, operands[1]);
1401 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1402 operands[1], smode, operands[2], smode);
1403 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1410 insns = get_insns ();
1412 emit_libcall_block (insns, operands[0], ret, equiv);
1417 c4x_emit_libcall3 (libcall, code, mode, operands)
1420 enum machine_mode mode;
1423 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1428 c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1431 enum machine_mode mode;
1439 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1440 operands[1], mode, operands[2], mode);
1441 equiv = gen_rtx_TRUNCATE (mode,
1442 gen_rtx_LSHIFTRT (HImode,
1443 gen_rtx_MULT (HImode,
1444 gen_rtx (code, HImode, operands[1]),
1445 gen_rtx (code, HImode, operands[2])),
1447 insns = get_insns ();
1449 emit_libcall_block (insns, operands[0], ret, equiv);
1453 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1454 yet use this info. */
1457 c4x_encode_section_info (decl, first)
1459 int first ATTRIBUTE_UNUSED;
1461 if (TREE_CODE (decl) == FUNCTION_DECL)
1462 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1467 c4x_check_legit_addr (mode, addr, strict)
1468 enum machine_mode mode;
1472 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1473 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1474 rtx disp = NULL_RTX; /* Displacement. */
1477 code = GET_CODE (addr);
1480 /* Register indirect with auto increment/decrement. We don't
1481 allow SP here---push_operand should recognise an operand
1482 being pushed on the stack. */
1487 if (mode != QImode && mode != QFmode)
1491 base = XEXP (addr, 0);
1499 rtx op0 = XEXP (addr, 0);
1500 rtx op1 = XEXP (addr, 1);
1502 if (mode != QImode && mode != QFmode)
1506 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1508 base = XEXP (op1, 0);
1511 if (REG_P (XEXP (op1, 1)))
1512 indx = XEXP (op1, 1);
1514 disp = XEXP (op1, 1);
1518 /* Register indirect. */
1523 /* Register indirect with displacement or index. */
1526 rtx op0 = XEXP (addr, 0);
1527 rtx op1 = XEXP (addr, 1);
1528 enum rtx_code code0 = GET_CODE (op0);
1535 base = op0; /* Base + index. */
1537 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1545 base = op0; /* Base + displacement. */
1556 /* Direct addressing with DP register. */
1559 rtx op0 = XEXP (addr, 0);
1560 rtx op1 = XEXP (addr, 1);
1562 /* HImode and HFmode direct memory references aren't truly
1563 offsettable (consider case at end of data page). We
1564 probably get better code by loading a pointer and using an
1565 indirect memory reference. */
1566 if (mode == HImode || mode == HFmode)
1569 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1572 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1575 if (GET_CODE (op1) == CONST)
1581 /* Direct addressing with some work for the assembler... */
1583 /* Direct addressing. */
1586 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1588 /* These need to be converted to a LO_SUM (...).
1589 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1592 /* Do not allow direct memory access to absolute addresses.
1593 This is more pain than it's worth, especially for the
1594 small memory model where we can't guarantee that
1595 this address is within the data page---we don't want
1596 to modify the DP register in the small memory model,
1597 even temporarily, since an interrupt can sneak in.... */
1601 /* Indirect indirect addressing. */
1606 fatal_insn ("using CONST_DOUBLE for address", addr);
1612 /* Validate the base register. */
1615 /* Check that the address is offsettable for HImode and HFmode. */
1616 if (indx && (mode == HImode || mode == HFmode))
1619 /* Handle DP based stuff. */
1620 if (REGNO (base) == DP_REGNO)
1622 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1624 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1628 /* Now validate the index register. */
1631 if (GET_CODE (indx) != REG)
1633 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1635 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1639 /* Validate displacement. */
1642 if (GET_CODE (disp) != CONST_INT)
1644 if (mode == HImode || mode == HFmode)
1646 /* The offset displacement must be legitimate. */
1647 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1652 if (! IS_DISP8_CONST (INTVAL (disp)))
1655 /* Can't add an index with a disp. */
1664 c4x_legitimize_address (orig, mode)
1665 rtx orig ATTRIBUTE_UNUSED;
1666 enum machine_mode mode ATTRIBUTE_UNUSED;
1668 if (GET_CODE (orig) == SYMBOL_REF
1669 || GET_CODE (orig) == LABEL_REF)
1671 if (mode == HImode || mode == HFmode)
1673 /* We need to force the address into
1674 a register so that it is offsettable. */
1675 rtx addr_reg = gen_reg_rtx (Pmode);
1676 emit_move_insn (addr_reg, orig);
1681 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1684 emit_insn (gen_set_ldp (dp_reg, orig));
1686 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1694 /* Provide the costs of an addressing mode that contains ADDR.
1695 If ADDR is not a valid address, its cost is irrelevant.
1696 This is used in cse and loop optimisation to determine
1697 if it is worthwhile storing a common address into a register.
1698 Unfortunately, the C4x address cost depends on other operands. */
1701 c4x_address_cost (addr)
1704 switch (GET_CODE (addr))
1715 /* These shouldn't be directly generated. */
1723 rtx op1 = XEXP (addr, 1);
1725 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1726 return TARGET_SMALL ? 3 : 4;
1728 if (GET_CODE (op1) == CONST)
1730 rtx offset = const0_rtx;
1732 op1 = eliminate_constant_term (op1, &offset);
1734 /* ??? These costs need rethinking... */
1735 if (GET_CODE (op1) == LABEL_REF)
1738 if (GET_CODE (op1) != SYMBOL_REF)
1741 if (INTVAL (offset) == 0)
1746 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1752 register rtx op0 = XEXP (addr, 0);
1753 register rtx op1 = XEXP (addr, 1);
1755 if (GET_CODE (op0) != REG)
1758 switch (GET_CODE (op1))
1764 /* This cost for REG+REG must be greater than the cost
1765 for REG if we want autoincrement addressing modes. */
1769 /* The following tries to improve GIV combination
1770 in strength reduce but appears not to help. */
1771 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1774 if (IS_DISP1_CONST (INTVAL (op1)))
1777 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1792 c4x_gen_compare_reg (code, x, y)
1796 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1799 if (mode == CC_NOOVmode
1800 && (code == LE || code == GE || code == LT || code == GT))
1803 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1804 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1805 gen_rtx_COMPARE (mode, x, y)));
1810 c4x_output_cbranch (form, seq)
1819 static char str[100];
1823 delay = XVECEXP (final_sequence, 0, 1);
1824 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1825 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1826 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1829 cp = &str [strlen (str)];
1854 c4x_print_operand (file, op, letter)
1855 FILE *file; /* File to write to. */
1856 rtx op; /* Operand to print. */
1857 int letter; /* %<letter> or 0. */
1864 case '#': /* Delayed. */
1866 asm_fprintf (file, "d");
1870 code = GET_CODE (op);
1873 case 'A': /* Direct address. */
1874 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1875 asm_fprintf (file, "@");
1878 case 'H': /* Sethi. */
1879 output_addr_const (file, op);
1882 case 'I': /* Reversed condition. */
1883 code = reverse_condition (code);
1886 case 'L': /* Log 2 of constant. */
1887 if (code != CONST_INT)
1888 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1889 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1892 case 'N': /* Ones complement of small constant. */
1893 if (code != CONST_INT)
1894 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1895 fprintf (file, "%d", ~INTVAL (op));
1898 case 'K': /* Generate ldp(k) if direct address. */
1901 && GET_CODE (XEXP (op, 0)) == LO_SUM
1902 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1903 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1905 op1 = XEXP (XEXP (op, 0), 1);
1906 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1908 asm_fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1909 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1910 asm_fprintf (file, "\n");
1915 case 'M': /* Generate ldp(k) if direct address. */
1916 if (! TARGET_SMALL /* Only used in asm statements. */
1918 && (GET_CODE (XEXP (op, 0)) == CONST
1919 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1921 asm_fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1922 output_address (XEXP (op, 0));
1923 asm_fprintf (file, "\n\t");
1927 case 'O': /* Offset address. */
1928 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1930 else if (code == MEM)
1931 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1932 else if (code == REG)
1933 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1935 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1938 case 'C': /* Call. */
1941 case 'U': /* Call/callu. */
1942 if (code != SYMBOL_REF)
1943 asm_fprintf (file, "u");
1953 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1955 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1957 fprintf (file, "%s", reg_names[REGNO (op)]);
1961 output_address (XEXP (op, 0));
1969 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
1970 REAL_VALUE_TO_DECIMAL (r, "%20f", str);
1971 fprintf (file, "%s", str);
1976 fprintf (file, "%d", INTVAL (op));
1980 asm_fprintf (file, "ne");
1984 asm_fprintf (file, "eq");
1988 asm_fprintf (file, "ge");
1992 asm_fprintf (file, "gt");
1996 asm_fprintf (file, "le");
2000 asm_fprintf (file, "lt");
2004 asm_fprintf (file, "hs");
2008 asm_fprintf (file, "hi");
2012 asm_fprintf (file, "ls");
2016 asm_fprintf (file, "lo");
2020 output_addr_const (file, op);
2024 output_addr_const (file, XEXP (op, 0));
2031 fatal_insn ("c4x_print_operand: Bad operand case", op);
2038 c4x_print_operand_address (file, addr)
2042 switch (GET_CODE (addr))
2045 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2049 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2053 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2058 rtx op0 = XEXP (XEXP (addr, 1), 0);
2059 rtx op1 = XEXP (XEXP (addr, 1), 1);
2061 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2062 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2063 reg_names[REGNO (op1)]);
2064 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2065 fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2067 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2068 fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2070 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2071 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2072 reg_names[REGNO (op1)]);
2074 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2080 rtx op0 = XEXP (XEXP (addr, 1), 0);
2081 rtx op1 = XEXP (XEXP (addr, 1), 1);
2083 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2084 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2085 reg_names[REGNO (op1)]);
2086 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2087 fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2089 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2090 fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2092 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2093 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2094 reg_names[REGNO (op1)]);
2096 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2101 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2105 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2108 case PLUS: /* Indirect with displacement. */
2110 rtx op0 = XEXP (addr, 0);
2111 rtx op1 = XEXP (addr, 1);
2117 if (IS_INDEX_REG (op0))
2119 fprintf (file, "*+%s(%s)",
2120 reg_names[REGNO (op1)],
2121 reg_names[REGNO (op0)]); /* Index + base. */
2125 fprintf (file, "*+%s(%s)",
2126 reg_names[REGNO (op0)],
2127 reg_names[REGNO (op1)]); /* Base + index. */
2130 else if (INTVAL (op1) < 0)
2132 fprintf (file, "*-%s(%d)",
2133 reg_names[REGNO (op0)],
2134 -INTVAL (op1)); /* Base - displacement. */
2138 fprintf (file, "*+%s(%d)",
2139 reg_names[REGNO (op0)],
2140 INTVAL (op1)); /* Base + displacement. */
2144 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2150 rtx op0 = XEXP (addr, 0);
2151 rtx op1 = XEXP (addr, 1);
2153 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2154 c4x_print_operand_address (file, op1);
2156 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2163 fprintf (file, "@");
2164 output_addr_const (file, addr);
2167 /* We shouldn't access CONST_INT addresses. */
2171 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2177 /* Return nonzero if the floating point operand will fit
2178 in the immediate field. */
2181 c4x_immed_float_p (op)
2188 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2189 if (GET_MODE (op) == HFmode)
2190 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2193 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2197 /* Sign extend exponent. */
2198 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2199 if (exponent == -128)
2201 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2202 return 0; /* Precision doesn't fit. */
2203 return (exponent <= 7) /* Positive exp. */
2204 && (exponent >= -7); /* Negative exp. */
2208 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2209 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2211 None of the last four instructions from the bottom of the block can
2212 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2213 BcondAT or RETIcondD.
2215 This routine scans the four previous insns for a jump insn, and if
2216 one is found, returns 1 so that we bung in a nop instruction.
2217 This simple minded strategy will add a nop, when it may not
2218 be required. Say when there is a JUMP_INSN near the end of the
2219 block that doesn't get converted into a delayed branch.
2221 Note that we cannot have a call insn, since we don't generate
2222 repeat loops with calls in them (although I suppose we could, but
2223 there's no benefit.)
2225 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2228 c4x_rptb_nop_p (insn)
2234 /* Extract the start label from the jump pattern (rptb_end). */
2235 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2237 /* If there is a label at the end of the loop we must insert
2240 insn = previous_insn (insn);
2241 } while (GET_CODE (insn) == NOTE
2242 || GET_CODE (insn) == USE
2243 || GET_CODE (insn) == CLOBBER);
2244 if (GET_CODE (insn) == CODE_LABEL)
2247 for (i = 0; i < 4; i++)
2249 /* Search back for prev non-note and non-label insn. */
2250 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2251 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2253 if (insn == start_label)
2256 insn = previous_insn (insn);
2259 /* If we have a jump instruction we should insert a NOP. If we
2260 hit repeat block top we should only insert a NOP if the loop
2262 if (GET_CODE (insn) == JUMP_INSN)
2264 insn = previous_insn (insn);
2270 /* The C4x looping instruction needs to be emitted at the top of the
2271 loop. Emitting the true RTL for a looping instruction at the top of
2272 the loop can cause problems with flow analysis. So instead, a dummy
2273 doloop insn is emitted at the end of the loop. This routine checks
2274 for the presence of this doloop insn and then searches back to the
2275 top of the loop, where it inserts the true looping insn (provided
2276 there are no instructions in the loop which would cause problems).
2277 Any additional labels can be emitted at this point. In addition, if
2278 the desired loop count register was not allocated, this routine does
2281 Before we can create a repeat block looping instruction we have to
2282 verify that there are no jumps outside the loop and no jumps outside
2283 the loop go into this loop. This can happen in the basic blocks reorder
2284 pass. The C4x cpu can not handle this. */
2287 c4x_label_ref_used_p (x, code_label)
2297 code = GET_CODE (x);
2298 if (code == LABEL_REF)
2299 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2301 fmt = GET_RTX_FORMAT (code);
2302 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2306 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2309 else if (fmt[i] == 'E')
2310 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2311 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2319 c4x_rptb_valid_p (insn, start_label)
2320 rtx insn, start_label;
2326 /* Find the start label. */
2327 for (; insn; insn = PREV_INSN (insn))
2328 if (insn == start_label)
2331 /* Note found then we can not use a rptb or rpts. The label was
2332 probably moved by the basic block reorder pass. */
2337 /* If any jump jumps inside this block then we must fail. */
2338 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2340 if (GET_CODE (insn) == CODE_LABEL)
2342 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2343 if (GET_CODE (tmp) == JUMP_INSN
2344 && c4x_label_ref_used_p (tmp, insn))
2348 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2350 if (GET_CODE (insn) == CODE_LABEL)
2352 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2353 if (GET_CODE (tmp) == JUMP_INSN
2354 && c4x_label_ref_used_p (tmp, insn))
2358 /* If any jump jumps outside this block then we must fail. */
2359 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2361 if (GET_CODE (insn) == CODE_LABEL)
2363 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2364 if (GET_CODE (tmp) == JUMP_INSN
2365 && c4x_label_ref_used_p (tmp, insn))
2367 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2368 if (GET_CODE (tmp) == JUMP_INSN
2369 && c4x_label_ref_used_p (tmp, insn))
2374 /* All checks OK. */
2380 c4x_rptb_insert (insn)
2385 rtx new_start_label;
2388 /* If the count register has not been allocated to RC, say if
2389 there is a movstr pattern in the loop, then do not insert a
2390 RPTB instruction. Instead we emit a decrement and branch
2391 at the end of the loop. */
2392 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2393 if (REGNO (count_reg) != RC_REGNO)
2396 /* Extract the start label from the jump pattern (rptb_end). */
2397 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2399 if (! c4x_rptb_valid_p (insn, start_label))
2401 /* We can not use the rptb insn. Replace it so reorg can use
2402 the delay slots of the jump insn. */
2403 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2404 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2405 emit_insn_before (gen_bge (start_label), insn);
2406 LABEL_NUSES (start_label)++;
2411 end_label = gen_label_rtx ();
2412 LABEL_NUSES (end_label)++;
2413 emit_label_after (end_label, insn);
2415 new_start_label = gen_label_rtx ();
2416 LABEL_NUSES (new_start_label)++;
2418 for (; insn; insn = PREV_INSN (insn))
2420 if (insn == start_label)
2422 if (GET_CODE (insn) == JUMP_INSN &&
2423 JUMP_LABEL (insn) == start_label)
2424 redirect_jump (insn, new_start_label, 0);
2427 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2429 emit_label_after (new_start_label, insn);
2431 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2432 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2434 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2435 if (LABEL_NUSES (start_label) == 0)
2436 delete_insn (start_label);
2440 /* This function is a C4x special called immediately before delayed
2441 branch scheduling. We fix up RTPB style loops that didn't get RC
2442 allocated as the loop counter. */
2445 c4x_process_after_reload (first)
2450 for (insn = first; insn; insn = NEXT_INSN (insn))
2452 /* Look for insn. */
2455 int insn_code_number;
2458 insn_code_number = recog_memoized (insn);
2460 if (insn_code_number < 0)
2463 /* Insert the RTX for RPTB at the top of the loop
2464 and a label at the end of the loop. */
2465 if (insn_code_number == CODE_FOR_rptb_end)
2466 c4x_rptb_insert(insn);
2468 /* We need to split the insn here. Otherwise the calls to
2469 force_const_mem will not work for load_immed_address. */
2472 /* Don't split the insn if it has been deleted. */
2473 if (! INSN_DELETED_P (old))
2474 insn = try_split (PATTERN(old), old, 1);
2476 /* When not optimizing, the old insn will be still left around
2477 with only the 'deleted' bit set. Transform it into a note
2478 to avoid confusion of subsequent processing. */
2479 if (INSN_DELETED_P (old))
2481 PUT_CODE (old, NOTE);
2482 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2483 NOTE_SOURCE_FILE (old) = 0;
2494 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2502 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2507 c4x_immed_int_constant (op)
2510 if (GET_CODE (op) != CONST_INT)
2513 return GET_MODE (op) == VOIDmode
2514 || GET_MODE_CLASS (op) == MODE_INT
2515 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2520 c4x_immed_float_constant (op)
2523 if (GET_CODE (op) != CONST_DOUBLE)
2526 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2527 present this only means that a MEM rtx has been generated. It does
2528 not mean the rtx is really in memory. */
2530 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2535 c4x_shiftable_constant (op)
2540 int val = INTVAL (op);
2542 for (i = 0; i < 16; i++)
2547 mask = ((0xffff >> i) << 16) | 0xffff;
2548 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2549 : (val >> i) & mask))
2559 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2567 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2577 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2585 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2587 return IS_INT5_CONST (INTVAL (op));
2595 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2603 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2611 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2615 /* The constraints do not have to check the register class,
2616 except when needed to discriminate between the constraints.
2617 The operand has been checked by the predicates to be valid. */
2619 /* ARx + 9-bit signed const or IRn
2620 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2621 We don't include the pre/post inc/dec forms here since
2622 they are handled by the <> constraints. */
2625 c4x_Q_constraint (op)
2628 enum machine_mode mode = GET_MODE (op);
2630 if (GET_CODE (op) != MEM)
2633 switch (GET_CODE (op))
2640 rtx op0 = XEXP (op, 0);
2641 rtx op1 = XEXP (op, 1);
2649 if (GET_CODE (op1) != CONST_INT)
2652 /* HImode and HFmode must be offsettable. */
2653 if (mode == HImode || mode == HFmode)
2654 return IS_DISP8_OFF_CONST (INTVAL (op1));
2656 return IS_DISP8_CONST (INTVAL (op1));
2667 /* ARx + 5-bit unsigned const
2668 *ARx, *+ARx(n) for n < 32. */
2671 c4x_R_constraint (op)
2674 enum machine_mode mode = GET_MODE (op);
2678 if (GET_CODE (op) != MEM)
2681 switch (GET_CODE (op))
2688 rtx op0 = XEXP (op, 0);
2689 rtx op1 = XEXP (op, 1);
2694 if (GET_CODE (op1) != CONST_INT)
2697 /* HImode and HFmode must be offsettable. */
2698 if (mode == HImode || mode == HFmode)
2699 return IS_UINT5_CONST (INTVAL (op1) + 1);
2701 return IS_UINT5_CONST (INTVAL (op1));
2716 enum machine_mode mode = GET_MODE (op);
2718 if (TARGET_C3X || GET_CODE (op) != MEM)
2722 switch (GET_CODE (op))
2725 return IS_ADDR_OR_PSEUDO_REG (op);
2729 rtx op0 = XEXP (op, 0);
2730 rtx op1 = XEXP (op, 1);
2732 /* HImode and HFmode must be offsettable. */
2733 if (mode == HImode || mode == HFmode)
2734 return IS_ADDR_OR_PSEUDO_REG (op0)
2735 && GET_CODE (op1) == CONST_INT
2736 && IS_UINT5_CONST (INTVAL (op1) + 1);
2739 && IS_ADDR_OR_PSEUDO_REG (op0)
2740 && GET_CODE (op1) == CONST_INT
2741 && IS_UINT5_CONST (INTVAL (op1));
2752 /* ARx + 1-bit unsigned const or IRn
2753 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2754 We don't include the pre/post inc/dec forms here since
2755 they are handled by the <> constraints. */
2758 c4x_S_constraint (op)
2761 enum machine_mode mode = GET_MODE (op);
2762 if (GET_CODE (op) != MEM)
2765 switch (GET_CODE (op))
2773 rtx op0 = XEXP (op, 0);
2774 rtx op1 = XEXP (op, 1);
2776 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2777 || (op0 != XEXP (op1, 0)))
2780 op0 = XEXP (op1, 0);
2781 op1 = XEXP (op1, 1);
2782 return REG_P (op0) && REG_P (op1);
2783 /* Pre or post_modify with a displacement of 0 or 1
2784 should not be generated. */
2790 rtx op0 = XEXP (op, 0);
2791 rtx op1 = XEXP (op, 1);
2799 if (GET_CODE (op1) != CONST_INT)
2802 /* HImode and HFmode must be offsettable. */
2803 if (mode == HImode || mode == HFmode)
2804 return IS_DISP1_OFF_CONST (INTVAL (op1));
2806 return IS_DISP1_CONST (INTVAL (op1));
2821 enum machine_mode mode = GET_MODE (op);
2822 if (GET_CODE (op) != MEM)
2826 switch (GET_CODE (op))
2830 if (mode != QImode && mode != QFmode)
2837 return IS_ADDR_OR_PSEUDO_REG (op);
2842 rtx op0 = XEXP (op, 0);
2843 rtx op1 = XEXP (op, 1);
2845 if (mode != QImode && mode != QFmode)
2848 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2849 || (op0 != XEXP (op1, 0)))
2852 op0 = XEXP (op1, 0);
2853 op1 = XEXP (op1, 1);
2854 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2855 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2856 /* Pre or post_modify with a displacement of 0 or 1
2857 should not be generated. */
2862 rtx op0 = XEXP (op, 0);
2863 rtx op1 = XEXP (op, 1);
2867 /* HImode and HFmode must be offsettable. */
2868 if (mode == HImode || mode == HFmode)
2869 return IS_ADDR_OR_PSEUDO_REG (op0)
2870 && GET_CODE (op1) == CONST_INT
2871 && IS_DISP1_OFF_CONST (INTVAL (op1));
2874 return (IS_INDEX_OR_PSEUDO_REG (op1)
2875 && IS_ADDR_OR_PSEUDO_REG (op0))
2876 || (IS_ADDR_OR_PSEUDO_REG (op1)
2877 && IS_INDEX_OR_PSEUDO_REG (op0));
2879 return IS_ADDR_OR_PSEUDO_REG (op0)
2880 && GET_CODE (op1) == CONST_INT
2881 && IS_DISP1_CONST (INTVAL (op1));
2893 /* Direct memory operand. */
2896 c4x_T_constraint (op)
2899 if (GET_CODE (op) != MEM)
2903 if (GET_CODE (op) != LO_SUM)
2905 /* Allow call operands. */
2906 return GET_CODE (op) == SYMBOL_REF
2907 && GET_MODE (op) == Pmode
2908 && SYMBOL_REF_FLAG (op);
2911 /* HImode and HFmode are not offsettable. */
2912 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2915 if ((GET_CODE (XEXP (op, 0)) == REG)
2916 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2917 return c4x_U_constraint (XEXP (op, 1));
2923 /* Symbolic operand. */
2926 c4x_U_constraint (op)
2929 /* Don't allow direct addressing to an arbitrary constant. */
2930 return GET_CODE (op) == CONST
2931 || GET_CODE (op) == SYMBOL_REF
2932 || GET_CODE (op) == LABEL_REF;
2937 c4x_autoinc_operand (op, mode)
2939 enum machine_mode mode ATTRIBUTE_UNUSED;
2941 if (GET_CODE (op) == MEM)
2943 enum rtx_code code = GET_CODE (XEXP (op, 0));
2949 || code == PRE_MODIFY
2950 || code == POST_MODIFY
2958 /* Match any operand. */
2961 any_operand (op, mode)
2962 register rtx op ATTRIBUTE_UNUSED;
2963 enum machine_mode mode ATTRIBUTE_UNUSED;
2969 /* Nonzero if OP is a floating point value with value 0.0. */
2972 fp_zero_operand (op, mode)
2974 enum machine_mode mode ATTRIBUTE_UNUSED;
2978 if (GET_CODE (op) != CONST_DOUBLE)
2980 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2981 return REAL_VALUES_EQUAL (r, dconst0);
2986 const_operand (op, mode)
2988 register enum machine_mode mode;
2994 if (GET_CODE (op) != CONST_DOUBLE
2995 || GET_MODE (op) != mode
2996 || GET_MODE_CLASS (mode) != MODE_FLOAT)
2999 return c4x_immed_float_p (op);
3005 if (GET_CODE (op) == CONSTANT_P_RTX)
3008 if (GET_CODE (op) != CONST_INT
3009 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3010 || GET_MODE_CLASS (mode) != MODE_INT)
3013 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3025 stik_const_operand (op, mode)
3027 enum machine_mode mode ATTRIBUTE_UNUSED;
3029 return c4x_K_constant (op);
3034 not_const_operand (op, mode)
3036 enum machine_mode mode ATTRIBUTE_UNUSED;
3038 return c4x_N_constant (op);
3043 reg_operand (op, mode)
3045 enum machine_mode mode;
3047 if (GET_CODE (op) == SUBREG
3048 && GET_MODE (op) == QFmode)
3050 return register_operand (op, mode);
3055 mixed_subreg_operand (op, mode)
3057 enum machine_mode mode ATTRIBUTE_UNUSED;
3059 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3060 int and a long double. */
3061 if (GET_CODE (op) == SUBREG
3062 && (GET_MODE (op) == QFmode)
3063 && (GET_MODE (SUBREG_REG (op)) == QImode
3064 || GET_MODE (SUBREG_REG (op)) == HImode))
3071 reg_imm_operand (op, mode)
3073 enum machine_mode mode ATTRIBUTE_UNUSED;
3075 if (REG_P (op) || CONSTANT_P (op))
3082 not_modify_reg (op, mode)
3084 enum machine_mode mode ATTRIBUTE_UNUSED;
3086 if (REG_P (op) || CONSTANT_P (op))
3088 if (GET_CODE (op) != MEM)
3091 switch (GET_CODE (op))
3098 rtx op0 = XEXP (op, 0);
3099 rtx op1 = XEXP (op, 1);
3104 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3110 rtx op0 = XEXP (op, 0);
3112 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3130 not_rc_reg (op, mode)
3132 enum machine_mode mode ATTRIBUTE_UNUSED;
3134 if (REG_P (op) && REGNO (op) == RC_REGNO)
3140 /* Extended precision register R0-R1. */
3143 r0r1_reg_operand (op, mode)
3145 enum machine_mode mode;
3147 if (! reg_operand (op, mode))
3149 if (GET_CODE (op) == SUBREG)
3150 op = SUBREG_REG (op);
3151 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3155 /* Extended precision register R2-R3. */
3158 r2r3_reg_operand (op, mode)
3160 enum machine_mode mode;
3162 if (! reg_operand (op, mode))
3164 if (GET_CODE (op) == SUBREG)
3165 op = SUBREG_REG (op);
3166 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3170 /* Low extended precision register R0-R7. */
3173 ext_low_reg_operand (op, mode)
3175 enum machine_mode mode;
3177 if (! reg_operand (op, mode))
3179 if (GET_CODE (op) == SUBREG)
3180 op = SUBREG_REG (op);
3181 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3185 /* Extended precision register. */
3188 ext_reg_operand (op, mode)
3190 enum machine_mode mode;
3192 if (! reg_operand (op, mode))
3194 if (GET_CODE (op) == SUBREG)
3195 op = SUBREG_REG (op);
3198 return IS_EXT_OR_PSEUDO_REG (op);
3202 /* Standard precision register. */
3205 std_reg_operand (op, mode)
3207 enum machine_mode mode;
3209 if (! reg_operand (op, mode))
3211 if (GET_CODE (op) == SUBREG)
3212 op = SUBREG_REG (op);
3213 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3216 /* Standard precision or normal register. */
3219 std_or_reg_operand (op, mode)
3221 enum machine_mode mode;
3223 if (reload_in_progress)
3224 return std_reg_operand (op, mode);
3225 return reg_operand (op, mode);
3228 /* Address register. */
3231 addr_reg_operand (op, mode)
3233 enum machine_mode mode;
3235 if (! reg_operand (op, mode))
3237 return c4x_a_register (op);
3241 /* Index register. */
3244 index_reg_operand (op, mode)
3246 enum machine_mode mode;
3248 if (! reg_operand (op, mode))
3250 if (GET_CODE (op) == SUBREG)
3251 op = SUBREG_REG (op);
3252 return c4x_x_register (op);
3259 dp_reg_operand (op, mode)
3261 enum machine_mode mode ATTRIBUTE_UNUSED;
3263 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3270 sp_reg_operand (op, mode)
3272 enum machine_mode mode ATTRIBUTE_UNUSED;
3274 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3281 st_reg_operand (op, mode)
3283 enum machine_mode mode ATTRIBUTE_UNUSED;
3285 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3292 rc_reg_operand (op, mode)
3294 enum machine_mode mode ATTRIBUTE_UNUSED;
3296 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3301 call_address_operand (op, mode)
3303 enum machine_mode mode ATTRIBUTE_UNUSED;
3305 return (REG_P (op) || symbolic_address_operand (op, mode));
3309 /* Symbolic address operand. */
3312 symbolic_address_operand (op, mode)
3314 enum machine_mode mode ATTRIBUTE_UNUSED;
3316 switch (GET_CODE (op))
3328 /* Check dst operand of a move instruction. */
3331 dst_operand (op, mode)
3333 enum machine_mode mode;
3335 if (GET_CODE (op) == SUBREG
3336 && mixed_subreg_operand (op, mode))
3340 return reg_operand (op, mode);
3342 return nonimmediate_operand (op, mode);
3346 /* Check src operand of two operand arithmetic instructions. */
3349 src_operand (op, mode)
3351 enum machine_mode mode;
3353 if (GET_CODE (op) == SUBREG
3354 && mixed_subreg_operand (op, mode))
3358 return reg_operand (op, mode);
3360 if (mode == VOIDmode)
3361 mode = GET_MODE (op);
3363 if (GET_CODE (op) == CONST_INT)
3364 return (mode == QImode || mode == Pmode || mode == HImode)
3365 && c4x_I_constant (op);
3367 /* We don't like CONST_DOUBLE integers. */
3368 if (GET_CODE (op) == CONST_DOUBLE)
3369 return c4x_H_constant (op);
3371 /* Disallow symbolic addresses. Only the predicate
3372 symbolic_address_operand will match these. */
3373 if (GET_CODE (op) == SYMBOL_REF
3374 || GET_CODE (op) == LABEL_REF
3375 || GET_CODE (op) == CONST)
3378 /* If TARGET_LOAD_DIRECT_MEMS is non-zero, disallow direct memory
3379 access to symbolic addresses. These operands will get forced
3380 into a register and the movqi expander will generate a
3381 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is non-zero. */
3382 if (GET_CODE (op) == MEM
3383 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3384 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3385 || GET_CODE (XEXP (op, 0)) == CONST)))
3386 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3388 return general_operand (op, mode);
3393 src_hi_operand (op, mode)
3395 enum machine_mode mode;
3397 if (c4x_O_constant (op))
3399 return src_operand (op, mode);
3403 /* Check src operand of two operand logical instructions. */
3406 lsrc_operand (op, mode)
3408 enum machine_mode mode;
3410 if (mode == VOIDmode)
3411 mode = GET_MODE (op);
3413 if (mode != QImode && mode != Pmode)
3414 fatal_insn ("mode not QImode", op);
3416 if (GET_CODE (op) == CONST_INT)
3417 return c4x_L_constant (op) || c4x_J_constant (op);
3419 return src_operand (op, mode);
3423 /* Check src operand of two operand tricky instructions. */
3426 tsrc_operand (op, mode)
3428 enum machine_mode mode;
3430 if (mode == VOIDmode)
3431 mode = GET_MODE (op);
3433 if (mode != QImode && mode != Pmode)
3434 fatal_insn ("mode not QImode", op);
3436 if (GET_CODE (op) == CONST_INT)
3437 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3439 return src_operand (op, mode);
3443 /* Check src operand of two operand non immedidate instructions. */
3446 nonimmediate_src_operand (op, mode)
3448 enum machine_mode mode;
3450 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3453 return src_operand (op, mode);
3457 /* Check logical src operand of two operand non immedidate instructions. */
3460 nonimmediate_lsrc_operand (op, mode)
3462 enum machine_mode mode;
3464 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3467 return lsrc_operand (op, mode);
3472 reg_or_const_operand (op, mode)
3474 enum machine_mode mode;
3476 return reg_operand (op, mode) || const_operand (op, mode);
3480 /* Check for indirect operands allowable in parallel instruction. */
3483 par_ind_operand (op, mode)
3485 enum machine_mode mode;
3487 if (mode != VOIDmode && mode != GET_MODE (op))
3490 return c4x_S_indirect (op);
3494 /* Check for operands allowable in parallel instruction. */
3497 parallel_operand (op, mode)
3499 enum machine_mode mode;
3501 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3506 c4x_S_address_parse (op, base, incdec, index, disp)
3518 if (GET_CODE (op) != MEM)
3519 fatal_insn ("invalid indirect memory address", op);
3522 switch (GET_CODE (op))
3525 *base = REGNO (XEXP (op, 0));
3531 *base = REGNO (XEXP (op, 0));
3537 *base = REGNO (XEXP (op, 0));
3543 *base = REGNO (XEXP (op, 0));
3549 *base = REGNO (XEXP (op, 0));
3550 if (REG_P (XEXP (XEXP (op, 1), 1)))
3552 *index = REGNO (XEXP (XEXP (op, 1), 1));
3553 *disp = 0; /* ??? */
3556 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3561 *base = REGNO (XEXP (op, 0));
3562 if (REG_P (XEXP (XEXP (op, 1), 1)))
3564 *index = REGNO (XEXP (XEXP (op, 1), 1));
3565 *disp = 1; /* ??? */
3568 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3579 rtx op0 = XEXP (op, 0);
3580 rtx op1 = XEXP (op, 1);
3582 if (c4x_a_register (op0))
3584 if (c4x_x_register (op1))
3586 *base = REGNO (op0);
3587 *index = REGNO (op1);
3590 else if ((GET_CODE (op1) == CONST_INT
3591 && IS_DISP1_CONST (INTVAL (op1))))
3593 *base = REGNO (op0);
3594 *disp = INTVAL (op1);
3598 else if (c4x_x_register (op0) && c4x_a_register (op1))
3600 *base = REGNO (op1);
3601 *index = REGNO (op0);
3608 fatal_insn ("invalid indirect (S) memory address", op);
3614 c4x_address_conflict (op0, op1, store0, store1)
3629 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3632 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3633 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3635 if (store0 && store1)
3637 /* If we have two stores in parallel to the same address, then
3638 the C4x only executes one of the stores. This is unlikely to
3639 cause problems except when writing to a hardware device such
3640 as a FIFO since the second write will be lost. The user
3641 should flag the hardware location as being volatile so that
3642 we don't do this optimisation. While it is unlikely that we
3643 have an aliased address if both locations are not marked
3644 volatile, it is probably safer to flag a potential conflict
3645 if either location is volatile. */
3646 if (! flag_argument_noalias)
3648 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3653 /* If have a parallel load and a store to the same address, the load
3654 is performed first, so there is no conflict. Similarly, there is
3655 no conflict if have parallel loads from the same address. */
3657 /* Cannot use auto increment or auto decrement twice for same
3659 if (base0 == base1 && incdec0 && incdec0)
3662 /* It might be too confusing for GCC if we have use a base register
3663 with a side effect and a memory reference using the same register
3665 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3668 /* We can not optimize the case where op1 and op2 refer to the same
3670 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3678 /* Check for while loop inside a decrement and branch loop. */
3681 c4x_label_conflict (insn, jump, db)
3688 if (GET_CODE (insn) == CODE_LABEL)
3690 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3692 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3695 insn = PREV_INSN (insn);
3701 /* Validate combination of operands for parallel load/store instructions. */
3704 valid_parallel_load_store (operands, mode)
3706 enum machine_mode mode ATTRIBUTE_UNUSED;
3708 rtx op0 = operands[0];
3709 rtx op1 = operands[1];
3710 rtx op2 = operands[2];
3711 rtx op3 = operands[3];
3713 if (GET_CODE (op0) == SUBREG)
3714 op0 = SUBREG_REG (op0);
3715 if (GET_CODE (op1) == SUBREG)
3716 op1 = SUBREG_REG (op1);
3717 if (GET_CODE (op2) == SUBREG)
3718 op2 = SUBREG_REG (op2);
3719 if (GET_CODE (op3) == SUBREG)
3720 op3 = SUBREG_REG (op3);
3722 /* The patterns should only allow ext_low_reg_operand() or
3723 par_ind_operand() operands. Thus of the 4 operands, only 2
3724 should be REGs and the other 2 should be MEMs. */
3726 /* This test prevents the multipack pass from using this pattern if
3727 op0 is used as an index or base register in op2 or op3, since
3728 this combination will require reloading. */
3729 if (GET_CODE (op0) == REG
3730 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3731 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3735 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3736 return (REGNO (op0) != REGNO (op2))
3737 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3738 && ! c4x_address_conflict (op1, op3, 0, 0);
3741 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3742 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3743 && ! c4x_address_conflict (op0, op2, 1, 1);
3746 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3747 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3748 && ! c4x_address_conflict (op1, op2, 0, 1);
3751 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3752 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3753 && ! c4x_address_conflict (op0, op3, 1, 0);
3760 valid_parallel_operands_4 (operands, mode)
3762 enum machine_mode mode ATTRIBUTE_UNUSED;
3764 rtx op0 = operands[0];
3765 rtx op2 = operands[2];
3767 if (GET_CODE (op0) == SUBREG)
3768 op0 = SUBREG_REG (op0);
3769 if (GET_CODE (op2) == SUBREG)
3770 op2 = SUBREG_REG (op2);
3772 /* This test prevents the multipack pass from using this pattern if
3773 op0 is used as an index or base register in op2, since this combination
3774 will require reloading. */
3775 if (GET_CODE (op0) == REG
3776 && GET_CODE (op2) == MEM
3777 && reg_mentioned_p (op0, XEXP (op2, 0)))
3785 valid_parallel_operands_5 (operands, mode)
3787 enum machine_mode mode ATTRIBUTE_UNUSED;
3790 rtx op0 = operands[0];
3791 rtx op1 = operands[1];
3792 rtx op2 = operands[2];
3793 rtx op3 = operands[3];
3795 if (GET_CODE (op0) == SUBREG)
3796 op0 = SUBREG_REG (op0);
3797 if (GET_CODE (op1) == SUBREG)
3798 op1 = SUBREG_REG (op1);
3799 if (GET_CODE (op2) == SUBREG)
3800 op2 = SUBREG_REG (op2);
3802 /* The patterns should only allow ext_low_reg_operand() or
3803 par_ind_operand() operands. Operands 1 and 2 may be commutative
3804 but only one of them can be a register. */
3805 if (GET_CODE (op1) == REG)
3807 if (GET_CODE (op2) == REG)
3813 /* This test prevents the multipack pass from using this pattern if
3814 op0 is used as an index or base register in op3, since this combination
3815 will require reloading. */
3816 if (GET_CODE (op0) == REG
3817 && GET_CODE (op3) == MEM
3818 && reg_mentioned_p (op0, XEXP (op3, 0)))
3826 valid_parallel_operands_6 (operands, mode)
3828 enum machine_mode mode ATTRIBUTE_UNUSED;
3831 rtx op0 = operands[0];
3832 rtx op1 = operands[1];
3833 rtx op2 = operands[2];
3834 rtx op4 = operands[4];
3835 rtx op5 = operands[5];
3837 if (GET_CODE (op1) == SUBREG)
3838 op1 = SUBREG_REG (op1);
3839 if (GET_CODE (op2) == SUBREG)
3840 op2 = SUBREG_REG (op2);
3841 if (GET_CODE (op4) == SUBREG)
3842 op4 = SUBREG_REG (op4);
3843 if (GET_CODE (op5) == SUBREG)
3844 op5 = SUBREG_REG (op5);
3846 /* The patterns should only allow ext_low_reg_operand() or
3847 par_ind_operand() operands. Thus of the 4 input operands, only 2
3848 should be REGs and the other 2 should be MEMs. */
3850 if (GET_CODE (op1) == REG)
3852 if (GET_CODE (op2) == REG)
3854 if (GET_CODE (op4) == REG)
3856 if (GET_CODE (op5) == REG)
3859 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3860 Perhaps we should count the MEMs as well? */
3864 /* This test prevents the multipack pass from using this pattern if
3865 op0 is used as an index or base register in op4 or op5, since
3866 this combination will require reloading. */
3867 if (GET_CODE (op0) == REG
3868 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3869 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3876 /* Validate combination of src operands. Note that the operands have
3877 been screened by the src_operand predicate. We just have to check
3878 that the combination of operands is valid. If FORCE is set, ensure
3879 that the destination regno is valid if we have a 2 operand insn. */
3882 c4x_valid_operands (code, operands, mode, force)
3885 enum machine_mode mode ATTRIBUTE_UNUSED;
3890 enum rtx_code code1;
3891 enum rtx_code code2;
3893 if (code == COMPARE)
3904 if (GET_CODE (op1) == SUBREG)
3905 op1 = SUBREG_REG (op1);
3906 if (GET_CODE (op2) == SUBREG)
3907 op2 = SUBREG_REG (op2);
3909 code1 = GET_CODE (op1);
3910 code2 = GET_CODE (op2);
3912 if (code1 == REG && code2 == REG)
3915 if (code1 == MEM && code2 == MEM)
3917 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3919 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3930 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3935 if (! c4x_H_constant (op2))
3939 /* Any valid memory operand screened by src_operand is OK. */
3942 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3943 into a stack slot memory address comprising a PLUS and a
3949 fatal_insn ("c4x_valid_operands: Internal error", op2);
3953 /* Check that we have a valid destination register for a two operand
3955 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3958 /* We assume MINUS is commutative since the subtract patterns
3959 also support the reverse subtract instructions. Since op1
3960 is not a register, and op2 is a register, op1 can only
3961 be a restricted memory operand for a shift instruction. */
3962 if (code == ASHIFTRT || code == LSHIFTRT
3963 || code == ASHIFT || code == COMPARE)
3965 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3970 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3975 if (! c4x_H_constant (op1))
3979 /* Any valid memory operand screened by src_operand is OK. */
3987 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3988 into a stack slot memory address comprising a PLUS and a
3998 /* Check that we have a valid destination register for a two operand
4000 return ! force || REGNO (op1) == REGNO (operands[0]);
4004 int valid_operands (code, operands, mode)
4007 enum machine_mode mode;
4010 /* If we are not optimizing then we have to let anything go and let
4011 reload fix things up. instantiate_decl in function.c can produce
4012 invalid insns by changing the offset of a memory operand from a
4013 valid one into an invalid one, when the second operand is also a
4014 memory operand. The alternative is not to allow two memory
4015 operands for an insn when not optimizing. The problem only rarely
4016 occurs, for example with the C-torture program DFcmp.c. */
4018 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
4023 legitimize_operands (code, operands, mode)
4026 enum machine_mode mode;
4028 /* Compare only has 2 operands. */
4029 if (code == COMPARE)
4031 /* During RTL generation, force constants into pseudos so that
4032 they can get hoisted out of loops. This will tie up an extra
4033 register but can save an extra cycle. Only do this if loop
4034 optimisation enabled. (We cannot pull this trick for add and
4035 sub instructions since the flow pass won't find
4036 autoincrements etc.) This allows us to generate compare
4037 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4038 of LDI *AR0++, R0; CMPI 42, R0.
4040 Note that expand_binops will try to load an expensive constant
4041 into a register if it is used within a loop. Unfortunately,
4042 the cost mechanism doesn't allow us to look at the other
4043 operand to decide whether the constant is expensive. */
4045 if (! reload_in_progress
4048 && GET_CODE (operands[1]) == CONST_INT
4049 && preserve_subexpressions_p ()
4050 && rtx_cost (operands[1], code) > 1)
4051 operands[1] = force_reg (mode, operands[1]);
4053 if (! reload_in_progress
4054 && ! c4x_valid_operands (code, operands, mode, 0))
4055 operands[0] = force_reg (mode, operands[0]);
4059 /* We cannot do this for ADDI/SUBI insns since we will
4060 defeat the flow pass from finding autoincrement addressing
4062 if (! reload_in_progress
4063 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
4066 && GET_CODE (operands[2]) == CONST_INT
4067 && preserve_subexpressions_p ()
4068 && rtx_cost (operands[2], code) > 1)
4069 operands[2] = force_reg (mode, operands[2]);
4071 /* We can get better code on a C30 if we force constant shift counts
4072 into a register. This way they can get hoisted out of loops,
4073 tying up a register, but saving an instruction. The downside is
4074 that they may get allocated to an address or index register, and
4075 thus we will get a pipeline conflict if there is a nearby
4076 indirect address using an address register.
4078 Note that expand_binops will not try to load an expensive constant
4079 into a register if it is used within a loop for a shift insn. */
4081 if (! reload_in_progress
4082 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
4084 /* If the operand combination is invalid, we force operand1 into a
4085 register, preventing reload from having doing to do this at a
4087 operands[1] = force_reg (mode, operands[1]);
4090 emit_move_insn (operands[0], operands[1]);
4091 operands[1] = copy_rtx (operands[0]);
4095 /* Just in case... */
4096 if (! c4x_valid_operands (code, operands, mode, 0))
4097 operands[2] = force_reg (mode, operands[2]);
4101 /* Right shifts require a negative shift count, but GCC expects
4102 a positive count, so we emit a NEG. */
4103 if ((code == ASHIFTRT || code == LSHIFTRT)
4104 && (GET_CODE (operands[2]) != CONST_INT))
4105 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
4111 /* The following predicates are used for instruction scheduling. */
4114 group1_reg_operand (op, mode)
4116 enum machine_mode mode;
4118 if (mode != VOIDmode && mode != GET_MODE (op))
4120 if (GET_CODE (op) == SUBREG)
4121 op = SUBREG_REG (op);
4122 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4127 group1_mem_operand (op, mode)
4129 enum machine_mode mode;
4131 if (mode != VOIDmode && mode != GET_MODE (op))
4134 if (GET_CODE (op) == MEM)
4137 if (GET_CODE (op) == PLUS)
4139 rtx op0 = XEXP (op, 0);
4140 rtx op1 = XEXP (op, 1);
4142 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4143 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4146 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4154 /* Return true if any one of the address registers. */
4157 arx_reg_operand (op, mode)
4159 enum machine_mode mode;
4161 if (mode != VOIDmode && mode != GET_MODE (op))
4163 if (GET_CODE (op) == SUBREG)
4164 op = SUBREG_REG (op);
4165 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4170 c4x_arn_reg_operand (op, mode, regno)
4172 enum machine_mode mode;
4175 if (mode != VOIDmode && mode != GET_MODE (op))
4177 if (GET_CODE (op) == SUBREG)
4178 op = SUBREG_REG (op);
4179 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4184 c4x_arn_mem_operand (op, mode, regno)
4186 enum machine_mode mode;
4189 if (mode != VOIDmode && mode != GET_MODE (op))
4192 if (GET_CODE (op) == MEM)
4195 switch (GET_CODE (op))
4204 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4208 if (REG_P (XEXP (op, 0)) && (! reload_completed
4209 || (REGNO (XEXP (op, 0)) == regno)))
4211 if (REG_P (XEXP (XEXP (op, 1), 1))
4212 && (! reload_completed
4213 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4219 rtx op0 = XEXP (op, 0);
4220 rtx op1 = XEXP (op, 1);
4222 if ((REG_P (op0) && (! reload_completed
4223 || (REGNO (op0) == regno)))
4224 || (REG_P (op1) && (! reload_completed
4225 || (REGNO (op1) == regno))))
4239 ar0_reg_operand (op, mode)
4241 enum machine_mode mode;
4243 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4248 ar0_mem_operand (op, mode)
4250 enum machine_mode mode;
4252 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4257 ar1_reg_operand (op, mode)
4259 enum machine_mode mode;
4261 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4266 ar1_mem_operand (op, mode)
4268 enum machine_mode mode;
4270 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4275 ar2_reg_operand (op, mode)
4277 enum machine_mode mode;
4279 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4284 ar2_mem_operand (op, mode)
4286 enum machine_mode mode;
4288 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4293 ar3_reg_operand (op, mode)
4295 enum machine_mode mode;
4297 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4302 ar3_mem_operand (op, mode)
4304 enum machine_mode mode;
4306 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4311 ar4_reg_operand (op, mode)
4313 enum machine_mode mode;
4315 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4320 ar4_mem_operand (op, mode)
4322 enum machine_mode mode;
4324 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4329 ar5_reg_operand (op, mode)
4331 enum machine_mode mode;
4333 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4338 ar5_mem_operand (op, mode)
4340 enum machine_mode mode;
4342 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4347 ar6_reg_operand (op, mode)
4349 enum machine_mode mode;
4351 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4356 ar6_mem_operand (op, mode)
4358 enum machine_mode mode;
4360 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4365 ar7_reg_operand (op, mode)
4367 enum machine_mode mode;
4369 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4374 ar7_mem_operand (op, mode)
4376 enum machine_mode mode;
4378 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4383 ir0_reg_operand (op, mode)
4385 enum machine_mode mode;
4387 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4392 ir0_mem_operand (op, mode)
4394 enum machine_mode mode;
4396 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4401 ir1_reg_operand (op, mode)
4403 enum machine_mode mode;
4405 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4410 ir1_mem_operand (op, mode)
4412 enum machine_mode mode;
4414 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4418 /* This is similar to operand_subword but allows autoincrement
4422 c4x_operand_subword (op, i, validate_address, mode)
4425 int validate_address;
4426 enum machine_mode mode;
4428 if (mode != HImode && mode != HFmode)
4429 fatal_insn ("c4x_operand_subword: invalid mode", op);
4431 if (mode == HFmode && REG_P (op))
4432 fatal_insn ("c4x_operand_subword: invalid operand", op);
4434 if (GET_CODE (op) == MEM)
4436 enum rtx_code code = GET_CODE (XEXP (op, 0));
4437 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4438 enum machine_mode submode;
4443 else if (mode == HFmode)
4450 return gen_rtx_MEM (submode, XEXP (op, 0));
4456 /* We could handle these with some difficulty.
4457 e.g., *p-- => *(p-=2); *(p+1). */
4458 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4464 fatal_insn ("c4x_operand_subword: invalid address", op);
4466 /* Even though offsettable_address_p considers (MEM
4467 (LO_SUM)) to be offsettable, it is not safe if the
4468 address is at the end of the data page since we also have
4469 to fix up the associated high PART. In this case where
4470 we are trying to split a HImode or HFmode memory
4471 reference, we would have to emit another insn to reload a
4472 new HIGH value. It's easier to disable LO_SUM memory references
4473 in HImode or HFmode and we probably get better code. */
4475 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4482 return operand_subword (op, i, validate_address, mode);
4487 struct name_list *next;
4491 static struct name_list *global_head;
4492 static struct name_list *extern_head;
4495 /* Add NAME to list of global symbols and remove from external list if
4496 present on external list. */
4499 c4x_global_label (name)
4502 struct name_list *p, *last;
4504 /* Do not insert duplicate names, so linearly search through list of
4509 if (strcmp (p->name, name) == 0)
4513 p = (struct name_list *) permalloc (sizeof *p);
4514 p->next = global_head;
4518 /* Remove this name from ref list if present. */
4523 if (strcmp (p->name, name) == 0)
4526 last->next = p->next;
4528 extern_head = p->next;
4537 /* Add NAME to list of external symbols. */
4540 c4x_external_ref (name)
4543 struct name_list *p;
4545 /* Do not insert duplicate names. */
4549 if (strcmp (p->name, name) == 0)
4554 /* Do not insert ref if global found. */
4558 if (strcmp (p->name, name) == 0)
4562 p = (struct name_list *) permalloc (sizeof *p);
4563 p->next = extern_head;
4573 struct name_list *p;
4575 /* Output all external names that are not global. */
4579 fprintf (fp, "\t.ref\t");
4580 assemble_name (fp, p->name);
4584 fprintf (fp, "\t.end\n");
4589 c4x_check_attribute (attrib, list, decl, attributes)
4591 tree list, decl, *attributes;
4593 while (list != NULL_TREE
4594 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4595 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4596 list = TREE_CHAIN (list);
4598 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4604 c4x_insert_attributes (decl, attributes)
4605 tree decl, *attributes;
4607 switch (TREE_CODE (decl))
4610 c4x_check_attribute ("section", code_tree, decl, attributes);
4611 c4x_check_attribute ("const", pure_tree, decl, attributes);
4612 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4613 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4617 c4x_check_attribute ("section", data_tree, decl, attributes);
4625 /* Table of valid machine attributes. */
4626 const struct attribute_spec c4x_attribute_table[] =
4628 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4629 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4630 /* FIXME: code elsewhere in this file treats "naked" as a synonym of
4631 "interrupt"; should it be accepted here? */
4632 { "assembler", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4633 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4634 { NULL, 0, 0, false, false, false, NULL }
4637 /* Handle an attribute requiring a FUNCTION_TYPE;
4638 arguments as in struct attribute_spec.handler. */
4640 c4x_handle_fntype_attribute (node, name, args, flags, no_add_attrs)
4643 tree args ATTRIBUTE_UNUSED;
4644 int flags ATTRIBUTE_UNUSED;
4647 if (TREE_CODE (*node) != FUNCTION_TYPE)
4649 warning ("`%s' attribute only applies to functions",
4650 IDENTIFIER_POINTER (name));
4651 *no_add_attrs = true;
4658 /* !!! FIXME to emit RPTS correctly. */
4661 c4x_rptb_rpts_p (insn, op)
4664 /* The next insn should be our label marking where the
4665 repeat block starts. */
4666 insn = NEXT_INSN (insn);
4667 if (GET_CODE (insn) != CODE_LABEL)
4669 /* Some insns may have been shifted between the RPTB insn
4670 and the top label... They were probably destined to
4671 be moved out of the loop. For now, let's leave them
4672 where they are and print a warning. We should
4673 probably move these insns before the repeat block insn. */
4675 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4680 /* Skip any notes. */
4681 insn = next_nonnote_insn (insn);
4683 /* This should be our first insn in the loop. */
4684 if (! INSN_P (insn))
4687 /* Skip any notes. */
4688 insn = next_nonnote_insn (insn);
4690 if (! INSN_P (insn))
4693 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4699 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4703 /* Check if register r11 is used as the destination of an insn. */
4716 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4717 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4719 if (INSN_P (x) && (set = single_set (x)))
4722 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4725 fmt = GET_RTX_FORMAT (GET_CODE (x));
4726 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4730 if (c4x_r11_set_p (XEXP (x, i)))
4733 else if (fmt[i] == 'E')
4734 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4735 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4742 /* The c4x sometimes has a problem when the insn before the laj insn
4743 sets the r11 register. Check for this situation. */
4746 c4x_check_laj_p (insn)
4749 insn = prev_nonnote_insn (insn);
4751 /* If this is the start of the function no nop is needed. */
4755 /* If the previous insn is a code label we have to insert a nop. This
4756 could be a jump or table jump. We can find the normal jumps by
4757 scanning the function but this will not find table jumps. */
4758 if (GET_CODE (insn) == CODE_LABEL)
4761 /* If the previous insn sets register r11 we have to insert a nop. */
4762 if (c4x_r11_set_p (insn))
4765 /* No nop needed. */
4770 /* Adjust the cost of a scheduling dependency. Return the new cost of
4771 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4772 A set of an address register followed by a use occurs a 2 cycle
4773 stall (reduced to a single cycle on the c40 using LDA), while
4774 a read of an address register followed by a use occurs a single cycle. */
4776 #define SET_USE_COST 3
4777 #define SETLDA_USE_COST 2
4778 #define READ_USE_COST 2
4781 c4x_adjust_cost (insn, link, dep_insn, cost)
4787 /* Don't worry about this until we know what registers have been
4789 if (flag_schedule_insns == 0 && ! reload_completed)
4792 /* How do we handle dependencies where a read followed by another
4793 read causes a pipeline stall? For example, a read of ar0 followed
4794 by the use of ar0 for a memory reference. It looks like we
4795 need to extend the scheduler to handle this case. */
4797 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4798 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4799 so only deal with insns we know about. */
4800 if (recog_memoized (dep_insn) < 0)
4803 if (REG_NOTE_KIND (link) == 0)
4807 /* Data dependency; DEP_INSN writes a register that INSN reads some
4811 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4812 max = SET_USE_COST > max ? SET_USE_COST : max;
4813 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4814 max = READ_USE_COST > max ? READ_USE_COST : max;
4818 /* This could be significantly optimized. We should look
4819 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4820 insn uses ar0-ar7. We then test if the same register
4821 is used. The tricky bit is that some operands will
4822 use several registers... */
4823 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4824 max = SET_USE_COST > max ? SET_USE_COST : max;
4825 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4826 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4827 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4828 max = READ_USE_COST > max ? READ_USE_COST : max;
4830 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4831 max = SET_USE_COST > max ? SET_USE_COST : max;
4832 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4833 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4834 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4835 max = READ_USE_COST > max ? READ_USE_COST : max;
4837 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4838 max = SET_USE_COST > max ? SET_USE_COST : max;
4839 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4840 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4841 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4842 max = READ_USE_COST > max ? READ_USE_COST : max;
4844 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4845 max = SET_USE_COST > max ? SET_USE_COST : max;
4846 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4847 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4848 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4849 max = READ_USE_COST > max ? READ_USE_COST : max;
4851 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4852 max = SET_USE_COST > max ? SET_USE_COST : max;
4853 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4854 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4855 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4856 max = READ_USE_COST > max ? READ_USE_COST : max;
4858 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4859 max = SET_USE_COST > max ? SET_USE_COST : max;
4860 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4861 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4862 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4863 max = READ_USE_COST > max ? READ_USE_COST : max;
4865 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4866 max = SET_USE_COST > max ? SET_USE_COST : max;
4867 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4868 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4869 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4870 max = READ_USE_COST > max ? READ_USE_COST : max;
4872 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4873 max = SET_USE_COST > max ? SET_USE_COST : max;
4874 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4875 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4876 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4877 max = READ_USE_COST > max ? READ_USE_COST : max;
4879 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4880 max = SET_USE_COST > max ? SET_USE_COST : max;
4881 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4882 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4884 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4885 max = SET_USE_COST > max ? SET_USE_COST : max;
4886 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4887 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4893 /* For other data dependencies, the default cost specified in the
4897 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4899 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4902 /* For c4x anti dependencies, the cost is 0. */
4905 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4907 /* Output dependency; DEP_INSN writes a register that INSN writes some
4910 /* For c4x output dependencies, the cost is 0. */
4918 c4x_init_builtins ()
4920 tree endlink = void_list_node;
4922 builtin_function ("fast_ftoi",
4925 tree_cons (NULL_TREE, double_type_node, endlink)),
4926 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL);
4927 builtin_function ("ansi_ftoi",
4930 tree_cons (NULL_TREE, double_type_node, endlink)),
4931 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL);
4933 builtin_function ("fast_imult",
4936 tree_cons (NULL_TREE, integer_type_node,
4937 tree_cons (NULL_TREE,
4938 integer_type_node, endlink))),
4939 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL);
4942 builtin_function ("toieee",
4945 tree_cons (NULL_TREE, double_type_node, endlink)),
4946 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL);
4947 builtin_function ("frieee",
4950 tree_cons (NULL_TREE, double_type_node, endlink)),
4951 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL);
4952 builtin_function ("fast_invf",
4955 tree_cons (NULL_TREE, double_type_node, endlink)),
4956 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL);
4962 c4x_expand_builtin (exp, target, subtarget, mode, ignore)
4965 rtx subtarget ATTRIBUTE_UNUSED;
4966 enum machine_mode mode ATTRIBUTE_UNUSED;
4967 int ignore ATTRIBUTE_UNUSED;
4969 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4970 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4971 tree arglist = TREE_OPERAND (exp, 1);
4977 case C4X_BUILTIN_FIX:
4978 arg0 = TREE_VALUE (arglist);
4979 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4980 r0 = protect_from_queue (r0, 0);
4981 if (! target || ! register_operand (target, QImode))
4982 target = gen_reg_rtx (QImode);
4983 emit_insn (gen_fixqfqi_clobber (target, r0));
4986 case C4X_BUILTIN_FIX_ANSI:
4987 arg0 = TREE_VALUE (arglist);
4988 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4989 r0 = protect_from_queue (r0, 0);
4990 if (! target || ! register_operand (target, QImode))
4991 target = gen_reg_rtx (QImode);
4992 emit_insn (gen_fix_truncqfqi2 (target, r0));
4995 case C4X_BUILTIN_MPYI:
4998 arg0 = TREE_VALUE (arglist);
4999 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5000 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
5001 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
5002 r0 = protect_from_queue (r0, 0);
5003 r1 = protect_from_queue (r1, 0);
5004 if (! target || ! register_operand (target, QImode))
5005 target = gen_reg_rtx (QImode);
5006 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
5009 case C4X_BUILTIN_TOIEEE:
5012 arg0 = TREE_VALUE (arglist);
5013 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5014 r0 = protect_from_queue (r0, 0);
5015 if (! target || ! register_operand (target, QFmode))
5016 target = gen_reg_rtx (QFmode);
5017 emit_insn (gen_toieee (target, r0));
5020 case C4X_BUILTIN_FRIEEE:
5023 arg0 = TREE_VALUE (arglist);
5024 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5025 put_var_into_stack (arg0);
5026 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5027 r0 = protect_from_queue (r0, 0);
5028 if (register_operand (r0, QFmode))
5030 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5031 emit_move_insn (r1, r0);
5034 if (! target || ! register_operand (target, QFmode))
5035 target = gen_reg_rtx (QFmode);
5036 emit_insn (gen_frieee (target, r0));
5039 case C4X_BUILTIN_RCPF:
5042 arg0 = TREE_VALUE (arglist);
5043 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5044 r0 = protect_from_queue (r0, 0);
5045 if (! target || ! register_operand (target, QFmode))
5046 target = gen_reg_rtx (QFmode);
5047 emit_insn (gen_rcpfqf_clobber (target, r0));
5054 c4x_asm_named_section (name, flags)
5056 unsigned int flags ATTRIBUTE_UNUSED;
5058 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);