1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "conditions.h"
51 #include "target-def.h"
55 rtx fix_truncqfhi2_libfunc;
56 rtx fixuns_truncqfhi2_libfunc;
57 rtx fix_trunchfhi2_libfunc;
58 rtx fixuns_trunchfhi2_libfunc;
59 rtx floathiqf2_libfunc;
60 rtx floatunshiqf2_libfunc;
61 rtx floathihf2_libfunc;
62 rtx floatunshihf2_libfunc;
64 static int c4x_leaf_function;
66 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
68 /* Array of the smallest class containing reg number REGNO, indexed by
69 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
70 registers are available and set the class to NO_REGS for registers
71 that the target switches say are unavailable. */
73 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
75 /* Reg Modes Saved. */
76 R0R1_REGS, /* R0 QI, QF, HF No. */
77 R0R1_REGS, /* R1 QI, QF, HF No. */
78 R2R3_REGS, /* R2 QI, QF, HF No. */
79 R2R3_REGS, /* R3 QI, QF, HF No. */
80 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
81 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
83 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
84 ADDR_REGS, /* AR0 QI No. */
85 ADDR_REGS, /* AR1 QI No. */
86 ADDR_REGS, /* AR2 QI No. */
87 ADDR_REGS, /* AR3 QI QI. */
88 ADDR_REGS, /* AR4 QI QI. */
89 ADDR_REGS, /* AR5 QI QI. */
90 ADDR_REGS, /* AR6 QI QI. */
91 ADDR_REGS, /* AR7 QI QI. */
92 DP_REG, /* DP QI No. */
93 INDEX_REGS, /* IR0 QI No. */
94 INDEX_REGS, /* IR1 QI No. */
95 BK_REG, /* BK QI QI. */
96 SP_REG, /* SP QI No. */
97 ST_REG, /* ST CC No. */
98 NO_REGS, /* DIE/IE No. */
99 NO_REGS, /* IIE/IF No. */
100 NO_REGS, /* IIF/IOF No. */
101 INT_REGS, /* RS QI No. */
102 INT_REGS, /* RE QI No. */
103 RC_REG, /* RC QI No. */
104 EXT_REGS, /* R8 QI, QF, HF QI. */
105 EXT_REGS, /* R9 QI, QF, HF No. */
106 EXT_REGS, /* R10 QI, QF, HF No. */
107 EXT_REGS, /* R11 QI, QF, HF No. */
110 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
112 /* Reg Modes Saved. */
113 HFmode, /* R0 QI, QF, HF No. */
114 HFmode, /* R1 QI, QF, HF No. */
115 HFmode, /* R2 QI, QF, HF No. */
116 HFmode, /* R3 QI, QF, HF No. */
117 QFmode, /* R4 QI, QF, HF QI. */
118 QFmode, /* R5 QI, QF, HF QI. */
119 QImode, /* R6 QI, QF, HF QF. */
120 QImode, /* R7 QI, QF, HF QF. */
121 QImode, /* AR0 QI No. */
122 QImode, /* AR1 QI No. */
123 QImode, /* AR2 QI No. */
124 QImode, /* AR3 QI QI. */
125 QImode, /* AR4 QI QI. */
126 QImode, /* AR5 QI QI. */
127 QImode, /* AR6 QI QI. */
128 QImode, /* AR7 QI QI. */
129 VOIDmode, /* DP QI No. */
130 QImode, /* IR0 QI No. */
131 QImode, /* IR1 QI No. */
132 QImode, /* BK QI QI. */
133 VOIDmode, /* SP QI No. */
134 VOIDmode, /* ST CC No. */
135 VOIDmode, /* DIE/IE No. */
136 VOIDmode, /* IIE/IF No. */
137 VOIDmode, /* IIF/IOF No. */
138 QImode, /* RS QI No. */
139 QImode, /* RE QI No. */
140 VOIDmode, /* RC QI No. */
141 QFmode, /* R8 QI, QF, HF QI. */
142 HFmode, /* R9 QI, QF, HF No. */
143 HFmode, /* R10 QI, QF, HF No. */
144 HFmode, /* R11 QI, QF, HF No. */
148 /* Test and compare insns in c4x.md store the information needed to
149 generate branch and scc insns here. */
154 const char *c4x_rpts_cycles_string;
155 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
156 const char *c4x_cpu_version_string;
157 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
159 /* Pragma definitions. */
161 tree code_tree = NULL_TREE;
162 tree data_tree = NULL_TREE;
163 tree pure_tree = NULL_TREE;
164 tree noreturn_tree = NULL_TREE;
165 tree interrupt_tree = NULL_TREE;
167 /* Forward declarations */
168 static int c4x_isr_reg_used_p PARAMS ((unsigned int));
169 static int c4x_leaf_function_p PARAMS ((void));
170 static int c4x_assembler_function_p PARAMS ((void));
171 static int c4x_immed_float_p PARAMS ((rtx));
172 static int c4x_a_register PARAMS ((rtx));
173 static int c4x_x_register PARAMS ((rtx));
174 static int c4x_immed_int_constant PARAMS ((rtx));
175 static int c4x_immed_float_constant PARAMS ((rtx));
176 static int c4x_K_constant PARAMS ((rtx));
177 static int c4x_N_constant PARAMS ((rtx));
178 static int c4x_O_constant PARAMS ((rtx));
179 static int c4x_R_indirect PARAMS ((rtx));
180 static int c4x_S_indirect PARAMS ((rtx));
181 static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
182 static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
183 enum machine_mode, int));
184 static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
185 static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
186 static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
187 static int c4x_r11_set_p PARAMS ((rtx));
188 static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
189 static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
190 static tree c4x_handle_fntype_attribute PARAMS ((tree *, tree, tree, int, bool *));
191 const struct attribute_spec c4x_attribute_table[];
192 static void c4x_insert_attributes PARAMS ((tree, tree *));
193 static void c4x_asm_named_section PARAMS ((const char *, unsigned int));
194 static int c4x_adjust_cost PARAMS ((rtx, rtx, rtx, int));
195 static void c4x_encode_section_info PARAMS ((tree, int));
196 static void c4x_globalize_label PARAMS ((FILE *, const char *));
198 /* Initialize the GCC target structure. */
199 #undef TARGET_ASM_BYTE_OP
200 #define TARGET_ASM_BYTE_OP "\t.word\t"
201 #undef TARGET_ASM_ALIGNED_HI_OP
202 #define TARGET_ASM_ALIGNED_HI_OP NULL
203 #undef TARGET_ASM_ALIGNED_SI_OP
204 #define TARGET_ASM_ALIGNED_SI_OP NULL
206 #undef TARGET_ATTRIBUTE_TABLE
207 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
209 #undef TARGET_INSERT_ATTRIBUTES
210 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
212 #undef TARGET_INIT_BUILTINS
213 #define TARGET_INIT_BUILTINS c4x_init_builtins
215 #undef TARGET_EXPAND_BUILTIN
216 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
218 #undef TARGET_SCHED_ADJUST_COST
219 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
221 #undef TARGET_ENCODE_SECTION_INFO
222 #define TARGET_ENCODE_SECTION_INFO c4x_encode_section_info
224 #undef TARGET_ASM_GLOBALIZE_LABEL
225 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
227 struct gcc_target targetm = TARGET_INITIALIZER;
229 /* Override command line options.
230 Called once after all options have been parsed.
231 Mostly we process the processor
232 type and sometimes adjust other TARGET_ options. */
235 c4x_override_options ()
237 if (c4x_rpts_cycles_string)
238 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
243 c4x_cpu_version = 30;
245 c4x_cpu_version = 31;
247 c4x_cpu_version = 32;
249 c4x_cpu_version = 33;
251 c4x_cpu_version = 40;
253 c4x_cpu_version = 44;
255 c4x_cpu_version = 40;
257 /* -mcpu=xx overrides -m40 etc. */
258 if (c4x_cpu_version_string)
260 const char *p = c4x_cpu_version_string;
262 /* Also allow -mcpu=c30 etc. */
263 if (*p == 'c' || *p == 'C')
265 c4x_cpu_version = atoi (p);
268 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
269 C40_FLAG | C44_FLAG);
271 switch (c4x_cpu_version)
273 case 30: target_flags |= C30_FLAG; break;
274 case 31: target_flags |= C31_FLAG; break;
275 case 32: target_flags |= C32_FLAG; break;
276 case 33: target_flags |= C33_FLAG; break;
277 case 40: target_flags |= C40_FLAG; break;
278 case 44: target_flags |= C44_FLAG; break;
280 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
281 c4x_cpu_version = 40;
282 target_flags |= C40_FLAG;
285 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
286 target_flags |= C3X_FLAG;
288 target_flags &= ~C3X_FLAG;
290 /* Convert foo / 8.0 into foo * 0.125, etc. */
291 set_fast_math_flags (1);
293 /* We should phase out the following at some stage.
294 This provides compatibility with the old -mno-aliases option. */
295 if (! TARGET_ALIASES && ! flag_argument_noalias)
296 flag_argument_noalias = 1;
298 /* We're C4X floating point, not IEEE floating point. */
299 memset (real_format_for_mode, 0, sizeof real_format_for_mode);
300 real_format_for_mode[QFmode - QFmode] = &c4x_single_format;
301 real_format_for_mode[HFmode - QFmode] = &c4x_extended_format;
305 /* This is called before c4x_override_options. */
308 c4x_optimization_options (level, size)
309 int level ATTRIBUTE_UNUSED;
310 int size ATTRIBUTE_UNUSED;
312 /* Scheduling before register allocation can screw up global
313 register allocation, especially for functions that use MPY||ADD
314 instructions. The benefit we gain we get by scheduling before
315 register allocation is probably marginal anyhow. */
316 flag_schedule_insns = 0;
320 /* Write an ASCII string. */
322 #define C4X_ASCII_LIMIT 40
325 c4x_output_ascii (stream, ptr, len)
330 char sbuf[C4X_ASCII_LIMIT + 1];
331 int s, l, special, first = 1, onlys;
334 fprintf (stream, "\t.byte\t");
336 for (s = l = 0; len > 0; --len, ++ptr)
340 /* Escape " and \ with a \". */
341 special = *ptr == '\"' || *ptr == '\\';
343 /* If printable - add to buff. */
344 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
349 if (s < C4X_ASCII_LIMIT - 1)
364 fprintf (stream, "\"%s\"", sbuf);
366 if (TARGET_TI && l >= 80 && len > 1)
368 fprintf (stream, "\n\t.byte\t");
386 fprintf (stream, "%d", *ptr);
388 if (TARGET_TI && l >= 80 && len > 1)
390 fprintf (stream, "\n\t.byte\t");
401 fprintf (stream, "\"%s\"", sbuf);
404 fputc ('\n', stream);
409 c4x_hard_regno_mode_ok (regno, mode)
411 enum machine_mode mode;
416 case Pmode: /* Pointer (24/32 bits). */
418 case QImode: /* Integer (32 bits). */
419 return IS_INT_REGNO (regno);
421 case QFmode: /* Float, Double (32 bits). */
422 case HFmode: /* Long Double (40 bits). */
423 return IS_EXT_REGNO (regno);
425 case CCmode: /* Condition Codes. */
426 case CC_NOOVmode: /* Condition Codes. */
427 return IS_ST_REGNO (regno);
429 case HImode: /* Long Long (64 bits). */
430 /* We need two registers to store long longs. Note that
431 it is much easier to constrain the first register
432 to start on an even boundary. */
433 return IS_INT_REGNO (regno)
434 && IS_INT_REGNO (regno + 1)
438 return 0; /* We don't support these modes. */
444 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
446 c4x_hard_regno_rename_ok (regno1, regno2)
450 /* We can not copy call saved registers from mode QI into QF or from
452 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
454 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
456 /* We cannot copy from an extended (40 bit) register to a standard
457 (32 bit) register because we only set the condition codes for
458 extended registers. */
459 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
461 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
466 /* The TI C3x C compiler register argument runtime model uses 6 registers,
467 AR2, R2, R3, RC, RS, RE.
469 The first two floating point arguments (float, double, long double)
470 that are found scanning from left to right are assigned to R2 and R3.
472 The remaining integer (char, short, int, long) or pointer arguments
473 are assigned to the remaining registers in the order AR2, R2, R3,
474 RC, RS, RE when scanning left to right, except for the last named
475 argument prior to an ellipsis denoting variable number of
476 arguments. We don't have to worry about the latter condition since
477 function.c treats the last named argument as anonymous (unnamed).
479 All arguments that cannot be passed in registers are pushed onto
480 the stack in reverse order (right to left). GCC handles that for us.
482 c4x_init_cumulative_args() is called at the start, so we can parse
483 the args to see how many floating point arguments and how many
484 integer (or pointer) arguments there are. c4x_function_arg() is
485 then called (sometimes repeatedly) for each argument (parsed left
486 to right) to obtain the register to pass the argument in, or zero
487 if the argument is to be passed on the stack. Once the compiler is
488 happy, c4x_function_arg_advance() is called.
490 Don't use R0 to pass arguments in, we use 0 to indicate a stack
493 static const int c4x_int_reglist[3][6] =
495 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
496 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
497 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
500 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
503 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
504 function whose data type is FNTYPE.
505 For a library call, FNTYPE is 0. */
508 c4x_init_cumulative_args (cum, fntype, libname)
509 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
510 tree fntype; /* Tree ptr for function decl. */
511 rtx libname; /* SYMBOL_REF of library name or 0. */
513 tree param, next_param;
515 cum->floats = cum->ints = 0;
522 fprintf (stderr, "\nc4x_init_cumulative_args (");
525 tree ret_type = TREE_TYPE (fntype);
527 fprintf (stderr, "fntype code = %s, ret code = %s",
528 tree_code_name[(int) TREE_CODE (fntype)],
529 tree_code_name[(int) TREE_CODE (ret_type)]);
532 fprintf (stderr, "no fntype");
535 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
538 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
540 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
541 param; param = next_param)
545 next_param = TREE_CHAIN (param);
547 type = TREE_VALUE (param);
548 if (type && type != void_type_node)
550 enum machine_mode mode;
552 /* If the last arg doesn't have void type then we have
553 variable arguments. */
557 if ((mode = TYPE_MODE (type)))
559 if (! MUST_PASS_IN_STACK (mode, type))
561 /* Look for float, double, or long double argument. */
562 if (mode == QFmode || mode == HFmode)
564 /* Look for integer, enumeral, boolean, char, or pointer
566 else if (mode == QImode || mode == Pmode)
575 fprintf (stderr, "%s%s, args = %d)\n",
576 cum->prototype ? ", prototype" : "",
577 cum->var ? ", variable args" : "",
582 /* Update the data in CUM to advance over an argument
583 of mode MODE and data type TYPE.
584 (TYPE is null for libcalls where that information may not be available.) */
587 c4x_function_arg_advance (cum, mode, type, named)
588 CUMULATIVE_ARGS *cum; /* Current arg information. */
589 enum machine_mode mode; /* Current arg mode. */
590 tree type; /* Type of the arg or 0 if lib support. */
591 int named; /* Whether or not the argument was named. */
594 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
595 GET_MODE_NAME (mode), named);
599 && ! MUST_PASS_IN_STACK (mode, type))
601 /* Look for float, double, or long double argument. */
602 if (mode == QFmode || mode == HFmode)
604 /* Look for integer, enumeral, boolean, char, or pointer argument. */
605 else if (mode == QImode || mode == Pmode)
608 else if (! TARGET_MEMPARM && ! type)
610 /* Handle libcall arguments. */
611 if (mode == QFmode || mode == HFmode)
613 else if (mode == QImode || mode == Pmode)
620 /* Define where to put the arguments to a function. Value is zero to
621 push the argument on the stack, or a hard register in which to
624 MODE is the argument's machine mode.
625 TYPE is the data type of the argument (as a tree).
626 This is null for libcalls where that information may
628 CUM is a variable of type CUMULATIVE_ARGS which gives info about
629 the preceding args and about the function being called.
630 NAMED is nonzero if this argument is a named parameter
631 (otherwise it is an extra parameter matching an ellipsis). */
634 c4x_function_arg (cum, mode, type, named)
635 CUMULATIVE_ARGS *cum; /* Current arg information. */
636 enum machine_mode mode; /* Current arg mode. */
637 tree type; /* Type of the arg or 0 if lib support. */
638 int named; /* != 0 for normal args, == 0 for ... args. */
640 int reg = 0; /* Default to passing argument on stack. */
644 /* We can handle at most 2 floats in R2, R3. */
645 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
647 /* We can handle at most 6 integers minus number of floats passed
649 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
650 6 - cum->maxfloats : cum->ints;
652 /* If there is no prototype, assume all the arguments are integers. */
653 if (! cum->prototype)
656 cum->ints = cum->floats = 0;
660 /* This marks the last argument. We don't need to pass this through
662 if (type == void_type_node)
668 && ! MUST_PASS_IN_STACK (mode, type))
670 /* Look for float, double, or long double argument. */
671 if (mode == QFmode || mode == HFmode)
673 if (cum->floats < cum->maxfloats)
674 reg = c4x_fp_reglist[cum->floats];
676 /* Look for integer, enumeral, boolean, char, or pointer argument. */
677 else if (mode == QImode || mode == Pmode)
679 if (cum->ints < cum->maxints)
680 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
683 else if (! TARGET_MEMPARM && ! type)
685 /* We could use a different argument calling model for libcalls,
686 since we're only calling functions in libgcc. Thus we could
687 pass arguments for long longs in registers rather than on the
688 stack. In the meantime, use the odd TI format. We make the
689 assumption that we won't have more than two floating point
690 args, six integer args, and that all the arguments are of the
692 if (mode == QFmode || mode == HFmode)
693 reg = c4x_fp_reglist[cum->floats];
694 else if (mode == QImode || mode == Pmode)
695 reg = c4x_int_reglist[0][cum->ints];
700 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
701 GET_MODE_NAME (mode), named);
703 fprintf (stderr, ", reg=%s", reg_names[reg]);
705 fprintf (stderr, ", stack");
706 fprintf (stderr, ")\n");
709 return gen_rtx_REG (mode, reg);
714 /* C[34]x arguments grow in weird ways (downwards) that the standard
715 varargs stuff can't handle.. */
717 c4x_va_arg (valist, type)
722 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
723 build_int_2 (int_size_in_bytes (type), 0));
724 TREE_SIDE_EFFECTS (t) = 1;
726 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
731 c4x_isr_reg_used_p (regno)
734 /* Don't save/restore FP or ST, we handle them separately. */
735 if (regno == FRAME_POINTER_REGNUM
736 || IS_ST_REGNO (regno))
739 /* We could be a little smarter abut saving/restoring DP.
740 We'll only save if for the big memory model or if
741 we're paranoid. ;-) */
742 if (IS_DP_REGNO (regno))
743 return ! TARGET_SMALL || TARGET_PARANOID;
745 /* Only save/restore regs in leaf function that are used. */
746 if (c4x_leaf_function)
747 return regs_ever_live[regno] && fixed_regs[regno] == 0;
749 /* Only save/restore regs that are used by the ISR and regs
750 that are likely to be used by functions the ISR calls
751 if they are not fixed. */
752 return IS_EXT_REGNO (regno)
753 || ((regs_ever_live[regno] || call_used_regs[regno])
754 && fixed_regs[regno] == 0);
759 c4x_leaf_function_p ()
761 /* A leaf function makes no calls, so we only need
762 to save/restore the registers we actually use.
763 For the global variable leaf_function to be set, we need
764 to define LEAF_REGISTERS and all that it entails.
765 Let's check ourselves... */
767 if (lookup_attribute ("leaf_pretend",
768 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
771 /* Use the leaf_pretend attribute at your own risk. This is a hack
772 to speed up ISRs that call a function infrequently where the
773 overhead of saving and restoring the additional registers is not
774 warranted. You must save and restore the additional registers
775 required by the called function. Caveat emptor. Here's enough
778 if (leaf_function_p ())
786 c4x_assembler_function_p ()
790 type = TREE_TYPE (current_function_decl);
791 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type)) != NULL)
792 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL);
797 c4x_interrupt_function_p ()
799 if (lookup_attribute ("interrupt",
800 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
803 /* Look for TI style c_intnn. */
804 return current_function_name[0] == 'c'
805 && current_function_name[1] == '_'
806 && current_function_name[2] == 'i'
807 && current_function_name[3] == 'n'
808 && current_function_name[4] == 't'
809 && ISDIGIT (current_function_name[5])
810 && ISDIGIT (current_function_name[6]);
814 c4x_expand_prologue ()
817 int size = get_frame_size ();
820 /* In functions where ar3 is not used but frame pointers are still
821 specified, frame pointers are not adjusted (if >= -O2) and this
822 is used so it won't needlessly push the frame pointer. */
825 /* For __assembler__ function don't build a prologue. */
826 if (c4x_assembler_function_p ())
831 /* For __interrupt__ function build specific prologue. */
832 if (c4x_interrupt_function_p ())
834 c4x_leaf_function = c4x_leaf_function_p ();
836 insn = emit_insn (gen_push_st ());
837 RTX_FRAME_RELATED_P (insn) = 1;
840 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
841 RTX_FRAME_RELATED_P (insn) = 1;
842 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
843 gen_rtx_REG (QImode, SP_REGNO)));
844 RTX_FRAME_RELATED_P (insn) = 1;
845 /* We require that an ISR uses fewer than 32768 words of
846 local variables, otherwise we have to go to lots of
847 effort to save a register, load it with the desired size,
848 adjust the stack pointer, and then restore the modified
849 register. Frankly, I think it is a poor ISR that
850 requires more than 32767 words of local temporary
853 error ("ISR %s requires %d words of local vars, max is 32767",
854 current_function_name, size);
856 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
857 gen_rtx_REG (QImode, SP_REGNO),
859 RTX_FRAME_RELATED_P (insn) = 1;
861 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
863 if (c4x_isr_reg_used_p (regno))
865 if (regno == DP_REGNO)
867 insn = emit_insn (gen_push_dp ());
868 RTX_FRAME_RELATED_P (insn) = 1;
872 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
873 RTX_FRAME_RELATED_P (insn) = 1;
874 if (IS_EXT_REGNO (regno))
876 insn = emit_insn (gen_pushqf
877 (gen_rtx_REG (QFmode, regno)));
878 RTX_FRAME_RELATED_P (insn) = 1;
883 /* We need to clear the repeat mode flag if the ISR is
884 going to use a RPTB instruction or uses the RC, RS, or RE
886 if (regs_ever_live[RC_REGNO]
887 || regs_ever_live[RS_REGNO]
888 || regs_ever_live[RE_REGNO])
890 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
891 RTX_FRAME_RELATED_P (insn) = 1;
894 /* Reload DP reg if we are paranoid about some turkey
895 violating small memory model rules. */
896 if (TARGET_SMALL && TARGET_PARANOID)
898 insn = emit_insn (gen_set_ldp_prologue
899 (gen_rtx_REG (QImode, DP_REGNO),
900 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
901 RTX_FRAME_RELATED_P (insn) = 1;
906 if (frame_pointer_needed)
909 || (current_function_args_size != 0)
912 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
913 RTX_FRAME_RELATED_P (insn) = 1;
914 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
915 gen_rtx_REG (QImode, SP_REGNO)));
916 RTX_FRAME_RELATED_P (insn) = 1;
921 /* Since ar3 is not used, we don't need to push it. */
927 /* If we use ar3, we need to push it. */
929 if ((size != 0) || (current_function_args_size != 0))
931 /* If we are omitting the frame pointer, we still have
932 to make space for it so the offsets are correct
933 unless we don't use anything on the stack at all. */
940 /* Local vars are too big, it will take multiple operations
944 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
945 GEN_INT(size >> 16)));
946 RTX_FRAME_RELATED_P (insn) = 1;
947 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
948 gen_rtx_REG (QImode, R1_REGNO),
950 RTX_FRAME_RELATED_P (insn) = 1;
954 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
955 GEN_INT(size & ~0xffff)));
956 RTX_FRAME_RELATED_P (insn) = 1;
958 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
959 gen_rtx_REG (QImode, R1_REGNO),
960 GEN_INT(size & 0xffff)));
961 RTX_FRAME_RELATED_P (insn) = 1;
962 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
963 gen_rtx_REG (QImode, SP_REGNO),
964 gen_rtx_REG (QImode, R1_REGNO)));
965 RTX_FRAME_RELATED_P (insn) = 1;
969 /* Local vars take up less than 32767 words, so we can directly
971 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
972 gen_rtx_REG (QImode, SP_REGNO),
974 RTX_FRAME_RELATED_P (insn) = 1;
977 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
979 if (regs_ever_live[regno] && ! call_used_regs[regno])
981 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
983 if (TARGET_PRESERVE_FLOAT)
985 insn = emit_insn (gen_pushqi
986 (gen_rtx_REG (QImode, regno)));
987 RTX_FRAME_RELATED_P (insn) = 1;
989 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
990 RTX_FRAME_RELATED_P (insn) = 1;
992 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
994 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
995 RTX_FRAME_RELATED_P (insn) = 1;
1004 c4x_expand_epilogue()
1010 int size = get_frame_size ();
1012 /* For __assembler__ function build no epilogue. */
1013 if (c4x_assembler_function_p ())
1015 insn = emit_jump_insn (gen_return_from_epilogue ());
1016 RTX_FRAME_RELATED_P (insn) = 1;
1020 /* For __interrupt__ function build specific epilogue. */
1021 if (c4x_interrupt_function_p ())
1023 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1025 if (! c4x_isr_reg_used_p (regno))
1027 if (regno == DP_REGNO)
1029 insn = emit_insn (gen_pop_dp ());
1030 RTX_FRAME_RELATED_P (insn) = 1;
1034 /* We have to use unspec because the compiler will delete insns
1035 that are not call-saved. */
1036 if (IS_EXT_REGNO (regno))
1038 insn = emit_insn (gen_popqf_unspec
1039 (gen_rtx_REG (QFmode, regno)));
1040 RTX_FRAME_RELATED_P (insn) = 1;
1042 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1043 RTX_FRAME_RELATED_P (insn) = 1;
1048 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1049 gen_rtx_REG (QImode, SP_REGNO),
1051 RTX_FRAME_RELATED_P (insn) = 1;
1052 insn = emit_insn (gen_popqi
1053 (gen_rtx_REG (QImode, AR3_REGNO)));
1054 RTX_FRAME_RELATED_P (insn) = 1;
1056 insn = emit_insn (gen_pop_st ());
1057 RTX_FRAME_RELATED_P (insn) = 1;
1058 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1059 RTX_FRAME_RELATED_P (insn) = 1;
1063 if (frame_pointer_needed)
1066 || (current_function_args_size != 0)
1070 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1071 gen_rtx_MEM (QImode,
1073 (QImode, gen_rtx_REG (QImode,
1076 RTX_FRAME_RELATED_P (insn) = 1;
1078 /* We already have the return value and the fp,
1079 so we need to add those to the stack. */
1086 /* Since ar3 is not used for anything, we don't need to
1093 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1094 if (size || current_function_args_size)
1096 /* If we are ommitting the frame pointer, we still have
1097 to make space for it so the offsets are correct
1098 unless we don't use anything on the stack at all. */
1103 /* Now restore the saved registers, putting in the delayed branch
1105 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1107 if (regs_ever_live[regno] && ! call_used_regs[regno])
1109 if (regno == AR3_REGNO && dont_pop_ar3)
1112 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1114 insn = emit_insn (gen_popqf_unspec
1115 (gen_rtx_REG (QFmode, regno)));
1116 RTX_FRAME_RELATED_P (insn) = 1;
1117 if (TARGET_PRESERVE_FLOAT)
1119 insn = emit_insn (gen_popqi_unspec
1120 (gen_rtx_REG (QImode, regno)));
1121 RTX_FRAME_RELATED_P (insn) = 1;
1126 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1127 RTX_FRAME_RELATED_P (insn) = 1;
1132 if (frame_pointer_needed)
1135 || (current_function_args_size != 0)
1138 /* Restore the old FP. */
1141 (gen_rtx_REG (QImode, AR3_REGNO),
1142 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1144 RTX_FRAME_RELATED_P (insn) = 1;
1150 /* Local vars are too big, it will take multiple operations
1154 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1155 GEN_INT(size >> 16)));
1156 RTX_FRAME_RELATED_P (insn) = 1;
1157 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1158 gen_rtx_REG (QImode, R3_REGNO),
1160 RTX_FRAME_RELATED_P (insn) = 1;
1164 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1165 GEN_INT(size & ~0xffff)));
1166 RTX_FRAME_RELATED_P (insn) = 1;
1168 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1169 gen_rtx_REG (QImode, R3_REGNO),
1170 GEN_INT(size & 0xffff)));
1171 RTX_FRAME_RELATED_P (insn) = 1;
1172 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1173 gen_rtx_REG (QImode, SP_REGNO),
1174 gen_rtx_REG (QImode, R3_REGNO)));
1175 RTX_FRAME_RELATED_P (insn) = 1;
1179 /* Local vars take up less than 32768 words, so we can directly
1180 subtract the number. */
1181 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1182 gen_rtx_REG (QImode, SP_REGNO),
1184 RTX_FRAME_RELATED_P (insn) = 1;
1189 insn = emit_jump_insn (gen_return_indirect_internal
1190 (gen_rtx_REG (QImode, R2_REGNO)));
1191 RTX_FRAME_RELATED_P (insn) = 1;
1195 insn = emit_jump_insn (gen_return_from_epilogue ());
1196 RTX_FRAME_RELATED_P (insn) = 1;
1203 c4x_null_epilogue_p ()
1207 if (reload_completed
1208 && ! c4x_assembler_function_p ()
1209 && ! c4x_interrupt_function_p ()
1210 && ! current_function_calls_alloca
1211 && ! current_function_args_size
1213 && ! get_frame_size ())
1215 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1216 if (regs_ever_live[regno] && ! call_used_regs[regno]
1217 && (regno != AR3_REGNO))
1226 c4x_emit_move_sequence (operands, mode)
1228 enum machine_mode mode;
1230 rtx op0 = operands[0];
1231 rtx op1 = operands[1];
1233 if (! reload_in_progress
1236 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1237 op1 = force_reg (mode, op1);
1239 if (GET_CODE (op1) == LO_SUM
1240 && GET_MODE (op1) == Pmode
1241 && dp_reg_operand (XEXP (op1, 0), mode))
1243 /* expand_increment will sometimes create a LO_SUM immediate
1245 op1 = XEXP (op1, 1);
1247 else if (symbolic_address_operand (op1, mode))
1249 if (TARGET_LOAD_ADDRESS)
1251 /* Alias analysis seems to do a better job if we force
1252 constant addresses to memory after reload. */
1253 emit_insn (gen_load_immed_address (op0, op1));
1258 /* Stick symbol or label address into the constant pool. */
1259 op1 = force_const_mem (Pmode, op1);
1262 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1264 /* We could be a lot smarter about loading some of these
1266 op1 = force_const_mem (mode, op1);
1269 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1270 and emit associated (HIGH (SYMREF)) if large memory model.
1271 c4x_legitimize_address could be used to do this,
1272 perhaps by calling validize_address. */
1273 if (TARGET_EXPOSE_LDP
1274 && ! (reload_in_progress || reload_completed)
1275 && GET_CODE (op1) == MEM
1276 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1278 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1280 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1281 op1 = change_address (op1, mode,
1282 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1285 if (TARGET_EXPOSE_LDP
1286 && ! (reload_in_progress || reload_completed)
1287 && GET_CODE (op0) == MEM
1288 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1290 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1292 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1293 op0 = change_address (op0, mode,
1294 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1297 if (GET_CODE (op0) == SUBREG
1298 && mixed_subreg_operand (op0, mode))
1300 /* We should only generate these mixed mode patterns
1301 during RTL generation. If we need do it later on
1302 then we'll have to emit patterns that won't clobber CC. */
1303 if (reload_in_progress || reload_completed)
1305 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1306 op0 = SUBREG_REG (op0);
1307 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1309 op0 = copy_rtx (op0);
1310 PUT_MODE (op0, QImode);
1316 emit_insn (gen_storeqf_int_clobber (op0, op1));
1322 if (GET_CODE (op1) == SUBREG
1323 && mixed_subreg_operand (op1, mode))
1325 /* We should only generate these mixed mode patterns
1326 during RTL generation. If we need do it later on
1327 then we'll have to emit patterns that won't clobber CC. */
1328 if (reload_in_progress || reload_completed)
1330 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1331 op1 = SUBREG_REG (op1);
1332 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1334 op1 = copy_rtx (op1);
1335 PUT_MODE (op1, QImode);
1341 emit_insn (gen_loadqf_int_clobber (op0, op1));
1348 && reg_operand (op0, mode)
1349 && const_int_operand (op1, mode)
1350 && ! IS_INT16_CONST (INTVAL (op1))
1351 && ! IS_HIGH_CONST (INTVAL (op1)))
1353 emit_insn (gen_loadqi_big_constant (op0, op1));
1358 && reg_operand (op0, mode)
1359 && const_int_operand (op1, mode))
1361 emit_insn (gen_loadhi_big_constant (op0, op1));
1365 /* Adjust operands in case we have modified them. */
1369 /* Emit normal pattern. */
1375 c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1378 enum machine_mode dmode;
1379 enum machine_mode smode;
1391 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1392 operands[1], smode);
1393 equiv = gen_rtx (code, dmode, operands[1]);
1397 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1398 operands[1], smode, operands[2], smode);
1399 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1406 insns = get_insns ();
1408 emit_libcall_block (insns, operands[0], ret, equiv);
1413 c4x_emit_libcall3 (libcall, code, mode, operands)
1416 enum machine_mode mode;
1419 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1424 c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1427 enum machine_mode mode;
1435 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1436 operands[1], mode, operands[2], mode);
1437 equiv = gen_rtx_TRUNCATE (mode,
1438 gen_rtx_LSHIFTRT (HImode,
1439 gen_rtx_MULT (HImode,
1440 gen_rtx (code, HImode, operands[1]),
1441 gen_rtx (code, HImode, operands[2])),
1443 insns = get_insns ();
1445 emit_libcall_block (insns, operands[0], ret, equiv);
1449 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1450 yet use this info. */
1453 c4x_encode_section_info (decl, first)
1455 int first ATTRIBUTE_UNUSED;
1457 if (TREE_CODE (decl) == FUNCTION_DECL)
1458 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1463 c4x_check_legit_addr (mode, addr, strict)
1464 enum machine_mode mode;
1468 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1469 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1470 rtx disp = NULL_RTX; /* Displacement. */
1473 code = GET_CODE (addr);
1476 /* Register indirect with auto increment/decrement. We don't
1477 allow SP here---push_operand should recognize an operand
1478 being pushed on the stack. */
1483 if (mode != QImode && mode != QFmode)
1487 base = XEXP (addr, 0);
1495 rtx op0 = XEXP (addr, 0);
1496 rtx op1 = XEXP (addr, 1);
1498 if (mode != QImode && mode != QFmode)
1502 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1504 base = XEXP (op1, 0);
1507 if (REG_P (XEXP (op1, 1)))
1508 indx = XEXP (op1, 1);
1510 disp = XEXP (op1, 1);
1514 /* Register indirect. */
1519 /* Register indirect with displacement or index. */
1522 rtx op0 = XEXP (addr, 0);
1523 rtx op1 = XEXP (addr, 1);
1524 enum rtx_code code0 = GET_CODE (op0);
1531 base = op0; /* Base + index. */
1533 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1541 base = op0; /* Base + displacement. */
1552 /* Direct addressing with DP register. */
1555 rtx op0 = XEXP (addr, 0);
1556 rtx op1 = XEXP (addr, 1);
1558 /* HImode and HFmode direct memory references aren't truly
1559 offsettable (consider case at end of data page). We
1560 probably get better code by loading a pointer and using an
1561 indirect memory reference. */
1562 if (mode == HImode || mode == HFmode)
1565 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1568 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1571 if (GET_CODE (op1) == CONST)
1577 /* Direct addressing with some work for the assembler... */
1579 /* Direct addressing. */
1582 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1584 /* These need to be converted to a LO_SUM (...).
1585 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1588 /* Do not allow direct memory access to absolute addresses.
1589 This is more pain than it's worth, especially for the
1590 small memory model where we can't guarantee that
1591 this address is within the data page---we don't want
1592 to modify the DP register in the small memory model,
1593 even temporarily, since an interrupt can sneak in.... */
1597 /* Indirect indirect addressing. */
1602 fatal_insn ("using CONST_DOUBLE for address", addr);
1608 /* Validate the base register. */
1611 /* Check that the address is offsettable for HImode and HFmode. */
1612 if (indx && (mode == HImode || mode == HFmode))
1615 /* Handle DP based stuff. */
1616 if (REGNO (base) == DP_REGNO)
1618 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1620 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1624 /* Now validate the index register. */
1627 if (GET_CODE (indx) != REG)
1629 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1631 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1635 /* Validate displacement. */
1638 if (GET_CODE (disp) != CONST_INT)
1640 if (mode == HImode || mode == HFmode)
1642 /* The offset displacement must be legitimate. */
1643 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1648 if (! IS_DISP8_CONST (INTVAL (disp)))
1651 /* Can't add an index with a disp. */
1660 c4x_legitimize_address (orig, mode)
1661 rtx orig ATTRIBUTE_UNUSED;
1662 enum machine_mode mode ATTRIBUTE_UNUSED;
1664 if (GET_CODE (orig) == SYMBOL_REF
1665 || GET_CODE (orig) == LABEL_REF)
1667 if (mode == HImode || mode == HFmode)
1669 /* We need to force the address into
1670 a register so that it is offsettable. */
1671 rtx addr_reg = gen_reg_rtx (Pmode);
1672 emit_move_insn (addr_reg, orig);
1677 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1680 emit_insn (gen_set_ldp (dp_reg, orig));
1682 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1690 /* Provide the costs of an addressing mode that contains ADDR.
1691 If ADDR is not a valid address, its cost is irrelevant.
1692 This is used in cse and loop optimisation to determine
1693 if it is worthwhile storing a common address into a register.
1694 Unfortunately, the C4x address cost depends on other operands. */
1697 c4x_address_cost (addr)
1700 switch (GET_CODE (addr))
1711 /* These shouldn't be directly generated. */
1719 rtx op1 = XEXP (addr, 1);
1721 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1722 return TARGET_SMALL ? 3 : 4;
1724 if (GET_CODE (op1) == CONST)
1726 rtx offset = const0_rtx;
1728 op1 = eliminate_constant_term (op1, &offset);
1730 /* ??? These costs need rethinking... */
1731 if (GET_CODE (op1) == LABEL_REF)
1734 if (GET_CODE (op1) != SYMBOL_REF)
1737 if (INTVAL (offset) == 0)
1742 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1748 register rtx op0 = XEXP (addr, 0);
1749 register rtx op1 = XEXP (addr, 1);
1751 if (GET_CODE (op0) != REG)
1754 switch (GET_CODE (op1))
1760 /* This cost for REG+REG must be greater than the cost
1761 for REG if we want autoincrement addressing modes. */
1765 /* The following tries to improve GIV combination
1766 in strength reduce but appears not to help. */
1767 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1770 if (IS_DISP1_CONST (INTVAL (op1)))
1773 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1788 c4x_gen_compare_reg (code, x, y)
1792 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1795 if (mode == CC_NOOVmode
1796 && (code == LE || code == GE || code == LT || code == GT))
1799 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1800 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1801 gen_rtx_COMPARE (mode, x, y)));
1806 c4x_output_cbranch (form, seq)
1815 static char str[100];
1819 delay = XVECEXP (final_sequence, 0, 1);
1820 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1821 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1822 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1825 cp = &str [strlen (str)];
1850 c4x_print_operand (file, op, letter)
1851 FILE *file; /* File to write to. */
1852 rtx op; /* Operand to print. */
1853 int letter; /* %<letter> or 0. */
1860 case '#': /* Delayed. */
1862 fprintf (file, "d");
1866 code = GET_CODE (op);
1869 case 'A': /* Direct address. */
1870 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1871 fprintf (file, "@");
1874 case 'H': /* Sethi. */
1875 output_addr_const (file, op);
1878 case 'I': /* Reversed condition. */
1879 code = reverse_condition (code);
1882 case 'L': /* Log 2 of constant. */
1883 if (code != CONST_INT)
1884 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1885 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1888 case 'N': /* Ones complement of small constant. */
1889 if (code != CONST_INT)
1890 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1891 fprintf (file, "%d", ~INTVAL (op));
1894 case 'K': /* Generate ldp(k) if direct address. */
1897 && GET_CODE (XEXP (op, 0)) == LO_SUM
1898 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1899 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1901 op1 = XEXP (XEXP (op, 0), 1);
1902 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1904 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1905 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1906 fprintf (file, "\n");
1911 case 'M': /* Generate ldp(k) if direct address. */
1912 if (! TARGET_SMALL /* Only used in asm statements. */
1914 && (GET_CODE (XEXP (op, 0)) == CONST
1915 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1917 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1918 output_address (XEXP (op, 0));
1919 fprintf (file, "\n\t");
1923 case 'O': /* Offset address. */
1924 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1926 else if (code == MEM)
1927 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1928 else if (code == REG)
1929 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1931 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1934 case 'C': /* Call. */
1937 case 'U': /* Call/callu. */
1938 if (code != SYMBOL_REF)
1939 fprintf (file, "u");
1949 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1951 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1953 fprintf (file, "%s", reg_names[REGNO (op)]);
1957 output_address (XEXP (op, 0));
1965 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
1966 REAL_VALUE_TO_DECIMAL (r, str, -1);
1967 fprintf (file, "%s", str);
1972 fprintf (file, "%d", INTVAL (op));
1976 fprintf (file, "ne");
1980 fprintf (file, "eq");
1984 fprintf (file, "ge");
1988 fprintf (file, "gt");
1992 fprintf (file, "le");
1996 fprintf (file, "lt");
2000 fprintf (file, "hs");
2004 fprintf (file, "hi");
2008 fprintf (file, "ls");
2012 fprintf (file, "lo");
2016 output_addr_const (file, op);
2020 output_addr_const (file, XEXP (op, 0));
2027 fatal_insn ("c4x_print_operand: Bad operand case", op);
2034 c4x_print_operand_address (file, addr)
2038 switch (GET_CODE (addr))
2041 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2045 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2049 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2054 rtx op0 = XEXP (XEXP (addr, 1), 0);
2055 rtx op1 = XEXP (XEXP (addr, 1), 1);
2057 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2058 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2059 reg_names[REGNO (op1)]);
2060 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2061 fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2063 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2064 fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2066 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2067 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2068 reg_names[REGNO (op1)]);
2070 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2076 rtx op0 = XEXP (XEXP (addr, 1), 0);
2077 rtx op1 = XEXP (XEXP (addr, 1), 1);
2079 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2080 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2081 reg_names[REGNO (op1)]);
2082 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2083 fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2085 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2086 fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2088 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2089 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2090 reg_names[REGNO (op1)]);
2092 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2097 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2101 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2104 case PLUS: /* Indirect with displacement. */
2106 rtx op0 = XEXP (addr, 0);
2107 rtx op1 = XEXP (addr, 1);
2113 if (IS_INDEX_REG (op0))
2115 fprintf (file, "*+%s(%s)",
2116 reg_names[REGNO (op1)],
2117 reg_names[REGNO (op0)]); /* Index + base. */
2121 fprintf (file, "*+%s(%s)",
2122 reg_names[REGNO (op0)],
2123 reg_names[REGNO (op1)]); /* Base + index. */
2126 else if (INTVAL (op1) < 0)
2128 fprintf (file, "*-%s(%d)",
2129 reg_names[REGNO (op0)],
2130 -INTVAL (op1)); /* Base - displacement. */
2134 fprintf (file, "*+%s(%d)",
2135 reg_names[REGNO (op0)],
2136 INTVAL (op1)); /* Base + displacement. */
2140 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2146 rtx op0 = XEXP (addr, 0);
2147 rtx op1 = XEXP (addr, 1);
2149 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2150 c4x_print_operand_address (file, op1);
2152 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2159 fprintf (file, "@");
2160 output_addr_const (file, addr);
2163 /* We shouldn't access CONST_INT addresses. */
2167 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2173 /* Return nonzero if the floating point operand will fit
2174 in the immediate field. */
2177 c4x_immed_float_p (op)
2184 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2185 if (GET_MODE (op) == HFmode)
2186 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2189 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2193 /* Sign extend exponent. */
2194 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2195 if (exponent == -128)
2197 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2198 return 0; /* Precision doesn't fit. */
2199 return (exponent <= 7) /* Positive exp. */
2200 && (exponent >= -7); /* Negative exp. */
2204 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2205 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2207 None of the last four instructions from the bottom of the block can
2208 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2209 BcondAT or RETIcondD.
2211 This routine scans the four previous insns for a jump insn, and if
2212 one is found, returns 1 so that we bung in a nop instruction.
2213 This simple minded strategy will add a nop, when it may not
2214 be required. Say when there is a JUMP_INSN near the end of the
2215 block that doesn't get converted into a delayed branch.
2217 Note that we cannot have a call insn, since we don't generate
2218 repeat loops with calls in them (although I suppose we could, but
2219 there's no benefit.)
2221 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2224 c4x_rptb_nop_p (insn)
2230 /* Extract the start label from the jump pattern (rptb_end). */
2231 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2233 /* If there is a label at the end of the loop we must insert
2236 insn = previous_insn (insn);
2237 } while (GET_CODE (insn) == NOTE
2238 || GET_CODE (insn) == USE
2239 || GET_CODE (insn) == CLOBBER);
2240 if (GET_CODE (insn) == CODE_LABEL)
2243 for (i = 0; i < 4; i++)
2245 /* Search back for prev non-note and non-label insn. */
2246 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2247 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2249 if (insn == start_label)
2252 insn = previous_insn (insn);
2255 /* If we have a jump instruction we should insert a NOP. If we
2256 hit repeat block top we should only insert a NOP if the loop
2258 if (GET_CODE (insn) == JUMP_INSN)
2260 insn = previous_insn (insn);
2266 /* The C4x looping instruction needs to be emitted at the top of the
2267 loop. Emitting the true RTL for a looping instruction at the top of
2268 the loop can cause problems with flow analysis. So instead, a dummy
2269 doloop insn is emitted at the end of the loop. This routine checks
2270 for the presence of this doloop insn and then searches back to the
2271 top of the loop, where it inserts the true looping insn (provided
2272 there are no instructions in the loop which would cause problems).
2273 Any additional labels can be emitted at this point. In addition, if
2274 the desired loop count register was not allocated, this routine does
2277 Before we can create a repeat block looping instruction we have to
2278 verify that there are no jumps outside the loop and no jumps outside
2279 the loop go into this loop. This can happen in the basic blocks reorder
2280 pass. The C4x cpu can not handle this. */
2283 c4x_label_ref_used_p (x, code_label)
2293 code = GET_CODE (x);
2294 if (code == LABEL_REF)
2295 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2297 fmt = GET_RTX_FORMAT (code);
2298 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2302 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2305 else if (fmt[i] == 'E')
2306 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2307 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2315 c4x_rptb_valid_p (insn, start_label)
2316 rtx insn, start_label;
2322 /* Find the start label. */
2323 for (; insn; insn = PREV_INSN (insn))
2324 if (insn == start_label)
2327 /* Note found then we can not use a rptb or rpts. The label was
2328 probably moved by the basic block reorder pass. */
2333 /* If any jump jumps inside this block then we must fail. */
2334 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2336 if (GET_CODE (insn) == CODE_LABEL)
2338 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2339 if (GET_CODE (tmp) == JUMP_INSN
2340 && c4x_label_ref_used_p (tmp, insn))
2344 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2346 if (GET_CODE (insn) == CODE_LABEL)
2348 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2349 if (GET_CODE (tmp) == JUMP_INSN
2350 && c4x_label_ref_used_p (tmp, insn))
2354 /* If any jump jumps outside this block then we must fail. */
2355 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2357 if (GET_CODE (insn) == CODE_LABEL)
2359 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2360 if (GET_CODE (tmp) == JUMP_INSN
2361 && c4x_label_ref_used_p (tmp, insn))
2363 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2364 if (GET_CODE (tmp) == JUMP_INSN
2365 && c4x_label_ref_used_p (tmp, insn))
2370 /* All checks OK. */
2376 c4x_rptb_insert (insn)
2381 rtx new_start_label;
2384 /* If the count register has not been allocated to RC, say if
2385 there is a movstr pattern in the loop, then do not insert a
2386 RPTB instruction. Instead we emit a decrement and branch
2387 at the end of the loop. */
2388 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2389 if (REGNO (count_reg) != RC_REGNO)
2392 /* Extract the start label from the jump pattern (rptb_end). */
2393 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2395 if (! c4x_rptb_valid_p (insn, start_label))
2397 /* We can not use the rptb insn. Replace it so reorg can use
2398 the delay slots of the jump insn. */
2399 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2400 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2401 emit_insn_before (gen_bge (start_label), insn);
2402 LABEL_NUSES (start_label)++;
2407 end_label = gen_label_rtx ();
2408 LABEL_NUSES (end_label)++;
2409 emit_label_after (end_label, insn);
2411 new_start_label = gen_label_rtx ();
2412 LABEL_NUSES (new_start_label)++;
2414 for (; insn; insn = PREV_INSN (insn))
2416 if (insn == start_label)
2418 if (GET_CODE (insn) == JUMP_INSN &&
2419 JUMP_LABEL (insn) == start_label)
2420 redirect_jump (insn, new_start_label, 0);
2423 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2425 emit_label_after (new_start_label, insn);
2427 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2428 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2430 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2431 if (LABEL_NUSES (start_label) == 0)
2432 delete_insn (start_label);
2436 /* This function is a C4x special called immediately before delayed
2437 branch scheduling. We fix up RTPB style loops that didn't get RC
2438 allocated as the loop counter. */
2441 c4x_process_after_reload (first)
2446 for (insn = first; insn; insn = NEXT_INSN (insn))
2448 /* Look for insn. */
2451 int insn_code_number;
2454 insn_code_number = recog_memoized (insn);
2456 if (insn_code_number < 0)
2459 /* Insert the RTX for RPTB at the top of the loop
2460 and a label at the end of the loop. */
2461 if (insn_code_number == CODE_FOR_rptb_end)
2462 c4x_rptb_insert(insn);
2464 /* We need to split the insn here. Otherwise the calls to
2465 force_const_mem will not work for load_immed_address. */
2468 /* Don't split the insn if it has been deleted. */
2469 if (! INSN_DELETED_P (old))
2470 insn = try_split (PATTERN(old), old, 1);
2472 /* When not optimizing, the old insn will be still left around
2473 with only the 'deleted' bit set. Transform it into a note
2474 to avoid confusion of subsequent processing. */
2475 if (INSN_DELETED_P (old))
2477 PUT_CODE (old, NOTE);
2478 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2479 NOTE_SOURCE_FILE (old) = 0;
2490 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2498 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2503 c4x_immed_int_constant (op)
2506 if (GET_CODE (op) != CONST_INT)
2509 return GET_MODE (op) == VOIDmode
2510 || GET_MODE_CLASS (op) == MODE_INT
2511 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2516 c4x_immed_float_constant (op)
2519 if (GET_CODE (op) != CONST_DOUBLE)
2522 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2523 present this only means that a MEM rtx has been generated. It does
2524 not mean the rtx is really in memory. */
2526 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2531 c4x_shiftable_constant (op)
2536 int val = INTVAL (op);
2538 for (i = 0; i < 16; i++)
2543 mask = ((0xffff >> i) << 16) | 0xffff;
2544 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2545 : (val >> i) & mask))
2555 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2563 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2573 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2581 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2583 return IS_INT5_CONST (INTVAL (op));
2591 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2599 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2607 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2611 /* The constraints do not have to check the register class,
2612 except when needed to discriminate between the constraints.
2613 The operand has been checked by the predicates to be valid. */
2615 /* ARx + 9-bit signed const or IRn
2616 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2617 We don't include the pre/post inc/dec forms here since
2618 they are handled by the <> constraints. */
2621 c4x_Q_constraint (op)
2624 enum machine_mode mode = GET_MODE (op);
2626 if (GET_CODE (op) != MEM)
2629 switch (GET_CODE (op))
2636 rtx op0 = XEXP (op, 0);
2637 rtx op1 = XEXP (op, 1);
2645 if (GET_CODE (op1) != CONST_INT)
2648 /* HImode and HFmode must be offsettable. */
2649 if (mode == HImode || mode == HFmode)
2650 return IS_DISP8_OFF_CONST (INTVAL (op1));
2652 return IS_DISP8_CONST (INTVAL (op1));
2663 /* ARx + 5-bit unsigned const
2664 *ARx, *+ARx(n) for n < 32. */
2667 c4x_R_constraint (op)
2670 enum machine_mode mode = GET_MODE (op);
2674 if (GET_CODE (op) != MEM)
2677 switch (GET_CODE (op))
2684 rtx op0 = XEXP (op, 0);
2685 rtx op1 = XEXP (op, 1);
2690 if (GET_CODE (op1) != CONST_INT)
2693 /* HImode and HFmode must be offsettable. */
2694 if (mode == HImode || mode == HFmode)
2695 return IS_UINT5_CONST (INTVAL (op1) + 1);
2697 return IS_UINT5_CONST (INTVAL (op1));
2712 enum machine_mode mode = GET_MODE (op);
2714 if (TARGET_C3X || GET_CODE (op) != MEM)
2718 switch (GET_CODE (op))
2721 return IS_ADDR_OR_PSEUDO_REG (op);
2725 rtx op0 = XEXP (op, 0);
2726 rtx op1 = XEXP (op, 1);
2728 /* HImode and HFmode must be offsettable. */
2729 if (mode == HImode || mode == HFmode)
2730 return IS_ADDR_OR_PSEUDO_REG (op0)
2731 && GET_CODE (op1) == CONST_INT
2732 && IS_UINT5_CONST (INTVAL (op1) + 1);
2735 && IS_ADDR_OR_PSEUDO_REG (op0)
2736 && GET_CODE (op1) == CONST_INT
2737 && IS_UINT5_CONST (INTVAL (op1));
2748 /* ARx + 1-bit unsigned const or IRn
2749 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2750 We don't include the pre/post inc/dec forms here since
2751 they are handled by the <> constraints. */
2754 c4x_S_constraint (op)
2757 enum machine_mode mode = GET_MODE (op);
2758 if (GET_CODE (op) != MEM)
2761 switch (GET_CODE (op))
2769 rtx op0 = XEXP (op, 0);
2770 rtx op1 = XEXP (op, 1);
2772 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2773 || (op0 != XEXP (op1, 0)))
2776 op0 = XEXP (op1, 0);
2777 op1 = XEXP (op1, 1);
2778 return REG_P (op0) && REG_P (op1);
2779 /* Pre or post_modify with a displacement of 0 or 1
2780 should not be generated. */
2786 rtx op0 = XEXP (op, 0);
2787 rtx op1 = XEXP (op, 1);
2795 if (GET_CODE (op1) != CONST_INT)
2798 /* HImode and HFmode must be offsettable. */
2799 if (mode == HImode || mode == HFmode)
2800 return IS_DISP1_OFF_CONST (INTVAL (op1));
2802 return IS_DISP1_CONST (INTVAL (op1));
2817 enum machine_mode mode = GET_MODE (op);
2818 if (GET_CODE (op) != MEM)
2822 switch (GET_CODE (op))
2826 if (mode != QImode && mode != QFmode)
2833 return IS_ADDR_OR_PSEUDO_REG (op);
2838 rtx op0 = XEXP (op, 0);
2839 rtx op1 = XEXP (op, 1);
2841 if (mode != QImode && mode != QFmode)
2844 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2845 || (op0 != XEXP (op1, 0)))
2848 op0 = XEXP (op1, 0);
2849 op1 = XEXP (op1, 1);
2850 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2851 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2852 /* Pre or post_modify with a displacement of 0 or 1
2853 should not be generated. */
2858 rtx op0 = XEXP (op, 0);
2859 rtx op1 = XEXP (op, 1);
2863 /* HImode and HFmode must be offsettable. */
2864 if (mode == HImode || mode == HFmode)
2865 return IS_ADDR_OR_PSEUDO_REG (op0)
2866 && GET_CODE (op1) == CONST_INT
2867 && IS_DISP1_OFF_CONST (INTVAL (op1));
2870 return (IS_INDEX_OR_PSEUDO_REG (op1)
2871 && IS_ADDR_OR_PSEUDO_REG (op0))
2872 || (IS_ADDR_OR_PSEUDO_REG (op1)
2873 && IS_INDEX_OR_PSEUDO_REG (op0));
2875 return IS_ADDR_OR_PSEUDO_REG (op0)
2876 && GET_CODE (op1) == CONST_INT
2877 && IS_DISP1_CONST (INTVAL (op1));
2889 /* Direct memory operand. */
2892 c4x_T_constraint (op)
2895 if (GET_CODE (op) != MEM)
2899 if (GET_CODE (op) != LO_SUM)
2901 /* Allow call operands. */
2902 return GET_CODE (op) == SYMBOL_REF
2903 && GET_MODE (op) == Pmode
2904 && SYMBOL_REF_FLAG (op);
2907 /* HImode and HFmode are not offsettable. */
2908 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2911 if ((GET_CODE (XEXP (op, 0)) == REG)
2912 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2913 return c4x_U_constraint (XEXP (op, 1));
2919 /* Symbolic operand. */
2922 c4x_U_constraint (op)
2925 /* Don't allow direct addressing to an arbitrary constant. */
2926 return GET_CODE (op) == CONST
2927 || GET_CODE (op) == SYMBOL_REF
2928 || GET_CODE (op) == LABEL_REF;
2933 c4x_autoinc_operand (op, mode)
2935 enum machine_mode mode ATTRIBUTE_UNUSED;
2937 if (GET_CODE (op) == MEM)
2939 enum rtx_code code = GET_CODE (XEXP (op, 0));
2945 || code == PRE_MODIFY
2946 || code == POST_MODIFY
2954 /* Match any operand. */
2957 any_operand (op, mode)
2958 register rtx op ATTRIBUTE_UNUSED;
2959 enum machine_mode mode ATTRIBUTE_UNUSED;
2965 /* Nonzero if OP is a floating point value with value 0.0. */
2968 fp_zero_operand (op, mode)
2970 enum machine_mode mode ATTRIBUTE_UNUSED;
2974 if (GET_CODE (op) != CONST_DOUBLE)
2976 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2977 return REAL_VALUES_EQUAL (r, dconst0);
2982 const_operand (op, mode)
2984 register enum machine_mode mode;
2990 if (GET_CODE (op) != CONST_DOUBLE
2991 || GET_MODE (op) != mode
2992 || GET_MODE_CLASS (mode) != MODE_FLOAT)
2995 return c4x_immed_float_p (op);
3001 if (GET_CODE (op) == CONSTANT_P_RTX)
3004 if (GET_CODE (op) != CONST_INT
3005 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3006 || GET_MODE_CLASS (mode) != MODE_INT)
3009 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3021 stik_const_operand (op, mode)
3023 enum machine_mode mode ATTRIBUTE_UNUSED;
3025 return c4x_K_constant (op);
3030 not_const_operand (op, mode)
3032 enum machine_mode mode ATTRIBUTE_UNUSED;
3034 return c4x_N_constant (op);
3039 reg_operand (op, mode)
3041 enum machine_mode mode;
3043 if (GET_CODE (op) == SUBREG
3044 && GET_MODE (op) == QFmode)
3046 return register_operand (op, mode);
3051 mixed_subreg_operand (op, mode)
3053 enum machine_mode mode ATTRIBUTE_UNUSED;
3055 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3056 int and a long double. */
3057 if (GET_CODE (op) == SUBREG
3058 && (GET_MODE (op) == QFmode)
3059 && (GET_MODE (SUBREG_REG (op)) == QImode
3060 || GET_MODE (SUBREG_REG (op)) == HImode))
3067 reg_imm_operand (op, mode)
3069 enum machine_mode mode ATTRIBUTE_UNUSED;
3071 if (REG_P (op) || CONSTANT_P (op))
3078 not_modify_reg (op, mode)
3080 enum machine_mode mode ATTRIBUTE_UNUSED;
3082 if (REG_P (op) || CONSTANT_P (op))
3084 if (GET_CODE (op) != MEM)
3087 switch (GET_CODE (op))
3094 rtx op0 = XEXP (op, 0);
3095 rtx op1 = XEXP (op, 1);
3100 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3106 rtx op0 = XEXP (op, 0);
3108 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3126 not_rc_reg (op, mode)
3128 enum machine_mode mode ATTRIBUTE_UNUSED;
3130 if (REG_P (op) && REGNO (op) == RC_REGNO)
3136 /* Extended precision register R0-R1. */
3139 r0r1_reg_operand (op, mode)
3141 enum machine_mode mode;
3143 if (! reg_operand (op, mode))
3145 if (GET_CODE (op) == SUBREG)
3146 op = SUBREG_REG (op);
3147 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3151 /* Extended precision register R2-R3. */
3154 r2r3_reg_operand (op, mode)
3156 enum machine_mode mode;
3158 if (! reg_operand (op, mode))
3160 if (GET_CODE (op) == SUBREG)
3161 op = SUBREG_REG (op);
3162 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3166 /* Low extended precision register R0-R7. */
3169 ext_low_reg_operand (op, mode)
3171 enum machine_mode mode;
3173 if (! reg_operand (op, mode))
3175 if (GET_CODE (op) == SUBREG)
3176 op = SUBREG_REG (op);
3177 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3181 /* Extended precision register. */
3184 ext_reg_operand (op, mode)
3186 enum machine_mode mode;
3188 if (! reg_operand (op, mode))
3190 if (GET_CODE (op) == SUBREG)
3191 op = SUBREG_REG (op);
3194 return IS_EXT_OR_PSEUDO_REG (op);
3198 /* Standard precision register. */
3201 std_reg_operand (op, mode)
3203 enum machine_mode mode;
3205 if (! reg_operand (op, mode))
3207 if (GET_CODE (op) == SUBREG)
3208 op = SUBREG_REG (op);
3209 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3212 /* Standard precision or normal register. */
3215 std_or_reg_operand (op, mode)
3217 enum machine_mode mode;
3219 if (reload_in_progress)
3220 return std_reg_operand (op, mode);
3221 return reg_operand (op, mode);
3224 /* Address register. */
3227 addr_reg_operand (op, mode)
3229 enum machine_mode mode;
3231 if (! reg_operand (op, mode))
3233 return c4x_a_register (op);
3237 /* Index register. */
3240 index_reg_operand (op, mode)
3242 enum machine_mode mode;
3244 if (! reg_operand (op, mode))
3246 if (GET_CODE (op) == SUBREG)
3247 op = SUBREG_REG (op);
3248 return c4x_x_register (op);
3255 dp_reg_operand (op, mode)
3257 enum machine_mode mode ATTRIBUTE_UNUSED;
3259 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3266 sp_reg_operand (op, mode)
3268 enum machine_mode mode ATTRIBUTE_UNUSED;
3270 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3277 st_reg_operand (op, mode)
3279 enum machine_mode mode ATTRIBUTE_UNUSED;
3281 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3288 rc_reg_operand (op, mode)
3290 enum machine_mode mode ATTRIBUTE_UNUSED;
3292 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3297 call_address_operand (op, mode)
3299 enum machine_mode mode ATTRIBUTE_UNUSED;
3301 return (REG_P (op) || symbolic_address_operand (op, mode));
3305 /* Symbolic address operand. */
3308 symbolic_address_operand (op, mode)
3310 enum machine_mode mode ATTRIBUTE_UNUSED;
3312 switch (GET_CODE (op))
3324 /* Check dst operand of a move instruction. */
3327 dst_operand (op, mode)
3329 enum machine_mode mode;
3331 if (GET_CODE (op) == SUBREG
3332 && mixed_subreg_operand (op, mode))
3336 return reg_operand (op, mode);
3338 return nonimmediate_operand (op, mode);
3342 /* Check src operand of two operand arithmetic instructions. */
3345 src_operand (op, mode)
3347 enum machine_mode mode;
3349 if (GET_CODE (op) == SUBREG
3350 && mixed_subreg_operand (op, mode))
3354 return reg_operand (op, mode);
3356 if (mode == VOIDmode)
3357 mode = GET_MODE (op);
3359 if (GET_CODE (op) == CONST_INT)
3360 return (mode == QImode || mode == Pmode || mode == HImode)
3361 && c4x_I_constant (op);
3363 /* We don't like CONST_DOUBLE integers. */
3364 if (GET_CODE (op) == CONST_DOUBLE)
3365 return c4x_H_constant (op);
3367 /* Disallow symbolic addresses. Only the predicate
3368 symbolic_address_operand will match these. */
3369 if (GET_CODE (op) == SYMBOL_REF
3370 || GET_CODE (op) == LABEL_REF
3371 || GET_CODE (op) == CONST)
3374 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
3375 access to symbolic addresses. These operands will get forced
3376 into a register and the movqi expander will generate a
3377 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
3378 if (GET_CODE (op) == MEM
3379 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3380 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3381 || GET_CODE (XEXP (op, 0)) == CONST)))
3382 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3384 return general_operand (op, mode);
3389 src_hi_operand (op, mode)
3391 enum machine_mode mode;
3393 if (c4x_O_constant (op))
3395 return src_operand (op, mode);
3399 /* Check src operand of two operand logical instructions. */
3402 lsrc_operand (op, mode)
3404 enum machine_mode mode;
3406 if (mode == VOIDmode)
3407 mode = GET_MODE (op);
3409 if (mode != QImode && mode != Pmode)
3410 fatal_insn ("mode not QImode", op);
3412 if (GET_CODE (op) == CONST_INT)
3413 return c4x_L_constant (op) || c4x_J_constant (op);
3415 return src_operand (op, mode);
3419 /* Check src operand of two operand tricky instructions. */
3422 tsrc_operand (op, mode)
3424 enum machine_mode mode;
3426 if (mode == VOIDmode)
3427 mode = GET_MODE (op);
3429 if (mode != QImode && mode != Pmode)
3430 fatal_insn ("mode not QImode", op);
3432 if (GET_CODE (op) == CONST_INT)
3433 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3435 return src_operand (op, mode);
3439 /* Check src operand of two operand non immedidate instructions. */
3442 nonimmediate_src_operand (op, mode)
3444 enum machine_mode mode;
3446 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3449 return src_operand (op, mode);
3453 /* Check logical src operand of two operand non immedidate instructions. */
3456 nonimmediate_lsrc_operand (op, mode)
3458 enum machine_mode mode;
3460 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3463 return lsrc_operand (op, mode);
3468 reg_or_const_operand (op, mode)
3470 enum machine_mode mode;
3472 return reg_operand (op, mode) || const_operand (op, mode);
3476 /* Check for indirect operands allowable in parallel instruction. */
3479 par_ind_operand (op, mode)
3481 enum machine_mode mode;
3483 if (mode != VOIDmode && mode != GET_MODE (op))
3486 return c4x_S_indirect (op);
3490 /* Check for operands allowable in parallel instruction. */
3493 parallel_operand (op, mode)
3495 enum machine_mode mode;
3497 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3502 c4x_S_address_parse (op, base, incdec, index, disp)
3514 if (GET_CODE (op) != MEM)
3515 fatal_insn ("invalid indirect memory address", op);
3518 switch (GET_CODE (op))
3521 *base = REGNO (XEXP (op, 0));
3527 *base = REGNO (XEXP (op, 0));
3533 *base = REGNO (XEXP (op, 0));
3539 *base = REGNO (XEXP (op, 0));
3545 *base = REGNO (XEXP (op, 0));
3546 if (REG_P (XEXP (XEXP (op, 1), 1)))
3548 *index = REGNO (XEXP (XEXP (op, 1), 1));
3549 *disp = 0; /* ??? */
3552 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3557 *base = REGNO (XEXP (op, 0));
3558 if (REG_P (XEXP (XEXP (op, 1), 1)))
3560 *index = REGNO (XEXP (XEXP (op, 1), 1));
3561 *disp = 1; /* ??? */
3564 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3575 rtx op0 = XEXP (op, 0);
3576 rtx op1 = XEXP (op, 1);
3578 if (c4x_a_register (op0))
3580 if (c4x_x_register (op1))
3582 *base = REGNO (op0);
3583 *index = REGNO (op1);
3586 else if ((GET_CODE (op1) == CONST_INT
3587 && IS_DISP1_CONST (INTVAL (op1))))
3589 *base = REGNO (op0);
3590 *disp = INTVAL (op1);
3594 else if (c4x_x_register (op0) && c4x_a_register (op1))
3596 *base = REGNO (op1);
3597 *index = REGNO (op0);
3604 fatal_insn ("invalid indirect (S) memory address", op);
3610 c4x_address_conflict (op0, op1, store0, store1)
3625 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3628 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3629 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3631 if (store0 && store1)
3633 /* If we have two stores in parallel to the same address, then
3634 the C4x only executes one of the stores. This is unlikely to
3635 cause problems except when writing to a hardware device such
3636 as a FIFO since the second write will be lost. The user
3637 should flag the hardware location as being volatile so that
3638 we don't do this optimisation. While it is unlikely that we
3639 have an aliased address if both locations are not marked
3640 volatile, it is probably safer to flag a potential conflict
3641 if either location is volatile. */
3642 if (! flag_argument_noalias)
3644 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3649 /* If have a parallel load and a store to the same address, the load
3650 is performed first, so there is no conflict. Similarly, there is
3651 no conflict if have parallel loads from the same address. */
3653 /* Cannot use auto increment or auto decrement twice for same
3655 if (base0 == base1 && incdec0 && incdec0)
3658 /* It might be too confusing for GCC if we have use a base register
3659 with a side effect and a memory reference using the same register
3661 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3664 /* We can not optimize the case where op1 and op2 refer to the same
3666 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3674 /* Check for while loop inside a decrement and branch loop. */
3677 c4x_label_conflict (insn, jump, db)
3684 if (GET_CODE (insn) == CODE_LABEL)
3686 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3688 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3691 insn = PREV_INSN (insn);
3697 /* Validate combination of operands for parallel load/store instructions. */
3700 valid_parallel_load_store (operands, mode)
3702 enum machine_mode mode ATTRIBUTE_UNUSED;
3704 rtx op0 = operands[0];
3705 rtx op1 = operands[1];
3706 rtx op2 = operands[2];
3707 rtx op3 = operands[3];
3709 if (GET_CODE (op0) == SUBREG)
3710 op0 = SUBREG_REG (op0);
3711 if (GET_CODE (op1) == SUBREG)
3712 op1 = SUBREG_REG (op1);
3713 if (GET_CODE (op2) == SUBREG)
3714 op2 = SUBREG_REG (op2);
3715 if (GET_CODE (op3) == SUBREG)
3716 op3 = SUBREG_REG (op3);
3718 /* The patterns should only allow ext_low_reg_operand() or
3719 par_ind_operand() operands. Thus of the 4 operands, only 2
3720 should be REGs and the other 2 should be MEMs. */
3722 /* This test prevents the multipack pass from using this pattern if
3723 op0 is used as an index or base register in op2 or op3, since
3724 this combination will require reloading. */
3725 if (GET_CODE (op0) == REG
3726 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3727 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3731 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3732 return (REGNO (op0) != REGNO (op2))
3733 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3734 && ! c4x_address_conflict (op1, op3, 0, 0);
3737 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3738 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3739 && ! c4x_address_conflict (op0, op2, 1, 1);
3742 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3743 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3744 && ! c4x_address_conflict (op1, op2, 0, 1);
3747 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3748 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3749 && ! c4x_address_conflict (op0, op3, 1, 0);
3756 valid_parallel_operands_4 (operands, mode)
3758 enum machine_mode mode ATTRIBUTE_UNUSED;
3760 rtx op0 = operands[0];
3761 rtx op2 = operands[2];
3763 if (GET_CODE (op0) == SUBREG)
3764 op0 = SUBREG_REG (op0);
3765 if (GET_CODE (op2) == SUBREG)
3766 op2 = SUBREG_REG (op2);
3768 /* This test prevents the multipack pass from using this pattern if
3769 op0 is used as an index or base register in op2, since this combination
3770 will require reloading. */
3771 if (GET_CODE (op0) == REG
3772 && GET_CODE (op2) == MEM
3773 && reg_mentioned_p (op0, XEXP (op2, 0)))
3781 valid_parallel_operands_5 (operands, mode)
3783 enum machine_mode mode ATTRIBUTE_UNUSED;
3786 rtx op0 = operands[0];
3787 rtx op1 = operands[1];
3788 rtx op2 = operands[2];
3789 rtx op3 = operands[3];
3791 if (GET_CODE (op0) == SUBREG)
3792 op0 = SUBREG_REG (op0);
3793 if (GET_CODE (op1) == SUBREG)
3794 op1 = SUBREG_REG (op1);
3795 if (GET_CODE (op2) == SUBREG)
3796 op2 = SUBREG_REG (op2);
3798 /* The patterns should only allow ext_low_reg_operand() or
3799 par_ind_operand() operands. Operands 1 and 2 may be commutative
3800 but only one of them can be a register. */
3801 if (GET_CODE (op1) == REG)
3803 if (GET_CODE (op2) == REG)
3809 /* This test prevents the multipack pass from using this pattern if
3810 op0 is used as an index or base register in op3, since this combination
3811 will require reloading. */
3812 if (GET_CODE (op0) == REG
3813 && GET_CODE (op3) == MEM
3814 && reg_mentioned_p (op0, XEXP (op3, 0)))
3822 valid_parallel_operands_6 (operands, mode)
3824 enum machine_mode mode ATTRIBUTE_UNUSED;
3827 rtx op0 = operands[0];
3828 rtx op1 = operands[1];
3829 rtx op2 = operands[2];
3830 rtx op4 = operands[4];
3831 rtx op5 = operands[5];
3833 if (GET_CODE (op1) == SUBREG)
3834 op1 = SUBREG_REG (op1);
3835 if (GET_CODE (op2) == SUBREG)
3836 op2 = SUBREG_REG (op2);
3837 if (GET_CODE (op4) == SUBREG)
3838 op4 = SUBREG_REG (op4);
3839 if (GET_CODE (op5) == SUBREG)
3840 op5 = SUBREG_REG (op5);
3842 /* The patterns should only allow ext_low_reg_operand() or
3843 par_ind_operand() operands. Thus of the 4 input operands, only 2
3844 should be REGs and the other 2 should be MEMs. */
3846 if (GET_CODE (op1) == REG)
3848 if (GET_CODE (op2) == REG)
3850 if (GET_CODE (op4) == REG)
3852 if (GET_CODE (op5) == REG)
3855 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3856 Perhaps we should count the MEMs as well? */
3860 /* This test prevents the multipack pass from using this pattern if
3861 op0 is used as an index or base register in op4 or op5, since
3862 this combination will require reloading. */
3863 if (GET_CODE (op0) == REG
3864 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3865 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3872 /* Validate combination of src operands. Note that the operands have
3873 been screened by the src_operand predicate. We just have to check
3874 that the combination of operands is valid. If FORCE is set, ensure
3875 that the destination regno is valid if we have a 2 operand insn. */
3878 c4x_valid_operands (code, operands, mode, force)
3881 enum machine_mode mode ATTRIBUTE_UNUSED;
3886 enum rtx_code code1;
3887 enum rtx_code code2;
3889 if (code == COMPARE)
3900 if (GET_CODE (op1) == SUBREG)
3901 op1 = SUBREG_REG (op1);
3902 if (GET_CODE (op2) == SUBREG)
3903 op2 = SUBREG_REG (op2);
3905 code1 = GET_CODE (op1);
3906 code2 = GET_CODE (op2);
3908 if (code1 == REG && code2 == REG)
3911 if (code1 == MEM && code2 == MEM)
3913 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3915 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3926 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3931 if (! c4x_H_constant (op2))
3935 /* Any valid memory operand screened by src_operand is OK. */
3938 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3939 into a stack slot memory address comprising a PLUS and a
3945 fatal_insn ("c4x_valid_operands: Internal error", op2);
3949 /* Check that we have a valid destination register for a two operand
3951 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3954 /* We assume MINUS is commutative since the subtract patterns
3955 also support the reverse subtract instructions. Since op1
3956 is not a register, and op2 is a register, op1 can only
3957 be a restricted memory operand for a shift instruction. */
3958 if (code == ASHIFTRT || code == LSHIFTRT
3959 || code == ASHIFT || code == COMPARE)
3961 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3966 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3971 if (! c4x_H_constant (op1))
3975 /* Any valid memory operand screened by src_operand is OK. */
3983 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3984 into a stack slot memory address comprising a PLUS and a
3994 /* Check that we have a valid destination register for a two operand
3996 return ! force || REGNO (op1) == REGNO (operands[0]);
4000 int valid_operands (code, operands, mode)
4003 enum machine_mode mode;
4006 /* If we are not optimizing then we have to let anything go and let
4007 reload fix things up. instantiate_decl in function.c can produce
4008 invalid insns by changing the offset of a memory operand from a
4009 valid one into an invalid one, when the second operand is also a
4010 memory operand. The alternative is not to allow two memory
4011 operands for an insn when not optimizing. The problem only rarely
4012 occurs, for example with the C-torture program DFcmp.c. */
4014 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
4019 legitimize_operands (code, operands, mode)
4022 enum machine_mode mode;
4024 /* Compare only has 2 operands. */
4025 if (code == COMPARE)
4027 /* During RTL generation, force constants into pseudos so that
4028 they can get hoisted out of loops. This will tie up an extra
4029 register but can save an extra cycle. Only do this if loop
4030 optimisation enabled. (We cannot pull this trick for add and
4031 sub instructions since the flow pass won't find
4032 autoincrements etc.) This allows us to generate compare
4033 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4034 of LDI *AR0++, R0; CMPI 42, R0.
4036 Note that expand_binops will try to load an expensive constant
4037 into a register if it is used within a loop. Unfortunately,
4038 the cost mechanism doesn't allow us to look at the other
4039 operand to decide whether the constant is expensive. */
4041 if (! reload_in_progress
4044 && GET_CODE (operands[1]) == CONST_INT
4045 && preserve_subexpressions_p ()
4046 && rtx_cost (operands[1], code) > 1)
4047 operands[1] = force_reg (mode, operands[1]);
4049 if (! reload_in_progress
4050 && ! c4x_valid_operands (code, operands, mode, 0))
4051 operands[0] = force_reg (mode, operands[0]);
4055 /* We cannot do this for ADDI/SUBI insns since we will
4056 defeat the flow pass from finding autoincrement addressing
4058 if (! reload_in_progress
4059 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
4062 && GET_CODE (operands[2]) == CONST_INT
4063 && preserve_subexpressions_p ()
4064 && rtx_cost (operands[2], code) > 1)
4065 operands[2] = force_reg (mode, operands[2]);
4067 /* We can get better code on a C30 if we force constant shift counts
4068 into a register. This way they can get hoisted out of loops,
4069 tying up a register, but saving an instruction. The downside is
4070 that they may get allocated to an address or index register, and
4071 thus we will get a pipeline conflict if there is a nearby
4072 indirect address using an address register.
4074 Note that expand_binops will not try to load an expensive constant
4075 into a register if it is used within a loop for a shift insn. */
4077 if (! reload_in_progress
4078 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
4080 /* If the operand combination is invalid, we force operand1 into a
4081 register, preventing reload from having doing to do this at a
4083 operands[1] = force_reg (mode, operands[1]);
4086 emit_move_insn (operands[0], operands[1]);
4087 operands[1] = copy_rtx (operands[0]);
4091 /* Just in case... */
4092 if (! c4x_valid_operands (code, operands, mode, 0))
4093 operands[2] = force_reg (mode, operands[2]);
4097 /* Right shifts require a negative shift count, but GCC expects
4098 a positive count, so we emit a NEG. */
4099 if ((code == ASHIFTRT || code == LSHIFTRT)
4100 && (GET_CODE (operands[2]) != CONST_INT))
4101 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
4107 /* The following predicates are used for instruction scheduling. */
4110 group1_reg_operand (op, mode)
4112 enum machine_mode mode;
4114 if (mode != VOIDmode && mode != GET_MODE (op))
4116 if (GET_CODE (op) == SUBREG)
4117 op = SUBREG_REG (op);
4118 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4123 group1_mem_operand (op, mode)
4125 enum machine_mode mode;
4127 if (mode != VOIDmode && mode != GET_MODE (op))
4130 if (GET_CODE (op) == MEM)
4133 if (GET_CODE (op) == PLUS)
4135 rtx op0 = XEXP (op, 0);
4136 rtx op1 = XEXP (op, 1);
4138 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4139 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4142 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4150 /* Return true if any one of the address registers. */
4153 arx_reg_operand (op, mode)
4155 enum machine_mode mode;
4157 if (mode != VOIDmode && mode != GET_MODE (op))
4159 if (GET_CODE (op) == SUBREG)
4160 op = SUBREG_REG (op);
4161 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4166 c4x_arn_reg_operand (op, mode, regno)
4168 enum machine_mode mode;
4171 if (mode != VOIDmode && mode != GET_MODE (op))
4173 if (GET_CODE (op) == SUBREG)
4174 op = SUBREG_REG (op);
4175 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4180 c4x_arn_mem_operand (op, mode, regno)
4182 enum machine_mode mode;
4185 if (mode != VOIDmode && mode != GET_MODE (op))
4188 if (GET_CODE (op) == MEM)
4191 switch (GET_CODE (op))
4200 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4204 if (REG_P (XEXP (op, 0)) && (! reload_completed
4205 || (REGNO (XEXP (op, 0)) == regno)))
4207 if (REG_P (XEXP (XEXP (op, 1), 1))
4208 && (! reload_completed
4209 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4215 rtx op0 = XEXP (op, 0);
4216 rtx op1 = XEXP (op, 1);
4218 if ((REG_P (op0) && (! reload_completed
4219 || (REGNO (op0) == regno)))
4220 || (REG_P (op1) && (! reload_completed
4221 || (REGNO (op1) == regno))))
4235 ar0_reg_operand (op, mode)
4237 enum machine_mode mode;
4239 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4244 ar0_mem_operand (op, mode)
4246 enum machine_mode mode;
4248 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4253 ar1_reg_operand (op, mode)
4255 enum machine_mode mode;
4257 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4262 ar1_mem_operand (op, mode)
4264 enum machine_mode mode;
4266 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4271 ar2_reg_operand (op, mode)
4273 enum machine_mode mode;
4275 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4280 ar2_mem_operand (op, mode)
4282 enum machine_mode mode;
4284 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4289 ar3_reg_operand (op, mode)
4291 enum machine_mode mode;
4293 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4298 ar3_mem_operand (op, mode)
4300 enum machine_mode mode;
4302 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4307 ar4_reg_operand (op, mode)
4309 enum machine_mode mode;
4311 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4316 ar4_mem_operand (op, mode)
4318 enum machine_mode mode;
4320 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4325 ar5_reg_operand (op, mode)
4327 enum machine_mode mode;
4329 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4334 ar5_mem_operand (op, mode)
4336 enum machine_mode mode;
4338 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4343 ar6_reg_operand (op, mode)
4345 enum machine_mode mode;
4347 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4352 ar6_mem_operand (op, mode)
4354 enum machine_mode mode;
4356 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4361 ar7_reg_operand (op, mode)
4363 enum machine_mode mode;
4365 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4370 ar7_mem_operand (op, mode)
4372 enum machine_mode mode;
4374 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4379 ir0_reg_operand (op, mode)
4381 enum machine_mode mode;
4383 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4388 ir0_mem_operand (op, mode)
4390 enum machine_mode mode;
4392 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4397 ir1_reg_operand (op, mode)
4399 enum machine_mode mode;
4401 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4406 ir1_mem_operand (op, mode)
4408 enum machine_mode mode;
4410 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4414 /* This is similar to operand_subword but allows autoincrement
4418 c4x_operand_subword (op, i, validate_address, mode)
4421 int validate_address;
4422 enum machine_mode mode;
4424 if (mode != HImode && mode != HFmode)
4425 fatal_insn ("c4x_operand_subword: invalid mode", op);
4427 if (mode == HFmode && REG_P (op))
4428 fatal_insn ("c4x_operand_subword: invalid operand", op);
4430 if (GET_CODE (op) == MEM)
4432 enum rtx_code code = GET_CODE (XEXP (op, 0));
4433 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4434 enum machine_mode submode;
4439 else if (mode == HFmode)
4446 return gen_rtx_MEM (submode, XEXP (op, 0));
4452 /* We could handle these with some difficulty.
4453 e.g., *p-- => *(p-=2); *(p+1). */
4454 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4460 fatal_insn ("c4x_operand_subword: invalid address", op);
4462 /* Even though offsettable_address_p considers (MEM
4463 (LO_SUM)) to be offsettable, it is not safe if the
4464 address is at the end of the data page since we also have
4465 to fix up the associated high PART. In this case where
4466 we are trying to split a HImode or HFmode memory
4467 reference, we would have to emit another insn to reload a
4468 new HIGH value. It's easier to disable LO_SUM memory references
4469 in HImode or HFmode and we probably get better code. */
4471 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4478 return operand_subword (op, i, validate_address, mode);
4483 struct name_list *next;
4487 static struct name_list *global_head;
4488 static struct name_list *extern_head;
4491 /* Add NAME to list of global symbols and remove from external list if
4492 present on external list. */
4495 c4x_global_label (name)
4498 struct name_list *p, *last;
4500 /* Do not insert duplicate names, so linearly search through list of
4505 if (strcmp (p->name, name) == 0)
4509 p = (struct name_list *) xmalloc (sizeof *p);
4510 p->next = global_head;
4514 /* Remove this name from ref list if present. */
4519 if (strcmp (p->name, name) == 0)
4522 last->next = p->next;
4524 extern_head = p->next;
4533 /* Add NAME to list of external symbols. */
4536 c4x_external_ref (name)
4539 struct name_list *p;
4541 /* Do not insert duplicate names. */
4545 if (strcmp (p->name, name) == 0)
4550 /* Do not insert ref if global found. */
4554 if (strcmp (p->name, name) == 0)
4558 p = (struct name_list *) xmalloc (sizeof *p);
4559 p->next = extern_head;
4569 struct name_list *p;
4571 /* Output all external names that are not global. */
4575 fprintf (fp, "\t.ref\t");
4576 assemble_name (fp, p->name);
4580 fprintf (fp, "\t.end\n");
4585 c4x_check_attribute (attrib, list, decl, attributes)
4587 tree list, decl, *attributes;
4589 while (list != NULL_TREE
4590 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4591 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4592 list = TREE_CHAIN (list);
4594 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4600 c4x_insert_attributes (decl, attributes)
4601 tree decl, *attributes;
4603 switch (TREE_CODE (decl))
4606 c4x_check_attribute ("section", code_tree, decl, attributes);
4607 c4x_check_attribute ("const", pure_tree, decl, attributes);
4608 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4609 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4613 c4x_check_attribute ("section", data_tree, decl, attributes);
4621 /* Table of valid machine attributes. */
4622 const struct attribute_spec c4x_attribute_table[] =
4624 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4625 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4626 /* FIXME: code elsewhere in this file treats "naked" as a synonym of
4627 "interrupt"; should it be accepted here? */
4628 { "assembler", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4629 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4630 { NULL, 0, 0, false, false, false, NULL }
4633 /* Handle an attribute requiring a FUNCTION_TYPE;
4634 arguments as in struct attribute_spec.handler. */
4636 c4x_handle_fntype_attribute (node, name, args, flags, no_add_attrs)
4639 tree args ATTRIBUTE_UNUSED;
4640 int flags ATTRIBUTE_UNUSED;
4643 if (TREE_CODE (*node) != FUNCTION_TYPE)
4645 warning ("`%s' attribute only applies to functions",
4646 IDENTIFIER_POINTER (name));
4647 *no_add_attrs = true;
4654 /* !!! FIXME to emit RPTS correctly. */
4657 c4x_rptb_rpts_p (insn, op)
4660 /* The next insn should be our label marking where the
4661 repeat block starts. */
4662 insn = NEXT_INSN (insn);
4663 if (GET_CODE (insn) != CODE_LABEL)
4665 /* Some insns may have been shifted between the RPTB insn
4666 and the top label... They were probably destined to
4667 be moved out of the loop. For now, let's leave them
4668 where they are and print a warning. We should
4669 probably move these insns before the repeat block insn. */
4671 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4676 /* Skip any notes. */
4677 insn = next_nonnote_insn (insn);
4679 /* This should be our first insn in the loop. */
4680 if (! INSN_P (insn))
4683 /* Skip any notes. */
4684 insn = next_nonnote_insn (insn);
4686 if (! INSN_P (insn))
4689 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4695 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4699 /* Check if register r11 is used as the destination of an insn. */
4712 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4713 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4715 if (INSN_P (x) && (set = single_set (x)))
4718 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4721 fmt = GET_RTX_FORMAT (GET_CODE (x));
4722 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4726 if (c4x_r11_set_p (XEXP (x, i)))
4729 else if (fmt[i] == 'E')
4730 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4731 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4738 /* The c4x sometimes has a problem when the insn before the laj insn
4739 sets the r11 register. Check for this situation. */
4742 c4x_check_laj_p (insn)
4745 insn = prev_nonnote_insn (insn);
4747 /* If this is the start of the function no nop is needed. */
4751 /* If the previous insn is a code label we have to insert a nop. This
4752 could be a jump or table jump. We can find the normal jumps by
4753 scanning the function but this will not find table jumps. */
4754 if (GET_CODE (insn) == CODE_LABEL)
4757 /* If the previous insn sets register r11 we have to insert a nop. */
4758 if (c4x_r11_set_p (insn))
4761 /* No nop needed. */
4766 /* Adjust the cost of a scheduling dependency. Return the new cost of
4767 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4768 A set of an address register followed by a use occurs a 2 cycle
4769 stall (reduced to a single cycle on the c40 using LDA), while
4770 a read of an address register followed by a use occurs a single cycle. */
4772 #define SET_USE_COST 3
4773 #define SETLDA_USE_COST 2
4774 #define READ_USE_COST 2
4777 c4x_adjust_cost (insn, link, dep_insn, cost)
4783 /* Don't worry about this until we know what registers have been
4785 if (flag_schedule_insns == 0 && ! reload_completed)
4788 /* How do we handle dependencies where a read followed by another
4789 read causes a pipeline stall? For example, a read of ar0 followed
4790 by the use of ar0 for a memory reference. It looks like we
4791 need to extend the scheduler to handle this case. */
4793 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4794 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4795 so only deal with insns we know about. */
4796 if (recog_memoized (dep_insn) < 0)
4799 if (REG_NOTE_KIND (link) == 0)
4803 /* Data dependency; DEP_INSN writes a register that INSN reads some
4807 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4808 max = SET_USE_COST > max ? SET_USE_COST : max;
4809 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4810 max = READ_USE_COST > max ? READ_USE_COST : max;
4814 /* This could be significantly optimized. We should look
4815 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4816 insn uses ar0-ar7. We then test if the same register
4817 is used. The tricky bit is that some operands will
4818 use several registers... */
4819 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4820 max = SET_USE_COST > max ? SET_USE_COST : max;
4821 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4822 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4823 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4824 max = READ_USE_COST > max ? READ_USE_COST : max;
4826 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4827 max = SET_USE_COST > max ? SET_USE_COST : max;
4828 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4829 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4830 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4831 max = READ_USE_COST > max ? READ_USE_COST : max;
4833 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4834 max = SET_USE_COST > max ? SET_USE_COST : max;
4835 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4836 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4837 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4838 max = READ_USE_COST > max ? READ_USE_COST : max;
4840 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4841 max = SET_USE_COST > max ? SET_USE_COST : max;
4842 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4843 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4844 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4845 max = READ_USE_COST > max ? READ_USE_COST : max;
4847 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4848 max = SET_USE_COST > max ? SET_USE_COST : max;
4849 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4850 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4851 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4852 max = READ_USE_COST > max ? READ_USE_COST : max;
4854 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4855 max = SET_USE_COST > max ? SET_USE_COST : max;
4856 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4857 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4858 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4859 max = READ_USE_COST > max ? READ_USE_COST : max;
4861 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4862 max = SET_USE_COST > max ? SET_USE_COST : max;
4863 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4864 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4865 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4866 max = READ_USE_COST > max ? READ_USE_COST : max;
4868 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4869 max = SET_USE_COST > max ? SET_USE_COST : max;
4870 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4871 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4872 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4873 max = READ_USE_COST > max ? READ_USE_COST : max;
4875 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4876 max = SET_USE_COST > max ? SET_USE_COST : max;
4877 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4878 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4880 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4881 max = SET_USE_COST > max ? SET_USE_COST : max;
4882 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4883 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4889 /* For other data dependencies, the default cost specified in the
4893 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4895 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4898 /* For c4x anti dependencies, the cost is 0. */
4901 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4903 /* Output dependency; DEP_INSN writes a register that INSN writes some
4906 /* For c4x output dependencies, the cost is 0. */
4914 c4x_init_builtins ()
4916 tree endlink = void_list_node;
4918 builtin_function ("fast_ftoi",
4921 tree_cons (NULL_TREE, double_type_node, endlink)),
4922 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4923 builtin_function ("ansi_ftoi",
4926 tree_cons (NULL_TREE, double_type_node, endlink)),
4927 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL, NULL_TREE);
4929 builtin_function ("fast_imult",
4932 tree_cons (NULL_TREE, integer_type_node,
4933 tree_cons (NULL_TREE,
4934 integer_type_node, endlink))),
4935 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL, NULL_TREE);
4938 builtin_function ("toieee",
4941 tree_cons (NULL_TREE, double_type_node, endlink)),
4942 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4943 builtin_function ("frieee",
4946 tree_cons (NULL_TREE, double_type_node, endlink)),
4947 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4948 builtin_function ("fast_invf",
4951 tree_cons (NULL_TREE, double_type_node, endlink)),
4952 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL, NULL_TREE);
4958 c4x_expand_builtin (exp, target, subtarget, mode, ignore)
4961 rtx subtarget ATTRIBUTE_UNUSED;
4962 enum machine_mode mode ATTRIBUTE_UNUSED;
4963 int ignore ATTRIBUTE_UNUSED;
4965 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4966 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4967 tree arglist = TREE_OPERAND (exp, 1);
4973 case C4X_BUILTIN_FIX:
4974 arg0 = TREE_VALUE (arglist);
4975 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4976 r0 = protect_from_queue (r0, 0);
4977 if (! target || ! register_operand (target, QImode))
4978 target = gen_reg_rtx (QImode);
4979 emit_insn (gen_fixqfqi_clobber (target, r0));
4982 case C4X_BUILTIN_FIX_ANSI:
4983 arg0 = TREE_VALUE (arglist);
4984 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4985 r0 = protect_from_queue (r0, 0);
4986 if (! target || ! register_operand (target, QImode))
4987 target = gen_reg_rtx (QImode);
4988 emit_insn (gen_fix_truncqfqi2 (target, r0));
4991 case C4X_BUILTIN_MPYI:
4994 arg0 = TREE_VALUE (arglist);
4995 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4996 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4997 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4998 r0 = protect_from_queue (r0, 0);
4999 r1 = protect_from_queue (r1, 0);
5000 if (! target || ! register_operand (target, QImode))
5001 target = gen_reg_rtx (QImode);
5002 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
5005 case C4X_BUILTIN_TOIEEE:
5008 arg0 = TREE_VALUE (arglist);
5009 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5010 r0 = protect_from_queue (r0, 0);
5011 if (! target || ! register_operand (target, QFmode))
5012 target = gen_reg_rtx (QFmode);
5013 emit_insn (gen_toieee (target, r0));
5016 case C4X_BUILTIN_FRIEEE:
5019 arg0 = TREE_VALUE (arglist);
5020 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5021 put_var_into_stack (arg0);
5022 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5023 r0 = protect_from_queue (r0, 0);
5024 if (register_operand (r0, QFmode))
5026 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5027 emit_move_insn (r1, r0);
5030 if (! target || ! register_operand (target, QFmode))
5031 target = gen_reg_rtx (QFmode);
5032 emit_insn (gen_frieee (target, r0));
5035 case C4X_BUILTIN_RCPF:
5038 arg0 = TREE_VALUE (arglist);
5039 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5040 r0 = protect_from_queue (r0, 0);
5041 if (! target || ! register_operand (target, QFmode))
5042 target = gen_reg_rtx (QFmode);
5043 emit_insn (gen_rcpfqf_clobber (target, r0));
5050 c4x_asm_named_section (name, flags)
5052 unsigned int flags ATTRIBUTE_UNUSED;
5054 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
5058 c4x_globalize_label (stream, name)
5062 default_globalize_label (stream, name);
5063 c4x_global_label (name);