1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "conditions.h"
49 #include "c4x-protos.h"
51 #include "target-def.h"
55 rtx fix_truncqfhi2_libfunc;
56 rtx fixuns_truncqfhi2_libfunc;
57 rtx fix_trunchfhi2_libfunc;
58 rtx fixuns_trunchfhi2_libfunc;
59 rtx floathiqf2_libfunc;
60 rtx floatunshiqf2_libfunc;
61 rtx floathihf2_libfunc;
62 rtx floatunshihf2_libfunc;
64 static int c4x_leaf_function;
66 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
68 /* Array of the smallest class containing reg number REGNO, indexed by
69 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
70 registers are available and set the class to NO_REGS for registers
71 that the target switches say are unavailable. */
73 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
75 /* Reg Modes Saved. */
76 R0R1_REGS, /* R0 QI, QF, HF No. */
77 R0R1_REGS, /* R1 QI, QF, HF No. */
78 R2R3_REGS, /* R2 QI, QF, HF No. */
79 R2R3_REGS, /* R3 QI, QF, HF No. */
80 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
81 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
83 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
84 ADDR_REGS, /* AR0 QI No. */
85 ADDR_REGS, /* AR1 QI No. */
86 ADDR_REGS, /* AR2 QI No. */
87 ADDR_REGS, /* AR3 QI QI. */
88 ADDR_REGS, /* AR4 QI QI. */
89 ADDR_REGS, /* AR5 QI QI. */
90 ADDR_REGS, /* AR6 QI QI. */
91 ADDR_REGS, /* AR7 QI QI. */
92 DP_REG, /* DP QI No. */
93 INDEX_REGS, /* IR0 QI No. */
94 INDEX_REGS, /* IR1 QI No. */
95 BK_REG, /* BK QI QI. */
96 SP_REG, /* SP QI No. */
97 ST_REG, /* ST CC No. */
98 NO_REGS, /* DIE/IE No. */
99 NO_REGS, /* IIE/IF No. */
100 NO_REGS, /* IIF/IOF No. */
101 INT_REGS, /* RS QI No. */
102 INT_REGS, /* RE QI No. */
103 RC_REG, /* RC QI No. */
104 EXT_REGS, /* R8 QI, QF, HF QI. */
105 EXT_REGS, /* R9 QI, QF, HF No. */
106 EXT_REGS, /* R10 QI, QF, HF No. */
107 EXT_REGS, /* R11 QI, QF, HF No. */
110 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
112 /* Reg Modes Saved. */
113 HFmode, /* R0 QI, QF, HF No. */
114 HFmode, /* R1 QI, QF, HF No. */
115 HFmode, /* R2 QI, QF, HF No. */
116 HFmode, /* R3 QI, QF, HF No. */
117 QFmode, /* R4 QI, QF, HF QI. */
118 QFmode, /* R5 QI, QF, HF QI. */
119 QImode, /* R6 QI, QF, HF QF. */
120 QImode, /* R7 QI, QF, HF QF. */
121 QImode, /* AR0 QI No. */
122 QImode, /* AR1 QI No. */
123 QImode, /* AR2 QI No. */
124 QImode, /* AR3 QI QI. */
125 QImode, /* AR4 QI QI. */
126 QImode, /* AR5 QI QI. */
127 QImode, /* AR6 QI QI. */
128 QImode, /* AR7 QI QI. */
129 VOIDmode, /* DP QI No. */
130 QImode, /* IR0 QI No. */
131 QImode, /* IR1 QI No. */
132 QImode, /* BK QI QI. */
133 VOIDmode, /* SP QI No. */
134 VOIDmode, /* ST CC No. */
135 VOIDmode, /* DIE/IE No. */
136 VOIDmode, /* IIE/IF No. */
137 VOIDmode, /* IIF/IOF No. */
138 QImode, /* RS QI No. */
139 QImode, /* RE QI No. */
140 VOIDmode, /* RC QI No. */
141 QFmode, /* R8 QI, QF, HF QI. */
142 HFmode, /* R9 QI, QF, HF No. */
143 HFmode, /* R10 QI, QF, HF No. */
144 HFmode, /* R11 QI, QF, HF No. */
148 /* Test and compare insns in c4x.md store the information needed to
149 generate branch and scc insns here. */
154 const char *c4x_rpts_cycles_string;
155 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
156 const char *c4x_cpu_version_string;
157 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
159 /* Pragma definitions. */
161 tree code_tree = NULL_TREE;
162 tree data_tree = NULL_TREE;
163 tree pure_tree = NULL_TREE;
164 tree noreturn_tree = NULL_TREE;
165 tree interrupt_tree = NULL_TREE;
167 /* Forward declarations */
168 static int c4x_isr_reg_used_p PARAMS ((unsigned int));
169 static int c4x_leaf_function_p PARAMS ((void));
170 static int c4x_assembler_function_p PARAMS ((void));
171 static int c4x_immed_float_p PARAMS ((rtx));
172 static int c4x_a_register PARAMS ((rtx));
173 static int c4x_x_register PARAMS ((rtx));
174 static int c4x_immed_int_constant PARAMS ((rtx));
175 static int c4x_immed_float_constant PARAMS ((rtx));
176 static int c4x_K_constant PARAMS ((rtx));
177 static int c4x_N_constant PARAMS ((rtx));
178 static int c4x_O_constant PARAMS ((rtx));
179 static int c4x_R_indirect PARAMS ((rtx));
180 static int c4x_S_indirect PARAMS ((rtx));
181 static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
182 static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
183 enum machine_mode, int));
184 static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
185 static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
186 static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
187 static int c4x_r11_set_p PARAMS ((rtx));
188 static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
189 static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
190 static tree c4x_handle_fntype_attribute PARAMS ((tree *, tree, tree, int, bool *));
191 const struct attribute_spec c4x_attribute_table[];
192 static void c4x_insert_attributes PARAMS ((tree, tree *));
193 static void c4x_asm_named_section PARAMS ((const char *, unsigned int));
194 static int c4x_adjust_cost PARAMS ((rtx, rtx, rtx, int));
195 static void c4x_encode_section_info PARAMS ((tree, int));
197 /* Initialize the GCC target structure. */
198 #undef TARGET_ASM_BYTE_OP
199 #define TARGET_ASM_BYTE_OP "\t.word\t"
200 #undef TARGET_ASM_ALIGNED_HI_OP
201 #define TARGET_ASM_ALIGNED_HI_OP NULL
202 #undef TARGET_ASM_ALIGNED_SI_OP
203 #define TARGET_ASM_ALIGNED_SI_OP NULL
205 #undef TARGET_ATTRIBUTE_TABLE
206 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
208 #undef TARGET_INSERT_ATTRIBUTES
209 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
211 #undef TARGET_INIT_BUILTINS
212 #define TARGET_INIT_BUILTINS c4x_init_builtins
214 #undef TARGET_EXPAND_BUILTIN
215 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
217 #undef TARGET_SCHED_ADJUST_COST
218 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
220 #undef TARGET_ENCODE_SECTION_INFO
221 #define TARGET_ENCODE_SECTION_INFO c4x_encode_section_info
223 struct gcc_target targetm = TARGET_INITIALIZER;
225 /* Override command line options.
226 Called once after all options have been parsed.
227 Mostly we process the processor
228 type and sometimes adjust other TARGET_ options. */
231 c4x_override_options ()
233 if (c4x_rpts_cycles_string)
234 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
239 c4x_cpu_version = 30;
241 c4x_cpu_version = 31;
243 c4x_cpu_version = 32;
245 c4x_cpu_version = 33;
247 c4x_cpu_version = 40;
249 c4x_cpu_version = 44;
251 c4x_cpu_version = 40;
253 /* -mcpu=xx overrides -m40 etc. */
254 if (c4x_cpu_version_string)
256 const char *p = c4x_cpu_version_string;
258 /* Also allow -mcpu=c30 etc. */
259 if (*p == 'c' || *p == 'C')
261 c4x_cpu_version = atoi (p);
264 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
265 C40_FLAG | C44_FLAG);
267 switch (c4x_cpu_version)
269 case 30: target_flags |= C30_FLAG; break;
270 case 31: target_flags |= C31_FLAG; break;
271 case 32: target_flags |= C32_FLAG; break;
272 case 33: target_flags |= C33_FLAG; break;
273 case 40: target_flags |= C40_FLAG; break;
274 case 44: target_flags |= C44_FLAG; break;
276 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
277 c4x_cpu_version = 40;
278 target_flags |= C40_FLAG;
281 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
282 target_flags |= C3X_FLAG;
284 target_flags &= ~C3X_FLAG;
286 /* Convert foo / 8.0 into foo * 0.125, etc. */
287 set_fast_math_flags (1);
289 /* We should phase out the following at some stage.
290 This provides compatibility with the old -mno-aliases option. */
291 if (! TARGET_ALIASES && ! flag_argument_noalias)
292 flag_argument_noalias = 1;
296 /* This is called before c4x_override_options. */
299 c4x_optimization_options (level, size)
300 int level ATTRIBUTE_UNUSED;
301 int size ATTRIBUTE_UNUSED;
303 /* Scheduling before register allocation can screw up global
304 register allocation, especially for functions that use MPY||ADD
305 instructions. The benefit we gain we get by scheduling before
306 register allocation is probably marginal anyhow. */
307 flag_schedule_insns = 0;
311 /* Write an ASCII string. */
313 #define C4X_ASCII_LIMIT 40
316 c4x_output_ascii (stream, ptr, len)
321 char sbuf[C4X_ASCII_LIMIT + 1];
322 int s, l, special, first = 1, onlys;
325 fprintf (stream, "\t.byte\t");
327 for (s = l = 0; len > 0; --len, ++ptr)
331 /* Escape " and \ with a \". */
332 special = *ptr == '\"' || *ptr == '\\';
334 /* If printable - add to buff. */
335 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
340 if (s < C4X_ASCII_LIMIT - 1)
355 fprintf (stream, "\"%s\"", sbuf);
357 if (TARGET_TI && l >= 80 && len > 1)
359 fprintf (stream, "\n\t.byte\t");
377 fprintf (stream, "%d", *ptr);
379 if (TARGET_TI && l >= 80 && len > 1)
381 fprintf (stream, "\n\t.byte\t");
392 fprintf (stream, "\"%s\"", sbuf);
395 fputc ('\n', stream);
400 c4x_hard_regno_mode_ok (regno, mode)
402 enum machine_mode mode;
407 case Pmode: /* Pointer (24/32 bits). */
409 case QImode: /* Integer (32 bits). */
410 return IS_INT_REGNO (regno);
412 case QFmode: /* Float, Double (32 bits). */
413 case HFmode: /* Long Double (40 bits). */
414 return IS_EXT_REGNO (regno);
416 case CCmode: /* Condition Codes. */
417 case CC_NOOVmode: /* Condition Codes. */
418 return IS_ST_REGNO (regno);
420 case HImode: /* Long Long (64 bits). */
421 /* We need two registers to store long longs. Note that
422 it is much easier to constrain the first register
423 to start on an even boundary. */
424 return IS_INT_REGNO (regno)
425 && IS_INT_REGNO (regno + 1)
429 return 0; /* We don't support these modes. */
435 /* Return non-zero if REGNO1 can be renamed to REGNO2. */
437 c4x_hard_regno_rename_ok (regno1, regno2)
441 /* We can not copy call saved registers from mode QI into QF or from
443 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
445 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
447 /* We cannot copy from an extended (40 bit) register to a standard
448 (32 bit) register because we only set the condition codes for
449 extended registers. */
450 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
452 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
457 /* The TI C3x C compiler register argument runtime model uses 6 registers,
458 AR2, R2, R3, RC, RS, RE.
460 The first two floating point arguments (float, double, long double)
461 that are found scanning from left to right are assigned to R2 and R3.
463 The remaining integer (char, short, int, long) or pointer arguments
464 are assigned to the remaining registers in the order AR2, R2, R3,
465 RC, RS, RE when scanning left to right, except for the last named
466 argument prior to an ellipsis denoting variable number of
467 arguments. We don't have to worry about the latter condition since
468 function.c treats the last named argument as anonymous (unnamed).
470 All arguments that cannot be passed in registers are pushed onto
471 the stack in reverse order (right to left). GCC handles that for us.
473 c4x_init_cumulative_args() is called at the start, so we can parse
474 the args to see how many floating point arguments and how many
475 integer (or pointer) arguments there are. c4x_function_arg() is
476 then called (sometimes repeatedly) for each argument (parsed left
477 to right) to obtain the register to pass the argument in, or zero
478 if the argument is to be passed on the stack. Once the compiler is
479 happy, c4x_function_arg_advance() is called.
481 Don't use R0 to pass arguments in, we use 0 to indicate a stack
484 static const int c4x_int_reglist[3][6] =
486 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
487 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
488 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
491 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
494 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
495 function whose data type is FNTYPE.
496 For a library call, FNTYPE is 0. */
499 c4x_init_cumulative_args (cum, fntype, libname)
500 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
501 tree fntype; /* Tree ptr for function decl. */
502 rtx libname; /* SYMBOL_REF of library name or 0. */
504 tree param, next_param;
506 cum->floats = cum->ints = 0;
513 fprintf (stderr, "\nc4x_init_cumulative_args (");
516 tree ret_type = TREE_TYPE (fntype);
518 fprintf (stderr, "fntype code = %s, ret code = %s",
519 tree_code_name[(int) TREE_CODE (fntype)],
520 tree_code_name[(int) TREE_CODE (ret_type)]);
523 fprintf (stderr, "no fntype");
526 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
529 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
531 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
532 param; param = next_param)
536 next_param = TREE_CHAIN (param);
538 type = TREE_VALUE (param);
539 if (type && type != void_type_node)
541 enum machine_mode mode;
543 /* If the last arg doesn't have void type then we have
544 variable arguments. */
548 if ((mode = TYPE_MODE (type)))
550 if (! MUST_PASS_IN_STACK (mode, type))
552 /* Look for float, double, or long double argument. */
553 if (mode == QFmode || mode == HFmode)
555 /* Look for integer, enumeral, boolean, char, or pointer
557 else if (mode == QImode || mode == Pmode)
566 fprintf (stderr, "%s%s, args = %d)\n",
567 cum->prototype ? ", prototype" : "",
568 cum->var ? ", variable args" : "",
573 /* Update the data in CUM to advance over an argument
574 of mode MODE and data type TYPE.
575 (TYPE is null for libcalls where that information may not be available.) */
578 c4x_function_arg_advance (cum, mode, type, named)
579 CUMULATIVE_ARGS *cum; /* Current arg information. */
580 enum machine_mode mode; /* Current arg mode. */
581 tree type; /* Type of the arg or 0 if lib support. */
582 int named; /* Whether or not the argument was named. */
585 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
586 GET_MODE_NAME (mode), named);
590 && ! MUST_PASS_IN_STACK (mode, type))
592 /* Look for float, double, or long double argument. */
593 if (mode == QFmode || mode == HFmode)
595 /* Look for integer, enumeral, boolean, char, or pointer argument. */
596 else if (mode == QImode || mode == Pmode)
599 else if (! TARGET_MEMPARM && ! type)
601 /* Handle libcall arguments. */
602 if (mode == QFmode || mode == HFmode)
604 else if (mode == QImode || mode == Pmode)
611 /* Define where to put the arguments to a function. Value is zero to
612 push the argument on the stack, or a hard register in which to
615 MODE is the argument's machine mode.
616 TYPE is the data type of the argument (as a tree).
617 This is null for libcalls where that information may
619 CUM is a variable of type CUMULATIVE_ARGS which gives info about
620 the preceding args and about the function being called.
621 NAMED is nonzero if this argument is a named parameter
622 (otherwise it is an extra parameter matching an ellipsis). */
625 c4x_function_arg (cum, mode, type, named)
626 CUMULATIVE_ARGS *cum; /* Current arg information. */
627 enum machine_mode mode; /* Current arg mode. */
628 tree type; /* Type of the arg or 0 if lib support. */
629 int named; /* != 0 for normal args, == 0 for ... args. */
631 int reg = 0; /* Default to passing argument on stack. */
635 /* We can handle at most 2 floats in R2, R3. */
636 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
638 /* We can handle at most 6 integers minus number of floats passed
640 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
641 6 - cum->maxfloats : cum->ints;
643 /* If there is no prototype, assume all the arguments are integers. */
644 if (! cum->prototype)
647 cum->ints = cum->floats = 0;
651 /* This marks the last argument. We don't need to pass this through
653 if (type == void_type_node)
659 && ! MUST_PASS_IN_STACK (mode, type))
661 /* Look for float, double, or long double argument. */
662 if (mode == QFmode || mode == HFmode)
664 if (cum->floats < cum->maxfloats)
665 reg = c4x_fp_reglist[cum->floats];
667 /* Look for integer, enumeral, boolean, char, or pointer argument. */
668 else if (mode == QImode || mode == Pmode)
670 if (cum->ints < cum->maxints)
671 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
674 else if (! TARGET_MEMPARM && ! type)
676 /* We could use a different argument calling model for libcalls,
677 since we're only calling functions in libgcc. Thus we could
678 pass arguments for long longs in registers rather than on the
679 stack. In the meantime, use the odd TI format. We make the
680 assumption that we won't have more than two floating point
681 args, six integer args, and that all the arguments are of the
683 if (mode == QFmode || mode == HFmode)
684 reg = c4x_fp_reglist[cum->floats];
685 else if (mode == QImode || mode == Pmode)
686 reg = c4x_int_reglist[0][cum->ints];
691 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
692 GET_MODE_NAME (mode), named);
694 fprintf (stderr, ", reg=%s", reg_names[reg]);
696 fprintf (stderr, ", stack");
697 fprintf (stderr, ")\n");
700 return gen_rtx_REG (mode, reg);
705 /* C[34]x arguments grow in weird ways (downwards) that the standard
706 varargs stuff can't handle.. */
708 c4x_va_arg (valist, type)
713 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
714 build_int_2 (int_size_in_bytes (type), 0));
715 TREE_SIDE_EFFECTS (t) = 1;
717 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
722 c4x_isr_reg_used_p (regno)
725 /* Don't save/restore FP or ST, we handle them separately. */
726 if (regno == FRAME_POINTER_REGNUM
727 || IS_ST_REGNO (regno))
730 /* We could be a little smarter abut saving/restoring DP.
731 We'll only save if for the big memory model or if
732 we're paranoid. ;-) */
733 if (IS_DP_REGNO (regno))
734 return ! TARGET_SMALL || TARGET_PARANOID;
736 /* Only save/restore regs in leaf function that are used. */
737 if (c4x_leaf_function)
738 return regs_ever_live[regno] && fixed_regs[regno] == 0;
740 /* Only save/restore regs that are used by the ISR and regs
741 that are likely to be used by functions the ISR calls
742 if they are not fixed. */
743 return IS_EXT_REGNO (regno)
744 || ((regs_ever_live[regno] || call_used_regs[regno])
745 && fixed_regs[regno] == 0);
750 c4x_leaf_function_p ()
752 /* A leaf function makes no calls, so we only need
753 to save/restore the registers we actually use.
754 For the global variable leaf_function to be set, we need
755 to define LEAF_REGISTERS and all that it entails.
756 Let's check ourselves... */
758 if (lookup_attribute ("leaf_pretend",
759 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
762 /* Use the leaf_pretend attribute at your own risk. This is a hack
763 to speed up ISRs that call a function infrequently where the
764 overhead of saving and restoring the additional registers is not
765 warranted. You must save and restore the additional registers
766 required by the called function. Caveat emptor. Here's enough
769 if (leaf_function_p ())
777 c4x_assembler_function_p ()
781 type = TREE_TYPE (current_function_decl);
782 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type)) != NULL)
783 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL);
788 c4x_interrupt_function_p ()
790 if (lookup_attribute ("interrupt",
791 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
794 /* Look for TI style c_intnn. */
795 return current_function_name[0] == 'c'
796 && current_function_name[1] == '_'
797 && current_function_name[2] == 'i'
798 && current_function_name[3] == 'n'
799 && current_function_name[4] == 't'
800 && ISDIGIT (current_function_name[5])
801 && ISDIGIT (current_function_name[6]);
805 c4x_expand_prologue ()
808 int size = get_frame_size ();
811 /* In functions where ar3 is not used but frame pointers are still
812 specified, frame pointers are not adjusted (if >= -O2) and this
813 is used so it won't needlessly push the frame pointer. */
816 /* For __assembler__ function don't build a prologue. */
817 if (c4x_assembler_function_p ())
822 /* For __interrupt__ function build specific prologue. */
823 if (c4x_interrupt_function_p ())
825 c4x_leaf_function = c4x_leaf_function_p ();
827 insn = emit_insn (gen_push_st ());
828 RTX_FRAME_RELATED_P (insn) = 1;
831 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
832 RTX_FRAME_RELATED_P (insn) = 1;
833 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
834 gen_rtx_REG (QImode, SP_REGNO)));
835 RTX_FRAME_RELATED_P (insn) = 1;
836 /* We require that an ISR uses fewer than 32768 words of
837 local variables, otherwise we have to go to lots of
838 effort to save a register, load it with the desired size,
839 adjust the stack pointer, and then restore the modified
840 register. Frankly, I think it is a poor ISR that
841 requires more than 32767 words of local temporary
844 error ("ISR %s requires %d words of local vars, max is 32767",
845 current_function_name, size);
847 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
848 gen_rtx_REG (QImode, SP_REGNO),
850 RTX_FRAME_RELATED_P (insn) = 1;
852 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
854 if (c4x_isr_reg_used_p (regno))
856 if (regno == DP_REGNO)
858 insn = emit_insn (gen_push_dp ());
859 RTX_FRAME_RELATED_P (insn) = 1;
863 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
864 RTX_FRAME_RELATED_P (insn) = 1;
865 if (IS_EXT_REGNO (regno))
867 insn = emit_insn (gen_pushqf
868 (gen_rtx_REG (QFmode, regno)));
869 RTX_FRAME_RELATED_P (insn) = 1;
874 /* We need to clear the repeat mode flag if the ISR is
875 going to use a RPTB instruction or uses the RC, RS, or RE
877 if (regs_ever_live[RC_REGNO]
878 || regs_ever_live[RS_REGNO]
879 || regs_ever_live[RE_REGNO])
881 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
882 RTX_FRAME_RELATED_P (insn) = 1;
885 /* Reload DP reg if we are paranoid about some turkey
886 violating small memory model rules. */
887 if (TARGET_SMALL && TARGET_PARANOID)
889 insn = emit_insn (gen_set_ldp_prologue
890 (gen_rtx_REG (QImode, DP_REGNO),
891 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
892 RTX_FRAME_RELATED_P (insn) = 1;
897 if (frame_pointer_needed)
900 || (current_function_args_size != 0)
903 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
904 RTX_FRAME_RELATED_P (insn) = 1;
905 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
906 gen_rtx_REG (QImode, SP_REGNO)));
907 RTX_FRAME_RELATED_P (insn) = 1;
912 /* Since ar3 is not used, we don't need to push it. */
918 /* If we use ar3, we need to push it. */
920 if ((size != 0) || (current_function_args_size != 0))
922 /* If we are omitting the frame pointer, we still have
923 to make space for it so the offsets are correct
924 unless we don't use anything on the stack at all. */
931 /* Local vars are too big, it will take multiple operations
935 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
936 GEN_INT(size >> 16)));
937 RTX_FRAME_RELATED_P (insn) = 1;
938 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
939 gen_rtx_REG (QImode, R1_REGNO),
941 RTX_FRAME_RELATED_P (insn) = 1;
945 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
946 GEN_INT(size & ~0xffff)));
947 RTX_FRAME_RELATED_P (insn) = 1;
949 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
950 gen_rtx_REG (QImode, R1_REGNO),
951 GEN_INT(size & 0xffff)));
952 RTX_FRAME_RELATED_P (insn) = 1;
953 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
954 gen_rtx_REG (QImode, SP_REGNO),
955 gen_rtx_REG (QImode, R1_REGNO)));
956 RTX_FRAME_RELATED_P (insn) = 1;
960 /* Local vars take up less than 32767 words, so we can directly
962 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
963 gen_rtx_REG (QImode, SP_REGNO),
965 RTX_FRAME_RELATED_P (insn) = 1;
968 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
970 if (regs_ever_live[regno] && ! call_used_regs[regno])
972 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
974 if (TARGET_PRESERVE_FLOAT)
976 insn = emit_insn (gen_pushqi
977 (gen_rtx_REG (QImode, regno)));
978 RTX_FRAME_RELATED_P (insn) = 1;
980 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
981 RTX_FRAME_RELATED_P (insn) = 1;
983 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
985 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
986 RTX_FRAME_RELATED_P (insn) = 1;
995 c4x_expand_epilogue()
1001 int size = get_frame_size ();
1003 /* For __assembler__ function build no epilogue. */
1004 if (c4x_assembler_function_p ())
1006 insn = emit_jump_insn (gen_return_from_epilogue ());
1007 RTX_FRAME_RELATED_P (insn) = 1;
1011 /* For __interrupt__ function build specific epilogue. */
1012 if (c4x_interrupt_function_p ())
1014 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1016 if (! c4x_isr_reg_used_p (regno))
1018 if (regno == DP_REGNO)
1020 insn = emit_insn (gen_pop_dp ());
1021 RTX_FRAME_RELATED_P (insn) = 1;
1025 /* We have to use unspec because the compiler will delete insns
1026 that are not call-saved. */
1027 if (IS_EXT_REGNO (regno))
1029 insn = emit_insn (gen_popqf_unspec
1030 (gen_rtx_REG (QFmode, regno)));
1031 RTX_FRAME_RELATED_P (insn) = 1;
1033 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1034 RTX_FRAME_RELATED_P (insn) = 1;
1039 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1040 gen_rtx_REG (QImode, SP_REGNO),
1042 RTX_FRAME_RELATED_P (insn) = 1;
1043 insn = emit_insn (gen_popqi
1044 (gen_rtx_REG (QImode, AR3_REGNO)));
1045 RTX_FRAME_RELATED_P (insn) = 1;
1047 insn = emit_insn (gen_pop_st ());
1048 RTX_FRAME_RELATED_P (insn) = 1;
1049 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1050 RTX_FRAME_RELATED_P (insn) = 1;
1054 if (frame_pointer_needed)
1057 || (current_function_args_size != 0)
1061 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1062 gen_rtx_MEM (QImode,
1064 (QImode, gen_rtx_REG (QImode,
1067 RTX_FRAME_RELATED_P (insn) = 1;
1069 /* We already have the return value and the fp,
1070 so we need to add those to the stack. */
1077 /* Since ar3 is not used for anything, we don't need to
1084 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1085 if (size || current_function_args_size)
1087 /* If we are ommitting the frame pointer, we still have
1088 to make space for it so the offsets are correct
1089 unless we don't use anything on the stack at all. */
1094 /* Now restore the saved registers, putting in the delayed branch
1096 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1098 if (regs_ever_live[regno] && ! call_used_regs[regno])
1100 if (regno == AR3_REGNO && dont_pop_ar3)
1103 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1105 insn = emit_insn (gen_popqf_unspec
1106 (gen_rtx_REG (QFmode, regno)));
1107 RTX_FRAME_RELATED_P (insn) = 1;
1108 if (TARGET_PRESERVE_FLOAT)
1110 insn = emit_insn (gen_popqi_unspec
1111 (gen_rtx_REG (QImode, regno)));
1112 RTX_FRAME_RELATED_P (insn) = 1;
1117 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1118 RTX_FRAME_RELATED_P (insn) = 1;
1123 if (frame_pointer_needed)
1126 || (current_function_args_size != 0)
1129 /* Restore the old FP. */
1132 (gen_rtx_REG (QImode, AR3_REGNO),
1133 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1135 RTX_FRAME_RELATED_P (insn) = 1;
1141 /* Local vars are too big, it will take multiple operations
1145 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1146 GEN_INT(size >> 16)));
1147 RTX_FRAME_RELATED_P (insn) = 1;
1148 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1149 gen_rtx_REG (QImode, R3_REGNO),
1151 RTX_FRAME_RELATED_P (insn) = 1;
1155 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1156 GEN_INT(size & ~0xffff)));
1157 RTX_FRAME_RELATED_P (insn) = 1;
1159 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1160 gen_rtx_REG (QImode, R3_REGNO),
1161 GEN_INT(size & 0xffff)));
1162 RTX_FRAME_RELATED_P (insn) = 1;
1163 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1164 gen_rtx_REG (QImode, SP_REGNO),
1165 gen_rtx_REG (QImode, R3_REGNO)));
1166 RTX_FRAME_RELATED_P (insn) = 1;
1170 /* Local vars take up less than 32768 words, so we can directly
1171 subtract the number. */
1172 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1173 gen_rtx_REG (QImode, SP_REGNO),
1175 RTX_FRAME_RELATED_P (insn) = 1;
1180 insn = emit_jump_insn (gen_return_indirect_internal
1181 (gen_rtx_REG (QImode, R2_REGNO)));
1182 RTX_FRAME_RELATED_P (insn) = 1;
1186 insn = emit_jump_insn (gen_return_from_epilogue ());
1187 RTX_FRAME_RELATED_P (insn) = 1;
1194 c4x_null_epilogue_p ()
1198 if (reload_completed
1199 && ! c4x_assembler_function_p ()
1200 && ! c4x_interrupt_function_p ()
1201 && ! current_function_calls_alloca
1202 && ! current_function_args_size
1204 && ! get_frame_size ())
1206 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1207 if (regs_ever_live[regno] && ! call_used_regs[regno]
1208 && (regno != AR3_REGNO))
1217 c4x_emit_move_sequence (operands, mode)
1219 enum machine_mode mode;
1221 rtx op0 = operands[0];
1222 rtx op1 = operands[1];
1224 if (! reload_in_progress
1227 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1228 op1 = force_reg (mode, op1);
1230 if (GET_CODE (op1) == LO_SUM
1231 && GET_MODE (op1) == Pmode
1232 && dp_reg_operand (XEXP (op1, 0), mode))
1234 /* expand_increment will sometimes create a LO_SUM immediate
1236 op1 = XEXP (op1, 1);
1238 else if (symbolic_address_operand (op1, mode))
1240 if (TARGET_LOAD_ADDRESS)
1242 /* Alias analysis seems to do a better job if we force
1243 constant addresses to memory after reload. */
1244 emit_insn (gen_load_immed_address (op0, op1));
1249 /* Stick symbol or label address into the constant pool. */
1250 op1 = force_const_mem (Pmode, op1);
1253 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1255 /* We could be a lot smarter about loading some of these
1257 op1 = force_const_mem (mode, op1);
1260 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1261 and emit associated (HIGH (SYMREF)) if large memory model.
1262 c4x_legitimize_address could be used to do this,
1263 perhaps by calling validize_address. */
1264 if (TARGET_EXPOSE_LDP
1265 && ! (reload_in_progress || reload_completed)
1266 && GET_CODE (op1) == MEM
1267 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1269 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1271 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1272 op1 = change_address (op1, mode,
1273 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1276 if (TARGET_EXPOSE_LDP
1277 && ! (reload_in_progress || reload_completed)
1278 && GET_CODE (op0) == MEM
1279 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1281 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1283 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1284 op0 = change_address (op0, mode,
1285 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1288 if (GET_CODE (op0) == SUBREG
1289 && mixed_subreg_operand (op0, mode))
1291 /* We should only generate these mixed mode patterns
1292 during RTL generation. If we need do it later on
1293 then we'll have to emit patterns that won't clobber CC. */
1294 if (reload_in_progress || reload_completed)
1296 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1297 op0 = SUBREG_REG (op0);
1298 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1300 op0 = copy_rtx (op0);
1301 PUT_MODE (op0, QImode);
1307 emit_insn (gen_storeqf_int_clobber (op0, op1));
1313 if (GET_CODE (op1) == SUBREG
1314 && mixed_subreg_operand (op1, mode))
1316 /* We should only generate these mixed mode patterns
1317 during RTL generation. If we need do it later on
1318 then we'll have to emit patterns that won't clobber CC. */
1319 if (reload_in_progress || reload_completed)
1321 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1322 op1 = SUBREG_REG (op1);
1323 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1325 op1 = copy_rtx (op1);
1326 PUT_MODE (op1, QImode);
1332 emit_insn (gen_loadqf_int_clobber (op0, op1));
1339 && reg_operand (op0, mode)
1340 && const_int_operand (op1, mode)
1341 && ! IS_INT16_CONST (INTVAL (op1))
1342 && ! IS_HIGH_CONST (INTVAL (op1)))
1344 emit_insn (gen_loadqi_big_constant (op0, op1));
1349 && reg_operand (op0, mode)
1350 && const_int_operand (op1, mode))
1352 emit_insn (gen_loadhi_big_constant (op0, op1));
1356 /* Adjust operands in case we have modified them. */
1360 /* Emit normal pattern. */
1366 c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1369 enum machine_mode dmode;
1370 enum machine_mode smode;
1382 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1383 operands[1], smode);
1384 equiv = gen_rtx (code, dmode, operands[1]);
1388 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1389 operands[1], smode, operands[2], smode);
1390 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1397 insns = get_insns ();
1399 emit_libcall_block (insns, operands[0], ret, equiv);
1404 c4x_emit_libcall3 (libcall, code, mode, operands)
1407 enum machine_mode mode;
1410 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1415 c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1418 enum machine_mode mode;
1426 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1427 operands[1], mode, operands[2], mode);
1428 equiv = gen_rtx_TRUNCATE (mode,
1429 gen_rtx_LSHIFTRT (HImode,
1430 gen_rtx_MULT (HImode,
1431 gen_rtx (code, HImode, operands[1]),
1432 gen_rtx (code, HImode, operands[2])),
1434 insns = get_insns ();
1436 emit_libcall_block (insns, operands[0], ret, equiv);
1440 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1441 yet use this info. */
1444 c4x_encode_section_info (decl, first)
1446 int first ATTRIBUTE_UNUSED;
1448 if (TREE_CODE (decl) == FUNCTION_DECL)
1449 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1454 c4x_check_legit_addr (mode, addr, strict)
1455 enum machine_mode mode;
1459 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1460 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1461 rtx disp = NULL_RTX; /* Displacement. */
1464 code = GET_CODE (addr);
1467 /* Register indirect with auto increment/decrement. We don't
1468 allow SP here---push_operand should recognise an operand
1469 being pushed on the stack. */
1474 if (mode != QImode && mode != QFmode)
1478 base = XEXP (addr, 0);
1486 rtx op0 = XEXP (addr, 0);
1487 rtx op1 = XEXP (addr, 1);
1489 if (mode != QImode && mode != QFmode)
1493 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1495 base = XEXP (op1, 0);
1498 if (REG_P (XEXP (op1, 1)))
1499 indx = XEXP (op1, 1);
1501 disp = XEXP (op1, 1);
1505 /* Register indirect. */
1510 /* Register indirect with displacement or index. */
1513 rtx op0 = XEXP (addr, 0);
1514 rtx op1 = XEXP (addr, 1);
1515 enum rtx_code code0 = GET_CODE (op0);
1522 base = op0; /* Base + index. */
1524 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1532 base = op0; /* Base + displacement. */
1543 /* Direct addressing with DP register. */
1546 rtx op0 = XEXP (addr, 0);
1547 rtx op1 = XEXP (addr, 1);
1549 /* HImode and HFmode direct memory references aren't truly
1550 offsettable (consider case at end of data page). We
1551 probably get better code by loading a pointer and using an
1552 indirect memory reference. */
1553 if (mode == HImode || mode == HFmode)
1556 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1559 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1562 if (GET_CODE (op1) == CONST)
1568 /* Direct addressing with some work for the assembler... */
1570 /* Direct addressing. */
1573 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1575 /* These need to be converted to a LO_SUM (...).
1576 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1579 /* Do not allow direct memory access to absolute addresses.
1580 This is more pain than it's worth, especially for the
1581 small memory model where we can't guarantee that
1582 this address is within the data page---we don't want
1583 to modify the DP register in the small memory model,
1584 even temporarily, since an interrupt can sneak in.... */
1588 /* Indirect indirect addressing. */
1593 fatal_insn ("using CONST_DOUBLE for address", addr);
1599 /* Validate the base register. */
1602 /* Check that the address is offsettable for HImode and HFmode. */
1603 if (indx && (mode == HImode || mode == HFmode))
1606 /* Handle DP based stuff. */
1607 if (REGNO (base) == DP_REGNO)
1609 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1611 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1615 /* Now validate the index register. */
1618 if (GET_CODE (indx) != REG)
1620 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1622 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1626 /* Validate displacement. */
1629 if (GET_CODE (disp) != CONST_INT)
1631 if (mode == HImode || mode == HFmode)
1633 /* The offset displacement must be legitimate. */
1634 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1639 if (! IS_DISP8_CONST (INTVAL (disp)))
1642 /* Can't add an index with a disp. */
1651 c4x_legitimize_address (orig, mode)
1652 rtx orig ATTRIBUTE_UNUSED;
1653 enum machine_mode mode ATTRIBUTE_UNUSED;
1655 if (GET_CODE (orig) == SYMBOL_REF
1656 || GET_CODE (orig) == LABEL_REF)
1658 if (mode == HImode || mode == HFmode)
1660 /* We need to force the address into
1661 a register so that it is offsettable. */
1662 rtx addr_reg = gen_reg_rtx (Pmode);
1663 emit_move_insn (addr_reg, orig);
1668 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1671 emit_insn (gen_set_ldp (dp_reg, orig));
1673 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1681 /* Provide the costs of an addressing mode that contains ADDR.
1682 If ADDR is not a valid address, its cost is irrelevant.
1683 This is used in cse and loop optimisation to determine
1684 if it is worthwhile storing a common address into a register.
1685 Unfortunately, the C4x address cost depends on other operands. */
1688 c4x_address_cost (addr)
1691 switch (GET_CODE (addr))
1702 /* These shouldn't be directly generated. */
1710 rtx op1 = XEXP (addr, 1);
1712 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1713 return TARGET_SMALL ? 3 : 4;
1715 if (GET_CODE (op1) == CONST)
1717 rtx offset = const0_rtx;
1719 op1 = eliminate_constant_term (op1, &offset);
1721 /* ??? These costs need rethinking... */
1722 if (GET_CODE (op1) == LABEL_REF)
1725 if (GET_CODE (op1) != SYMBOL_REF)
1728 if (INTVAL (offset) == 0)
1733 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1739 register rtx op0 = XEXP (addr, 0);
1740 register rtx op1 = XEXP (addr, 1);
1742 if (GET_CODE (op0) != REG)
1745 switch (GET_CODE (op1))
1751 /* This cost for REG+REG must be greater than the cost
1752 for REG if we want autoincrement addressing modes. */
1756 /* The following tries to improve GIV combination
1757 in strength reduce but appears not to help. */
1758 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1761 if (IS_DISP1_CONST (INTVAL (op1)))
1764 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1779 c4x_gen_compare_reg (code, x, y)
1783 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1786 if (mode == CC_NOOVmode
1787 && (code == LE || code == GE || code == LT || code == GT))
1790 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1791 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1792 gen_rtx_COMPARE (mode, x, y)));
1797 c4x_output_cbranch (form, seq)
1806 static char str[100];
1810 delay = XVECEXP (final_sequence, 0, 1);
1811 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1812 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1813 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1816 cp = &str [strlen (str)];
1841 c4x_print_operand (file, op, letter)
1842 FILE *file; /* File to write to. */
1843 rtx op; /* Operand to print. */
1844 int letter; /* %<letter> or 0. */
1851 case '#': /* Delayed. */
1853 fprintf (file, "d");
1857 code = GET_CODE (op);
1860 case 'A': /* Direct address. */
1861 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1862 fprintf (file, "@");
1865 case 'H': /* Sethi. */
1866 output_addr_const (file, op);
1869 case 'I': /* Reversed condition. */
1870 code = reverse_condition (code);
1873 case 'L': /* Log 2 of constant. */
1874 if (code != CONST_INT)
1875 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1876 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1879 case 'N': /* Ones complement of small constant. */
1880 if (code != CONST_INT)
1881 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1882 fprintf (file, "%d", ~INTVAL (op));
1885 case 'K': /* Generate ldp(k) if direct address. */
1888 && GET_CODE (XEXP (op, 0)) == LO_SUM
1889 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1890 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1892 op1 = XEXP (XEXP (op, 0), 1);
1893 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1895 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1896 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1897 fprintf (file, "\n");
1902 case 'M': /* Generate ldp(k) if direct address. */
1903 if (! TARGET_SMALL /* Only used in asm statements. */
1905 && (GET_CODE (XEXP (op, 0)) == CONST
1906 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1908 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1909 output_address (XEXP (op, 0));
1910 fprintf (file, "\n\t");
1914 case 'O': /* Offset address. */
1915 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1917 else if (code == MEM)
1918 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1919 else if (code == REG)
1920 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1922 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1925 case 'C': /* Call. */
1928 case 'U': /* Call/callu. */
1929 if (code != SYMBOL_REF)
1930 fprintf (file, "u");
1940 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1942 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1944 fprintf (file, "%s", reg_names[REGNO (op)]);
1948 output_address (XEXP (op, 0));
1956 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
1957 REAL_VALUE_TO_DECIMAL (r, "%20f", str);
1958 fprintf (file, "%s", str);
1963 fprintf (file, "%d", INTVAL (op));
1967 fprintf (file, "ne");
1971 fprintf (file, "eq");
1975 fprintf (file, "ge");
1979 fprintf (file, "gt");
1983 fprintf (file, "le");
1987 fprintf (file, "lt");
1991 fprintf (file, "hs");
1995 fprintf (file, "hi");
1999 fprintf (file, "ls");
2003 fprintf (file, "lo");
2007 output_addr_const (file, op);
2011 output_addr_const (file, XEXP (op, 0));
2018 fatal_insn ("c4x_print_operand: Bad operand case", op);
2025 c4x_print_operand_address (file, addr)
2029 switch (GET_CODE (addr))
2032 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2036 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2040 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2045 rtx op0 = XEXP (XEXP (addr, 1), 0);
2046 rtx op1 = XEXP (XEXP (addr, 1), 1);
2048 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2049 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2050 reg_names[REGNO (op1)]);
2051 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2052 fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2054 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2055 fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2057 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2058 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2059 reg_names[REGNO (op1)]);
2061 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2067 rtx op0 = XEXP (XEXP (addr, 1), 0);
2068 rtx op1 = XEXP (XEXP (addr, 1), 1);
2070 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2071 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2072 reg_names[REGNO (op1)]);
2073 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2074 fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2076 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2077 fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2079 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2080 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2081 reg_names[REGNO (op1)]);
2083 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2088 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2092 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2095 case PLUS: /* Indirect with displacement. */
2097 rtx op0 = XEXP (addr, 0);
2098 rtx op1 = XEXP (addr, 1);
2104 if (IS_INDEX_REG (op0))
2106 fprintf (file, "*+%s(%s)",
2107 reg_names[REGNO (op1)],
2108 reg_names[REGNO (op0)]); /* Index + base. */
2112 fprintf (file, "*+%s(%s)",
2113 reg_names[REGNO (op0)],
2114 reg_names[REGNO (op1)]); /* Base + index. */
2117 else if (INTVAL (op1) < 0)
2119 fprintf (file, "*-%s(%d)",
2120 reg_names[REGNO (op0)],
2121 -INTVAL (op1)); /* Base - displacement. */
2125 fprintf (file, "*+%s(%d)",
2126 reg_names[REGNO (op0)],
2127 INTVAL (op1)); /* Base + displacement. */
2131 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2137 rtx op0 = XEXP (addr, 0);
2138 rtx op1 = XEXP (addr, 1);
2140 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2141 c4x_print_operand_address (file, op1);
2143 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2150 fprintf (file, "@");
2151 output_addr_const (file, addr);
2154 /* We shouldn't access CONST_INT addresses. */
2158 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2164 /* Return nonzero if the floating point operand will fit
2165 in the immediate field. */
2168 c4x_immed_float_p (op)
2175 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2176 if (GET_MODE (op) == HFmode)
2177 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2180 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2184 /* Sign extend exponent. */
2185 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2186 if (exponent == -128)
2188 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2189 return 0; /* Precision doesn't fit. */
2190 return (exponent <= 7) /* Positive exp. */
2191 && (exponent >= -7); /* Negative exp. */
2195 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2196 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2198 None of the last four instructions from the bottom of the block can
2199 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2200 BcondAT or RETIcondD.
2202 This routine scans the four previous insns for a jump insn, and if
2203 one is found, returns 1 so that we bung in a nop instruction.
2204 This simple minded strategy will add a nop, when it may not
2205 be required. Say when there is a JUMP_INSN near the end of the
2206 block that doesn't get converted into a delayed branch.
2208 Note that we cannot have a call insn, since we don't generate
2209 repeat loops with calls in them (although I suppose we could, but
2210 there's no benefit.)
2212 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2215 c4x_rptb_nop_p (insn)
2221 /* Extract the start label from the jump pattern (rptb_end). */
2222 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2224 /* If there is a label at the end of the loop we must insert
2227 insn = previous_insn (insn);
2228 } while (GET_CODE (insn) == NOTE
2229 || GET_CODE (insn) == USE
2230 || GET_CODE (insn) == CLOBBER);
2231 if (GET_CODE (insn) == CODE_LABEL)
2234 for (i = 0; i < 4; i++)
2236 /* Search back for prev non-note and non-label insn. */
2237 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2238 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2240 if (insn == start_label)
2243 insn = previous_insn (insn);
2246 /* If we have a jump instruction we should insert a NOP. If we
2247 hit repeat block top we should only insert a NOP if the loop
2249 if (GET_CODE (insn) == JUMP_INSN)
2251 insn = previous_insn (insn);
2257 /* The C4x looping instruction needs to be emitted at the top of the
2258 loop. Emitting the true RTL for a looping instruction at the top of
2259 the loop can cause problems with flow analysis. So instead, a dummy
2260 doloop insn is emitted at the end of the loop. This routine checks
2261 for the presence of this doloop insn and then searches back to the
2262 top of the loop, where it inserts the true looping insn (provided
2263 there are no instructions in the loop which would cause problems).
2264 Any additional labels can be emitted at this point. In addition, if
2265 the desired loop count register was not allocated, this routine does
2268 Before we can create a repeat block looping instruction we have to
2269 verify that there are no jumps outside the loop and no jumps outside
2270 the loop go into this loop. This can happen in the basic blocks reorder
2271 pass. The C4x cpu can not handle this. */
2274 c4x_label_ref_used_p (x, code_label)
2284 code = GET_CODE (x);
2285 if (code == LABEL_REF)
2286 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2288 fmt = GET_RTX_FORMAT (code);
2289 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2293 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2296 else if (fmt[i] == 'E')
2297 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2298 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2306 c4x_rptb_valid_p (insn, start_label)
2307 rtx insn, start_label;
2313 /* Find the start label. */
2314 for (; insn; insn = PREV_INSN (insn))
2315 if (insn == start_label)
2318 /* Note found then we can not use a rptb or rpts. The label was
2319 probably moved by the basic block reorder pass. */
2324 /* If any jump jumps inside this block then we must fail. */
2325 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2327 if (GET_CODE (insn) == CODE_LABEL)
2329 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2330 if (GET_CODE (tmp) == JUMP_INSN
2331 && c4x_label_ref_used_p (tmp, insn))
2335 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2337 if (GET_CODE (insn) == CODE_LABEL)
2339 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2340 if (GET_CODE (tmp) == JUMP_INSN
2341 && c4x_label_ref_used_p (tmp, insn))
2345 /* If any jump jumps outside this block then we must fail. */
2346 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2348 if (GET_CODE (insn) == CODE_LABEL)
2350 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2351 if (GET_CODE (tmp) == JUMP_INSN
2352 && c4x_label_ref_used_p (tmp, insn))
2354 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2355 if (GET_CODE (tmp) == JUMP_INSN
2356 && c4x_label_ref_used_p (tmp, insn))
2361 /* All checks OK. */
2367 c4x_rptb_insert (insn)
2372 rtx new_start_label;
2375 /* If the count register has not been allocated to RC, say if
2376 there is a movstr pattern in the loop, then do not insert a
2377 RPTB instruction. Instead we emit a decrement and branch
2378 at the end of the loop. */
2379 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2380 if (REGNO (count_reg) != RC_REGNO)
2383 /* Extract the start label from the jump pattern (rptb_end). */
2384 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2386 if (! c4x_rptb_valid_p (insn, start_label))
2388 /* We can not use the rptb insn. Replace it so reorg can use
2389 the delay slots of the jump insn. */
2390 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2391 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2392 emit_insn_before (gen_bge (start_label), insn);
2393 LABEL_NUSES (start_label)++;
2398 end_label = gen_label_rtx ();
2399 LABEL_NUSES (end_label)++;
2400 emit_label_after (end_label, insn);
2402 new_start_label = gen_label_rtx ();
2403 LABEL_NUSES (new_start_label)++;
2405 for (; insn; insn = PREV_INSN (insn))
2407 if (insn == start_label)
2409 if (GET_CODE (insn) == JUMP_INSN &&
2410 JUMP_LABEL (insn) == start_label)
2411 redirect_jump (insn, new_start_label, 0);
2414 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2416 emit_label_after (new_start_label, insn);
2418 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2419 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2421 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2422 if (LABEL_NUSES (start_label) == 0)
2423 delete_insn (start_label);
2427 /* This function is a C4x special called immediately before delayed
2428 branch scheduling. We fix up RTPB style loops that didn't get RC
2429 allocated as the loop counter. */
2432 c4x_process_after_reload (first)
2437 for (insn = first; insn; insn = NEXT_INSN (insn))
2439 /* Look for insn. */
2442 int insn_code_number;
2445 insn_code_number = recog_memoized (insn);
2447 if (insn_code_number < 0)
2450 /* Insert the RTX for RPTB at the top of the loop
2451 and a label at the end of the loop. */
2452 if (insn_code_number == CODE_FOR_rptb_end)
2453 c4x_rptb_insert(insn);
2455 /* We need to split the insn here. Otherwise the calls to
2456 force_const_mem will not work for load_immed_address. */
2459 /* Don't split the insn if it has been deleted. */
2460 if (! INSN_DELETED_P (old))
2461 insn = try_split (PATTERN(old), old, 1);
2463 /* When not optimizing, the old insn will be still left around
2464 with only the 'deleted' bit set. Transform it into a note
2465 to avoid confusion of subsequent processing. */
2466 if (INSN_DELETED_P (old))
2468 PUT_CODE (old, NOTE);
2469 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2470 NOTE_SOURCE_FILE (old) = 0;
2481 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2489 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2494 c4x_immed_int_constant (op)
2497 if (GET_CODE (op) != CONST_INT)
2500 return GET_MODE (op) == VOIDmode
2501 || GET_MODE_CLASS (op) == MODE_INT
2502 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2507 c4x_immed_float_constant (op)
2510 if (GET_CODE (op) != CONST_DOUBLE)
2513 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2514 present this only means that a MEM rtx has been generated. It does
2515 not mean the rtx is really in memory. */
2517 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2522 c4x_shiftable_constant (op)
2527 int val = INTVAL (op);
2529 for (i = 0; i < 16; i++)
2534 mask = ((0xffff >> i) << 16) | 0xffff;
2535 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2536 : (val >> i) & mask))
2546 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2554 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2564 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2572 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2574 return IS_INT5_CONST (INTVAL (op));
2582 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2590 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2598 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2602 /* The constraints do not have to check the register class,
2603 except when needed to discriminate between the constraints.
2604 The operand has been checked by the predicates to be valid. */
2606 /* ARx + 9-bit signed const or IRn
2607 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2608 We don't include the pre/post inc/dec forms here since
2609 they are handled by the <> constraints. */
2612 c4x_Q_constraint (op)
2615 enum machine_mode mode = GET_MODE (op);
2617 if (GET_CODE (op) != MEM)
2620 switch (GET_CODE (op))
2627 rtx op0 = XEXP (op, 0);
2628 rtx op1 = XEXP (op, 1);
2636 if (GET_CODE (op1) != CONST_INT)
2639 /* HImode and HFmode must be offsettable. */
2640 if (mode == HImode || mode == HFmode)
2641 return IS_DISP8_OFF_CONST (INTVAL (op1));
2643 return IS_DISP8_CONST (INTVAL (op1));
2654 /* ARx + 5-bit unsigned const
2655 *ARx, *+ARx(n) for n < 32. */
2658 c4x_R_constraint (op)
2661 enum machine_mode mode = GET_MODE (op);
2665 if (GET_CODE (op) != MEM)
2668 switch (GET_CODE (op))
2675 rtx op0 = XEXP (op, 0);
2676 rtx op1 = XEXP (op, 1);
2681 if (GET_CODE (op1) != CONST_INT)
2684 /* HImode and HFmode must be offsettable. */
2685 if (mode == HImode || mode == HFmode)
2686 return IS_UINT5_CONST (INTVAL (op1) + 1);
2688 return IS_UINT5_CONST (INTVAL (op1));
2703 enum machine_mode mode = GET_MODE (op);
2705 if (TARGET_C3X || GET_CODE (op) != MEM)
2709 switch (GET_CODE (op))
2712 return IS_ADDR_OR_PSEUDO_REG (op);
2716 rtx op0 = XEXP (op, 0);
2717 rtx op1 = XEXP (op, 1);
2719 /* HImode and HFmode must be offsettable. */
2720 if (mode == HImode || mode == HFmode)
2721 return IS_ADDR_OR_PSEUDO_REG (op0)
2722 && GET_CODE (op1) == CONST_INT
2723 && IS_UINT5_CONST (INTVAL (op1) + 1);
2726 && IS_ADDR_OR_PSEUDO_REG (op0)
2727 && GET_CODE (op1) == CONST_INT
2728 && IS_UINT5_CONST (INTVAL (op1));
2739 /* ARx + 1-bit unsigned const or IRn
2740 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2741 We don't include the pre/post inc/dec forms here since
2742 they are handled by the <> constraints. */
2745 c4x_S_constraint (op)
2748 enum machine_mode mode = GET_MODE (op);
2749 if (GET_CODE (op) != MEM)
2752 switch (GET_CODE (op))
2760 rtx op0 = XEXP (op, 0);
2761 rtx op1 = XEXP (op, 1);
2763 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2764 || (op0 != XEXP (op1, 0)))
2767 op0 = XEXP (op1, 0);
2768 op1 = XEXP (op1, 1);
2769 return REG_P (op0) && REG_P (op1);
2770 /* Pre or post_modify with a displacement of 0 or 1
2771 should not be generated. */
2777 rtx op0 = XEXP (op, 0);
2778 rtx op1 = XEXP (op, 1);
2786 if (GET_CODE (op1) != CONST_INT)
2789 /* HImode and HFmode must be offsettable. */
2790 if (mode == HImode || mode == HFmode)
2791 return IS_DISP1_OFF_CONST (INTVAL (op1));
2793 return IS_DISP1_CONST (INTVAL (op1));
2808 enum machine_mode mode = GET_MODE (op);
2809 if (GET_CODE (op) != MEM)
2813 switch (GET_CODE (op))
2817 if (mode != QImode && mode != QFmode)
2824 return IS_ADDR_OR_PSEUDO_REG (op);
2829 rtx op0 = XEXP (op, 0);
2830 rtx op1 = XEXP (op, 1);
2832 if (mode != QImode && mode != QFmode)
2835 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2836 || (op0 != XEXP (op1, 0)))
2839 op0 = XEXP (op1, 0);
2840 op1 = XEXP (op1, 1);
2841 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2842 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2843 /* Pre or post_modify with a displacement of 0 or 1
2844 should not be generated. */
2849 rtx op0 = XEXP (op, 0);
2850 rtx op1 = XEXP (op, 1);
2854 /* HImode and HFmode must be offsettable. */
2855 if (mode == HImode || mode == HFmode)
2856 return IS_ADDR_OR_PSEUDO_REG (op0)
2857 && GET_CODE (op1) == CONST_INT
2858 && IS_DISP1_OFF_CONST (INTVAL (op1));
2861 return (IS_INDEX_OR_PSEUDO_REG (op1)
2862 && IS_ADDR_OR_PSEUDO_REG (op0))
2863 || (IS_ADDR_OR_PSEUDO_REG (op1)
2864 && IS_INDEX_OR_PSEUDO_REG (op0));
2866 return IS_ADDR_OR_PSEUDO_REG (op0)
2867 && GET_CODE (op1) == CONST_INT
2868 && IS_DISP1_CONST (INTVAL (op1));
2880 /* Direct memory operand. */
2883 c4x_T_constraint (op)
2886 if (GET_CODE (op) != MEM)
2890 if (GET_CODE (op) != LO_SUM)
2892 /* Allow call operands. */
2893 return GET_CODE (op) == SYMBOL_REF
2894 && GET_MODE (op) == Pmode
2895 && SYMBOL_REF_FLAG (op);
2898 /* HImode and HFmode are not offsettable. */
2899 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2902 if ((GET_CODE (XEXP (op, 0)) == REG)
2903 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2904 return c4x_U_constraint (XEXP (op, 1));
2910 /* Symbolic operand. */
2913 c4x_U_constraint (op)
2916 /* Don't allow direct addressing to an arbitrary constant. */
2917 return GET_CODE (op) == CONST
2918 || GET_CODE (op) == SYMBOL_REF
2919 || GET_CODE (op) == LABEL_REF;
2924 c4x_autoinc_operand (op, mode)
2926 enum machine_mode mode ATTRIBUTE_UNUSED;
2928 if (GET_CODE (op) == MEM)
2930 enum rtx_code code = GET_CODE (XEXP (op, 0));
2936 || code == PRE_MODIFY
2937 || code == POST_MODIFY
2945 /* Match any operand. */
2948 any_operand (op, mode)
2949 register rtx op ATTRIBUTE_UNUSED;
2950 enum machine_mode mode ATTRIBUTE_UNUSED;
2956 /* Nonzero if OP is a floating point value with value 0.0. */
2959 fp_zero_operand (op, mode)
2961 enum machine_mode mode ATTRIBUTE_UNUSED;
2965 if (GET_CODE (op) != CONST_DOUBLE)
2967 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2968 return REAL_VALUES_EQUAL (r, dconst0);
2973 const_operand (op, mode)
2975 register enum machine_mode mode;
2981 if (GET_CODE (op) != CONST_DOUBLE
2982 || GET_MODE (op) != mode
2983 || GET_MODE_CLASS (mode) != MODE_FLOAT)
2986 return c4x_immed_float_p (op);
2992 if (GET_CODE (op) == CONSTANT_P_RTX)
2995 if (GET_CODE (op) != CONST_INT
2996 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
2997 || GET_MODE_CLASS (mode) != MODE_INT)
3000 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3012 stik_const_operand (op, mode)
3014 enum machine_mode mode ATTRIBUTE_UNUSED;
3016 return c4x_K_constant (op);
3021 not_const_operand (op, mode)
3023 enum machine_mode mode ATTRIBUTE_UNUSED;
3025 return c4x_N_constant (op);
3030 reg_operand (op, mode)
3032 enum machine_mode mode;
3034 if (GET_CODE (op) == SUBREG
3035 && GET_MODE (op) == QFmode)
3037 return register_operand (op, mode);
3042 mixed_subreg_operand (op, mode)
3044 enum machine_mode mode ATTRIBUTE_UNUSED;
3046 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3047 int and a long double. */
3048 if (GET_CODE (op) == SUBREG
3049 && (GET_MODE (op) == QFmode)
3050 && (GET_MODE (SUBREG_REG (op)) == QImode
3051 || GET_MODE (SUBREG_REG (op)) == HImode))
3058 reg_imm_operand (op, mode)
3060 enum machine_mode mode ATTRIBUTE_UNUSED;
3062 if (REG_P (op) || CONSTANT_P (op))
3069 not_modify_reg (op, mode)
3071 enum machine_mode mode ATTRIBUTE_UNUSED;
3073 if (REG_P (op) || CONSTANT_P (op))
3075 if (GET_CODE (op) != MEM)
3078 switch (GET_CODE (op))
3085 rtx op0 = XEXP (op, 0);
3086 rtx op1 = XEXP (op, 1);
3091 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3097 rtx op0 = XEXP (op, 0);
3099 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3117 not_rc_reg (op, mode)
3119 enum machine_mode mode ATTRIBUTE_UNUSED;
3121 if (REG_P (op) && REGNO (op) == RC_REGNO)
3127 /* Extended precision register R0-R1. */
3130 r0r1_reg_operand (op, mode)
3132 enum machine_mode mode;
3134 if (! reg_operand (op, mode))
3136 if (GET_CODE (op) == SUBREG)
3137 op = SUBREG_REG (op);
3138 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3142 /* Extended precision register R2-R3. */
3145 r2r3_reg_operand (op, mode)
3147 enum machine_mode mode;
3149 if (! reg_operand (op, mode))
3151 if (GET_CODE (op) == SUBREG)
3152 op = SUBREG_REG (op);
3153 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3157 /* Low extended precision register R0-R7. */
3160 ext_low_reg_operand (op, mode)
3162 enum machine_mode mode;
3164 if (! reg_operand (op, mode))
3166 if (GET_CODE (op) == SUBREG)
3167 op = SUBREG_REG (op);
3168 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3172 /* Extended precision register. */
3175 ext_reg_operand (op, mode)
3177 enum machine_mode mode;
3179 if (! reg_operand (op, mode))
3181 if (GET_CODE (op) == SUBREG)
3182 op = SUBREG_REG (op);
3185 return IS_EXT_OR_PSEUDO_REG (op);
3189 /* Standard precision register. */
3192 std_reg_operand (op, mode)
3194 enum machine_mode mode;
3196 if (! reg_operand (op, mode))
3198 if (GET_CODE (op) == SUBREG)
3199 op = SUBREG_REG (op);
3200 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3203 /* Standard precision or normal register. */
3206 std_or_reg_operand (op, mode)
3208 enum machine_mode mode;
3210 if (reload_in_progress)
3211 return std_reg_operand (op, mode);
3212 return reg_operand (op, mode);
3215 /* Address register. */
3218 addr_reg_operand (op, mode)
3220 enum machine_mode mode;
3222 if (! reg_operand (op, mode))
3224 return c4x_a_register (op);
3228 /* Index register. */
3231 index_reg_operand (op, mode)
3233 enum machine_mode mode;
3235 if (! reg_operand (op, mode))
3237 if (GET_CODE (op) == SUBREG)
3238 op = SUBREG_REG (op);
3239 return c4x_x_register (op);
3246 dp_reg_operand (op, mode)
3248 enum machine_mode mode ATTRIBUTE_UNUSED;
3250 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3257 sp_reg_operand (op, mode)
3259 enum machine_mode mode ATTRIBUTE_UNUSED;
3261 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3268 st_reg_operand (op, mode)
3270 enum machine_mode mode ATTRIBUTE_UNUSED;
3272 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3279 rc_reg_operand (op, mode)
3281 enum machine_mode mode ATTRIBUTE_UNUSED;
3283 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3288 call_address_operand (op, mode)
3290 enum machine_mode mode ATTRIBUTE_UNUSED;
3292 return (REG_P (op) || symbolic_address_operand (op, mode));
3296 /* Symbolic address operand. */
3299 symbolic_address_operand (op, mode)
3301 enum machine_mode mode ATTRIBUTE_UNUSED;
3303 switch (GET_CODE (op))
3315 /* Check dst operand of a move instruction. */
3318 dst_operand (op, mode)
3320 enum machine_mode mode;
3322 if (GET_CODE (op) == SUBREG
3323 && mixed_subreg_operand (op, mode))
3327 return reg_operand (op, mode);
3329 return nonimmediate_operand (op, mode);
3333 /* Check src operand of two operand arithmetic instructions. */
3336 src_operand (op, mode)
3338 enum machine_mode mode;
3340 if (GET_CODE (op) == SUBREG
3341 && mixed_subreg_operand (op, mode))
3345 return reg_operand (op, mode);
3347 if (mode == VOIDmode)
3348 mode = GET_MODE (op);
3350 if (GET_CODE (op) == CONST_INT)
3351 return (mode == QImode || mode == Pmode || mode == HImode)
3352 && c4x_I_constant (op);
3354 /* We don't like CONST_DOUBLE integers. */
3355 if (GET_CODE (op) == CONST_DOUBLE)
3356 return c4x_H_constant (op);
3358 /* Disallow symbolic addresses. Only the predicate
3359 symbolic_address_operand will match these. */
3360 if (GET_CODE (op) == SYMBOL_REF
3361 || GET_CODE (op) == LABEL_REF
3362 || GET_CODE (op) == CONST)
3365 /* If TARGET_LOAD_DIRECT_MEMS is non-zero, disallow direct memory
3366 access to symbolic addresses. These operands will get forced
3367 into a register and the movqi expander will generate a
3368 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is non-zero. */
3369 if (GET_CODE (op) == MEM
3370 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3371 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3372 || GET_CODE (XEXP (op, 0)) == CONST)))
3373 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3375 return general_operand (op, mode);
3380 src_hi_operand (op, mode)
3382 enum machine_mode mode;
3384 if (c4x_O_constant (op))
3386 return src_operand (op, mode);
3390 /* Check src operand of two operand logical instructions. */
3393 lsrc_operand (op, mode)
3395 enum machine_mode mode;
3397 if (mode == VOIDmode)
3398 mode = GET_MODE (op);
3400 if (mode != QImode && mode != Pmode)
3401 fatal_insn ("mode not QImode", op);
3403 if (GET_CODE (op) == CONST_INT)
3404 return c4x_L_constant (op) || c4x_J_constant (op);
3406 return src_operand (op, mode);
3410 /* Check src operand of two operand tricky instructions. */
3413 tsrc_operand (op, mode)
3415 enum machine_mode mode;
3417 if (mode == VOIDmode)
3418 mode = GET_MODE (op);
3420 if (mode != QImode && mode != Pmode)
3421 fatal_insn ("mode not QImode", op);
3423 if (GET_CODE (op) == CONST_INT)
3424 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3426 return src_operand (op, mode);
3430 /* Check src operand of two operand non immedidate instructions. */
3433 nonimmediate_src_operand (op, mode)
3435 enum machine_mode mode;
3437 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3440 return src_operand (op, mode);
3444 /* Check logical src operand of two operand non immedidate instructions. */
3447 nonimmediate_lsrc_operand (op, mode)
3449 enum machine_mode mode;
3451 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3454 return lsrc_operand (op, mode);
3459 reg_or_const_operand (op, mode)
3461 enum machine_mode mode;
3463 return reg_operand (op, mode) || const_operand (op, mode);
3467 /* Check for indirect operands allowable in parallel instruction. */
3470 par_ind_operand (op, mode)
3472 enum machine_mode mode;
3474 if (mode != VOIDmode && mode != GET_MODE (op))
3477 return c4x_S_indirect (op);
3481 /* Check for operands allowable in parallel instruction. */
3484 parallel_operand (op, mode)
3486 enum machine_mode mode;
3488 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3493 c4x_S_address_parse (op, base, incdec, index, disp)
3505 if (GET_CODE (op) != MEM)
3506 fatal_insn ("invalid indirect memory address", op);
3509 switch (GET_CODE (op))
3512 *base = REGNO (XEXP (op, 0));
3518 *base = REGNO (XEXP (op, 0));
3524 *base = REGNO (XEXP (op, 0));
3530 *base = REGNO (XEXP (op, 0));
3536 *base = REGNO (XEXP (op, 0));
3537 if (REG_P (XEXP (XEXP (op, 1), 1)))
3539 *index = REGNO (XEXP (XEXP (op, 1), 1));
3540 *disp = 0; /* ??? */
3543 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3548 *base = REGNO (XEXP (op, 0));
3549 if (REG_P (XEXP (XEXP (op, 1), 1)))
3551 *index = REGNO (XEXP (XEXP (op, 1), 1));
3552 *disp = 1; /* ??? */
3555 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3566 rtx op0 = XEXP (op, 0);
3567 rtx op1 = XEXP (op, 1);
3569 if (c4x_a_register (op0))
3571 if (c4x_x_register (op1))
3573 *base = REGNO (op0);
3574 *index = REGNO (op1);
3577 else if ((GET_CODE (op1) == CONST_INT
3578 && IS_DISP1_CONST (INTVAL (op1))))
3580 *base = REGNO (op0);
3581 *disp = INTVAL (op1);
3585 else if (c4x_x_register (op0) && c4x_a_register (op1))
3587 *base = REGNO (op1);
3588 *index = REGNO (op0);
3595 fatal_insn ("invalid indirect (S) memory address", op);
3601 c4x_address_conflict (op0, op1, store0, store1)
3616 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3619 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3620 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3622 if (store0 && store1)
3624 /* If we have two stores in parallel to the same address, then
3625 the C4x only executes one of the stores. This is unlikely to
3626 cause problems except when writing to a hardware device such
3627 as a FIFO since the second write will be lost. The user
3628 should flag the hardware location as being volatile so that
3629 we don't do this optimisation. While it is unlikely that we
3630 have an aliased address if both locations are not marked
3631 volatile, it is probably safer to flag a potential conflict
3632 if either location is volatile. */
3633 if (! flag_argument_noalias)
3635 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3640 /* If have a parallel load and a store to the same address, the load
3641 is performed first, so there is no conflict. Similarly, there is
3642 no conflict if have parallel loads from the same address. */
3644 /* Cannot use auto increment or auto decrement twice for same
3646 if (base0 == base1 && incdec0 && incdec0)
3649 /* It might be too confusing for GCC if we have use a base register
3650 with a side effect and a memory reference using the same register
3652 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3655 /* We can not optimize the case where op1 and op2 refer to the same
3657 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3665 /* Check for while loop inside a decrement and branch loop. */
3668 c4x_label_conflict (insn, jump, db)
3675 if (GET_CODE (insn) == CODE_LABEL)
3677 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3679 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3682 insn = PREV_INSN (insn);
3688 /* Validate combination of operands for parallel load/store instructions. */
3691 valid_parallel_load_store (operands, mode)
3693 enum machine_mode mode ATTRIBUTE_UNUSED;
3695 rtx op0 = operands[0];
3696 rtx op1 = operands[1];
3697 rtx op2 = operands[2];
3698 rtx op3 = operands[3];
3700 if (GET_CODE (op0) == SUBREG)
3701 op0 = SUBREG_REG (op0);
3702 if (GET_CODE (op1) == SUBREG)
3703 op1 = SUBREG_REG (op1);
3704 if (GET_CODE (op2) == SUBREG)
3705 op2 = SUBREG_REG (op2);
3706 if (GET_CODE (op3) == SUBREG)
3707 op3 = SUBREG_REG (op3);
3709 /* The patterns should only allow ext_low_reg_operand() or
3710 par_ind_operand() operands. Thus of the 4 operands, only 2
3711 should be REGs and the other 2 should be MEMs. */
3713 /* This test prevents the multipack pass from using this pattern if
3714 op0 is used as an index or base register in op2 or op3, since
3715 this combination will require reloading. */
3716 if (GET_CODE (op0) == REG
3717 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3718 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3722 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3723 return (REGNO (op0) != REGNO (op2))
3724 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3725 && ! c4x_address_conflict (op1, op3, 0, 0);
3728 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3729 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3730 && ! c4x_address_conflict (op0, op2, 1, 1);
3733 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3734 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3735 && ! c4x_address_conflict (op1, op2, 0, 1);
3738 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3739 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3740 && ! c4x_address_conflict (op0, op3, 1, 0);
3747 valid_parallel_operands_4 (operands, mode)
3749 enum machine_mode mode ATTRIBUTE_UNUSED;
3751 rtx op0 = operands[0];
3752 rtx op2 = operands[2];
3754 if (GET_CODE (op0) == SUBREG)
3755 op0 = SUBREG_REG (op0);
3756 if (GET_CODE (op2) == SUBREG)
3757 op2 = SUBREG_REG (op2);
3759 /* This test prevents the multipack pass from using this pattern if
3760 op0 is used as an index or base register in op2, since this combination
3761 will require reloading. */
3762 if (GET_CODE (op0) == REG
3763 && GET_CODE (op2) == MEM
3764 && reg_mentioned_p (op0, XEXP (op2, 0)))
3772 valid_parallel_operands_5 (operands, mode)
3774 enum machine_mode mode ATTRIBUTE_UNUSED;
3777 rtx op0 = operands[0];
3778 rtx op1 = operands[1];
3779 rtx op2 = operands[2];
3780 rtx op3 = operands[3];
3782 if (GET_CODE (op0) == SUBREG)
3783 op0 = SUBREG_REG (op0);
3784 if (GET_CODE (op1) == SUBREG)
3785 op1 = SUBREG_REG (op1);
3786 if (GET_CODE (op2) == SUBREG)
3787 op2 = SUBREG_REG (op2);
3789 /* The patterns should only allow ext_low_reg_operand() or
3790 par_ind_operand() operands. Operands 1 and 2 may be commutative
3791 but only one of them can be a register. */
3792 if (GET_CODE (op1) == REG)
3794 if (GET_CODE (op2) == REG)
3800 /* This test prevents the multipack pass from using this pattern if
3801 op0 is used as an index or base register in op3, since this combination
3802 will require reloading. */
3803 if (GET_CODE (op0) == REG
3804 && GET_CODE (op3) == MEM
3805 && reg_mentioned_p (op0, XEXP (op3, 0)))
3813 valid_parallel_operands_6 (operands, mode)
3815 enum machine_mode mode ATTRIBUTE_UNUSED;
3818 rtx op0 = operands[0];
3819 rtx op1 = operands[1];
3820 rtx op2 = operands[2];
3821 rtx op4 = operands[4];
3822 rtx op5 = operands[5];
3824 if (GET_CODE (op1) == SUBREG)
3825 op1 = SUBREG_REG (op1);
3826 if (GET_CODE (op2) == SUBREG)
3827 op2 = SUBREG_REG (op2);
3828 if (GET_CODE (op4) == SUBREG)
3829 op4 = SUBREG_REG (op4);
3830 if (GET_CODE (op5) == SUBREG)
3831 op5 = SUBREG_REG (op5);
3833 /* The patterns should only allow ext_low_reg_operand() or
3834 par_ind_operand() operands. Thus of the 4 input operands, only 2
3835 should be REGs and the other 2 should be MEMs. */
3837 if (GET_CODE (op1) == REG)
3839 if (GET_CODE (op2) == REG)
3841 if (GET_CODE (op4) == REG)
3843 if (GET_CODE (op5) == REG)
3846 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3847 Perhaps we should count the MEMs as well? */
3851 /* This test prevents the multipack pass from using this pattern if
3852 op0 is used as an index or base register in op4 or op5, since
3853 this combination will require reloading. */
3854 if (GET_CODE (op0) == REG
3855 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3856 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3863 /* Validate combination of src operands. Note that the operands have
3864 been screened by the src_operand predicate. We just have to check
3865 that the combination of operands is valid. If FORCE is set, ensure
3866 that the destination regno is valid if we have a 2 operand insn. */
3869 c4x_valid_operands (code, operands, mode, force)
3872 enum machine_mode mode ATTRIBUTE_UNUSED;
3877 enum rtx_code code1;
3878 enum rtx_code code2;
3880 if (code == COMPARE)
3891 if (GET_CODE (op1) == SUBREG)
3892 op1 = SUBREG_REG (op1);
3893 if (GET_CODE (op2) == SUBREG)
3894 op2 = SUBREG_REG (op2);
3896 code1 = GET_CODE (op1);
3897 code2 = GET_CODE (op2);
3899 if (code1 == REG && code2 == REG)
3902 if (code1 == MEM && code2 == MEM)
3904 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3906 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3917 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3922 if (! c4x_H_constant (op2))
3926 /* Any valid memory operand screened by src_operand is OK. */
3929 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3930 into a stack slot memory address comprising a PLUS and a
3936 fatal_insn ("c4x_valid_operands: Internal error", op2);
3940 /* Check that we have a valid destination register for a two operand
3942 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3945 /* We assume MINUS is commutative since the subtract patterns
3946 also support the reverse subtract instructions. Since op1
3947 is not a register, and op2 is a register, op1 can only
3948 be a restricted memory operand for a shift instruction. */
3949 if (code == ASHIFTRT || code == LSHIFTRT
3950 || code == ASHIFT || code == COMPARE)
3952 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3957 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3962 if (! c4x_H_constant (op1))
3966 /* Any valid memory operand screened by src_operand is OK. */
3974 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3975 into a stack slot memory address comprising a PLUS and a
3985 /* Check that we have a valid destination register for a two operand
3987 return ! force || REGNO (op1) == REGNO (operands[0]);
3991 int valid_operands (code, operands, mode)
3994 enum machine_mode mode;
3997 /* If we are not optimizing then we have to let anything go and let
3998 reload fix things up. instantiate_decl in function.c can produce
3999 invalid insns by changing the offset of a memory operand from a
4000 valid one into an invalid one, when the second operand is also a
4001 memory operand. The alternative is not to allow two memory
4002 operands for an insn when not optimizing. The problem only rarely
4003 occurs, for example with the C-torture program DFcmp.c. */
4005 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
4010 legitimize_operands (code, operands, mode)
4013 enum machine_mode mode;
4015 /* Compare only has 2 operands. */
4016 if (code == COMPARE)
4018 /* During RTL generation, force constants into pseudos so that
4019 they can get hoisted out of loops. This will tie up an extra
4020 register but can save an extra cycle. Only do this if loop
4021 optimisation enabled. (We cannot pull this trick for add and
4022 sub instructions since the flow pass won't find
4023 autoincrements etc.) This allows us to generate compare
4024 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4025 of LDI *AR0++, R0; CMPI 42, R0.
4027 Note that expand_binops will try to load an expensive constant
4028 into a register if it is used within a loop. Unfortunately,
4029 the cost mechanism doesn't allow us to look at the other
4030 operand to decide whether the constant is expensive. */
4032 if (! reload_in_progress
4035 && GET_CODE (operands[1]) == CONST_INT
4036 && preserve_subexpressions_p ()
4037 && rtx_cost (operands[1], code) > 1)
4038 operands[1] = force_reg (mode, operands[1]);
4040 if (! reload_in_progress
4041 && ! c4x_valid_operands (code, operands, mode, 0))
4042 operands[0] = force_reg (mode, operands[0]);
4046 /* We cannot do this for ADDI/SUBI insns since we will
4047 defeat the flow pass from finding autoincrement addressing
4049 if (! reload_in_progress
4050 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
4053 && GET_CODE (operands[2]) == CONST_INT
4054 && preserve_subexpressions_p ()
4055 && rtx_cost (operands[2], code) > 1)
4056 operands[2] = force_reg (mode, operands[2]);
4058 /* We can get better code on a C30 if we force constant shift counts
4059 into a register. This way they can get hoisted out of loops,
4060 tying up a register, but saving an instruction. The downside is
4061 that they may get allocated to an address or index register, and
4062 thus we will get a pipeline conflict if there is a nearby
4063 indirect address using an address register.
4065 Note that expand_binops will not try to load an expensive constant
4066 into a register if it is used within a loop for a shift insn. */
4068 if (! reload_in_progress
4069 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
4071 /* If the operand combination is invalid, we force operand1 into a
4072 register, preventing reload from having doing to do this at a
4074 operands[1] = force_reg (mode, operands[1]);
4077 emit_move_insn (operands[0], operands[1]);
4078 operands[1] = copy_rtx (operands[0]);
4082 /* Just in case... */
4083 if (! c4x_valid_operands (code, operands, mode, 0))
4084 operands[2] = force_reg (mode, operands[2]);
4088 /* Right shifts require a negative shift count, but GCC expects
4089 a positive count, so we emit a NEG. */
4090 if ((code == ASHIFTRT || code == LSHIFTRT)
4091 && (GET_CODE (operands[2]) != CONST_INT))
4092 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
4098 /* The following predicates are used for instruction scheduling. */
4101 group1_reg_operand (op, mode)
4103 enum machine_mode mode;
4105 if (mode != VOIDmode && mode != GET_MODE (op))
4107 if (GET_CODE (op) == SUBREG)
4108 op = SUBREG_REG (op);
4109 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4114 group1_mem_operand (op, mode)
4116 enum machine_mode mode;
4118 if (mode != VOIDmode && mode != GET_MODE (op))
4121 if (GET_CODE (op) == MEM)
4124 if (GET_CODE (op) == PLUS)
4126 rtx op0 = XEXP (op, 0);
4127 rtx op1 = XEXP (op, 1);
4129 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4130 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4133 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4141 /* Return true if any one of the address registers. */
4144 arx_reg_operand (op, mode)
4146 enum machine_mode mode;
4148 if (mode != VOIDmode && mode != GET_MODE (op))
4150 if (GET_CODE (op) == SUBREG)
4151 op = SUBREG_REG (op);
4152 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4157 c4x_arn_reg_operand (op, mode, regno)
4159 enum machine_mode mode;
4162 if (mode != VOIDmode && mode != GET_MODE (op))
4164 if (GET_CODE (op) == SUBREG)
4165 op = SUBREG_REG (op);
4166 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4171 c4x_arn_mem_operand (op, mode, regno)
4173 enum machine_mode mode;
4176 if (mode != VOIDmode && mode != GET_MODE (op))
4179 if (GET_CODE (op) == MEM)
4182 switch (GET_CODE (op))
4191 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4195 if (REG_P (XEXP (op, 0)) && (! reload_completed
4196 || (REGNO (XEXP (op, 0)) == regno)))
4198 if (REG_P (XEXP (XEXP (op, 1), 1))
4199 && (! reload_completed
4200 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4206 rtx op0 = XEXP (op, 0);
4207 rtx op1 = XEXP (op, 1);
4209 if ((REG_P (op0) && (! reload_completed
4210 || (REGNO (op0) == regno)))
4211 || (REG_P (op1) && (! reload_completed
4212 || (REGNO (op1) == regno))))
4226 ar0_reg_operand (op, mode)
4228 enum machine_mode mode;
4230 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4235 ar0_mem_operand (op, mode)
4237 enum machine_mode mode;
4239 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4244 ar1_reg_operand (op, mode)
4246 enum machine_mode mode;
4248 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4253 ar1_mem_operand (op, mode)
4255 enum machine_mode mode;
4257 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4262 ar2_reg_operand (op, mode)
4264 enum machine_mode mode;
4266 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4271 ar2_mem_operand (op, mode)
4273 enum machine_mode mode;
4275 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4280 ar3_reg_operand (op, mode)
4282 enum machine_mode mode;
4284 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4289 ar3_mem_operand (op, mode)
4291 enum machine_mode mode;
4293 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4298 ar4_reg_operand (op, mode)
4300 enum machine_mode mode;
4302 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4307 ar4_mem_operand (op, mode)
4309 enum machine_mode mode;
4311 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4316 ar5_reg_operand (op, mode)
4318 enum machine_mode mode;
4320 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4325 ar5_mem_operand (op, mode)
4327 enum machine_mode mode;
4329 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4334 ar6_reg_operand (op, mode)
4336 enum machine_mode mode;
4338 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4343 ar6_mem_operand (op, mode)
4345 enum machine_mode mode;
4347 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4352 ar7_reg_operand (op, mode)
4354 enum machine_mode mode;
4356 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4361 ar7_mem_operand (op, mode)
4363 enum machine_mode mode;
4365 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4370 ir0_reg_operand (op, mode)
4372 enum machine_mode mode;
4374 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4379 ir0_mem_operand (op, mode)
4381 enum machine_mode mode;
4383 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4388 ir1_reg_operand (op, mode)
4390 enum machine_mode mode;
4392 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4397 ir1_mem_operand (op, mode)
4399 enum machine_mode mode;
4401 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4405 /* This is similar to operand_subword but allows autoincrement
4409 c4x_operand_subword (op, i, validate_address, mode)
4412 int validate_address;
4413 enum machine_mode mode;
4415 if (mode != HImode && mode != HFmode)
4416 fatal_insn ("c4x_operand_subword: invalid mode", op);
4418 if (mode == HFmode && REG_P (op))
4419 fatal_insn ("c4x_operand_subword: invalid operand", op);
4421 if (GET_CODE (op) == MEM)
4423 enum rtx_code code = GET_CODE (XEXP (op, 0));
4424 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4425 enum machine_mode submode;
4430 else if (mode == HFmode)
4437 return gen_rtx_MEM (submode, XEXP (op, 0));
4443 /* We could handle these with some difficulty.
4444 e.g., *p-- => *(p-=2); *(p+1). */
4445 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4451 fatal_insn ("c4x_operand_subword: invalid address", op);
4453 /* Even though offsettable_address_p considers (MEM
4454 (LO_SUM)) to be offsettable, it is not safe if the
4455 address is at the end of the data page since we also have
4456 to fix up the associated high PART. In this case where
4457 we are trying to split a HImode or HFmode memory
4458 reference, we would have to emit another insn to reload a
4459 new HIGH value. It's easier to disable LO_SUM memory references
4460 in HImode or HFmode and we probably get better code. */
4462 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4469 return operand_subword (op, i, validate_address, mode);
4474 struct name_list *next;
4478 static struct name_list *global_head;
4479 static struct name_list *extern_head;
4482 /* Add NAME to list of global symbols and remove from external list if
4483 present on external list. */
4486 c4x_global_label (name)
4489 struct name_list *p, *last;
4491 /* Do not insert duplicate names, so linearly search through list of
4496 if (strcmp (p->name, name) == 0)
4500 p = (struct name_list *) xmalloc (sizeof *p);
4501 p->next = global_head;
4505 /* Remove this name from ref list if present. */
4510 if (strcmp (p->name, name) == 0)
4513 last->next = p->next;
4515 extern_head = p->next;
4524 /* Add NAME to list of external symbols. */
4527 c4x_external_ref (name)
4530 struct name_list *p;
4532 /* Do not insert duplicate names. */
4536 if (strcmp (p->name, name) == 0)
4541 /* Do not insert ref if global found. */
4545 if (strcmp (p->name, name) == 0)
4549 p = (struct name_list *) xmalloc (sizeof *p);
4550 p->next = extern_head;
4560 struct name_list *p;
4562 /* Output all external names that are not global. */
4566 fprintf (fp, "\t.ref\t");
4567 assemble_name (fp, p->name);
4571 fprintf (fp, "\t.end\n");
4576 c4x_check_attribute (attrib, list, decl, attributes)
4578 tree list, decl, *attributes;
4580 while (list != NULL_TREE
4581 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4582 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4583 list = TREE_CHAIN (list);
4585 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4591 c4x_insert_attributes (decl, attributes)
4592 tree decl, *attributes;
4594 switch (TREE_CODE (decl))
4597 c4x_check_attribute ("section", code_tree, decl, attributes);
4598 c4x_check_attribute ("const", pure_tree, decl, attributes);
4599 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4600 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4604 c4x_check_attribute ("section", data_tree, decl, attributes);
4612 /* Table of valid machine attributes. */
4613 const struct attribute_spec c4x_attribute_table[] =
4615 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4616 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4617 /* FIXME: code elsewhere in this file treats "naked" as a synonym of
4618 "interrupt"; should it be accepted here? */
4619 { "assembler", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4620 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4621 { NULL, 0, 0, false, false, false, NULL }
4624 /* Handle an attribute requiring a FUNCTION_TYPE;
4625 arguments as in struct attribute_spec.handler. */
4627 c4x_handle_fntype_attribute (node, name, args, flags, no_add_attrs)
4630 tree args ATTRIBUTE_UNUSED;
4631 int flags ATTRIBUTE_UNUSED;
4634 if (TREE_CODE (*node) != FUNCTION_TYPE)
4636 warning ("`%s' attribute only applies to functions",
4637 IDENTIFIER_POINTER (name));
4638 *no_add_attrs = true;
4645 /* !!! FIXME to emit RPTS correctly. */
4648 c4x_rptb_rpts_p (insn, op)
4651 /* The next insn should be our label marking where the
4652 repeat block starts. */
4653 insn = NEXT_INSN (insn);
4654 if (GET_CODE (insn) != CODE_LABEL)
4656 /* Some insns may have been shifted between the RPTB insn
4657 and the top label... They were probably destined to
4658 be moved out of the loop. For now, let's leave them
4659 where they are and print a warning. We should
4660 probably move these insns before the repeat block insn. */
4662 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4667 /* Skip any notes. */
4668 insn = next_nonnote_insn (insn);
4670 /* This should be our first insn in the loop. */
4671 if (! INSN_P (insn))
4674 /* Skip any notes. */
4675 insn = next_nonnote_insn (insn);
4677 if (! INSN_P (insn))
4680 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4686 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4690 /* Check if register r11 is used as the destination of an insn. */
4703 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4704 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4706 if (INSN_P (x) && (set = single_set (x)))
4709 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4712 fmt = GET_RTX_FORMAT (GET_CODE (x));
4713 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4717 if (c4x_r11_set_p (XEXP (x, i)))
4720 else if (fmt[i] == 'E')
4721 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4722 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4729 /* The c4x sometimes has a problem when the insn before the laj insn
4730 sets the r11 register. Check for this situation. */
4733 c4x_check_laj_p (insn)
4736 insn = prev_nonnote_insn (insn);
4738 /* If this is the start of the function no nop is needed. */
4742 /* If the previous insn is a code label we have to insert a nop. This
4743 could be a jump or table jump. We can find the normal jumps by
4744 scanning the function but this will not find table jumps. */
4745 if (GET_CODE (insn) == CODE_LABEL)
4748 /* If the previous insn sets register r11 we have to insert a nop. */
4749 if (c4x_r11_set_p (insn))
4752 /* No nop needed. */
4757 /* Adjust the cost of a scheduling dependency. Return the new cost of
4758 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4759 A set of an address register followed by a use occurs a 2 cycle
4760 stall (reduced to a single cycle on the c40 using LDA), while
4761 a read of an address register followed by a use occurs a single cycle. */
4763 #define SET_USE_COST 3
4764 #define SETLDA_USE_COST 2
4765 #define READ_USE_COST 2
4768 c4x_adjust_cost (insn, link, dep_insn, cost)
4774 /* Don't worry about this until we know what registers have been
4776 if (flag_schedule_insns == 0 && ! reload_completed)
4779 /* How do we handle dependencies where a read followed by another
4780 read causes a pipeline stall? For example, a read of ar0 followed
4781 by the use of ar0 for a memory reference. It looks like we
4782 need to extend the scheduler to handle this case. */
4784 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4785 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4786 so only deal with insns we know about. */
4787 if (recog_memoized (dep_insn) < 0)
4790 if (REG_NOTE_KIND (link) == 0)
4794 /* Data dependency; DEP_INSN writes a register that INSN reads some
4798 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4799 max = SET_USE_COST > max ? SET_USE_COST : max;
4800 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4801 max = READ_USE_COST > max ? READ_USE_COST : max;
4805 /* This could be significantly optimized. We should look
4806 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4807 insn uses ar0-ar7. We then test if the same register
4808 is used. The tricky bit is that some operands will
4809 use several registers... */
4810 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4811 max = SET_USE_COST > max ? SET_USE_COST : max;
4812 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4813 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4814 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4815 max = READ_USE_COST > max ? READ_USE_COST : max;
4817 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4818 max = SET_USE_COST > max ? SET_USE_COST : max;
4819 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4820 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4821 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4822 max = READ_USE_COST > max ? READ_USE_COST : max;
4824 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4825 max = SET_USE_COST > max ? SET_USE_COST : max;
4826 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4827 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4828 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4829 max = READ_USE_COST > max ? READ_USE_COST : max;
4831 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4832 max = SET_USE_COST > max ? SET_USE_COST : max;
4833 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4834 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4835 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4836 max = READ_USE_COST > max ? READ_USE_COST : max;
4838 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4839 max = SET_USE_COST > max ? SET_USE_COST : max;
4840 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4841 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4842 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4843 max = READ_USE_COST > max ? READ_USE_COST : max;
4845 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4846 max = SET_USE_COST > max ? SET_USE_COST : max;
4847 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4848 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4849 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4850 max = READ_USE_COST > max ? READ_USE_COST : max;
4852 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4853 max = SET_USE_COST > max ? SET_USE_COST : max;
4854 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4855 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4856 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4857 max = READ_USE_COST > max ? READ_USE_COST : max;
4859 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4860 max = SET_USE_COST > max ? SET_USE_COST : max;
4861 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4862 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4863 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4864 max = READ_USE_COST > max ? READ_USE_COST : max;
4866 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4867 max = SET_USE_COST > max ? SET_USE_COST : max;
4868 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4869 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4871 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4872 max = SET_USE_COST > max ? SET_USE_COST : max;
4873 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4874 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4880 /* For other data dependencies, the default cost specified in the
4884 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4886 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4889 /* For c4x anti dependencies, the cost is 0. */
4892 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4894 /* Output dependency; DEP_INSN writes a register that INSN writes some
4897 /* For c4x output dependencies, the cost is 0. */
4905 c4x_init_builtins ()
4907 tree endlink = void_list_node;
4909 builtin_function ("fast_ftoi",
4912 tree_cons (NULL_TREE, double_type_node, endlink)),
4913 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4914 builtin_function ("ansi_ftoi",
4917 tree_cons (NULL_TREE, double_type_node, endlink)),
4918 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL, NULL_TREE);
4920 builtin_function ("fast_imult",
4923 tree_cons (NULL_TREE, integer_type_node,
4924 tree_cons (NULL_TREE,
4925 integer_type_node, endlink))),
4926 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL, NULL_TREE);
4929 builtin_function ("toieee",
4932 tree_cons (NULL_TREE, double_type_node, endlink)),
4933 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4934 builtin_function ("frieee",
4937 tree_cons (NULL_TREE, double_type_node, endlink)),
4938 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4939 builtin_function ("fast_invf",
4942 tree_cons (NULL_TREE, double_type_node, endlink)),
4943 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL, NULL_TREE);
4949 c4x_expand_builtin (exp, target, subtarget, mode, ignore)
4952 rtx subtarget ATTRIBUTE_UNUSED;
4953 enum machine_mode mode ATTRIBUTE_UNUSED;
4954 int ignore ATTRIBUTE_UNUSED;
4956 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4957 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4958 tree arglist = TREE_OPERAND (exp, 1);
4964 case C4X_BUILTIN_FIX:
4965 arg0 = TREE_VALUE (arglist);
4966 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4967 r0 = protect_from_queue (r0, 0);
4968 if (! target || ! register_operand (target, QImode))
4969 target = gen_reg_rtx (QImode);
4970 emit_insn (gen_fixqfqi_clobber (target, r0));
4973 case C4X_BUILTIN_FIX_ANSI:
4974 arg0 = TREE_VALUE (arglist);
4975 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4976 r0 = protect_from_queue (r0, 0);
4977 if (! target || ! register_operand (target, QImode))
4978 target = gen_reg_rtx (QImode);
4979 emit_insn (gen_fix_truncqfqi2 (target, r0));
4982 case C4X_BUILTIN_MPYI:
4985 arg0 = TREE_VALUE (arglist);
4986 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4987 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4988 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4989 r0 = protect_from_queue (r0, 0);
4990 r1 = protect_from_queue (r1, 0);
4991 if (! target || ! register_operand (target, QImode))
4992 target = gen_reg_rtx (QImode);
4993 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
4996 case C4X_BUILTIN_TOIEEE:
4999 arg0 = TREE_VALUE (arglist);
5000 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5001 r0 = protect_from_queue (r0, 0);
5002 if (! target || ! register_operand (target, QFmode))
5003 target = gen_reg_rtx (QFmode);
5004 emit_insn (gen_toieee (target, r0));
5007 case C4X_BUILTIN_FRIEEE:
5010 arg0 = TREE_VALUE (arglist);
5011 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5012 put_var_into_stack (arg0);
5013 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5014 r0 = protect_from_queue (r0, 0);
5015 if (register_operand (r0, QFmode))
5017 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5018 emit_move_insn (r1, r0);
5021 if (! target || ! register_operand (target, QFmode))
5022 target = gen_reg_rtx (QFmode);
5023 emit_insn (gen_frieee (target, r0));
5026 case C4X_BUILTIN_RCPF:
5029 arg0 = TREE_VALUE (arglist);
5030 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5031 r0 = protect_from_queue (r0, 0);
5032 if (! target || ! register_operand (target, QFmode))
5033 target = gen_reg_rtx (QFmode);
5034 emit_insn (gen_rcpfqf_clobber (target, r0));
5041 c4x_asm_named_section (name, flags)
5043 unsigned int flags ATTRIBUTE_UNUSED;
5045 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);