1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "insn-codes.h"
37 #include "conditions.h"
38 #include "insn-flags.h"
51 #include "c4x-protos.h"
55 rtx fix_truncqfhi2_libfunc;
56 rtx fixuns_truncqfhi2_libfunc;
57 rtx fix_trunchfhi2_libfunc;
58 rtx fixuns_trunchfhi2_libfunc;
59 rtx floathiqf2_libfunc;
60 rtx floatunshiqf2_libfunc;
61 rtx floathihf2_libfunc;
62 rtx floatunshihf2_libfunc;
64 static int c4x_leaf_function;
66 static const char *float_reg_names[] = FLOAT_REGISTER_NAMES;
68 /* Array of the smallest class containing reg number REGNO, indexed by
69 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
70 registers are available and set the class to NO_REGS for registers
71 that the target switches say are unavailable. */
73 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
75 /* Reg Modes Saved. */
76 R0R1_REGS, /* R0 QI, QF, HF No. */
77 R0R1_REGS, /* R1 QI, QF, HF No. */
78 R2R3_REGS, /* R2 QI, QF, HF No. */
79 R2R3_REGS, /* R3 QI, QF, HF No. */
80 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
81 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
83 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
84 ADDR_REGS, /* AR0 QI No. */
85 ADDR_REGS, /* AR1 QI No. */
86 ADDR_REGS, /* AR2 QI No. */
87 ADDR_REGS, /* AR3 QI QI. */
88 ADDR_REGS, /* AR4 QI QI. */
89 ADDR_REGS, /* AR5 QI QI. */
90 ADDR_REGS, /* AR6 QI QI. */
91 ADDR_REGS, /* AR7 QI QI. */
92 DP_REG, /* DP QI No. */
93 INDEX_REGS, /* IR0 QI No. */
94 INDEX_REGS, /* IR1 QI No. */
95 BK_REG, /* BK QI QI. */
96 SP_REG, /* SP QI No. */
97 ST_REG, /* ST CC No. */
98 NO_REGS, /* DIE/IE No. */
99 NO_REGS, /* IIE/IF No. */
100 NO_REGS, /* IIF/IOF No. */
101 INT_REGS, /* RS QI No. */
102 INT_REGS, /* RE QI No. */
103 RC_REG, /* RC QI No. */
104 EXT_REGS, /* R8 QI, QF, HF QI. */
105 EXT_REGS, /* R9 QI, QF, HF No. */
106 EXT_REGS, /* R10 QI, QF, HF No. */
107 EXT_REGS, /* R11 QI, QF, HF No. */
110 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
112 /* Reg Modes Saved. */
113 HFmode, /* R0 QI, QF, HF No. */
114 HFmode, /* R1 QI, QF, HF No. */
115 HFmode, /* R2 QI, QF, HF No. */
116 HFmode, /* R3 QI, QF, HF No. */
117 QFmode, /* R4 QI, QF, HF QI. */
118 QFmode, /* R5 QI, QF, HF QI. */
119 QImode, /* R6 QI, QF, HF QF. */
120 QImode, /* R7 QI, QF, HF QF. */
121 QImode, /* AR0 QI No. */
122 QImode, /* AR1 QI No. */
123 QImode, /* AR2 QI No. */
124 QImode, /* AR3 QI QI. */
125 QImode, /* AR4 QI QI. */
126 QImode, /* AR5 QI QI. */
127 QImode, /* AR6 QI QI. */
128 QImode, /* AR7 QI QI. */
129 VOIDmode, /* DP QI No. */
130 QImode, /* IR0 QI No. */
131 QImode, /* IR1 QI No. */
132 QImode, /* BK QI QI. */
133 VOIDmode, /* SP QI No. */
134 VOIDmode, /* ST CC No. */
135 VOIDmode, /* DIE/IE No. */
136 VOIDmode, /* IIE/IF No. */
137 VOIDmode, /* IIF/IOF No. */
138 QImode, /* RS QI No. */
139 QImode, /* RE QI No. */
140 VOIDmode, /* RC QI No. */
141 QFmode, /* R8 QI, QF, HF QI. */
142 HFmode, /* R9 QI, QF, HF No. */
143 HFmode, /* R10 QI, QF, HF No. */
144 HFmode, /* R11 QI, QF, HF No. */
148 /* Test and compare insns in c4x.md store the information needed to
149 generate branch and scc insns here. */
151 struct rtx_def *c4x_compare_op0 = NULL_RTX;
152 struct rtx_def *c4x_compare_op1 = NULL_RTX;
154 const char *c4x_rpts_cycles_string;
155 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
156 const char *c4x_cpu_version_string;
157 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
159 /* Pragma definitions. */
161 static tree code_tree = NULL_TREE;
162 static tree data_tree = NULL_TREE;
163 static tree pure_tree = NULL_TREE;
164 static tree noreturn_tree = NULL_TREE;
165 static tree interrupt_tree = NULL_TREE;
167 /* Forward declarations */
168 static void c4x_add_gc_roots PARAMS ((void));
169 static int c4x_isr_reg_used_p PARAMS ((unsigned int));
170 static int c4x_leaf_function_p PARAMS ((void));
171 static int c4x_assembler_function_p PARAMS ((void));
172 static int c4x_immed_float_p PARAMS ((rtx));
173 static int c4x_a_register PARAMS ((rtx));
174 static int c4x_x_register PARAMS ((rtx));
175 static int c4x_immed_int_constant PARAMS ((rtx));
176 static int c4x_immed_float_constant PARAMS ((rtx));
177 static int c4x_K_constant PARAMS ((rtx));
178 static int c4x_N_constant PARAMS ((rtx));
179 static int c4x_O_constant PARAMS ((rtx));
180 static int c4x_R_indirect PARAMS ((rtx));
181 static int c4x_S_indirect PARAMS ((rtx));
182 static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
183 static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
184 enum machine_mode, int));
185 static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
186 static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
187 static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
188 static int c4x_parse_pragma PARAMS ((const char *, tree *, tree *));
189 static int c4x_r11_set_p PARAMS ((rtx));
190 static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
191 static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
193 /* Called to register all of our global variables with the garbage
199 ggc_add_rtx_root (&c4x_compare_op0, 1);
200 ggc_add_rtx_root (&c4x_compare_op1, 1);
201 ggc_add_tree_root (&code_tree, 1);
202 ggc_add_tree_root (&data_tree, 1);
203 ggc_add_tree_root (&pure_tree, 1);
204 ggc_add_tree_root (&noreturn_tree, 1);
205 ggc_add_tree_root (&interrupt_tree, 1);
206 ggc_add_rtx_root (&smulhi3_libfunc, 1);
207 ggc_add_rtx_root (&umulhi3_libfunc, 1);
208 ggc_add_rtx_root (&fix_truncqfhi2_libfunc, 1);
209 ggc_add_rtx_root (&fixuns_truncqfhi2_libfunc, 1);
210 ggc_add_rtx_root (&fix_trunchfhi2_libfunc, 1);
211 ggc_add_rtx_root (&fixuns_trunchfhi2_libfunc, 1);
212 ggc_add_rtx_root (&floathiqf2_libfunc, 1);
213 ggc_add_rtx_root (&floatunshiqf2_libfunc, 1);
214 ggc_add_rtx_root (&floathihf2_libfunc, 1);
215 ggc_add_rtx_root (&floatunshihf2_libfunc, 1);
219 /* Override command line options.
220 Called once after all options have been parsed.
221 Mostly we process the processor
222 type and sometimes adjust other TARGET_ options. */
225 c4x_override_options ()
227 if (c4x_rpts_cycles_string)
228 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
233 c4x_cpu_version = 30;
235 c4x_cpu_version = 31;
237 c4x_cpu_version = 32;
239 c4x_cpu_version = 33;
241 c4x_cpu_version = 40;
243 c4x_cpu_version = 44;
245 c4x_cpu_version = 40;
247 /* -mcpu=xx overrides -m40 etc. */
248 if (c4x_cpu_version_string)
250 const char *p = c4x_cpu_version_string;
252 /* Also allow -mcpu=c30 etc. */
253 if (*p == 'c' || *p == 'C')
255 c4x_cpu_version = atoi (p);
258 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
259 C40_FLAG | C44_FLAG);
261 switch (c4x_cpu_version)
263 case 30: target_flags |= C30_FLAG; break;
264 case 31: target_flags |= C31_FLAG; break;
265 case 32: target_flags |= C32_FLAG; break;
266 case 33: target_flags |= C33_FLAG; break;
267 case 40: target_flags |= C40_FLAG; break;
268 case 44: target_flags |= C44_FLAG; break;
270 warning ("Unknown CPU version %d, using 40.\n", c4x_cpu_version);
271 c4x_cpu_version = 40;
272 target_flags |= C40_FLAG;
275 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
276 target_flags |= C3X_FLAG;
278 target_flags &= ~C3X_FLAG;
280 /* Convert foo / 8.0 into foo * 0.125, etc. */
283 /* We should phase out the following at some stage.
284 This provides compatibility with the old -mno-aliases option. */
285 if (! TARGET_ALIASES && ! flag_argument_noalias)
286 flag_argument_noalias = 1;
288 /* Register global variables with the garbage collector. */
293 /* This is called before c4x_override_options. */
296 c4x_optimization_options (level, size)
297 int level ATTRIBUTE_UNUSED;
298 int size ATTRIBUTE_UNUSED;
300 /* Scheduling before register allocation can screw up global
301 register allocation, especially for functions that use MPY||ADD
302 instructions. The benefit we gain we get by scheduling before
303 register allocation is probably marginal anyhow. */
304 flag_schedule_insns = 0;
308 /* Write an ASCII string. */
310 #define C4X_ASCII_LIMIT 40
313 c4x_output_ascii (stream, ptr, len)
318 char sbuf[C4X_ASCII_LIMIT + 1];
319 int s, l, special, first = 1, onlys;
322 fprintf (stream, "\t.byte\t");
324 for (s = l = 0; len > 0; --len, ++ptr)
328 /* Escape " and \ with a \". */
329 special = *ptr == '\"' || *ptr == '\\';
331 /* If printable - add to buff. */
332 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
337 if (s < C4X_ASCII_LIMIT - 1)
352 fprintf (stream, "\"%s\"", sbuf);
354 if (TARGET_TI && l >= 80 && len > 1)
356 fprintf (stream, "\n\t.byte\t");
374 fprintf (stream, "%d", *ptr);
376 if (TARGET_TI && l >= 80 && len > 1)
378 fprintf (stream, "\n\t.byte\t");
389 fprintf (stream, "\"%s\"", sbuf);
392 fputc ('\n', stream);
397 c4x_hard_regno_mode_ok (regno, mode)
399 enum machine_mode mode;
404 case Pmode: /* Pointer (24/32 bits). */
406 case QImode: /* Integer (32 bits). */
407 return IS_INT_REGNO (regno);
409 case QFmode: /* Float, Double (32 bits). */
410 case HFmode: /* Long Double (40 bits). */
411 return IS_EXT_REGNO (regno);
413 case CCmode: /* Condition Codes. */
414 case CC_NOOVmode: /* Condition Codes. */
415 return IS_ST_REGNO (regno);
417 case HImode: /* Long Long (64 bits). */
418 /* We need two registers to store long longs. Note that
419 it is much easier to constrain the first register
420 to start on an even boundary. */
421 return IS_INT_REGNO (regno)
422 && IS_INT_REGNO (regno + 1)
426 return 0; /* We don't support these modes. */
432 /* Return non-zero if REGNO1 can be renamed to REGNO2. */
434 c4x_hard_regno_rename_ok (regno1, regno2)
438 /* We can not copy call saved registers from mode QI into QF or from
440 if ((regno1 == R6_REGNO || regno1 == R7_REGNO)
441 && (regno2 == R4_REGNO || regno2 == R5_REGNO || regno2 == R8_REGNO))
443 if ((regno1 == R4_REGNO || regno1 == R5_REGNO || regno1 == R8_REGNO)
444 && (regno2 == R6_REGNO || regno2 == R7_REGNO))
446 /* We cannot copy from an extended (40 bit) register to a standard
447 (32 bit) register because we only set the condition codes for
448 extended registers. */
449 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
451 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
456 /* The TI C3x C compiler register argument runtime model uses 6 registers,
457 AR2, R2, R3, RC, RS, RE.
459 The first two floating point arguments (float, double, long double)
460 that are found scanning from left to right are assigned to R2 and R3.
462 The remaining integer (char, short, int, long) or pointer arguments
463 are assigned to the remaining registers in the order AR2, R2, R3,
464 RC, RS, RE when scanning left to right, except for the last named
465 argument prior to an ellipsis denoting variable number of
466 arguments. We don't have to worry about the latter condition since
467 function.c treats the last named argument as anonymous (unnamed).
469 All arguments that cannot be passed in registers are pushed onto
470 the stack in reverse order (right to left). GCC handles that for us.
472 c4x_init_cumulative_args() is called at the start, so we can parse
473 the args to see how many floating point arguments and how many
474 integer (or pointer) arguments there are. c4x_function_arg() is
475 then called (sometimes repeatedly) for each argument (parsed left
476 to right) to obtain the register to pass the argument in, or zero
477 if the argument is to be passed on the stack. Once the compiler is
478 happy, c4x_function_arg_advance() is called.
480 Don't use R0 to pass arguments in, we use 0 to indicate a stack
483 static int c4x_int_reglist[3][6] =
485 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
486 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
487 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
490 static int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
493 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
494 function whose data type is FNTYPE.
495 For a library call, FNTYPE is 0. */
498 c4x_init_cumulative_args (cum, fntype, libname)
499 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
500 tree fntype; /* Tree ptr for function decl. */
501 rtx libname; /* SYMBOL_REF of library name or 0. */
503 tree param, next_param;
505 cum->floats = cum->ints = 0;
512 fprintf (stderr, "\nc4x_init_cumulative_args (");
515 tree ret_type = TREE_TYPE (fntype);
517 fprintf (stderr, "fntype code = %s, ret code = %s",
518 tree_code_name[(int) TREE_CODE (fntype)],
519 tree_code_name[(int) TREE_CODE (ret_type)]);
522 fprintf (stderr, "no fntype");
525 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
528 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
530 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
531 param; param = next_param)
535 next_param = TREE_CHAIN (param);
537 type = TREE_VALUE (param);
538 if (type && type != void_type_node)
540 enum machine_mode mode;
542 /* If the last arg doesn't have void type then we have
543 variable arguments. */
547 if ((mode = TYPE_MODE (type)))
549 if (! MUST_PASS_IN_STACK (mode, type))
551 /* Look for float, double, or long double argument. */
552 if (mode == QFmode || mode == HFmode)
554 /* Look for integer, enumeral, boolean, char, or pointer
556 else if (mode == QImode || mode == Pmode)
565 fprintf (stderr, "%s%s, args = %d)\n",
566 cum->prototype ? ", prototype" : "",
567 cum->var ? ", variable args" : "",
572 /* Update the data in CUM to advance over an argument
573 of mode MODE and data type TYPE.
574 (TYPE is null for libcalls where that information may not be available.) */
577 c4x_function_arg_advance (cum, mode, type, named)
578 CUMULATIVE_ARGS *cum; /* Current arg information. */
579 enum machine_mode mode; /* Current arg mode. */
580 tree type; /* Type of the arg or 0 if lib support. */
581 int named; /* Whether or not the argument was named. */
584 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
585 GET_MODE_NAME (mode), named);
589 && ! MUST_PASS_IN_STACK (mode, type))
591 /* Look for float, double, or long double argument. */
592 if (mode == QFmode || mode == HFmode)
594 /* Look for integer, enumeral, boolean, char, or pointer argument. */
595 else if (mode == QImode || mode == Pmode)
598 else if (! TARGET_MEMPARM && ! type)
600 /* Handle libcall arguments. */
601 if (mode == QFmode || mode == HFmode)
603 else if (mode == QImode || mode == Pmode)
610 /* Define where to put the arguments to a function. Value is zero to
611 push the argument on the stack, or a hard register in which to
614 MODE is the argument's machine mode.
615 TYPE is the data type of the argument (as a tree).
616 This is null for libcalls where that information may
618 CUM is a variable of type CUMULATIVE_ARGS which gives info about
619 the preceding args and about the function being called.
620 NAMED is nonzero if this argument is a named parameter
621 (otherwise it is an extra parameter matching an ellipsis). */
624 c4x_function_arg (cum, mode, type, named)
625 CUMULATIVE_ARGS *cum; /* Current arg information. */
626 enum machine_mode mode; /* Current arg mode. */
627 tree type; /* Type of the arg or 0 if lib support. */
628 int named; /* != 0 for normal args, == 0 for ... args. */
630 int reg = 0; /* Default to passing argument on stack. */
634 /* We can handle at most 2 floats in R2, R3. */
635 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
637 /* We can handle at most 6 integers minus number of floats passed
639 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
640 6 - cum->maxfloats : cum->ints;
642 /* If there is no prototype, assume all the arguments are integers. */
643 if (! cum->prototype)
646 cum->ints = cum->floats = 0;
650 /* This marks the last argument. We don't need to pass this through
652 if (type == void_type_node)
658 && ! MUST_PASS_IN_STACK (mode, type))
660 /* Look for float, double, or long double argument. */
661 if (mode == QFmode || mode == HFmode)
663 if (cum->floats < cum->maxfloats)
664 reg = c4x_fp_reglist[cum->floats];
666 /* Look for integer, enumeral, boolean, char, or pointer argument. */
667 else if (mode == QImode || mode == Pmode)
669 if (cum->ints < cum->maxints)
670 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
673 else if (! TARGET_MEMPARM && ! type)
675 /* We could use a different argument calling model for libcalls,
676 since we're only calling functions in libgcc. Thus we could
677 pass arguments for long longs in registers rather than on the
678 stack. In the meantime, use the odd TI format. We make the
679 assumption that we won't have more than two floating point
680 args, six integer args, and that all the arguments are of the
682 if (mode == QFmode || mode == HFmode)
683 reg = c4x_fp_reglist[cum->floats];
684 else if (mode == QImode || mode == Pmode)
685 reg = c4x_int_reglist[0][cum->ints];
690 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
691 GET_MODE_NAME (mode), named);
693 fprintf (stderr, ", reg=%s", reg_names[reg]);
695 fprintf (stderr, ", stack");
696 fprintf (stderr, ")\n");
699 return gen_rtx_REG (mode, reg);
706 c4x_va_start (stdarg_p, valist, nextarg)
711 nextarg = plus_constant (nextarg, stdarg_p ? 0 : UNITS_PER_WORD * 2);
713 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
717 /* C[34]x arguments grow in weird ways (downwards) that the standard
718 varargs stuff can't handle.. */
720 c4x_va_arg (valist, type)
725 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
726 build_int_2 (int_size_in_bytes (type), 0));
727 TREE_SIDE_EFFECTS (t) = 1;
729 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
734 c4x_isr_reg_used_p (regno)
737 /* Don't save/restore FP or ST, we handle them separately. */
738 if (regno == FRAME_POINTER_REGNUM
739 || IS_ST_REGNO (regno))
742 /* We could be a little smarter abut saving/restoring DP.
743 We'll only save if for the big memory model or if
744 we're paranoid. ;-) */
745 if (IS_DP_REGNO (regno))
746 return ! TARGET_SMALL || TARGET_PARANOID;
748 /* Only save/restore regs in leaf function that are used. */
749 if (c4x_leaf_function)
750 return regs_ever_live[regno] && fixed_regs[regno] == 0;
752 /* Only save/restore regs that are used by the ISR and regs
753 that are likely to be used by functions the ISR calls
754 if they are not fixed. */
755 return IS_EXT_REGNO (regno)
756 || ((regs_ever_live[regno] || call_used_regs[regno])
757 && fixed_regs[regno] == 0);
762 c4x_leaf_function_p ()
764 /* A leaf function makes no calls, so we only need
765 to save/restore the registers we actually use.
766 For the global variable leaf_function to be set, we need
767 to define LEAF_REGISTERS and all that it entails.
768 Let's check ourselves... */
770 if (lookup_attribute ("leaf_pretend",
771 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
774 /* Use the leaf_pretend attribute at your own risk. This is a hack
775 to speed up ISRs that call a function infrequently where the
776 overhead of saving and restoring the additional registers is not
777 warranted. You must save and restore the additional registers
778 required by the called function. Caveat emptor. Here's enough
781 if (leaf_function_p ())
789 c4x_assembler_function_p ()
793 type = TREE_TYPE (current_function_decl);
794 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type)) != NULL)
795 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL);
800 c4x_interrupt_function_p ()
802 if (lookup_attribute ("interrupt",
803 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
806 /* Look for TI style c_intnn. */
807 return current_function_name[0] == 'c'
808 && current_function_name[1] == '_'
809 && current_function_name[2] == 'i'
810 && current_function_name[3] == 'n'
811 && current_function_name[4] == 't'
812 && ISDIGIT (current_function_name[5])
813 && ISDIGIT (current_function_name[6]);
817 c4x_expand_prologue ()
820 int size = get_frame_size ();
823 /* In functions where ar3 is not used but frame pointers are still
824 specified, frame pointers are not adjusted (if >= -O2) and this
825 is used so it won't needlessly push the frame pointer. */
828 /* For __assembler__ function don't build a prologue. */
829 if (c4x_assembler_function_p ())
834 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
835 if (profile_block_flag == 2)
837 FUNCTION_BLOCK_PROFILER_EXIT
841 /* For __interrupt__ function build specific prologue. */
842 if (c4x_interrupt_function_p ())
844 c4x_leaf_function = c4x_leaf_function_p ();
846 insn = emit_insn (gen_push_st ());
847 RTX_FRAME_RELATED_P (insn) = 1;
850 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
851 RTX_FRAME_RELATED_P (insn) = 1;
852 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
853 gen_rtx_REG (QImode, SP_REGNO)));
854 RTX_FRAME_RELATED_P (insn) = 1;
855 /* We require that an ISR uses fewer than 32768 words of
856 local variables, otherwise we have to go to lots of
857 effort to save a register, load it with the desired size,
858 adjust the stack pointer, and then restore the modified
859 register. Frankly, I think it is a poor ISR that
860 requires more than 32767 words of local temporary
863 fatal ("ISR %s requires %d words of local vars, max is 32767.",
864 current_function_name, size);
865 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
866 gen_rtx_REG (QImode, SP_REGNO),
868 RTX_FRAME_RELATED_P (insn) = 1;
870 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
872 if (c4x_isr_reg_used_p (regno))
874 if (regno == DP_REGNO)
876 insn = emit_insn (gen_push_dp ());
877 RTX_FRAME_RELATED_P (insn) = 1;
881 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
882 RTX_FRAME_RELATED_P (insn) = 1;
883 if (IS_EXT_REGNO (regno))
885 insn = emit_insn (gen_pushqf
886 (gen_rtx_REG (QFmode, regno)));
887 RTX_FRAME_RELATED_P (insn) = 1;
892 /* We need to clear the repeat mode flag if the ISR is
893 going to use a RPTB instruction or uses the RC, RS, or RE
895 if (regs_ever_live[RC_REGNO]
896 || regs_ever_live[RS_REGNO]
897 || regs_ever_live[RE_REGNO])
899 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
900 RTX_FRAME_RELATED_P (insn) = 1;
903 /* Reload DP reg if we are paranoid about some turkey
904 violating small memory model rules. */
905 if (TARGET_SMALL && TARGET_PARANOID)
907 insn = emit_insn (gen_set_ldp_prologue
908 (gen_rtx_REG (QImode, DP_REGNO),
909 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
910 RTX_FRAME_RELATED_P (insn) = 1;
915 if (frame_pointer_needed)
918 || (current_function_args_size != 0)
921 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
922 RTX_FRAME_RELATED_P (insn) = 1;
923 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
924 gen_rtx_REG (QImode, SP_REGNO)));
925 RTX_FRAME_RELATED_P (insn) = 1;
930 /* Since ar3 is not used, we don't need to push it. */
936 /* If we use ar3, we need to push it. */
938 if ((size != 0) || (current_function_args_size != 0))
940 /* If we are omitting the frame pointer, we still have
941 to make space for it so the offsets are correct
942 unless we don't use anything on the stack at all. */
949 /* Local vars are too big, it will take multiple operations
953 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
954 GEN_INT(size >> 16)));
955 RTX_FRAME_RELATED_P (insn) = 1;
956 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
957 gen_rtx_REG (QImode, R1_REGNO),
959 RTX_FRAME_RELATED_P (insn) = 1;
963 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
964 GEN_INT(size & ~0xffff)));
965 RTX_FRAME_RELATED_P (insn) = 1;
967 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
968 gen_rtx_REG (QImode, R1_REGNO),
969 GEN_INT(size & 0xffff)));
970 RTX_FRAME_RELATED_P (insn) = 1;
971 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
972 gen_rtx_REG (QImode, SP_REGNO),
973 gen_rtx_REG (QImode, R1_REGNO)));
974 RTX_FRAME_RELATED_P (insn) = 1;
978 /* Local vars take up less than 32767 words, so we can directly
980 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
981 gen_rtx_REG (QImode, SP_REGNO),
983 RTX_FRAME_RELATED_P (insn) = 1;
986 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
988 if (regs_ever_live[regno] && ! call_used_regs[regno])
990 if ((regno == R6_REGNO) || (regno == R7_REGNO))
992 /* R6 and R7 are saved as floating point. */
993 if (TARGET_PRESERVE_FLOAT)
995 insn = emit_insn (gen_pushqi
996 (gen_rtx_REG (QImode, regno)));
997 RTX_FRAME_RELATED_P (insn) = 1;
999 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
1000 RTX_FRAME_RELATED_P (insn) = 1;
1002 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1004 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1005 RTX_FRAME_RELATED_P (insn) = 1;
1014 c4x_expand_epilogue()
1020 int size = get_frame_size ();
1022 /* For __assembler__ function build no epilogue. */
1023 if (c4x_assembler_function_p ())
1025 insn = emit_jump_insn (gen_return_from_epilogue ());
1026 RTX_FRAME_RELATED_P (insn) = 1;
1030 /* For __interrupt__ function build specific epilogue. */
1031 if (c4x_interrupt_function_p ())
1033 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1035 if (! c4x_isr_reg_used_p (regno))
1037 if (regno == DP_REGNO)
1039 insn = emit_insn (gen_pop_dp ());
1040 RTX_FRAME_RELATED_P (insn) = 1;
1044 /* We have to use unspec because the compiler will delete insns
1045 that are not call-saved. */
1046 if (IS_EXT_REGNO (regno))
1048 insn = emit_insn (gen_popqf_unspec
1049 (gen_rtx_REG (QFmode, regno)));
1050 RTX_FRAME_RELATED_P (insn) = 1;
1052 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1053 RTX_FRAME_RELATED_P (insn) = 1;
1058 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1059 gen_rtx_REG (QImode, SP_REGNO),
1061 RTX_FRAME_RELATED_P (insn) = 1;
1062 insn = emit_insn (gen_popqi
1063 (gen_rtx_REG (QImode, AR3_REGNO)));
1064 RTX_FRAME_RELATED_P (insn) = 1;
1066 insn = emit_insn (gen_pop_st ());
1067 RTX_FRAME_RELATED_P (insn) = 1;
1068 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1069 RTX_FRAME_RELATED_P (insn) = 1;
1073 if (frame_pointer_needed)
1076 || (current_function_args_size != 0)
1080 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1081 gen_rtx_MEM (QImode,
1083 (QImode, gen_rtx_REG (QImode,
1086 RTX_FRAME_RELATED_P (insn) = 1;
1088 /* We already have the return value and the fp,
1089 so we need to add those to the stack. */
1096 /* Since ar3 is not used for anything, we don't need to
1103 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1104 if (size || current_function_args_size)
1106 /* If we are ommitting the frame pointer, we still have
1107 to make space for it so the offsets are correct
1108 unless we don't use anything on the stack at all. */
1113 /* Now restore the saved registers, putting in the delayed branch
1115 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1117 if (regs_ever_live[regno] && ! call_used_regs[regno])
1119 if (regno == AR3_REGNO && dont_pop_ar3)
1122 /* R6 and R7 are saved as floating point. */
1123 if ((regno == R6_REGNO) || (regno == R7_REGNO))
1125 insn = emit_insn (gen_popqf_unspec
1126 (gen_rtx_REG (QFmode, regno)));
1127 RTX_FRAME_RELATED_P (insn) = 1;
1128 if (TARGET_PRESERVE_FLOAT)
1130 insn = emit_insn (gen_popqi_unspec
1131 (gen_rtx_REG (QImode, regno)));
1132 RTX_FRAME_RELATED_P (insn) = 1;
1137 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1138 RTX_FRAME_RELATED_P (insn) = 1;
1143 if (frame_pointer_needed)
1146 || (current_function_args_size != 0)
1149 /* Restore the old FP. */
1152 (gen_rtx_REG (QImode, AR3_REGNO),
1153 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1155 RTX_FRAME_RELATED_P (insn) = 1;
1161 /* Local vars are too big, it will take multiple operations
1165 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1166 GEN_INT(size >> 16)));
1167 RTX_FRAME_RELATED_P (insn) = 1;
1168 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1169 gen_rtx_REG (QImode, R3_REGNO),
1171 RTX_FRAME_RELATED_P (insn) = 1;
1175 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1176 GEN_INT(size & ~0xffff)));
1177 RTX_FRAME_RELATED_P (insn) = 1;
1179 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1180 gen_rtx_REG (QImode, R3_REGNO),
1181 GEN_INT(size & 0xffff)));
1182 RTX_FRAME_RELATED_P (insn) = 1;
1183 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1184 gen_rtx_REG (QImode, SP_REGNO),
1185 gen_rtx_REG (QImode, R3_REGNO)));
1186 RTX_FRAME_RELATED_P (insn) = 1;
1190 /* Local vars take up less than 32768 words, so we can directly
1191 subtract the number. */
1192 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1193 gen_rtx_REG (QImode, SP_REGNO),
1195 RTX_FRAME_RELATED_P (insn) = 1;
1200 insn = emit_jump_insn (gen_return_indirect_internal
1201 (gen_rtx_REG (QImode, R2_REGNO)));
1202 RTX_FRAME_RELATED_P (insn) = 1;
1206 insn = emit_jump_insn (gen_return_from_epilogue ());
1207 RTX_FRAME_RELATED_P (insn) = 1;
1214 c4x_null_epilogue_p ()
1218 if (reload_completed
1219 && ! c4x_assembler_function_p ()
1220 && ! c4x_interrupt_function_p ()
1221 && ! current_function_calls_alloca
1222 && ! current_function_args_size
1223 && ! (profile_block_flag == 2)
1225 && ! get_frame_size ())
1227 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1228 if (regs_ever_live[regno] && ! call_used_regs[regno]
1229 && (regno != AR3_REGNO))
1238 c4x_emit_move_sequence (operands, mode)
1240 enum machine_mode mode;
1242 rtx op0 = operands[0];
1243 rtx op1 = operands[1];
1245 if (! reload_in_progress
1248 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1249 op1 = force_reg (mode, op1);
1251 if (GET_CODE (op1) == LO_SUM
1252 && GET_MODE (op1) == Pmode
1253 && dp_reg_operand (XEXP (op1, 0), mode))
1255 /* expand_increment will sometimes create a LO_SUM immediate
1257 op1 = XEXP (op1, 1);
1259 else if (symbolic_address_operand (op1, mode))
1261 if (TARGET_LOAD_ADDRESS)
1263 /* Alias analysis seems to do a better job if we force
1264 constant addresses to memory after reload. */
1265 emit_insn (gen_load_immed_address (op0, op1));
1270 /* Stick symbol or label address into the constant pool. */
1271 op1 = force_const_mem (Pmode, op1);
1274 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1276 /* We could be a lot smarter about loading some of these
1278 op1 = force_const_mem (mode, op1);
1281 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1282 and emit associated (HIGH (SYMREF)) if large memory model.
1283 c4x_legitimize_address could be used to do this,
1284 perhaps by calling validize_address. */
1285 if (TARGET_EXPOSE_LDP
1286 && ! (reload_in_progress || reload_completed)
1287 && GET_CODE (op1) == MEM
1288 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1290 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1292 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1293 op1 = change_address (op1, mode,
1294 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1297 if (TARGET_EXPOSE_LDP
1298 && ! (reload_in_progress || reload_completed)
1299 && GET_CODE (op0) == MEM
1300 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1302 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1304 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1305 op0 = change_address (op0, mode,
1306 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1309 if (GET_CODE (op0) == SUBREG
1310 && mixed_subreg_operand (op0, mode))
1312 /* We should only generate these mixed mode patterns
1313 during RTL generation. If we need do it later on
1314 then we'll have to emit patterns that won't clobber CC. */
1315 if (reload_in_progress || reload_completed)
1317 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1318 op0 = SUBREG_REG (op0);
1319 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1321 op0 = copy_rtx (op0);
1322 PUT_MODE (op0, QImode);
1328 emit_insn (gen_storeqf_int_clobber (op0, op1));
1334 if (GET_CODE (op1) == SUBREG
1335 && mixed_subreg_operand (op1, mode))
1337 /* We should only generate these mixed mode patterns
1338 during RTL generation. If we need do it later on
1339 then we'll have to emit patterns that won't clobber CC. */
1340 if (reload_in_progress || reload_completed)
1342 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1343 op1 = SUBREG_REG (op1);
1344 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1346 op1 = copy_rtx (op1);
1347 PUT_MODE (op1, QImode);
1353 emit_insn (gen_loadqf_int_clobber (op0, op1));
1360 && reg_operand (op0, mode)
1361 && const_int_operand (op1, mode)
1362 && ! IS_INT16_CONST (INTVAL (op1))
1363 && ! IS_HIGH_CONST (INTVAL (op1)))
1365 emit_insn (gen_loadqi_big_constant (op0, op1));
1370 && reg_operand (op0, mode)
1371 && const_int_operand (op1, mode))
1373 emit_insn (gen_loadhi_big_constant (op0, op1));
1377 /* Adjust operands in case we have modified them. */
1381 /* Emit normal pattern. */
1387 c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1390 enum machine_mode dmode;
1391 enum machine_mode smode;
1403 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1404 operands[1], smode);
1405 equiv = gen_rtx (code, dmode, operands[1]);
1409 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1410 operands[1], smode, operands[2], smode);
1411 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1415 fatal ("c4x_emit_libcall: Bad number of operands");
1418 insns = get_insns ();
1420 emit_libcall_block (insns, operands[0], ret, equiv);
1425 c4x_emit_libcall3 (libcall, code, mode, operands)
1428 enum machine_mode mode;
1431 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1436 c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1439 enum machine_mode mode;
1447 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1448 operands[1], mode, operands[2], mode);
1449 equiv = gen_rtx_TRUNCATE (mode,
1450 gen_rtx_LSHIFTRT (HImode,
1451 gen_rtx_MULT (HImode,
1452 gen_rtx (code, HImode, operands[1]),
1453 gen_rtx (code, HImode, operands[2])),
1455 insns = get_insns ();
1457 emit_libcall_block (insns, operands[0], ret, equiv);
1461 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1462 yet use this info. */
1464 c4x_encode_section_info (decl)
1468 if (TREE_CODE (TREE_TYPE (decl)) == FUNCTION_TYPE)
1469 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1471 if (TREE_CODE (decl) == FUNCTION_DECL)
1472 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1478 c4x_check_legit_addr (mode, addr, strict)
1479 enum machine_mode mode;
1483 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1484 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1485 rtx disp = NULL_RTX; /* Displacement. */
1488 code = GET_CODE (addr);
1491 /* Register indirect with auto increment/decrement. We don't
1492 allow SP here---push_operand should recognise an operand
1493 being pushed on the stack. */
1498 if (mode != QImode && mode != QFmode)
1502 base = XEXP (addr, 0);
1510 rtx op0 = XEXP (addr, 0);
1511 rtx op1 = XEXP (addr, 1);
1513 if (mode != QImode && mode != QFmode)
1517 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1519 base = XEXP (op1, 0);
1522 if (REG_P (XEXP (op1, 1)))
1523 indx = XEXP (op1, 1);
1525 disp = XEXP (op1, 1);
1529 /* Register indirect. */
1534 /* Register indirect with displacement or index. */
1537 rtx op0 = XEXP (addr, 0);
1538 rtx op1 = XEXP (addr, 1);
1539 enum rtx_code code0 = GET_CODE (op0);
1546 base = op0; /* Base + index. */
1548 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1556 base = op0; /* Base + displacement. */
1567 /* Direct addressing with DP register. */
1570 rtx op0 = XEXP (addr, 0);
1571 rtx op1 = XEXP (addr, 1);
1573 /* HImode and HFmode direct memory references aren't truly
1574 offsettable (consider case at end of data page). We
1575 probably get better code by loading a pointer and using an
1576 indirect memory reference. */
1577 if (mode == HImode || mode == HFmode)
1580 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1583 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1586 if (GET_CODE (op1) == CONST)
1592 /* Direct addressing with some work for the assembler... */
1594 /* Direct addressing. */
1597 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1599 /* These need to be converted to a LO_SUM (...).
1600 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1603 /* Do not allow direct memory access to absolute addresses.
1604 This is more pain than it's worth, especially for the
1605 small memory model where we can't guarantee that
1606 this address is within the data page---we don't want
1607 to modify the DP register in the small memory model,
1608 even temporarily, since an interrupt can sneak in.... */
1612 /* Indirect indirect addressing. */
1617 fatal_insn ("Using CONST_DOUBLE for address", addr);
1623 /* Validate the base register. */
1626 /* Check that the address is offsettable for HImode and HFmode. */
1627 if (indx && (mode == HImode || mode == HFmode))
1630 /* Handle DP based stuff. */
1631 if (REGNO (base) == DP_REGNO)
1633 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1635 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1639 /* Now validate the index register. */
1642 if (GET_CODE (indx) != REG)
1644 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1646 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1650 /* Validate displacement. */
1653 if (GET_CODE (disp) != CONST_INT)
1655 if (mode == HImode || mode == HFmode)
1657 /* The offset displacement must be legitimate. */
1658 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1663 if (! IS_DISP8_CONST (INTVAL (disp)))
1666 /* Can't add an index with a disp. */
1675 c4x_legitimize_address (orig, mode)
1676 rtx orig ATTRIBUTE_UNUSED;
1677 enum machine_mode mode ATTRIBUTE_UNUSED;
1679 if (GET_CODE (orig) == SYMBOL_REF
1680 || GET_CODE (orig) == LABEL_REF)
1682 if (mode == HImode || mode == HFmode)
1684 /* We need to force the address into
1685 a register so that it is offsettable. */
1686 rtx addr_reg = gen_reg_rtx (Pmode);
1687 emit_move_insn (addr_reg, orig);
1692 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1695 emit_insn (gen_set_ldp (dp_reg, orig));
1697 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1705 /* Provide the costs of an addressing mode that contains ADDR.
1706 If ADDR is not a valid address, its cost is irrelevant.
1707 This is used in cse and loop optimisation to determine
1708 if it is worthwhile storing a common address into a register.
1709 Unfortunately, the C4x address cost depends on other operands. */
1712 c4x_address_cost (addr)
1715 switch (GET_CODE (addr))
1726 /* These shouldn't be directly generated. */
1734 rtx op1 = XEXP (addr, 1);
1736 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1737 return TARGET_SMALL ? 3 : 4;
1739 if (GET_CODE (op1) == CONST)
1741 rtx offset = const0_rtx;
1743 op1 = eliminate_constant_term (op1, &offset);
1745 /* ??? These costs need rethinking... */
1746 if (GET_CODE (op1) == LABEL_REF)
1749 if (GET_CODE (op1) != SYMBOL_REF)
1752 if (INTVAL (offset) == 0)
1757 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1763 register rtx op0 = XEXP (addr, 0);
1764 register rtx op1 = XEXP (addr, 1);
1766 if (GET_CODE (op0) != REG)
1769 switch (GET_CODE (op1))
1775 /* This cost for REG+REG must be greater than the cost
1776 for REG if we want autoincrement addressing modes. */
1780 /* The following tries to improve GIV combination
1781 in strength reduce but appears not to help. */
1782 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1785 if (IS_DISP1_CONST (INTVAL (op1)))
1788 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1803 c4x_gen_compare_reg (code, x, y)
1807 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1810 if (mode == CC_NOOVmode
1811 && (code == LE || code == GE || code == LT || code == GT))
1814 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1815 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1816 gen_rtx_COMPARE (mode, x, y)));
1821 c4x_output_cbranch (form, seq)
1830 static char str[100];
1834 delay = XVECEXP (final_sequence, 0, 1);
1835 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1836 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1837 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1840 cp = &str [strlen (str)];
1865 c4x_print_operand (file, op, letter)
1866 FILE *file; /* File to write to. */
1867 rtx op; /* Operand to print. */
1868 int letter; /* %<letter> or 0. */
1875 case '#': /* Delayed. */
1877 asm_fprintf (file, "d");
1881 code = GET_CODE (op);
1884 case 'A': /* Direct address. */
1885 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1886 asm_fprintf (file, "@");
1889 case 'H': /* Sethi. */
1890 output_addr_const (file, op);
1893 case 'I': /* Reversed condition. */
1894 code = reverse_condition (code);
1897 case 'L': /* Log 2 of constant. */
1898 if (code != CONST_INT)
1899 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1900 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1903 case 'N': /* Ones complement of small constant. */
1904 if (code != CONST_INT)
1905 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1906 fprintf (file, "%d", ~INTVAL (op));
1909 case 'K': /* Generate ldp(k) if direct address. */
1912 && GET_CODE (XEXP (op, 0)) == LO_SUM
1913 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1914 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1916 op1 = XEXP (XEXP (op, 0), 1);
1917 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1919 asm_fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1920 output_address (XEXP (adj_offsettable_operand (op, 1), 0));
1921 asm_fprintf (file, "\n");
1926 case 'M': /* Generate ldp(k) if direct address. */
1927 if (! TARGET_SMALL /* Only used in asm statements. */
1929 && (GET_CODE (XEXP (op, 0)) == CONST
1930 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1932 asm_fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1933 output_address (XEXP (op, 0));
1934 asm_fprintf (file, "\n\t");
1938 case 'O': /* Offset address. */
1939 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1941 else if (code == MEM)
1942 output_address (XEXP (adj_offsettable_operand (op, 1), 0));
1943 else if (code == REG)
1944 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1946 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1949 case 'C': /* Call. */
1952 case 'U': /* Call/callu. */
1953 if (code != SYMBOL_REF)
1954 asm_fprintf (file, "u");
1964 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1966 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1968 fprintf (file, "%s", reg_names[REGNO (op)]);
1972 output_address (XEXP (op, 0));
1980 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
1981 REAL_VALUE_TO_DECIMAL (r, "%20f", str);
1982 fprintf (file, "%s", str);
1987 fprintf (file, "%d", INTVAL (op));
1991 asm_fprintf (file, "ne");
1995 asm_fprintf (file, "eq");
1999 asm_fprintf (file, "ge");
2003 asm_fprintf (file, "gt");
2007 asm_fprintf (file, "le");
2011 asm_fprintf (file, "lt");
2015 asm_fprintf (file, "hs");
2019 asm_fprintf (file, "hi");
2023 asm_fprintf (file, "ls");
2027 asm_fprintf (file, "lo");
2031 output_addr_const (file, op);
2035 output_addr_const (file, XEXP (op, 0));
2042 fatal_insn ("c4x_print_operand: Bad operand case", op);
2049 c4x_print_operand_address (file, addr)
2053 switch (GET_CODE (addr))
2056 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2060 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2064 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2069 rtx op0 = XEXP (XEXP (addr, 1), 0);
2070 rtx op1 = XEXP (XEXP (addr, 1), 1);
2072 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2073 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2074 reg_names[REGNO (op1)]);
2075 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2076 fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2078 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2079 fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2081 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2082 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2083 reg_names[REGNO (op1)]);
2085 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2091 rtx op0 = XEXP (XEXP (addr, 1), 0);
2092 rtx op1 = XEXP (XEXP (addr, 1), 1);
2094 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2095 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2096 reg_names[REGNO (op1)]);
2097 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2098 fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2100 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2101 fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2103 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2104 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2105 reg_names[REGNO (op1)]);
2107 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2112 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2116 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2119 case PLUS: /* Indirect with displacement. */
2121 rtx op0 = XEXP (addr, 0);
2122 rtx op1 = XEXP (addr, 1);
2128 if (IS_INDEX_REG (op0))
2130 fprintf (file, "*+%s(%s)",
2131 reg_names[REGNO (op1)],
2132 reg_names[REGNO (op0)]); /* Index + base. */
2136 fprintf (file, "*+%s(%s)",
2137 reg_names[REGNO (op0)],
2138 reg_names[REGNO (op1)]); /* Base + index. */
2141 else if (INTVAL (op1) < 0)
2143 fprintf (file, "*-%s(%d)",
2144 reg_names[REGNO (op0)],
2145 -INTVAL (op1)); /* Base - displacement. */
2149 fprintf (file, "*+%s(%d)",
2150 reg_names[REGNO (op0)],
2151 INTVAL (op1)); /* Base + displacement. */
2155 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2161 rtx op0 = XEXP (addr, 0);
2162 rtx op1 = XEXP (addr, 1);
2164 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2165 c4x_print_operand_address (file, op1);
2167 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2174 fprintf (file, "@");
2175 output_addr_const (file, addr);
2178 /* We shouldn't access CONST_INT addresses. */
2182 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2188 /* Return nonzero if the floating point operand will fit
2189 in the immediate field. */
2192 c4x_immed_float_p (op)
2199 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2200 if (GET_MODE (op) == HFmode)
2201 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2204 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2208 /* Sign extend exponent. */
2209 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2210 if (exponent == -128)
2212 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2213 return 0; /* Precision doesn't fit. */
2214 return (exponent <= 7) /* Positive exp. */
2215 && (exponent >= -7); /* Negative exp. */
2219 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2220 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2222 None of the last four instructions from the bottom of the block can
2223 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2224 BcondAT or RETIcondD.
2226 This routine scans the four previous insns for a jump insn, and if
2227 one is found, returns 1 so that we bung in a nop instruction.
2228 This simple minded strategy will add a nop, when it may not
2229 be required. Say when there is a JUMP_INSN near the end of the
2230 block that doesn't get converted into a delayed branch.
2232 Note that we cannot have a call insn, since we don't generate
2233 repeat loops with calls in them (although I suppose we could, but
2234 there's no benefit.)
2236 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2239 c4x_rptb_nop_p (insn)
2245 /* Extract the start label from the jump pattern (rptb_end). */
2246 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2248 /* If there is a label at the end of the loop we must insert
2251 insn = previous_insn (insn);
2252 } while (GET_CODE (insn) == NOTE
2253 || GET_CODE (insn) == USE
2254 || GET_CODE (insn) == CLOBBER);
2255 if (GET_CODE (insn) == CODE_LABEL)
2258 for (i = 0; i < 4; i++)
2260 /* Search back for prev non-note and non-label insn. */
2261 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2262 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2264 if (insn == start_label)
2267 insn = previous_insn (insn);
2270 /* If we have a jump instruction we should insert a NOP. If we
2271 hit repeat block top we should only insert a NOP if the loop
2273 if (GET_CODE (insn) == JUMP_INSN)
2275 insn = previous_insn (insn);
2281 /* The C4x looping instruction needs to be emitted at the top of the
2282 loop. Emitting the true RTL for a looping instruction at the top of
2283 the loop can cause problems with flow analysis. So instead, a dummy
2284 doloop insn is emitted at the end of the loop. This routine checks
2285 for the presence of this doloop insn and then searches back to the
2286 top of the loop, where it inserts the true looping insn (provided
2287 there are no instructions in the loop which would cause problems).
2288 Any additional labels can be emitted at this point. In addition, if
2289 the desired loop count register was not allocated, this routine does
2292 Before we can create a repeat block looping instruction we have to
2293 verify that there are no jumps outside the loop and no jumps outside
2294 the loop go into this loop. This can happen in the basic blocks reorder
2295 pass. The C4x cpu can not handle this. */
2298 c4x_label_ref_used_p (x, code_label)
2308 code = GET_CODE (x);
2309 if (code == LABEL_REF)
2310 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2312 fmt = GET_RTX_FORMAT (code);
2313 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2317 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2320 else if (fmt[i] == 'E')
2321 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2322 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2330 c4x_rptb_valid_p (insn, start_label)
2331 rtx insn, start_label;
2337 /* Find the start label. */
2338 for (; insn; insn = PREV_INSN (insn))
2339 if (insn == start_label)
2342 /* Note found then we can not use a rptb or rpts. The label was
2343 probably moved by the basic block reorder pass. */
2348 /* If any jump jumps inside this block then we must fail. */
2349 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2351 if (GET_CODE (insn) == CODE_LABEL)
2353 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2354 if (GET_CODE (tmp) == JUMP_INSN
2355 && c4x_label_ref_used_p (tmp, insn))
2359 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2361 if (GET_CODE (insn) == CODE_LABEL)
2363 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2364 if (GET_CODE (tmp) == JUMP_INSN
2365 && c4x_label_ref_used_p (tmp, insn))
2369 /* If any jump jumps outside this block then we must fail. */
2370 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2372 if (GET_CODE (insn) == CODE_LABEL)
2374 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2375 if (GET_CODE (tmp) == JUMP_INSN
2376 && c4x_label_ref_used_p (tmp, insn))
2378 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2379 if (GET_CODE (tmp) == JUMP_INSN
2380 && c4x_label_ref_used_p (tmp, insn))
2385 /* All checks OK. */
2391 c4x_rptb_insert (insn)
2396 rtx new_start_label;
2399 /* If the count register has not been allocated to RC, say if
2400 there is a movstr pattern in the loop, then do not insert a
2401 RPTB instruction. Instead we emit a decrement and branch
2402 at the end of the loop. */
2403 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2404 if (REGNO (count_reg) != RC_REGNO)
2407 /* Extract the start label from the jump pattern (rptb_end). */
2408 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2410 if (! c4x_rptb_valid_p (insn, start_label))
2412 /* We can not use the rptb insn. Replace it so reorg can use
2413 the delay slots of the jump insn. */
2414 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2415 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2416 emit_insn_before (gen_bge (start_label), insn);
2417 LABEL_NUSES (start_label)++;
2422 end_label = gen_label_rtx ();
2423 LABEL_NUSES (end_label)++;
2424 emit_label_after (end_label, insn);
2426 new_start_label = gen_label_rtx ();
2427 LABEL_NUSES (new_start_label)++;
2429 for (; insn; insn = PREV_INSN (insn))
2431 if (insn == start_label)
2433 if (GET_CODE (insn) == JUMP_INSN &&
2434 JUMP_LABEL (insn) == start_label)
2435 redirect_jump (insn, new_start_label, 0);
2438 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2440 emit_label_after (new_start_label, insn);
2442 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2443 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2445 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2446 if (LABEL_NUSES (start_label) == 0)
2447 delete_insn (start_label);
2451 /* This function is a C4x special called immediately before delayed
2452 branch scheduling. We fix up RTPB style loops that didn't get RC
2453 allocated as the loop counter. */
2456 c4x_process_after_reload (first)
2461 for (insn = first; insn; insn = NEXT_INSN (insn))
2463 /* Look for insn. */
2466 int insn_code_number;
2469 insn_code_number = recog_memoized (insn);
2471 if (insn_code_number < 0)
2474 /* Insert the RTX for RPTB at the top of the loop
2475 and a label at the end of the loop. */
2476 if (insn_code_number == CODE_FOR_rptb_end)
2477 c4x_rptb_insert(insn);
2479 /* We need to split the insn here. Otherwise the calls to
2480 force_const_mem will not work for load_immed_address. */
2483 /* Don't split the insn if it has been deleted. */
2484 if (! INSN_DELETED_P (old))
2485 insn = try_split (PATTERN(old), old, 1);
2487 /* When not optimizing, the old insn will be still left around
2488 with only the 'deleted' bit set. Transform it into a note
2489 to avoid confusion of subsequent processing. */
2490 if (INSN_DELETED_P (old))
2492 PUT_CODE (old, NOTE);
2493 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2494 NOTE_SOURCE_FILE (old) = 0;
2505 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2513 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2518 c4x_immed_int_constant (op)
2521 if (GET_CODE (op) != CONST_INT)
2524 return GET_MODE (op) == VOIDmode
2525 || GET_MODE_CLASS (op) == MODE_INT
2526 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2531 c4x_immed_float_constant (op)
2534 if (GET_CODE (op) != CONST_DOUBLE)
2537 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2538 present this only means that a MEM rtx has been generated. It does
2539 not mean the rtx is really in memory. */
2541 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2546 c4x_shiftable_constant (op)
2551 int val = INTVAL (op);
2553 for (i = 0; i < 16; i++)
2558 mask = ((0xffff >> i) << 16) | 0xffff;
2559 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2560 : (val >> i) & mask))
2570 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2578 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2588 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2596 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2598 return IS_INT5_CONST (INTVAL (op));
2606 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2614 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2622 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2626 /* The constraints do not have to check the register class,
2627 except when needed to discriminate between the constraints.
2628 The operand has been checked by the predicates to be valid. */
2630 /* ARx + 9-bit signed const or IRn
2631 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2632 We don't include the pre/post inc/dec forms here since
2633 they are handled by the <> constraints. */
2636 c4x_Q_constraint (op)
2639 enum machine_mode mode = GET_MODE (op);
2641 if (GET_CODE (op) != MEM)
2644 switch (GET_CODE (op))
2651 rtx op0 = XEXP (op, 0);
2652 rtx op1 = XEXP (op, 1);
2660 if (GET_CODE (op1) != CONST_INT)
2663 /* HImode and HFmode must be offsettable. */
2664 if (mode == HImode || mode == HFmode)
2665 return IS_DISP8_OFF_CONST (INTVAL (op1));
2667 return IS_DISP8_CONST (INTVAL (op1));
2678 /* ARx + 5-bit unsigned const
2679 *ARx, *+ARx(n) for n < 32. */
2682 c4x_R_constraint (op)
2685 enum machine_mode mode = GET_MODE (op);
2689 if (GET_CODE (op) != MEM)
2692 switch (GET_CODE (op))
2699 rtx op0 = XEXP (op, 0);
2700 rtx op1 = XEXP (op, 1);
2705 if (GET_CODE (op1) != CONST_INT)
2708 /* HImode and HFmode must be offsettable. */
2709 if (mode == HImode || mode == HFmode)
2710 return IS_UINT5_CONST (INTVAL (op1) + 1);
2712 return IS_UINT5_CONST (INTVAL (op1));
2727 enum machine_mode mode = GET_MODE (op);
2729 if (TARGET_C3X || GET_CODE (op) != MEM)
2733 switch (GET_CODE (op))
2736 return IS_ADDR_OR_PSEUDO_REG (op);
2740 rtx op0 = XEXP (op, 0);
2741 rtx op1 = XEXP (op, 1);
2743 /* HImode and HFmode must be offsettable. */
2744 if (mode == HImode || mode == HFmode)
2745 return IS_ADDR_OR_PSEUDO_REG (op0)
2746 && GET_CODE (op1) == CONST_INT
2747 && IS_UINT5_CONST (INTVAL (op1) + 1);
2750 && IS_ADDR_OR_PSEUDO_REG (op0)
2751 && GET_CODE (op1) == CONST_INT
2752 && IS_UINT5_CONST (INTVAL (op1));
2763 /* ARx + 1-bit unsigned const or IRn
2764 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2765 We don't include the pre/post inc/dec forms here since
2766 they are handled by the <> constraints. */
2769 c4x_S_constraint (op)
2772 enum machine_mode mode = GET_MODE (op);
2773 if (GET_CODE (op) != MEM)
2776 switch (GET_CODE (op))
2784 rtx op0 = XEXP (op, 0);
2785 rtx op1 = XEXP (op, 1);
2787 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2788 || (op0 != XEXP (op1, 0)))
2791 op0 = XEXP (op1, 0);
2792 op1 = XEXP (op1, 1);
2793 return REG_P (op0) && REG_P (op1);
2794 /* Pre or post_modify with a displacement of 0 or 1
2795 should not be generated. */
2801 rtx op0 = XEXP (op, 0);
2802 rtx op1 = XEXP (op, 1);
2810 if (GET_CODE (op1) != CONST_INT)
2813 /* HImode and HFmode must be offsettable. */
2814 if (mode == HImode || mode == HFmode)
2815 return IS_DISP1_OFF_CONST (INTVAL (op1));
2817 return IS_DISP1_CONST (INTVAL (op1));
2832 enum machine_mode mode = GET_MODE (op);
2833 if (GET_CODE (op) != MEM)
2837 switch (GET_CODE (op))
2841 if (mode != QImode && mode != QFmode)
2848 return IS_ADDR_OR_PSEUDO_REG (op);
2853 rtx op0 = XEXP (op, 0);
2854 rtx op1 = XEXP (op, 1);
2856 if (mode != QImode && mode != QFmode)
2859 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2860 || (op0 != XEXP (op1, 0)))
2863 op0 = XEXP (op1, 0);
2864 op1 = XEXP (op1, 1);
2865 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2866 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2867 /* Pre or post_modify with a displacement of 0 or 1
2868 should not be generated. */
2873 rtx op0 = XEXP (op, 0);
2874 rtx op1 = XEXP (op, 1);
2878 /* HImode and HFmode must be offsettable. */
2879 if (mode == HImode || mode == HFmode)
2880 return IS_ADDR_OR_PSEUDO_REG (op0)
2881 && GET_CODE (op1) == CONST_INT
2882 && IS_DISP1_OFF_CONST (INTVAL (op1));
2885 return (IS_INDEX_OR_PSEUDO_REG (op1)
2886 && IS_ADDR_OR_PSEUDO_REG (op0))
2887 || (IS_ADDR_OR_PSEUDO_REG (op1)
2888 && IS_INDEX_OR_PSEUDO_REG (op0));
2890 return IS_ADDR_OR_PSEUDO_REG (op0)
2891 && GET_CODE (op1) == CONST_INT
2892 && IS_DISP1_CONST (INTVAL (op1));
2904 /* Direct memory operand. */
2907 c4x_T_constraint (op)
2910 if (GET_CODE (op) != MEM)
2914 if (GET_CODE (op) != LO_SUM)
2916 /* Allow call operands. */
2917 return GET_CODE (op) == SYMBOL_REF
2918 && GET_MODE (op) == Pmode
2919 && SYMBOL_REF_FLAG (op);
2922 /* HImode and HFmode are not offsettable. */
2923 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2926 if ((GET_CODE (XEXP (op, 0)) == REG)
2927 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2928 return c4x_U_constraint (XEXP (op, 1));
2934 /* Symbolic operand. */
2937 c4x_U_constraint (op)
2940 /* Don't allow direct addressing to an arbitrary constant. */
2941 return GET_CODE (op) == CONST
2942 || GET_CODE (op) == SYMBOL_REF
2943 || GET_CODE (op) == LABEL_REF;
2948 c4x_autoinc_operand (op, mode)
2950 enum machine_mode mode ATTRIBUTE_UNUSED;
2952 if (GET_CODE (op) == MEM)
2954 enum rtx_code code = GET_CODE (XEXP (op, 0));
2960 || code == PRE_MODIFY
2961 || code == POST_MODIFY
2969 /* Match any operand. */
2972 any_operand (op, mode)
2973 register rtx op ATTRIBUTE_UNUSED;
2974 enum machine_mode mode ATTRIBUTE_UNUSED;
2980 /* Nonzero if OP is a floating point value with value 0.0. */
2983 fp_zero_operand (op, mode)
2985 enum machine_mode mode ATTRIBUTE_UNUSED;
2989 if (GET_CODE (op) != CONST_DOUBLE)
2991 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2992 return REAL_VALUES_EQUAL (r, dconst0);
2997 const_operand (op, mode)
2999 register enum machine_mode mode;
3005 if (GET_CODE (op) != CONST_DOUBLE
3006 || GET_MODE (op) != mode
3007 || GET_MODE_CLASS (mode) != MODE_FLOAT)
3010 return c4x_immed_float_p (op);
3016 if (GET_CODE (op) == CONSTANT_P_RTX)
3019 if (GET_CODE (op) != CONST_INT
3020 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3021 || GET_MODE_CLASS (mode) != MODE_INT)
3024 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3036 stik_const_operand (op, mode)
3038 enum machine_mode mode ATTRIBUTE_UNUSED;
3040 return c4x_K_constant (op);
3045 not_const_operand (op, mode)
3047 enum machine_mode mode ATTRIBUTE_UNUSED;
3049 return c4x_N_constant (op);
3054 reg_operand (op, mode)
3056 enum machine_mode mode;
3058 if (GET_CODE (op) == SUBREG
3059 && GET_MODE (op) == QFmode)
3061 return register_operand (op, mode);
3066 mixed_subreg_operand (op, mode)
3068 enum machine_mode mode ATTRIBUTE_UNUSED;
3070 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3071 int and a long double. */
3072 if (GET_CODE (op) == SUBREG
3073 && (GET_MODE (op) == QFmode)
3074 && (GET_MODE (SUBREG_REG (op)) == QImode
3075 || GET_MODE (SUBREG_REG (op)) == HImode))
3082 reg_imm_operand (op, mode)
3084 enum machine_mode mode ATTRIBUTE_UNUSED;
3086 if (REG_P (op) || CONSTANT_P (op))
3093 not_modify_reg (op, mode)
3095 enum machine_mode mode ATTRIBUTE_UNUSED;
3097 if (REG_P (op) || CONSTANT_P (op))
3099 if (GET_CODE (op) != MEM)
3102 switch (GET_CODE (op))
3109 rtx op0 = XEXP (op, 0);
3110 rtx op1 = XEXP (op, 1);
3115 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3121 rtx op0 = XEXP (op, 0);
3123 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3141 not_rc_reg (op, mode)
3143 enum machine_mode mode ATTRIBUTE_UNUSED;
3145 if (REG_P (op) && REGNO (op) == RC_REGNO)
3151 /* Extended precision register R0-R1. */
3154 r0r1_reg_operand (op, mode)
3156 enum machine_mode mode;
3158 if (! reg_operand (op, mode))
3160 if (GET_CODE (op) == SUBREG)
3161 op = SUBREG_REG (op);
3162 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3166 /* Extended precision register R2-R3. */
3169 r2r3_reg_operand (op, mode)
3171 enum machine_mode mode;
3173 if (! reg_operand (op, mode))
3175 if (GET_CODE (op) == SUBREG)
3176 op = SUBREG_REG (op);
3177 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3181 /* Low extended precision register R0-R7. */
3184 ext_low_reg_operand (op, mode)
3186 enum machine_mode mode;
3188 if (! reg_operand (op, mode))
3190 if (GET_CODE (op) == SUBREG)
3191 op = SUBREG_REG (op);
3192 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3196 /* Extended precision register. */
3199 ext_reg_operand (op, mode)
3201 enum machine_mode mode;
3203 if (! reg_operand (op, mode))
3205 if (GET_CODE (op) == SUBREG)
3206 op = SUBREG_REG (op);
3209 return IS_EXT_OR_PSEUDO_REG (op);
3213 /* Standard precision register. */
3216 std_reg_operand (op, mode)
3218 enum machine_mode mode;
3220 if (! reg_operand (op, mode))
3222 if (GET_CODE (op) == SUBREG)
3223 op = SUBREG_REG (op);
3224 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3227 /* Standard precision or normal register. */
3230 std_or_reg_operand (op, mode)
3232 enum machine_mode mode;
3234 if (reload_in_progress)
3235 return std_reg_operand (op, mode);
3236 return reg_operand (op, mode);
3239 /* Address register. */
3242 addr_reg_operand (op, mode)
3244 enum machine_mode mode;
3246 if (! reg_operand (op, mode))
3248 return c4x_a_register (op);
3252 /* Index register. */
3255 index_reg_operand (op, mode)
3257 enum machine_mode mode;
3259 if (! reg_operand (op, mode))
3261 if (GET_CODE (op) == SUBREG)
3262 op = SUBREG_REG (op);
3263 return c4x_x_register (op);
3270 dp_reg_operand (op, mode)
3272 enum machine_mode mode ATTRIBUTE_UNUSED;
3274 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3281 sp_reg_operand (op, mode)
3283 enum machine_mode mode ATTRIBUTE_UNUSED;
3285 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3292 st_reg_operand (op, mode)
3294 enum machine_mode mode ATTRIBUTE_UNUSED;
3296 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3303 rc_reg_operand (op, mode)
3305 enum machine_mode mode ATTRIBUTE_UNUSED;
3307 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3312 call_address_operand (op, mode)
3314 enum machine_mode mode ATTRIBUTE_UNUSED;
3316 return (REG_P (op) || symbolic_address_operand (op, mode));
3320 /* Symbolic address operand. */
3323 symbolic_address_operand (op, mode)
3325 enum machine_mode mode ATTRIBUTE_UNUSED;
3327 switch (GET_CODE (op))
3339 /* Check dst operand of a move instruction. */
3342 dst_operand (op, mode)
3344 enum machine_mode mode;
3346 if (GET_CODE (op) == SUBREG
3347 && mixed_subreg_operand (op, mode))
3351 return reg_operand (op, mode);
3353 return nonimmediate_operand (op, mode);
3357 /* Check src operand of two operand arithmetic instructions. */
3360 src_operand (op, mode)
3362 enum machine_mode mode;
3364 if (GET_CODE (op) == SUBREG
3365 && mixed_subreg_operand (op, mode))
3369 return reg_operand (op, mode);
3371 if (mode == VOIDmode)
3372 mode = GET_MODE (op);
3374 if (GET_CODE (op) == CONST_INT)
3375 return (mode == QImode || mode == Pmode || mode == HImode)
3376 && c4x_I_constant (op);
3378 /* We don't like CONST_DOUBLE integers. */
3379 if (GET_CODE (op) == CONST_DOUBLE)
3380 return c4x_H_constant (op);
3382 /* Disallow symbolic addresses. Only the predicate
3383 symbolic_address_operand will match these. */
3384 if (GET_CODE (op) == SYMBOL_REF
3385 || GET_CODE (op) == LABEL_REF
3386 || GET_CODE (op) == CONST)
3389 /* If TARGET_LOAD_DIRECT_MEMS is non-zero, disallow direct memory
3390 access to symbolic addresses. These operands will get forced
3391 into a register and the movqi expander will generate a
3392 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is non-zero. */
3393 if (GET_CODE (op) == MEM
3394 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3395 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3396 || GET_CODE (XEXP (op, 0)) == CONST)))
3397 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3399 return general_operand (op, mode);
3404 src_hi_operand (op, mode)
3406 enum machine_mode mode;
3408 if (c4x_O_constant (op))
3410 return src_operand (op, mode);
3414 /* Check src operand of two operand logical instructions. */
3417 lsrc_operand (op, mode)
3419 enum machine_mode mode;
3421 if (mode == VOIDmode)
3422 mode = GET_MODE (op);
3424 if (mode != QImode && mode != Pmode)
3425 fatal_insn ("Mode not QImode", op);
3427 if (GET_CODE (op) == CONST_INT)
3428 return c4x_L_constant (op) || c4x_J_constant (op);
3430 return src_operand (op, mode);
3434 /* Check src operand of two operand tricky instructions. */
3437 tsrc_operand (op, mode)
3439 enum machine_mode mode;
3441 if (mode == VOIDmode)
3442 mode = GET_MODE (op);
3444 if (mode != QImode && mode != Pmode)
3445 fatal_insn ("Mode not QImode", op);
3447 if (GET_CODE (op) == CONST_INT)
3448 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3450 return src_operand (op, mode);
3455 reg_or_const_operand (op, mode)
3457 enum machine_mode mode;
3459 return reg_operand (op, mode) || const_operand (op, mode);
3463 /* Check for indirect operands allowable in parallel instruction. */
3466 par_ind_operand (op, mode)
3468 enum machine_mode mode;
3470 if (mode != VOIDmode && mode != GET_MODE (op))
3473 return c4x_S_indirect (op);
3477 /* Check for operands allowable in parallel instruction. */
3480 parallel_operand (op, mode)
3482 enum machine_mode mode;
3484 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3489 c4x_S_address_parse (op, base, incdec, index, disp)
3501 if (GET_CODE (op) != MEM)
3502 fatal_insn ("Invalid indirect memory address", op);
3505 switch (GET_CODE (op))
3508 *base = REGNO (XEXP (op, 0));
3514 *base = REGNO (XEXP (op, 0));
3520 *base = REGNO (XEXP (op, 0));
3526 *base = REGNO (XEXP (op, 0));
3532 *base = REGNO (XEXP (op, 0));
3533 if (REG_P (XEXP (XEXP (op, 1), 1)))
3535 *index = REGNO (XEXP (XEXP (op, 1), 1));
3536 *disp = 0; /* ??? */
3539 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3544 *base = REGNO (XEXP (op, 0));
3545 if (REG_P (XEXP (XEXP (op, 1), 1)))
3547 *index = REGNO (XEXP (XEXP (op, 1), 1));
3548 *disp = 1; /* ??? */
3551 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3562 rtx op0 = XEXP (op, 0);
3563 rtx op1 = XEXP (op, 1);
3565 if (c4x_a_register (op0))
3567 if (c4x_x_register (op1))
3569 *base = REGNO (op0);
3570 *index = REGNO (op1);
3573 else if ((GET_CODE (op1) == CONST_INT
3574 && IS_DISP1_CONST (INTVAL (op1))))
3576 *base = REGNO (op0);
3577 *disp = INTVAL (op1);
3581 else if (c4x_x_register (op0) && c4x_a_register (op1))
3583 *base = REGNO (op1);
3584 *index = REGNO (op0);
3591 fatal_insn ("Invalid indirect (S) memory address", op);
3597 c4x_address_conflict (op0, op1, store0, store1)
3612 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3615 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3616 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3618 if (store0 && store1)
3620 /* If we have two stores in parallel to the same address, then
3621 the C4x only executes one of the stores. This is unlikely to
3622 cause problems except when writing to a hardware device such
3623 as a FIFO since the second write will be lost. The user
3624 should flag the hardware location as being volatile so that
3625 we don't do this optimisation. While it is unlikely that we
3626 have an aliased address if both locations are not marked
3627 volatile, it is probably safer to flag a potential conflict
3628 if either location is volatile. */
3629 if (! flag_argument_noalias)
3631 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3636 /* If have a parallel load and a store to the same address, the load
3637 is performed first, so there is no conflict. Similarly, there is
3638 no conflict if have parallel loads from the same address. */
3640 /* Cannot use auto increment or auto decrement twice for same
3642 if (base0 == base1 && incdec0 && incdec0)
3645 /* It might be too confusing for GCC if we have use a base register
3646 with a side effect and a memory reference using the same register
3648 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3651 /* We can not optimize the case where op1 and op2 refer to the same
3653 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3661 /* Check for while loop inside a decrement and branch loop. */
3664 c4x_label_conflict (insn, jump, db)
3671 if (GET_CODE (insn) == CODE_LABEL)
3673 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3675 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3678 insn = PREV_INSN (insn);
3684 /* Validate combination of operands for parallel load/store instructions. */
3687 valid_parallel_load_store (operands, mode)
3689 enum machine_mode mode ATTRIBUTE_UNUSED;
3691 rtx op0 = operands[0];
3692 rtx op1 = operands[1];
3693 rtx op2 = operands[2];
3694 rtx op3 = operands[3];
3696 if (GET_CODE (op0) == SUBREG)
3697 op0 = SUBREG_REG (op0);
3698 if (GET_CODE (op1) == SUBREG)
3699 op1 = SUBREG_REG (op1);
3700 if (GET_CODE (op2) == SUBREG)
3701 op2 = SUBREG_REG (op2);
3702 if (GET_CODE (op3) == SUBREG)
3703 op3 = SUBREG_REG (op3);
3705 /* The patterns should only allow ext_low_reg_operand() or
3706 par_ind_operand() operands. Thus of the 4 operands, only 2
3707 should be REGs and the other 2 should be MEMs. */
3709 /* This test prevents the multipack pass from using this pattern if
3710 op0 is used as an index or base register in op2 or op3, since
3711 this combination will require reloading. */
3712 if (GET_CODE (op0) == REG
3713 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3714 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3718 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3719 return (REGNO (op0) != REGNO (op2))
3720 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3721 && ! c4x_address_conflict (op1, op3, 0, 0);
3724 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3725 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3726 && ! c4x_address_conflict (op0, op2, 1, 1);
3729 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3730 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3731 && ! c4x_address_conflict (op1, op2, 0, 1);
3734 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3735 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3736 && ! c4x_address_conflict (op0, op3, 1, 0);
3743 valid_parallel_operands_4 (operands, mode)
3745 enum machine_mode mode ATTRIBUTE_UNUSED;
3747 rtx op0 = operands[0];
3748 rtx op2 = operands[2];
3750 if (GET_CODE (op0) == SUBREG)
3751 op0 = SUBREG_REG (op0);
3752 if (GET_CODE (op2) == SUBREG)
3753 op2 = SUBREG_REG (op2);
3755 /* This test prevents the multipack pass from using this pattern if
3756 op0 is used as an index or base register in op2, since this combination
3757 will require reloading. */
3758 if (GET_CODE (op0) == REG
3759 && GET_CODE (op2) == MEM
3760 && reg_mentioned_p (op0, XEXP (op2, 0)))
3768 valid_parallel_operands_5 (operands, mode)
3770 enum machine_mode mode ATTRIBUTE_UNUSED;
3773 rtx op0 = operands[0];
3774 rtx op1 = operands[1];
3775 rtx op2 = operands[2];
3776 rtx op3 = operands[3];
3778 if (GET_CODE (op0) == SUBREG)
3779 op0 = SUBREG_REG (op0);
3780 if (GET_CODE (op1) == SUBREG)
3781 op1 = SUBREG_REG (op1);
3782 if (GET_CODE (op2) == SUBREG)
3783 op2 = SUBREG_REG (op2);
3785 /* The patterns should only allow ext_low_reg_operand() or
3786 par_ind_operand() operands. Operands 1 and 2 may be commutative
3787 but only one of them can be a register. */
3788 if (GET_CODE (op1) == REG)
3790 if (GET_CODE (op2) == REG)
3796 /* This test prevents the multipack pass from using this pattern if
3797 op0 is used as an index or base register in op3, since this combination
3798 will require reloading. */
3799 if (GET_CODE (op0) == REG
3800 && GET_CODE (op3) == MEM
3801 && reg_mentioned_p (op0, XEXP (op3, 0)))
3809 valid_parallel_operands_6 (operands, mode)
3811 enum machine_mode mode ATTRIBUTE_UNUSED;
3814 rtx op0 = operands[0];
3815 rtx op1 = operands[1];
3816 rtx op2 = operands[2];
3817 rtx op4 = operands[4];
3818 rtx op5 = operands[5];
3820 if (GET_CODE (op1) == SUBREG)
3821 op1 = SUBREG_REG (op1);
3822 if (GET_CODE (op2) == SUBREG)
3823 op2 = SUBREG_REG (op2);
3824 if (GET_CODE (op4) == SUBREG)
3825 op4 = SUBREG_REG (op4);
3826 if (GET_CODE (op5) == SUBREG)
3827 op5 = SUBREG_REG (op5);
3829 /* The patterns should only allow ext_low_reg_operand() or
3830 par_ind_operand() operands. Thus of the 4 input operands, only 2
3831 should be REGs and the other 2 should be MEMs. */
3833 if (GET_CODE (op1) == REG)
3835 if (GET_CODE (op2) == REG)
3837 if (GET_CODE (op4) == REG)
3839 if (GET_CODE (op5) == REG)
3842 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3843 Perhaps we should count the MEMs as well? */
3847 /* This test prevents the multipack pass from using this pattern if
3848 op0 is used as an index or base register in op4 or op5, since
3849 this combination will require reloading. */
3850 if (GET_CODE (op0) == REG
3851 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3852 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3859 /* Validate combination of src operands. Note that the operands have
3860 been screened by the src_operand predicate. We just have to check
3861 that the combination of operands is valid. If FORCE is set, ensure
3862 that the destination regno is valid if we have a 2 operand insn. */
3865 c4x_valid_operands (code, operands, mode, force)
3868 enum machine_mode mode ATTRIBUTE_UNUSED;
3873 enum rtx_code code1;
3874 enum rtx_code code2;
3876 if (code == COMPARE)
3887 if (GET_CODE (op1) == SUBREG)
3888 op1 = SUBREG_REG (op1);
3889 if (GET_CODE (op2) == SUBREG)
3890 op2 = SUBREG_REG (op2);
3892 code1 = GET_CODE (op1);
3893 code2 = GET_CODE (op2);
3895 if (code1 == REG && code2 == REG)
3898 if (code1 == MEM && code2 == MEM)
3900 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3902 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3913 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3918 if (! c4x_H_constant (op2))
3922 /* Any valid memory operand screened by src_operand is OK. */
3925 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3926 into a stack slot memory address comprising a PLUS and a
3932 fatal_insn ("c4x_valid_operands: Internal error", op2);
3936 /* Check that we have a valid destination register for a two operand
3938 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3941 /* We assume MINUS is commutative since the subtract patterns
3942 also support the reverse subtract instructions. Since op1
3943 is not a register, and op2 is a register, op1 can only
3944 be a restricted memory operand for a shift instruction. */
3945 if (code == ASHIFTRT || code == LSHIFTRT
3946 || code == ASHIFT || code == COMPARE)
3948 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3953 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3958 if (! c4x_H_constant (op1))
3962 /* Any valid memory operand screened by src_operand is OK. */
3970 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3971 into a stack slot memory address comprising a PLUS and a
3977 fatal ("c4x_valid_operands: Internal error");
3981 /* Check that we have a valid destination register for a two operand
3983 return ! force || REGNO (op1) == REGNO (operands[0]);
3987 int valid_operands (code, operands, mode)
3990 enum machine_mode mode;
3993 /* If we are not optimizing then we have to let anything go and let
3994 reload fix things up. instantiate_decl in function.c can produce
3995 invalid insns by changing the offset of a memory operand from a
3996 valid one into an invalid one, when the second operand is also a
3997 memory operand. The alternative is not to allow two memory
3998 operands for an insn when not optimizing. The problem only rarely
3999 occurs, for example with the C-torture program DFcmp.c. */
4001 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
4006 legitimize_operands (code, operands, mode)
4009 enum machine_mode mode;
4011 /* Compare only has 2 operands. */
4012 if (code == COMPARE)
4014 /* During RTL generation, force constants into pseudos so that
4015 they can get hoisted out of loops. This will tie up an extra
4016 register but can save an extra cycle. Only do this if loop
4017 optimisation enabled. (We cannot pull this trick for add and
4018 sub instructions since the flow pass won't find
4019 autoincrements etc.) This allows us to generate compare
4020 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4021 of LDI *AR0++, R0; CMPI 42, R0.
4023 Note that expand_binops will try to load an expensive constant
4024 into a register if it is used within a loop. Unfortunately,
4025 the cost mechanism doesn't allow us to look at the other
4026 operand to decide whether the constant is expensive. */
4028 if (! reload_in_progress
4031 && GET_CODE (operands[1]) == CONST_INT
4032 && preserve_subexpressions_p ()
4033 && rtx_cost (operands[1], code) > 1)
4034 operands[1] = force_reg (mode, operands[1]);
4036 if (! reload_in_progress
4037 && ! c4x_valid_operands (code, operands, mode, 0))
4038 operands[0] = force_reg (mode, operands[0]);
4042 /* We cannot do this for ADDI/SUBI insns since we will
4043 defeat the flow pass from finding autoincrement addressing
4045 if (! reload_in_progress
4046 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
4049 && GET_CODE (operands[2]) == CONST_INT
4050 && preserve_subexpressions_p ()
4051 && rtx_cost (operands[2], code) > 1)
4052 operands[2] = force_reg (mode, operands[2]);
4054 /* We can get better code on a C30 if we force constant shift counts
4055 into a register. This way they can get hoisted out of loops,
4056 tying up a register, but saving an instruction. The downside is
4057 that they may get allocated to an address or index register, and
4058 thus we will get a pipeline conflict if there is a nearby
4059 indirect address using an address register.
4061 Note that expand_binops will not try to load an expensive constant
4062 into a register if it is used within a loop for a shift insn. */
4064 if (! reload_in_progress
4065 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
4067 /* If the operand combination is invalid, we force operand1 into a
4068 register, preventing reload from having doing to do this at a
4070 operands[1] = force_reg (mode, operands[1]);
4073 emit_move_insn (operands[0], operands[1]);
4074 operands[1] = copy_rtx (operands[0]);
4078 /* Just in case... */
4079 if (! c4x_valid_operands (code, operands, mode, 0))
4080 operands[2] = force_reg (mode, operands[2]);
4084 /* Right shifts require a negative shift count, but GCC expects
4085 a positive count, so we emit a NEG. */
4086 if ((code == ASHIFTRT || code == LSHIFTRT)
4087 && (GET_CODE (operands[2]) != CONST_INT))
4088 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
4094 /* The following predicates are used for instruction scheduling. */
4097 group1_reg_operand (op, mode)
4099 enum machine_mode mode;
4101 if (mode != VOIDmode && mode != GET_MODE (op))
4103 if (GET_CODE (op) == SUBREG)
4104 op = SUBREG_REG (op);
4105 return REG_P (op) && IS_GROUP1_REG (op);
4110 group1_mem_operand (op, mode)
4112 enum machine_mode mode;
4114 if (mode != VOIDmode && mode != GET_MODE (op))
4117 if (GET_CODE (op) == MEM)
4120 if (GET_CODE (op) == PLUS)
4122 rtx op0 = XEXP (op, 0);
4123 rtx op1 = XEXP (op, 1);
4125 if (((GET_CODE (op0) == REG) && IS_GROUP1_REG (op0))
4126 || ((GET_CODE (op1) == REG) && IS_GROUP1_REG (op1)))
4129 else if ((REG_P (op)) && IS_GROUP1_REG (op))
4137 /* Return true if any one of the address registers. */
4140 arx_reg_operand (op, mode)
4142 enum machine_mode mode;
4144 if (mode != VOIDmode && mode != GET_MODE (op))
4146 if (GET_CODE (op) == SUBREG)
4147 op = SUBREG_REG (op);
4148 return REG_P (op) && IS_ADDR_REG (op);
4153 c4x_arn_reg_operand (op, mode, regno)
4155 enum machine_mode mode;
4158 if (mode != VOIDmode && mode != GET_MODE (op))
4160 if (GET_CODE (op) == SUBREG)
4161 op = SUBREG_REG (op);
4162 return REG_P (op) && (REGNO (op) == regno);
4167 c4x_arn_mem_operand (op, mode, regno)
4169 enum machine_mode mode;
4172 if (mode != VOIDmode && mode != GET_MODE (op))
4175 if (GET_CODE (op) == MEM)
4178 switch (GET_CODE (op))
4187 if (REG_P (op) && (REGNO (op) == regno))
4193 if (REG_P (XEXP (op, 0)) && (REGNO (XEXP (op, 0)) == regno))
4195 if (REG_P (XEXP (XEXP (op, 1), 1))
4196 && (REGNO (XEXP (XEXP (op, 1), 1)) == regno))
4202 rtx op0 = XEXP (op, 0);
4203 rtx op1 = XEXP (op, 1);
4205 if (((GET_CODE (op0) == REG) && (REGNO (op0) == regno))
4206 || ((GET_CODE (op1) == REG) && (REGNO (op1) == regno)))
4220 ar0_reg_operand (op, mode)
4222 enum machine_mode mode;
4224 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4229 ar0_mem_operand (op, mode)
4231 enum machine_mode mode;
4233 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4238 ar1_reg_operand (op, mode)
4240 enum machine_mode mode;
4242 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4247 ar1_mem_operand (op, mode)
4249 enum machine_mode mode;
4251 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4256 ar2_reg_operand (op, mode)
4258 enum machine_mode mode;
4260 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4265 ar2_mem_operand (op, mode)
4267 enum machine_mode mode;
4269 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4274 ar3_reg_operand (op, mode)
4276 enum machine_mode mode;
4278 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4283 ar3_mem_operand (op, mode)
4285 enum machine_mode mode;
4287 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4292 ar4_reg_operand (op, mode)
4294 enum machine_mode mode;
4296 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4301 ar4_mem_operand (op, mode)
4303 enum machine_mode mode;
4305 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4310 ar5_reg_operand (op, mode)
4312 enum machine_mode mode;
4314 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4319 ar5_mem_operand (op, mode)
4321 enum machine_mode mode;
4323 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4328 ar6_reg_operand (op, mode)
4330 enum machine_mode mode;
4332 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4337 ar6_mem_operand (op, mode)
4339 enum machine_mode mode;
4341 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4346 ar7_reg_operand (op, mode)
4348 enum machine_mode mode;
4350 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4355 ar7_mem_operand (op, mode)
4357 enum machine_mode mode;
4359 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4364 ir0_reg_operand (op, mode)
4366 enum machine_mode mode;
4368 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4373 ir0_mem_operand (op, mode)
4375 enum machine_mode mode;
4377 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4382 ir1_reg_operand (op, mode)
4384 enum machine_mode mode;
4386 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4391 ir1_mem_operand (op, mode)
4393 enum machine_mode mode;
4395 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4399 /* This is similar to operand_subword but allows autoincrement
4403 c4x_operand_subword (op, i, validate_address, mode)
4406 int validate_address;
4407 enum machine_mode mode;
4409 if (mode != HImode && mode != HFmode)
4410 fatal_insn ("c4x_operand_subword: invalid mode", op);
4412 if (mode == HFmode && REG_P (op))
4413 fatal_insn ("c4x_operand_subword: invalid operand", op);
4415 if (GET_CODE (op) == MEM)
4417 enum rtx_code code = GET_CODE (XEXP (op, 0));
4418 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4419 enum machine_mode submode;
4424 else if (mode == HFmode)
4431 return gen_rtx_MEM (submode, XEXP (op, 0));
4437 /* We could handle these with some difficulty.
4438 e.g., *p-- => *(p-=2); *(p+1). */
4439 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4445 fatal_insn ("c4x_operand_subword: invalid address", op);
4447 /* Even though offsettable_address_p considers (MEM
4448 (LO_SUM)) to be offsettable, it is not safe if the
4449 address is at the end of the data page since we also have
4450 to fix up the associated high PART. In this case where
4451 we are trying to split a HImode or HFmode memory
4452 reference, we would have to emit another insn to reload a
4453 new HIGH value. It's easier to disable LO_SUM memory references
4454 in HImode or HFmode and we probably get better code. */
4456 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4463 return operand_subword (op, i, validate_address, mode);
4466 /* Handle machine specific pragmas for compatibility with existing
4467 compilers for the C3x/C4x.
4470 ----------------------------------------------------------
4471 CODE_SECTION(symbol,"section") section("section")
4472 DATA_SECTION(symbol,"section") section("section")
4473 FUNC_CANNOT_INLINE(function)
4474 FUNC_EXT_CALLED(function)
4475 FUNC_IS_PURE(function) const
4476 FUNC_IS_SYSTEM(function)
4477 FUNC_NEVER_RETURNS(function) noreturn
4478 FUNC_NO_GLOBAL_ASG(function)
4479 FUNC_NO_IND_ASG(function)
4480 INTERRUPT(function) interrupt
4484 /* Parse a C4x pragma, of the form ( function [, "section"] ) \n.
4485 FUNC is loaded with the IDENTIFIER_NODE of the function, SECT with
4486 the STRING_CST node of the string. If SECT is null, then this
4487 pragma doesn't take a section string. Returns 0 for a good pragma,
4488 -1 for a malformed pragma. */
4489 #define BAD(msgid, arg) do { warning (msgid, arg); return -1; } while (0)
4491 static int (*c_lex_func) (tree *);
4494 c4x_init_pragma (get_token)
4495 int (*get_token) PARAMS ((tree *));
4497 c_lex_func = get_token;
4502 c4x_parse_pragma (name, func, sect)
4509 if (c_lex_func (&x) != CPP_OPEN_PAREN)
4510 BAD ("missing '(' after '#pragma %s' - ignored", name);
4512 if (c_lex_func (&f) != CPP_NAME)
4513 BAD ("missing function name in '#pragma %s' - ignored", name);
4517 if (c_lex_func (&x) != CPP_COMMA)
4518 BAD ("malformed '#pragma %s' - ignored", name);
4519 if (c_lex_func (&s) != CPP_STRING)
4520 BAD ("missing section name in '#pragma %s' - ignored", name);
4524 if (c_lex_func (&x) != CPP_CLOSE_PAREN)
4525 BAD ("missing ')' for '#pragma %s' - ignored", name);
4527 if (c_lex_func (&x) != CPP_EOF)
4528 warning ("junk at end of '#pragma %s'", name);
4535 c4x_pr_CODE_SECTION (pfile)
4536 cpp_reader *pfile ATTRIBUTE_UNUSED;
4540 if (c4x_parse_pragma ("CODE_SECTION", &func, §))
4542 code_tree = chainon (code_tree,
4543 build_tree_list (func,
4544 build_tree_list (NULL_TREE, sect)));
4548 c4x_pr_DATA_SECTION (pfile)
4549 cpp_reader *pfile ATTRIBUTE_UNUSED;
4553 if (c4x_parse_pragma ("DATA_SECTION", &func, §))
4555 data_tree = chainon (data_tree,
4556 build_tree_list (func,
4557 build_tree_list (NULL_TREE, sect)));
4561 c4x_pr_FUNC_IS_PURE (pfile)
4562 cpp_reader *pfile ATTRIBUTE_UNUSED;
4566 if (c4x_parse_pragma ("FUNC_IS_PURE", &func, 0))
4568 pure_tree = chainon (pure_tree, build_tree_list (func, NULL_TREE));
4572 c4x_pr_FUNC_NEVER_RETURNS (pfile)
4573 cpp_reader *pfile ATTRIBUTE_UNUSED;
4577 if (c4x_parse_pragma ("FUNC_NEVER_RETURNS", &func, 0))
4579 noreturn_tree = chainon (noreturn_tree, build_tree_list (func, NULL_TREE));
4583 c4x_pr_INTERRUPT (pfile)
4584 cpp_reader *pfile ATTRIBUTE_UNUSED;
4588 if (c4x_parse_pragma ("INTERRUPT", &func, 0))
4590 interrupt_tree = chainon (interrupt_tree, build_tree_list (func, NULL_TREE));
4593 /* Used for FUNC_CANNOT_INLINE, FUNC_EXT_CALLED, FUNC_IS_SYSTEM,
4594 FUNC_NO_GLOBAL_ASG, and FUNC_NO_IND_ASG. */
4596 c4x_pr_ignored (pfile)
4597 cpp_reader *pfile ATTRIBUTE_UNUSED;
4603 struct name_list *next;
4607 static struct name_list *global_head;
4608 static struct name_list *extern_head;
4611 /* Add NAME to list of global symbols and remove from external list if
4612 present on external list. */
4615 c4x_global_label (name)
4618 struct name_list *p, *last;
4620 /* Do not insert duplicate names, so linearly search through list of
4625 if (strcmp (p->name, name) == 0)
4629 p = (struct name_list *) permalloc (sizeof *p);
4630 p->next = global_head;
4634 /* Remove this name from ref list if present. */
4639 if (strcmp (p->name, name) == 0)
4642 last->next = p->next;
4644 extern_head = p->next;
4653 /* Add NAME to list of external symbols. */
4656 c4x_external_ref (name)
4659 struct name_list *p;
4661 /* Do not insert duplicate names. */
4665 if (strcmp (p->name, name) == 0)
4670 /* Do not insert ref if global found. */
4674 if (strcmp (p->name, name) == 0)
4678 p = (struct name_list *) permalloc (sizeof *p);
4679 p->next = extern_head;
4689 struct name_list *p;
4691 /* Output all external names that are not global. */
4695 fprintf (fp, "\t.ref\t");
4696 assemble_name (fp, p->name);
4700 fprintf (fp, "\t.end\n");
4705 c4x_check_attribute (attrib, list, decl, attributes)
4707 tree list, decl, *attributes;
4709 while (list != NULL_TREE
4710 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4711 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4712 list = TREE_CHAIN (list);
4714 *attributes = chainon (*attributes,
4715 build_tree_list (get_identifier (attrib),
4716 TREE_VALUE (list)));
4721 c4x_set_default_attributes(decl, attributes)
4722 tree decl, *attributes;
4724 switch (TREE_CODE (decl))
4727 c4x_check_attribute ("section", code_tree, decl, attributes);
4728 c4x_check_attribute ("const", pure_tree, decl, attributes);
4729 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4730 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4734 c4x_check_attribute ("section", data_tree, decl, attributes);
4743 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine
4744 specific attribute for TYPE. The attributes in ATTRIBUTES have
4745 previously been assigned to TYPE. */
4748 c4x_valid_type_attribute_p (type, attributes, identifier, args)
4750 tree attributes ATTRIBUTE_UNUSED;
4752 tree args ATTRIBUTE_UNUSED;
4754 if (TREE_CODE (type) != FUNCTION_TYPE)
4757 if (is_attribute_p ("interrupt", identifier))
4760 if (is_attribute_p ("assembler", identifier))
4763 if (is_attribute_p ("leaf_pretend", identifier))
4770 /* !!! FIXME to emit RPTS correctly. */
4773 c4x_rptb_rpts_p (insn, op)
4776 /* The next insn should be our label marking where the
4777 repeat block starts. */
4778 insn = NEXT_INSN (insn);
4779 if (GET_CODE (insn) != CODE_LABEL)
4781 /* Some insns may have been shifted between the RPTB insn
4782 and the top label... They were probably destined to
4783 be moved out of the loop. For now, let's leave them
4784 where they are and print a warning. We should
4785 probably move these insns before the repeat block insn. */
4787 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4792 /* Skip any notes. */
4793 insn = next_nonnote_insn (insn);
4795 /* This should be our first insn in the loop. */
4796 if (! INSN_P (insn))
4799 /* Skip any notes. */
4800 insn = next_nonnote_insn (insn);
4802 if (! INSN_P (insn))
4805 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4811 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4815 /* Check if register r11 is used as the destination of an insn. */
4828 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4829 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4831 if (INSN_P (x) && (set = single_set (x)))
4834 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4837 fmt = GET_RTX_FORMAT (GET_CODE (x));
4838 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4842 if (c4x_r11_set_p (XEXP (x, i)))
4845 else if (fmt[i] == 'E')
4846 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4847 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4854 /* The c4x sometimes has a problem when the insn before the laj insn
4855 sets the r11 register. Check for this situation. */
4858 c4x_check_laj_p (insn)
4861 insn = prev_nonnote_insn (insn);
4863 /* If this is the start of the function no nop is needed. */
4867 /* If the previous insn is a code label we have to insert a nop. This
4868 could be a jump or table jump. We can find the normal jumps by
4869 scanning the function but this will not find table jumps. */
4870 if (GET_CODE (insn) == CODE_LABEL)
4873 /* If the previous insn sets register r11 we have to insert a nop. */
4874 if (c4x_r11_set_p (insn))
4877 /* No nop needed. */
4882 /* Adjust the cost of a scheduling dependency. Return the new cost of
4883 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4884 A set of an address register followed by a use occurs a 2 cycle
4885 stall (reduced to a single cycle on the c40 using LDA), while
4886 a read of an address register followed by a use occurs a single cycle. */
4888 #define SET_USE_COST 3
4889 #define SETLDA_USE_COST 2
4890 #define READ_USE_COST 2
4894 c4x_adjust_cost (insn, link, dep_insn, cost)
4900 /* Don't worry about this until we know what registers have been
4902 if (! reload_completed)
4905 /* How do we handle dependencies where a read followed by another
4906 read causes a pipeline stall? For example, a read of ar0 followed
4907 by the use of ar0 for a memory reference. It looks like we
4908 need to extend the scheduler to handle this case. */
4910 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4911 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4912 so only deal with insns we know about. */
4913 if (recog_memoized (dep_insn) < 0)
4916 if (REG_NOTE_KIND (link) == 0)
4920 /* Data dependency; DEP_INSN writes a register that INSN reads some
4924 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4925 max = SET_USE_COST > max ? SET_USE_COST : max;
4926 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4927 max = READ_USE_COST > max ? READ_USE_COST : max;
4931 /* This could be significantly optimized. We should look
4932 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4933 insn uses ar0-ar7. We then test if the same register
4934 is used. The tricky bit is that some operands will
4935 use several registers... */
4936 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4937 max = SET_USE_COST > max ? SET_USE_COST : max;
4938 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4939 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4940 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4941 max = READ_USE_COST > max ? READ_USE_COST : max;
4943 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4944 max = SET_USE_COST > max ? SET_USE_COST : max;
4945 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4946 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4947 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4948 max = READ_USE_COST > max ? READ_USE_COST : max;
4950 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4951 max = SET_USE_COST > max ? SET_USE_COST : max;
4952 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4953 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4954 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4955 max = READ_USE_COST > max ? READ_USE_COST : max;
4957 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4958 max = SET_USE_COST > max ? SET_USE_COST : max;
4959 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4960 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4961 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4962 max = READ_USE_COST > max ? READ_USE_COST : max;
4964 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4965 max = SET_USE_COST > max ? SET_USE_COST : max;
4966 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4967 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4968 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4969 max = READ_USE_COST > max ? READ_USE_COST : max;
4971 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4972 max = SET_USE_COST > max ? SET_USE_COST : max;
4973 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4974 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4975 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4976 max = READ_USE_COST > max ? READ_USE_COST : max;
4978 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4979 max = SET_USE_COST > max ? SET_USE_COST : max;
4980 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4981 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4982 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4983 max = READ_USE_COST > max ? READ_USE_COST : max;
4985 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4986 max = SET_USE_COST > max ? SET_USE_COST : max;
4987 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4988 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4989 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4990 max = READ_USE_COST > max ? READ_USE_COST : max;
4992 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4993 max = SET_USE_COST > max ? SET_USE_COST : max;
4994 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4995 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4997 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4998 max = SET_USE_COST > max ? SET_USE_COST : max;
4999 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
5000 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5006 /* For other data dependencies, the default cost specified in the
5010 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
5012 /* Anti dependency; DEP_INSN reads a register that INSN writes some
5015 /* For c4x anti dependencies, the cost is 0. */
5018 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
5020 /* Output dependency; DEP_INSN writes a register that INSN writes some
5023 /* For c4x output dependencies, the cost is 0. */
5031 c4x_init_builtins (endlink)
5034 builtin_function ("fast_ftoi",
5037 tree_cons (NULL_TREE, double_type_node, endlink)),
5038 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL_PTR);
5039 builtin_function ("ansi_ftoi",
5042 tree_cons (NULL_TREE, double_type_node, endlink)),
5043 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL_PTR);
5045 builtin_function ("fast_imult",
5048 tree_cons (NULL_TREE, integer_type_node,
5049 tree_cons (NULL_TREE,
5050 integer_type_node, endlink))),
5051 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL_PTR);
5054 builtin_function ("toieee",
5057 tree_cons (NULL_TREE, double_type_node, endlink)),
5058 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL_PTR);
5059 builtin_function ("frieee",
5062 tree_cons (NULL_TREE, double_type_node, endlink)),
5063 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL_PTR);
5064 builtin_function ("fast_invf",
5067 tree_cons (NULL_TREE, double_type_node, endlink)),
5068 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL_PTR);
5074 c4x_expand_builtin (exp, target, subtarget, mode, ignore)
5077 rtx subtarget ATTRIBUTE_UNUSED;
5078 enum machine_mode mode ATTRIBUTE_UNUSED;
5079 int ignore ATTRIBUTE_UNUSED;
5081 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5082 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5083 tree arglist = TREE_OPERAND (exp, 1);
5089 case C4X_BUILTIN_FIX:
5090 arg0 = TREE_VALUE (arglist);
5091 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5092 r0 = protect_from_queue (r0, 0);
5093 if (! target || ! register_operand (target, QImode))
5094 target = gen_reg_rtx (QImode);
5095 emit_insn (gen_fixqfqi_clobber (target, r0));
5098 case C4X_BUILTIN_FIX_ANSI:
5099 arg0 = TREE_VALUE (arglist);
5100 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5101 r0 = protect_from_queue (r0, 0);
5102 if (! target || ! register_operand (target, QImode))
5103 target = gen_reg_rtx (QImode);
5104 emit_insn (gen_fix_truncqfqi2 (target, r0));
5107 case C4X_BUILTIN_MPYI:
5110 arg0 = TREE_VALUE (arglist);
5111 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5112 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
5113 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
5114 r0 = protect_from_queue (r0, 0);
5115 r1 = protect_from_queue (r1, 0);
5116 if (! target || ! register_operand (target, QImode))
5117 target = gen_reg_rtx (QImode);
5118 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
5121 case C4X_BUILTIN_TOIEEE:
5124 arg0 = TREE_VALUE (arglist);
5125 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5126 r0 = protect_from_queue (r0, 0);
5127 if (! target || ! register_operand (target, QFmode))
5128 target = gen_reg_rtx (QFmode);
5129 emit_insn (gen_toieee (target, r0));
5132 case C4X_BUILTIN_FRIEEE:
5135 arg0 = TREE_VALUE (arglist);
5136 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5137 put_var_into_stack (arg0);
5138 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5139 r0 = protect_from_queue (r0, 0);
5140 if (register_operand (r0, QFmode))
5142 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5143 emit_move_insn (r1, r0);
5146 if (! target || ! register_operand (target, QFmode))
5147 target = gen_reg_rtx (QFmode);
5148 emit_insn (gen_frieee (target, r0));
5151 case C4X_BUILTIN_RCPF:
5154 arg0 = TREE_VALUE (arglist);
5155 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5156 r0 = protect_from_queue (r0, 0);
5157 if (! target || ! register_operand (target, QFmode))
5158 target = gen_reg_rtx (QFmode);
5159 emit_insn (gen_rcpfqf_clobber (target, r0));