1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "conditions.h"
49 #include "c4x-protos.h"
51 #include "target-def.h"
55 rtx fix_truncqfhi2_libfunc;
56 rtx fixuns_truncqfhi2_libfunc;
57 rtx fix_trunchfhi2_libfunc;
58 rtx fixuns_trunchfhi2_libfunc;
59 rtx floathiqf2_libfunc;
60 rtx floatunshiqf2_libfunc;
61 rtx floathihf2_libfunc;
62 rtx floatunshihf2_libfunc;
64 static int c4x_leaf_function;
66 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
68 /* Array of the smallest class containing reg number REGNO, indexed by
69 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
70 registers are available and set the class to NO_REGS for registers
71 that the target switches say are unavailable. */
73 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
75 /* Reg Modes Saved. */
76 R0R1_REGS, /* R0 QI, QF, HF No. */
77 R0R1_REGS, /* R1 QI, QF, HF No. */
78 R2R3_REGS, /* R2 QI, QF, HF No. */
79 R2R3_REGS, /* R3 QI, QF, HF No. */
80 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
81 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
83 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
84 ADDR_REGS, /* AR0 QI No. */
85 ADDR_REGS, /* AR1 QI No. */
86 ADDR_REGS, /* AR2 QI No. */
87 ADDR_REGS, /* AR3 QI QI. */
88 ADDR_REGS, /* AR4 QI QI. */
89 ADDR_REGS, /* AR5 QI QI. */
90 ADDR_REGS, /* AR6 QI QI. */
91 ADDR_REGS, /* AR7 QI QI. */
92 DP_REG, /* DP QI No. */
93 INDEX_REGS, /* IR0 QI No. */
94 INDEX_REGS, /* IR1 QI No. */
95 BK_REG, /* BK QI QI. */
96 SP_REG, /* SP QI No. */
97 ST_REG, /* ST CC No. */
98 NO_REGS, /* DIE/IE No. */
99 NO_REGS, /* IIE/IF No. */
100 NO_REGS, /* IIF/IOF No. */
101 INT_REGS, /* RS QI No. */
102 INT_REGS, /* RE QI No. */
103 RC_REG, /* RC QI No. */
104 EXT_REGS, /* R8 QI, QF, HF QI. */
105 EXT_REGS, /* R9 QI, QF, HF No. */
106 EXT_REGS, /* R10 QI, QF, HF No. */
107 EXT_REGS, /* R11 QI, QF, HF No. */
110 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
112 /* Reg Modes Saved. */
113 HFmode, /* R0 QI, QF, HF No. */
114 HFmode, /* R1 QI, QF, HF No. */
115 HFmode, /* R2 QI, QF, HF No. */
116 HFmode, /* R3 QI, QF, HF No. */
117 QFmode, /* R4 QI, QF, HF QI. */
118 QFmode, /* R5 QI, QF, HF QI. */
119 QImode, /* R6 QI, QF, HF QF. */
120 QImode, /* R7 QI, QF, HF QF. */
121 QImode, /* AR0 QI No. */
122 QImode, /* AR1 QI No. */
123 QImode, /* AR2 QI No. */
124 QImode, /* AR3 QI QI. */
125 QImode, /* AR4 QI QI. */
126 QImode, /* AR5 QI QI. */
127 QImode, /* AR6 QI QI. */
128 QImode, /* AR7 QI QI. */
129 VOIDmode, /* DP QI No. */
130 QImode, /* IR0 QI No. */
131 QImode, /* IR1 QI No. */
132 QImode, /* BK QI QI. */
133 VOIDmode, /* SP QI No. */
134 VOIDmode, /* ST CC No. */
135 VOIDmode, /* DIE/IE No. */
136 VOIDmode, /* IIE/IF No. */
137 VOIDmode, /* IIF/IOF No. */
138 QImode, /* RS QI No. */
139 QImode, /* RE QI No. */
140 VOIDmode, /* RC QI No. */
141 QFmode, /* R8 QI, QF, HF QI. */
142 HFmode, /* R9 QI, QF, HF No. */
143 HFmode, /* R10 QI, QF, HF No. */
144 HFmode, /* R11 QI, QF, HF No. */
148 /* Test and compare insns in c4x.md store the information needed to
149 generate branch and scc insns here. */
151 struct rtx_def *c4x_compare_op0 = NULL_RTX;
152 struct rtx_def *c4x_compare_op1 = NULL_RTX;
154 const char *c4x_rpts_cycles_string;
155 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
156 const char *c4x_cpu_version_string;
157 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
159 /* Pragma definitions. */
161 tree code_tree = NULL_TREE;
162 tree data_tree = NULL_TREE;
163 tree pure_tree = NULL_TREE;
164 tree noreturn_tree = NULL_TREE;
165 tree interrupt_tree = NULL_TREE;
167 /* Forward declarations */
168 static void c4x_add_gc_roots PARAMS ((void));
169 static int c4x_isr_reg_used_p PARAMS ((unsigned int));
170 static int c4x_leaf_function_p PARAMS ((void));
171 static int c4x_assembler_function_p PARAMS ((void));
172 static int c4x_immed_float_p PARAMS ((rtx));
173 static int c4x_a_register PARAMS ((rtx));
174 static int c4x_x_register PARAMS ((rtx));
175 static int c4x_immed_int_constant PARAMS ((rtx));
176 static int c4x_immed_float_constant PARAMS ((rtx));
177 static int c4x_K_constant PARAMS ((rtx));
178 static int c4x_N_constant PARAMS ((rtx));
179 static int c4x_O_constant PARAMS ((rtx));
180 static int c4x_R_indirect PARAMS ((rtx));
181 static int c4x_S_indirect PARAMS ((rtx));
182 static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
183 static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
184 enum machine_mode, int));
185 static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
186 static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
187 static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
188 static int c4x_r11_set_p PARAMS ((rtx));
189 static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
190 static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
191 static tree c4x_handle_fntype_attribute PARAMS ((tree *, tree, tree, int, bool *));
192 const struct attribute_spec c4x_attribute_table[];
193 static void c4x_insert_attributes PARAMS ((tree, tree *));
194 static void c4x_asm_named_section PARAMS ((const char *, unsigned int));
195 static int c4x_adjust_cost PARAMS ((rtx, rtx, rtx, int));
197 /* Initialize the GCC target structure. */
198 #undef TARGET_ASM_BYTE_OP
199 #define TARGET_ASM_BYTE_OP "\t.word\t"
200 #undef TARGET_ASM_ALIGNED_HI_OP
201 #define TARGET_ASM_ALIGNED_HI_OP NULL
202 #undef TARGET_ASM_ALIGNED_SI_OP
203 #define TARGET_ASM_ALIGNED_SI_OP NULL
205 #undef TARGET_ATTRIBUTE_TABLE
206 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
208 #undef TARGET_INSERT_ATTRIBUTES
209 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
211 #undef TARGET_INIT_BUILTINS
212 #define TARGET_INIT_BUILTINS c4x_init_builtins
214 #undef TARGET_EXPAND_BUILTIN
215 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
217 #undef TARGET_SCHED_ADJUST_COST
218 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
220 struct gcc_target targetm = TARGET_INITIALIZER;
222 /* Called to register all of our global variables with the garbage
228 ggc_add_rtx_root (&c4x_compare_op0, 1);
229 ggc_add_rtx_root (&c4x_compare_op1, 1);
230 ggc_add_tree_root (&code_tree, 1);
231 ggc_add_tree_root (&data_tree, 1);
232 ggc_add_tree_root (&pure_tree, 1);
233 ggc_add_tree_root (&noreturn_tree, 1);
234 ggc_add_tree_root (&interrupt_tree, 1);
235 ggc_add_rtx_root (&smulhi3_libfunc, 1);
236 ggc_add_rtx_root (&umulhi3_libfunc, 1);
237 ggc_add_rtx_root (&fix_truncqfhi2_libfunc, 1);
238 ggc_add_rtx_root (&fixuns_truncqfhi2_libfunc, 1);
239 ggc_add_rtx_root (&fix_trunchfhi2_libfunc, 1);
240 ggc_add_rtx_root (&fixuns_trunchfhi2_libfunc, 1);
241 ggc_add_rtx_root (&floathiqf2_libfunc, 1);
242 ggc_add_rtx_root (&floatunshiqf2_libfunc, 1);
243 ggc_add_rtx_root (&floathihf2_libfunc, 1);
244 ggc_add_rtx_root (&floatunshihf2_libfunc, 1);
248 /* Override command line options.
249 Called once after all options have been parsed.
250 Mostly we process the processor
251 type and sometimes adjust other TARGET_ options. */
254 c4x_override_options ()
256 if (c4x_rpts_cycles_string)
257 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
262 c4x_cpu_version = 30;
264 c4x_cpu_version = 31;
266 c4x_cpu_version = 32;
268 c4x_cpu_version = 33;
270 c4x_cpu_version = 40;
272 c4x_cpu_version = 44;
274 c4x_cpu_version = 40;
276 /* -mcpu=xx overrides -m40 etc. */
277 if (c4x_cpu_version_string)
279 const char *p = c4x_cpu_version_string;
281 /* Also allow -mcpu=c30 etc. */
282 if (*p == 'c' || *p == 'C')
284 c4x_cpu_version = atoi (p);
287 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
288 C40_FLAG | C44_FLAG);
290 switch (c4x_cpu_version)
292 case 30: target_flags |= C30_FLAG; break;
293 case 31: target_flags |= C31_FLAG; break;
294 case 32: target_flags |= C32_FLAG; break;
295 case 33: target_flags |= C33_FLAG; break;
296 case 40: target_flags |= C40_FLAG; break;
297 case 44: target_flags |= C44_FLAG; break;
299 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
300 c4x_cpu_version = 40;
301 target_flags |= C40_FLAG;
304 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
305 target_flags |= C3X_FLAG;
307 target_flags &= ~C3X_FLAG;
309 /* Convert foo / 8.0 into foo * 0.125, etc. */
310 set_fast_math_flags();
312 /* We should phase out the following at some stage.
313 This provides compatibility with the old -mno-aliases option. */
314 if (! TARGET_ALIASES && ! flag_argument_noalias)
315 flag_argument_noalias = 1;
317 /* Register global variables with the garbage collector. */
322 /* This is called before c4x_override_options. */
325 c4x_optimization_options (level, size)
326 int level ATTRIBUTE_UNUSED;
327 int size ATTRIBUTE_UNUSED;
329 /* Scheduling before register allocation can screw up global
330 register allocation, especially for functions that use MPY||ADD
331 instructions. The benefit we gain we get by scheduling before
332 register allocation is probably marginal anyhow. */
333 flag_schedule_insns = 0;
337 /* Write an ASCII string. */
339 #define C4X_ASCII_LIMIT 40
342 c4x_output_ascii (stream, ptr, len)
347 char sbuf[C4X_ASCII_LIMIT + 1];
348 int s, l, special, first = 1, onlys;
351 fprintf (stream, "\t.byte\t");
353 for (s = l = 0; len > 0; --len, ++ptr)
357 /* Escape " and \ with a \". */
358 special = *ptr == '\"' || *ptr == '\\';
360 /* If printable - add to buff. */
361 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
366 if (s < C4X_ASCII_LIMIT - 1)
381 fprintf (stream, "\"%s\"", sbuf);
383 if (TARGET_TI && l >= 80 && len > 1)
385 fprintf (stream, "\n\t.byte\t");
403 fprintf (stream, "%d", *ptr);
405 if (TARGET_TI && l >= 80 && len > 1)
407 fprintf (stream, "\n\t.byte\t");
418 fprintf (stream, "\"%s\"", sbuf);
421 fputc ('\n', stream);
426 c4x_hard_regno_mode_ok (regno, mode)
428 enum machine_mode mode;
433 case Pmode: /* Pointer (24/32 bits). */
435 case QImode: /* Integer (32 bits). */
436 return IS_INT_REGNO (regno);
438 case QFmode: /* Float, Double (32 bits). */
439 case HFmode: /* Long Double (40 bits). */
440 return IS_EXT_REGNO (regno);
442 case CCmode: /* Condition Codes. */
443 case CC_NOOVmode: /* Condition Codes. */
444 return IS_ST_REGNO (regno);
446 case HImode: /* Long Long (64 bits). */
447 /* We need two registers to store long longs. Note that
448 it is much easier to constrain the first register
449 to start on an even boundary. */
450 return IS_INT_REGNO (regno)
451 && IS_INT_REGNO (regno + 1)
455 return 0; /* We don't support these modes. */
461 /* Return non-zero if REGNO1 can be renamed to REGNO2. */
463 c4x_hard_regno_rename_ok (regno1, regno2)
467 /* We can not copy call saved registers from mode QI into QF or from
469 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
471 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
473 /* We cannot copy from an extended (40 bit) register to a standard
474 (32 bit) register because we only set the condition codes for
475 extended registers. */
476 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
478 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
483 /* The TI C3x C compiler register argument runtime model uses 6 registers,
484 AR2, R2, R3, RC, RS, RE.
486 The first two floating point arguments (float, double, long double)
487 that are found scanning from left to right are assigned to R2 and R3.
489 The remaining integer (char, short, int, long) or pointer arguments
490 are assigned to the remaining registers in the order AR2, R2, R3,
491 RC, RS, RE when scanning left to right, except for the last named
492 argument prior to an ellipsis denoting variable number of
493 arguments. We don't have to worry about the latter condition since
494 function.c treats the last named argument as anonymous (unnamed).
496 All arguments that cannot be passed in registers are pushed onto
497 the stack in reverse order (right to left). GCC handles that for us.
499 c4x_init_cumulative_args() is called at the start, so we can parse
500 the args to see how many floating point arguments and how many
501 integer (or pointer) arguments there are. c4x_function_arg() is
502 then called (sometimes repeatedly) for each argument (parsed left
503 to right) to obtain the register to pass the argument in, or zero
504 if the argument is to be passed on the stack. Once the compiler is
505 happy, c4x_function_arg_advance() is called.
507 Don't use R0 to pass arguments in, we use 0 to indicate a stack
510 static const int c4x_int_reglist[3][6] =
512 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
513 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
514 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
517 static int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
520 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
521 function whose data type is FNTYPE.
522 For a library call, FNTYPE is 0. */
525 c4x_init_cumulative_args (cum, fntype, libname)
526 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
527 tree fntype; /* Tree ptr for function decl. */
528 rtx libname; /* SYMBOL_REF of library name or 0. */
530 tree param, next_param;
532 cum->floats = cum->ints = 0;
539 fprintf (stderr, "\nc4x_init_cumulative_args (");
542 tree ret_type = TREE_TYPE (fntype);
544 fprintf (stderr, "fntype code = %s, ret code = %s",
545 tree_code_name[(int) TREE_CODE (fntype)],
546 tree_code_name[(int) TREE_CODE (ret_type)]);
549 fprintf (stderr, "no fntype");
552 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
555 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
557 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
558 param; param = next_param)
562 next_param = TREE_CHAIN (param);
564 type = TREE_VALUE (param);
565 if (type && type != void_type_node)
567 enum machine_mode mode;
569 /* If the last arg doesn't have void type then we have
570 variable arguments. */
574 if ((mode = TYPE_MODE (type)))
576 if (! MUST_PASS_IN_STACK (mode, type))
578 /* Look for float, double, or long double argument. */
579 if (mode == QFmode || mode == HFmode)
581 /* Look for integer, enumeral, boolean, char, or pointer
583 else if (mode == QImode || mode == Pmode)
592 fprintf (stderr, "%s%s, args = %d)\n",
593 cum->prototype ? ", prototype" : "",
594 cum->var ? ", variable args" : "",
599 /* Update the data in CUM to advance over an argument
600 of mode MODE and data type TYPE.
601 (TYPE is null for libcalls where that information may not be available.) */
604 c4x_function_arg_advance (cum, mode, type, named)
605 CUMULATIVE_ARGS *cum; /* Current arg information. */
606 enum machine_mode mode; /* Current arg mode. */
607 tree type; /* Type of the arg or 0 if lib support. */
608 int named; /* Whether or not the argument was named. */
611 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
612 GET_MODE_NAME (mode), named);
616 && ! MUST_PASS_IN_STACK (mode, type))
618 /* Look for float, double, or long double argument. */
619 if (mode == QFmode || mode == HFmode)
621 /* Look for integer, enumeral, boolean, char, or pointer argument. */
622 else if (mode == QImode || mode == Pmode)
625 else if (! TARGET_MEMPARM && ! type)
627 /* Handle libcall arguments. */
628 if (mode == QFmode || mode == HFmode)
630 else if (mode == QImode || mode == Pmode)
637 /* Define where to put the arguments to a function. Value is zero to
638 push the argument on the stack, or a hard register in which to
641 MODE is the argument's machine mode.
642 TYPE is the data type of the argument (as a tree).
643 This is null for libcalls where that information may
645 CUM is a variable of type CUMULATIVE_ARGS which gives info about
646 the preceding args and about the function being called.
647 NAMED is nonzero if this argument is a named parameter
648 (otherwise it is an extra parameter matching an ellipsis). */
651 c4x_function_arg (cum, mode, type, named)
652 CUMULATIVE_ARGS *cum; /* Current arg information. */
653 enum machine_mode mode; /* Current arg mode. */
654 tree type; /* Type of the arg or 0 if lib support. */
655 int named; /* != 0 for normal args, == 0 for ... args. */
657 int reg = 0; /* Default to passing argument on stack. */
661 /* We can handle at most 2 floats in R2, R3. */
662 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
664 /* We can handle at most 6 integers minus number of floats passed
666 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
667 6 - cum->maxfloats : cum->ints;
669 /* If there is no prototype, assume all the arguments are integers. */
670 if (! cum->prototype)
673 cum->ints = cum->floats = 0;
677 /* This marks the last argument. We don't need to pass this through
679 if (type == void_type_node)
685 && ! MUST_PASS_IN_STACK (mode, type))
687 /* Look for float, double, or long double argument. */
688 if (mode == QFmode || mode == HFmode)
690 if (cum->floats < cum->maxfloats)
691 reg = c4x_fp_reglist[cum->floats];
693 /* Look for integer, enumeral, boolean, char, or pointer argument. */
694 else if (mode == QImode || mode == Pmode)
696 if (cum->ints < cum->maxints)
697 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
700 else if (! TARGET_MEMPARM && ! type)
702 /* We could use a different argument calling model for libcalls,
703 since we're only calling functions in libgcc. Thus we could
704 pass arguments for long longs in registers rather than on the
705 stack. In the meantime, use the odd TI format. We make the
706 assumption that we won't have more than two floating point
707 args, six integer args, and that all the arguments are of the
709 if (mode == QFmode || mode == HFmode)
710 reg = c4x_fp_reglist[cum->floats];
711 else if (mode == QImode || mode == Pmode)
712 reg = c4x_int_reglist[0][cum->ints];
717 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
718 GET_MODE_NAME (mode), named);
720 fprintf (stderr, ", reg=%s", reg_names[reg]);
722 fprintf (stderr, ", stack");
723 fprintf (stderr, ")\n");
726 return gen_rtx_REG (mode, reg);
733 c4x_va_start (stdarg_p, valist, nextarg)
738 nextarg = plus_constant (nextarg, stdarg_p ? 0 : UNITS_PER_WORD * 2);
740 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
744 /* C[34]x arguments grow in weird ways (downwards) that the standard
745 varargs stuff can't handle.. */
747 c4x_va_arg (valist, type)
752 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
753 build_int_2 (int_size_in_bytes (type), 0));
754 TREE_SIDE_EFFECTS (t) = 1;
756 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
761 c4x_isr_reg_used_p (regno)
764 /* Don't save/restore FP or ST, we handle them separately. */
765 if (regno == FRAME_POINTER_REGNUM
766 || IS_ST_REGNO (regno))
769 /* We could be a little smarter abut saving/restoring DP.
770 We'll only save if for the big memory model or if
771 we're paranoid. ;-) */
772 if (IS_DP_REGNO (regno))
773 return ! TARGET_SMALL || TARGET_PARANOID;
775 /* Only save/restore regs in leaf function that are used. */
776 if (c4x_leaf_function)
777 return regs_ever_live[regno] && fixed_regs[regno] == 0;
779 /* Only save/restore regs that are used by the ISR and regs
780 that are likely to be used by functions the ISR calls
781 if they are not fixed. */
782 return IS_EXT_REGNO (regno)
783 || ((regs_ever_live[regno] || call_used_regs[regno])
784 && fixed_regs[regno] == 0);
789 c4x_leaf_function_p ()
791 /* A leaf function makes no calls, so we only need
792 to save/restore the registers we actually use.
793 For the global variable leaf_function to be set, we need
794 to define LEAF_REGISTERS and all that it entails.
795 Let's check ourselves... */
797 if (lookup_attribute ("leaf_pretend",
798 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
801 /* Use the leaf_pretend attribute at your own risk. This is a hack
802 to speed up ISRs that call a function infrequently where the
803 overhead of saving and restoring the additional registers is not
804 warranted. You must save and restore the additional registers
805 required by the called function. Caveat emptor. Here's enough
808 if (leaf_function_p ())
816 c4x_assembler_function_p ()
820 type = TREE_TYPE (current_function_decl);
821 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type)) != NULL)
822 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL);
827 c4x_interrupt_function_p ()
829 if (lookup_attribute ("interrupt",
830 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
833 /* Look for TI style c_intnn. */
834 return current_function_name[0] == 'c'
835 && current_function_name[1] == '_'
836 && current_function_name[2] == 'i'
837 && current_function_name[3] == 'n'
838 && current_function_name[4] == 't'
839 && ISDIGIT (current_function_name[5])
840 && ISDIGIT (current_function_name[6]);
844 c4x_expand_prologue ()
847 int size = get_frame_size ();
850 /* In functions where ar3 is not used but frame pointers are still
851 specified, frame pointers are not adjusted (if >= -O2) and this
852 is used so it won't needlessly push the frame pointer. */
855 /* For __assembler__ function don't build a prologue. */
856 if (c4x_assembler_function_p ())
861 /* For __interrupt__ function build specific prologue. */
862 if (c4x_interrupt_function_p ())
864 c4x_leaf_function = c4x_leaf_function_p ();
866 insn = emit_insn (gen_push_st ());
867 RTX_FRAME_RELATED_P (insn) = 1;
870 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
871 RTX_FRAME_RELATED_P (insn) = 1;
872 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
873 gen_rtx_REG (QImode, SP_REGNO)));
874 RTX_FRAME_RELATED_P (insn) = 1;
875 /* We require that an ISR uses fewer than 32768 words of
876 local variables, otherwise we have to go to lots of
877 effort to save a register, load it with the desired size,
878 adjust the stack pointer, and then restore the modified
879 register. Frankly, I think it is a poor ISR that
880 requires more than 32767 words of local temporary
883 error ("ISR %s requires %d words of local vars, max is 32767",
884 current_function_name, size);
886 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
887 gen_rtx_REG (QImode, SP_REGNO),
889 RTX_FRAME_RELATED_P (insn) = 1;
891 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
893 if (c4x_isr_reg_used_p (regno))
895 if (regno == DP_REGNO)
897 insn = emit_insn (gen_push_dp ());
898 RTX_FRAME_RELATED_P (insn) = 1;
902 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
903 RTX_FRAME_RELATED_P (insn) = 1;
904 if (IS_EXT_REGNO (regno))
906 insn = emit_insn (gen_pushqf
907 (gen_rtx_REG (QFmode, regno)));
908 RTX_FRAME_RELATED_P (insn) = 1;
913 /* We need to clear the repeat mode flag if the ISR is
914 going to use a RPTB instruction or uses the RC, RS, or RE
916 if (regs_ever_live[RC_REGNO]
917 || regs_ever_live[RS_REGNO]
918 || regs_ever_live[RE_REGNO])
920 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
921 RTX_FRAME_RELATED_P (insn) = 1;
924 /* Reload DP reg if we are paranoid about some turkey
925 violating small memory model rules. */
926 if (TARGET_SMALL && TARGET_PARANOID)
928 insn = emit_insn (gen_set_ldp_prologue
929 (gen_rtx_REG (QImode, DP_REGNO),
930 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
931 RTX_FRAME_RELATED_P (insn) = 1;
936 if (frame_pointer_needed)
939 || (current_function_args_size != 0)
942 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
943 RTX_FRAME_RELATED_P (insn) = 1;
944 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
945 gen_rtx_REG (QImode, SP_REGNO)));
946 RTX_FRAME_RELATED_P (insn) = 1;
951 /* Since ar3 is not used, we don't need to push it. */
957 /* If we use ar3, we need to push it. */
959 if ((size != 0) || (current_function_args_size != 0))
961 /* If we are omitting the frame pointer, we still have
962 to make space for it so the offsets are correct
963 unless we don't use anything on the stack at all. */
970 /* Local vars are too big, it will take multiple operations
974 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
975 GEN_INT(size >> 16)));
976 RTX_FRAME_RELATED_P (insn) = 1;
977 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
978 gen_rtx_REG (QImode, R1_REGNO),
980 RTX_FRAME_RELATED_P (insn) = 1;
984 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
985 GEN_INT(size & ~0xffff)));
986 RTX_FRAME_RELATED_P (insn) = 1;
988 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
989 gen_rtx_REG (QImode, R1_REGNO),
990 GEN_INT(size & 0xffff)));
991 RTX_FRAME_RELATED_P (insn) = 1;
992 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
993 gen_rtx_REG (QImode, SP_REGNO),
994 gen_rtx_REG (QImode, R1_REGNO)));
995 RTX_FRAME_RELATED_P (insn) = 1;
999 /* Local vars take up less than 32767 words, so we can directly
1001 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
1002 gen_rtx_REG (QImode, SP_REGNO),
1004 RTX_FRAME_RELATED_P (insn) = 1;
1007 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1009 if (regs_ever_live[regno] && ! call_used_regs[regno])
1011 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1013 if (TARGET_PRESERVE_FLOAT)
1015 insn = emit_insn (gen_pushqi
1016 (gen_rtx_REG (QImode, regno)));
1017 RTX_FRAME_RELATED_P (insn) = 1;
1019 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
1020 RTX_FRAME_RELATED_P (insn) = 1;
1022 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1024 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1025 RTX_FRAME_RELATED_P (insn) = 1;
1034 c4x_expand_epilogue()
1040 int size = get_frame_size ();
1042 /* For __assembler__ function build no epilogue. */
1043 if (c4x_assembler_function_p ())
1045 insn = emit_jump_insn (gen_return_from_epilogue ());
1046 RTX_FRAME_RELATED_P (insn) = 1;
1050 /* For __interrupt__ function build specific epilogue. */
1051 if (c4x_interrupt_function_p ())
1053 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1055 if (! c4x_isr_reg_used_p (regno))
1057 if (regno == DP_REGNO)
1059 insn = emit_insn (gen_pop_dp ());
1060 RTX_FRAME_RELATED_P (insn) = 1;
1064 /* We have to use unspec because the compiler will delete insns
1065 that are not call-saved. */
1066 if (IS_EXT_REGNO (regno))
1068 insn = emit_insn (gen_popqf_unspec
1069 (gen_rtx_REG (QFmode, regno)));
1070 RTX_FRAME_RELATED_P (insn) = 1;
1072 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1073 RTX_FRAME_RELATED_P (insn) = 1;
1078 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1079 gen_rtx_REG (QImode, SP_REGNO),
1081 RTX_FRAME_RELATED_P (insn) = 1;
1082 insn = emit_insn (gen_popqi
1083 (gen_rtx_REG (QImode, AR3_REGNO)));
1084 RTX_FRAME_RELATED_P (insn) = 1;
1086 insn = emit_insn (gen_pop_st ());
1087 RTX_FRAME_RELATED_P (insn) = 1;
1088 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1089 RTX_FRAME_RELATED_P (insn) = 1;
1093 if (frame_pointer_needed)
1096 || (current_function_args_size != 0)
1100 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1101 gen_rtx_MEM (QImode,
1103 (QImode, gen_rtx_REG (QImode,
1106 RTX_FRAME_RELATED_P (insn) = 1;
1108 /* We already have the return value and the fp,
1109 so we need to add those to the stack. */
1116 /* Since ar3 is not used for anything, we don't need to
1123 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1124 if (size || current_function_args_size)
1126 /* If we are ommitting the frame pointer, we still have
1127 to make space for it so the offsets are correct
1128 unless we don't use anything on the stack at all. */
1133 /* Now restore the saved registers, putting in the delayed branch
1135 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1137 if (regs_ever_live[regno] && ! call_used_regs[regno])
1139 if (regno == AR3_REGNO && dont_pop_ar3)
1142 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1144 insn = emit_insn (gen_popqf_unspec
1145 (gen_rtx_REG (QFmode, regno)));
1146 RTX_FRAME_RELATED_P (insn) = 1;
1147 if (TARGET_PRESERVE_FLOAT)
1149 insn = emit_insn (gen_popqi_unspec
1150 (gen_rtx_REG (QImode, regno)));
1151 RTX_FRAME_RELATED_P (insn) = 1;
1156 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1157 RTX_FRAME_RELATED_P (insn) = 1;
1162 if (frame_pointer_needed)
1165 || (current_function_args_size != 0)
1168 /* Restore the old FP. */
1171 (gen_rtx_REG (QImode, AR3_REGNO),
1172 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1174 RTX_FRAME_RELATED_P (insn) = 1;
1180 /* Local vars are too big, it will take multiple operations
1184 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1185 GEN_INT(size >> 16)));
1186 RTX_FRAME_RELATED_P (insn) = 1;
1187 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1188 gen_rtx_REG (QImode, R3_REGNO),
1190 RTX_FRAME_RELATED_P (insn) = 1;
1194 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1195 GEN_INT(size & ~0xffff)));
1196 RTX_FRAME_RELATED_P (insn) = 1;
1198 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1199 gen_rtx_REG (QImode, R3_REGNO),
1200 GEN_INT(size & 0xffff)));
1201 RTX_FRAME_RELATED_P (insn) = 1;
1202 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1203 gen_rtx_REG (QImode, SP_REGNO),
1204 gen_rtx_REG (QImode, R3_REGNO)));
1205 RTX_FRAME_RELATED_P (insn) = 1;
1209 /* Local vars take up less than 32768 words, so we can directly
1210 subtract the number. */
1211 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1212 gen_rtx_REG (QImode, SP_REGNO),
1214 RTX_FRAME_RELATED_P (insn) = 1;
1219 insn = emit_jump_insn (gen_return_indirect_internal
1220 (gen_rtx_REG (QImode, R2_REGNO)));
1221 RTX_FRAME_RELATED_P (insn) = 1;
1225 insn = emit_jump_insn (gen_return_from_epilogue ());
1226 RTX_FRAME_RELATED_P (insn) = 1;
1233 c4x_null_epilogue_p ()
1237 if (reload_completed
1238 && ! c4x_assembler_function_p ()
1239 && ! c4x_interrupt_function_p ()
1240 && ! current_function_calls_alloca
1241 && ! current_function_args_size
1243 && ! get_frame_size ())
1245 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1246 if (regs_ever_live[regno] && ! call_used_regs[regno]
1247 && (regno != AR3_REGNO))
1256 c4x_emit_move_sequence (operands, mode)
1258 enum machine_mode mode;
1260 rtx op0 = operands[0];
1261 rtx op1 = operands[1];
1263 if (! reload_in_progress
1266 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1267 op1 = force_reg (mode, op1);
1269 if (GET_CODE (op1) == LO_SUM
1270 && GET_MODE (op1) == Pmode
1271 && dp_reg_operand (XEXP (op1, 0), mode))
1273 /* expand_increment will sometimes create a LO_SUM immediate
1275 op1 = XEXP (op1, 1);
1277 else if (symbolic_address_operand (op1, mode))
1279 if (TARGET_LOAD_ADDRESS)
1281 /* Alias analysis seems to do a better job if we force
1282 constant addresses to memory after reload. */
1283 emit_insn (gen_load_immed_address (op0, op1));
1288 /* Stick symbol or label address into the constant pool. */
1289 op1 = force_const_mem (Pmode, op1);
1292 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1294 /* We could be a lot smarter about loading some of these
1296 op1 = force_const_mem (mode, op1);
1299 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1300 and emit associated (HIGH (SYMREF)) if large memory model.
1301 c4x_legitimize_address could be used to do this,
1302 perhaps by calling validize_address. */
1303 if (TARGET_EXPOSE_LDP
1304 && ! (reload_in_progress || reload_completed)
1305 && GET_CODE (op1) == MEM
1306 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1308 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1310 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1311 op1 = change_address (op1, mode,
1312 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1315 if (TARGET_EXPOSE_LDP
1316 && ! (reload_in_progress || reload_completed)
1317 && GET_CODE (op0) == MEM
1318 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1320 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1322 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1323 op0 = change_address (op0, mode,
1324 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1327 if (GET_CODE (op0) == SUBREG
1328 && mixed_subreg_operand (op0, mode))
1330 /* We should only generate these mixed mode patterns
1331 during RTL generation. If we need do it later on
1332 then we'll have to emit patterns that won't clobber CC. */
1333 if (reload_in_progress || reload_completed)
1335 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1336 op0 = SUBREG_REG (op0);
1337 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1339 op0 = copy_rtx (op0);
1340 PUT_MODE (op0, QImode);
1346 emit_insn (gen_storeqf_int_clobber (op0, op1));
1352 if (GET_CODE (op1) == SUBREG
1353 && mixed_subreg_operand (op1, mode))
1355 /* We should only generate these mixed mode patterns
1356 during RTL generation. If we need do it later on
1357 then we'll have to emit patterns that won't clobber CC. */
1358 if (reload_in_progress || reload_completed)
1360 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1361 op1 = SUBREG_REG (op1);
1362 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1364 op1 = copy_rtx (op1);
1365 PUT_MODE (op1, QImode);
1371 emit_insn (gen_loadqf_int_clobber (op0, op1));
1378 && reg_operand (op0, mode)
1379 && const_int_operand (op1, mode)
1380 && ! IS_INT16_CONST (INTVAL (op1))
1381 && ! IS_HIGH_CONST (INTVAL (op1)))
1383 emit_insn (gen_loadqi_big_constant (op0, op1));
1388 && reg_operand (op0, mode)
1389 && const_int_operand (op1, mode))
1391 emit_insn (gen_loadhi_big_constant (op0, op1));
1395 /* Adjust operands in case we have modified them. */
1399 /* Emit normal pattern. */
1405 c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1408 enum machine_mode dmode;
1409 enum machine_mode smode;
1421 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1422 operands[1], smode);
1423 equiv = gen_rtx (code, dmode, operands[1]);
1427 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1428 operands[1], smode, operands[2], smode);
1429 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1436 insns = get_insns ();
1438 emit_libcall_block (insns, operands[0], ret, equiv);
1443 c4x_emit_libcall3 (libcall, code, mode, operands)
1446 enum machine_mode mode;
1449 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1454 c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1457 enum machine_mode mode;
1465 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1466 operands[1], mode, operands[2], mode);
1467 equiv = gen_rtx_TRUNCATE (mode,
1468 gen_rtx_LSHIFTRT (HImode,
1469 gen_rtx_MULT (HImode,
1470 gen_rtx (code, HImode, operands[1]),
1471 gen_rtx (code, HImode, operands[2])),
1473 insns = get_insns ();
1475 emit_libcall_block (insns, operands[0], ret, equiv);
1479 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1480 yet use this info. */
1482 c4x_encode_section_info (decl)
1486 if (TREE_CODE (TREE_TYPE (decl)) == FUNCTION_TYPE)
1487 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1489 if (TREE_CODE (decl) == FUNCTION_DECL)
1490 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1496 c4x_check_legit_addr (mode, addr, strict)
1497 enum machine_mode mode;
1501 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1502 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1503 rtx disp = NULL_RTX; /* Displacement. */
1506 code = GET_CODE (addr);
1509 /* Register indirect with auto increment/decrement. We don't
1510 allow SP here---push_operand should recognise an operand
1511 being pushed on the stack. */
1516 if (mode != QImode && mode != QFmode)
1520 base = XEXP (addr, 0);
1528 rtx op0 = XEXP (addr, 0);
1529 rtx op1 = XEXP (addr, 1);
1531 if (mode != QImode && mode != QFmode)
1535 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1537 base = XEXP (op1, 0);
1540 if (REG_P (XEXP (op1, 1)))
1541 indx = XEXP (op1, 1);
1543 disp = XEXP (op1, 1);
1547 /* Register indirect. */
1552 /* Register indirect with displacement or index. */
1555 rtx op0 = XEXP (addr, 0);
1556 rtx op1 = XEXP (addr, 1);
1557 enum rtx_code code0 = GET_CODE (op0);
1564 base = op0; /* Base + index. */
1566 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1574 base = op0; /* Base + displacement. */
1585 /* Direct addressing with DP register. */
1588 rtx op0 = XEXP (addr, 0);
1589 rtx op1 = XEXP (addr, 1);
1591 /* HImode and HFmode direct memory references aren't truly
1592 offsettable (consider case at end of data page). We
1593 probably get better code by loading a pointer and using an
1594 indirect memory reference. */
1595 if (mode == HImode || mode == HFmode)
1598 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1601 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1604 if (GET_CODE (op1) == CONST)
1610 /* Direct addressing with some work for the assembler... */
1612 /* Direct addressing. */
1615 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1617 /* These need to be converted to a LO_SUM (...).
1618 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1621 /* Do not allow direct memory access to absolute addresses.
1622 This is more pain than it's worth, especially for the
1623 small memory model where we can't guarantee that
1624 this address is within the data page---we don't want
1625 to modify the DP register in the small memory model,
1626 even temporarily, since an interrupt can sneak in.... */
1630 /* Indirect indirect addressing. */
1635 fatal_insn ("using CONST_DOUBLE for address", addr);
1641 /* Validate the base register. */
1644 /* Check that the address is offsettable for HImode and HFmode. */
1645 if (indx && (mode == HImode || mode == HFmode))
1648 /* Handle DP based stuff. */
1649 if (REGNO (base) == DP_REGNO)
1651 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1653 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1657 /* Now validate the index register. */
1660 if (GET_CODE (indx) != REG)
1662 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1664 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1668 /* Validate displacement. */
1671 if (GET_CODE (disp) != CONST_INT)
1673 if (mode == HImode || mode == HFmode)
1675 /* The offset displacement must be legitimate. */
1676 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1681 if (! IS_DISP8_CONST (INTVAL (disp)))
1684 /* Can't add an index with a disp. */
1693 c4x_legitimize_address (orig, mode)
1694 rtx orig ATTRIBUTE_UNUSED;
1695 enum machine_mode mode ATTRIBUTE_UNUSED;
1697 if (GET_CODE (orig) == SYMBOL_REF
1698 || GET_CODE (orig) == LABEL_REF)
1700 if (mode == HImode || mode == HFmode)
1702 /* We need to force the address into
1703 a register so that it is offsettable. */
1704 rtx addr_reg = gen_reg_rtx (Pmode);
1705 emit_move_insn (addr_reg, orig);
1710 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1713 emit_insn (gen_set_ldp (dp_reg, orig));
1715 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1723 /* Provide the costs of an addressing mode that contains ADDR.
1724 If ADDR is not a valid address, its cost is irrelevant.
1725 This is used in cse and loop optimisation to determine
1726 if it is worthwhile storing a common address into a register.
1727 Unfortunately, the C4x address cost depends on other operands. */
1730 c4x_address_cost (addr)
1733 switch (GET_CODE (addr))
1744 /* These shouldn't be directly generated. */
1752 rtx op1 = XEXP (addr, 1);
1754 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1755 return TARGET_SMALL ? 3 : 4;
1757 if (GET_CODE (op1) == CONST)
1759 rtx offset = const0_rtx;
1761 op1 = eliminate_constant_term (op1, &offset);
1763 /* ??? These costs need rethinking... */
1764 if (GET_CODE (op1) == LABEL_REF)
1767 if (GET_CODE (op1) != SYMBOL_REF)
1770 if (INTVAL (offset) == 0)
1775 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1781 register rtx op0 = XEXP (addr, 0);
1782 register rtx op1 = XEXP (addr, 1);
1784 if (GET_CODE (op0) != REG)
1787 switch (GET_CODE (op1))
1793 /* This cost for REG+REG must be greater than the cost
1794 for REG if we want autoincrement addressing modes. */
1798 /* The following tries to improve GIV combination
1799 in strength reduce but appears not to help. */
1800 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1803 if (IS_DISP1_CONST (INTVAL (op1)))
1806 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1821 c4x_gen_compare_reg (code, x, y)
1825 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1828 if (mode == CC_NOOVmode
1829 && (code == LE || code == GE || code == LT || code == GT))
1832 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1833 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1834 gen_rtx_COMPARE (mode, x, y)));
1839 c4x_output_cbranch (form, seq)
1848 static char str[100];
1852 delay = XVECEXP (final_sequence, 0, 1);
1853 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1854 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1855 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1858 cp = &str [strlen (str)];
1883 c4x_print_operand (file, op, letter)
1884 FILE *file; /* File to write to. */
1885 rtx op; /* Operand to print. */
1886 int letter; /* %<letter> or 0. */
1893 case '#': /* Delayed. */
1895 asm_fprintf (file, "d");
1899 code = GET_CODE (op);
1902 case 'A': /* Direct address. */
1903 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1904 asm_fprintf (file, "@");
1907 case 'H': /* Sethi. */
1908 output_addr_const (file, op);
1911 case 'I': /* Reversed condition. */
1912 code = reverse_condition (code);
1915 case 'L': /* Log 2 of constant. */
1916 if (code != CONST_INT)
1917 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1918 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1921 case 'N': /* Ones complement of small constant. */
1922 if (code != CONST_INT)
1923 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1924 fprintf (file, "%d", ~INTVAL (op));
1927 case 'K': /* Generate ldp(k) if direct address. */
1930 && GET_CODE (XEXP (op, 0)) == LO_SUM
1931 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1932 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1934 op1 = XEXP (XEXP (op, 0), 1);
1935 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1937 asm_fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1938 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1939 asm_fprintf (file, "\n");
1944 case 'M': /* Generate ldp(k) if direct address. */
1945 if (! TARGET_SMALL /* Only used in asm statements. */
1947 && (GET_CODE (XEXP (op, 0)) == CONST
1948 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1950 asm_fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1951 output_address (XEXP (op, 0));
1952 asm_fprintf (file, "\n\t");
1956 case 'O': /* Offset address. */
1957 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1959 else if (code == MEM)
1960 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1961 else if (code == REG)
1962 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1964 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1967 case 'C': /* Call. */
1970 case 'U': /* Call/callu. */
1971 if (code != SYMBOL_REF)
1972 asm_fprintf (file, "u");
1982 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1984 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1986 fprintf (file, "%s", reg_names[REGNO (op)]);
1990 output_address (XEXP (op, 0));
1998 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
1999 REAL_VALUE_TO_DECIMAL (r, "%20f", str);
2000 fprintf (file, "%s", str);
2005 fprintf (file, "%d", INTVAL (op));
2009 asm_fprintf (file, "ne");
2013 asm_fprintf (file, "eq");
2017 asm_fprintf (file, "ge");
2021 asm_fprintf (file, "gt");
2025 asm_fprintf (file, "le");
2029 asm_fprintf (file, "lt");
2033 asm_fprintf (file, "hs");
2037 asm_fprintf (file, "hi");
2041 asm_fprintf (file, "ls");
2045 asm_fprintf (file, "lo");
2049 output_addr_const (file, op);
2053 output_addr_const (file, XEXP (op, 0));
2060 fatal_insn ("c4x_print_operand: Bad operand case", op);
2067 c4x_print_operand_address (file, addr)
2071 switch (GET_CODE (addr))
2074 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2078 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2082 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2087 rtx op0 = XEXP (XEXP (addr, 1), 0);
2088 rtx op1 = XEXP (XEXP (addr, 1), 1);
2090 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2091 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2092 reg_names[REGNO (op1)]);
2093 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2094 fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2096 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2097 fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2099 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2100 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2101 reg_names[REGNO (op1)]);
2103 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2109 rtx op0 = XEXP (XEXP (addr, 1), 0);
2110 rtx op1 = XEXP (XEXP (addr, 1), 1);
2112 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2113 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2114 reg_names[REGNO (op1)]);
2115 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2116 fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2118 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2119 fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2121 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2122 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2123 reg_names[REGNO (op1)]);
2125 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2130 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2134 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2137 case PLUS: /* Indirect with displacement. */
2139 rtx op0 = XEXP (addr, 0);
2140 rtx op1 = XEXP (addr, 1);
2146 if (IS_INDEX_REG (op0))
2148 fprintf (file, "*+%s(%s)",
2149 reg_names[REGNO (op1)],
2150 reg_names[REGNO (op0)]); /* Index + base. */
2154 fprintf (file, "*+%s(%s)",
2155 reg_names[REGNO (op0)],
2156 reg_names[REGNO (op1)]); /* Base + index. */
2159 else if (INTVAL (op1) < 0)
2161 fprintf (file, "*-%s(%d)",
2162 reg_names[REGNO (op0)],
2163 -INTVAL (op1)); /* Base - displacement. */
2167 fprintf (file, "*+%s(%d)",
2168 reg_names[REGNO (op0)],
2169 INTVAL (op1)); /* Base + displacement. */
2173 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2179 rtx op0 = XEXP (addr, 0);
2180 rtx op1 = XEXP (addr, 1);
2182 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2183 c4x_print_operand_address (file, op1);
2185 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2192 fprintf (file, "@");
2193 output_addr_const (file, addr);
2196 /* We shouldn't access CONST_INT addresses. */
2200 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2206 /* Return nonzero if the floating point operand will fit
2207 in the immediate field. */
2210 c4x_immed_float_p (op)
2217 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2218 if (GET_MODE (op) == HFmode)
2219 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2222 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2226 /* Sign extend exponent. */
2227 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2228 if (exponent == -128)
2230 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2231 return 0; /* Precision doesn't fit. */
2232 return (exponent <= 7) /* Positive exp. */
2233 && (exponent >= -7); /* Negative exp. */
2237 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2238 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2240 None of the last four instructions from the bottom of the block can
2241 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2242 BcondAT or RETIcondD.
2244 This routine scans the four previous insns for a jump insn, and if
2245 one is found, returns 1 so that we bung in a nop instruction.
2246 This simple minded strategy will add a nop, when it may not
2247 be required. Say when there is a JUMP_INSN near the end of the
2248 block that doesn't get converted into a delayed branch.
2250 Note that we cannot have a call insn, since we don't generate
2251 repeat loops with calls in them (although I suppose we could, but
2252 there's no benefit.)
2254 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2257 c4x_rptb_nop_p (insn)
2263 /* Extract the start label from the jump pattern (rptb_end). */
2264 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2266 /* If there is a label at the end of the loop we must insert
2269 insn = previous_insn (insn);
2270 } while (GET_CODE (insn) == NOTE
2271 || GET_CODE (insn) == USE
2272 || GET_CODE (insn) == CLOBBER);
2273 if (GET_CODE (insn) == CODE_LABEL)
2276 for (i = 0; i < 4; i++)
2278 /* Search back for prev non-note and non-label insn. */
2279 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2280 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2282 if (insn == start_label)
2285 insn = previous_insn (insn);
2288 /* If we have a jump instruction we should insert a NOP. If we
2289 hit repeat block top we should only insert a NOP if the loop
2291 if (GET_CODE (insn) == JUMP_INSN)
2293 insn = previous_insn (insn);
2299 /* The C4x looping instruction needs to be emitted at the top of the
2300 loop. Emitting the true RTL for a looping instruction at the top of
2301 the loop can cause problems with flow analysis. So instead, a dummy
2302 doloop insn is emitted at the end of the loop. This routine checks
2303 for the presence of this doloop insn and then searches back to the
2304 top of the loop, where it inserts the true looping insn (provided
2305 there are no instructions in the loop which would cause problems).
2306 Any additional labels can be emitted at this point. In addition, if
2307 the desired loop count register was not allocated, this routine does
2310 Before we can create a repeat block looping instruction we have to
2311 verify that there are no jumps outside the loop and no jumps outside
2312 the loop go into this loop. This can happen in the basic blocks reorder
2313 pass. The C4x cpu can not handle this. */
2316 c4x_label_ref_used_p (x, code_label)
2326 code = GET_CODE (x);
2327 if (code == LABEL_REF)
2328 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2330 fmt = GET_RTX_FORMAT (code);
2331 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2335 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2338 else if (fmt[i] == 'E')
2339 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2340 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2348 c4x_rptb_valid_p (insn, start_label)
2349 rtx insn, start_label;
2355 /* Find the start label. */
2356 for (; insn; insn = PREV_INSN (insn))
2357 if (insn == start_label)
2360 /* Note found then we can not use a rptb or rpts. The label was
2361 probably moved by the basic block reorder pass. */
2366 /* If any jump jumps inside this block then we must fail. */
2367 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2369 if (GET_CODE (insn) == CODE_LABEL)
2371 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2372 if (GET_CODE (tmp) == JUMP_INSN
2373 && c4x_label_ref_used_p (tmp, insn))
2377 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2379 if (GET_CODE (insn) == CODE_LABEL)
2381 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2382 if (GET_CODE (tmp) == JUMP_INSN
2383 && c4x_label_ref_used_p (tmp, insn))
2387 /* If any jump jumps outside this block then we must fail. */
2388 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2390 if (GET_CODE (insn) == CODE_LABEL)
2392 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2393 if (GET_CODE (tmp) == JUMP_INSN
2394 && c4x_label_ref_used_p (tmp, insn))
2396 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2397 if (GET_CODE (tmp) == JUMP_INSN
2398 && c4x_label_ref_used_p (tmp, insn))
2403 /* All checks OK. */
2409 c4x_rptb_insert (insn)
2414 rtx new_start_label;
2417 /* If the count register has not been allocated to RC, say if
2418 there is a movstr pattern in the loop, then do not insert a
2419 RPTB instruction. Instead we emit a decrement and branch
2420 at the end of the loop. */
2421 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2422 if (REGNO (count_reg) != RC_REGNO)
2425 /* Extract the start label from the jump pattern (rptb_end). */
2426 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2428 if (! c4x_rptb_valid_p (insn, start_label))
2430 /* We can not use the rptb insn. Replace it so reorg can use
2431 the delay slots of the jump insn. */
2432 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2433 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2434 emit_insn_before (gen_bge (start_label), insn);
2435 LABEL_NUSES (start_label)++;
2440 end_label = gen_label_rtx ();
2441 LABEL_NUSES (end_label)++;
2442 emit_label_after (end_label, insn);
2444 new_start_label = gen_label_rtx ();
2445 LABEL_NUSES (new_start_label)++;
2447 for (; insn; insn = PREV_INSN (insn))
2449 if (insn == start_label)
2451 if (GET_CODE (insn) == JUMP_INSN &&
2452 JUMP_LABEL (insn) == start_label)
2453 redirect_jump (insn, new_start_label, 0);
2456 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2458 emit_label_after (new_start_label, insn);
2460 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2461 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2463 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2464 if (LABEL_NUSES (start_label) == 0)
2465 delete_insn (start_label);
2469 /* This function is a C4x special called immediately before delayed
2470 branch scheduling. We fix up RTPB style loops that didn't get RC
2471 allocated as the loop counter. */
2474 c4x_process_after_reload (first)
2479 for (insn = first; insn; insn = NEXT_INSN (insn))
2481 /* Look for insn. */
2484 int insn_code_number;
2487 insn_code_number = recog_memoized (insn);
2489 if (insn_code_number < 0)
2492 /* Insert the RTX for RPTB at the top of the loop
2493 and a label at the end of the loop. */
2494 if (insn_code_number == CODE_FOR_rptb_end)
2495 c4x_rptb_insert(insn);
2497 /* We need to split the insn here. Otherwise the calls to
2498 force_const_mem will not work for load_immed_address. */
2501 /* Don't split the insn if it has been deleted. */
2502 if (! INSN_DELETED_P (old))
2503 insn = try_split (PATTERN(old), old, 1);
2505 /* When not optimizing, the old insn will be still left around
2506 with only the 'deleted' bit set. Transform it into a note
2507 to avoid confusion of subsequent processing. */
2508 if (INSN_DELETED_P (old))
2510 PUT_CODE (old, NOTE);
2511 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2512 NOTE_SOURCE_FILE (old) = 0;
2523 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2531 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2536 c4x_immed_int_constant (op)
2539 if (GET_CODE (op) != CONST_INT)
2542 return GET_MODE (op) == VOIDmode
2543 || GET_MODE_CLASS (op) == MODE_INT
2544 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2549 c4x_immed_float_constant (op)
2552 if (GET_CODE (op) != CONST_DOUBLE)
2555 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2556 present this only means that a MEM rtx has been generated. It does
2557 not mean the rtx is really in memory. */
2559 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2564 c4x_shiftable_constant (op)
2569 int val = INTVAL (op);
2571 for (i = 0; i < 16; i++)
2576 mask = ((0xffff >> i) << 16) | 0xffff;
2577 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2578 : (val >> i) & mask))
2588 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2596 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2606 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2614 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2616 return IS_INT5_CONST (INTVAL (op));
2624 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2632 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2640 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2644 /* The constraints do not have to check the register class,
2645 except when needed to discriminate between the constraints.
2646 The operand has been checked by the predicates to be valid. */
2648 /* ARx + 9-bit signed const or IRn
2649 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2650 We don't include the pre/post inc/dec forms here since
2651 they are handled by the <> constraints. */
2654 c4x_Q_constraint (op)
2657 enum machine_mode mode = GET_MODE (op);
2659 if (GET_CODE (op) != MEM)
2662 switch (GET_CODE (op))
2669 rtx op0 = XEXP (op, 0);
2670 rtx op1 = XEXP (op, 1);
2678 if (GET_CODE (op1) != CONST_INT)
2681 /* HImode and HFmode must be offsettable. */
2682 if (mode == HImode || mode == HFmode)
2683 return IS_DISP8_OFF_CONST (INTVAL (op1));
2685 return IS_DISP8_CONST (INTVAL (op1));
2696 /* ARx + 5-bit unsigned const
2697 *ARx, *+ARx(n) for n < 32. */
2700 c4x_R_constraint (op)
2703 enum machine_mode mode = GET_MODE (op);
2707 if (GET_CODE (op) != MEM)
2710 switch (GET_CODE (op))
2717 rtx op0 = XEXP (op, 0);
2718 rtx op1 = XEXP (op, 1);
2723 if (GET_CODE (op1) != CONST_INT)
2726 /* HImode and HFmode must be offsettable. */
2727 if (mode == HImode || mode == HFmode)
2728 return IS_UINT5_CONST (INTVAL (op1) + 1);
2730 return IS_UINT5_CONST (INTVAL (op1));
2745 enum machine_mode mode = GET_MODE (op);
2747 if (TARGET_C3X || GET_CODE (op) != MEM)
2751 switch (GET_CODE (op))
2754 return IS_ADDR_OR_PSEUDO_REG (op);
2758 rtx op0 = XEXP (op, 0);
2759 rtx op1 = XEXP (op, 1);
2761 /* HImode and HFmode must be offsettable. */
2762 if (mode == HImode || mode == HFmode)
2763 return IS_ADDR_OR_PSEUDO_REG (op0)
2764 && GET_CODE (op1) == CONST_INT
2765 && IS_UINT5_CONST (INTVAL (op1) + 1);
2768 && IS_ADDR_OR_PSEUDO_REG (op0)
2769 && GET_CODE (op1) == CONST_INT
2770 && IS_UINT5_CONST (INTVAL (op1));
2781 /* ARx + 1-bit unsigned const or IRn
2782 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2783 We don't include the pre/post inc/dec forms here since
2784 they are handled by the <> constraints. */
2787 c4x_S_constraint (op)
2790 enum machine_mode mode = GET_MODE (op);
2791 if (GET_CODE (op) != MEM)
2794 switch (GET_CODE (op))
2802 rtx op0 = XEXP (op, 0);
2803 rtx op1 = XEXP (op, 1);
2805 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2806 || (op0 != XEXP (op1, 0)))
2809 op0 = XEXP (op1, 0);
2810 op1 = XEXP (op1, 1);
2811 return REG_P (op0) && REG_P (op1);
2812 /* Pre or post_modify with a displacement of 0 or 1
2813 should not be generated. */
2819 rtx op0 = XEXP (op, 0);
2820 rtx op1 = XEXP (op, 1);
2828 if (GET_CODE (op1) != CONST_INT)
2831 /* HImode and HFmode must be offsettable. */
2832 if (mode == HImode || mode == HFmode)
2833 return IS_DISP1_OFF_CONST (INTVAL (op1));
2835 return IS_DISP1_CONST (INTVAL (op1));
2850 enum machine_mode mode = GET_MODE (op);
2851 if (GET_CODE (op) != MEM)
2855 switch (GET_CODE (op))
2859 if (mode != QImode && mode != QFmode)
2866 return IS_ADDR_OR_PSEUDO_REG (op);
2871 rtx op0 = XEXP (op, 0);
2872 rtx op1 = XEXP (op, 1);
2874 if (mode != QImode && mode != QFmode)
2877 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2878 || (op0 != XEXP (op1, 0)))
2881 op0 = XEXP (op1, 0);
2882 op1 = XEXP (op1, 1);
2883 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2884 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2885 /* Pre or post_modify with a displacement of 0 or 1
2886 should not be generated. */
2891 rtx op0 = XEXP (op, 0);
2892 rtx op1 = XEXP (op, 1);
2896 /* HImode and HFmode must be offsettable. */
2897 if (mode == HImode || mode == HFmode)
2898 return IS_ADDR_OR_PSEUDO_REG (op0)
2899 && GET_CODE (op1) == CONST_INT
2900 && IS_DISP1_OFF_CONST (INTVAL (op1));
2903 return (IS_INDEX_OR_PSEUDO_REG (op1)
2904 && IS_ADDR_OR_PSEUDO_REG (op0))
2905 || (IS_ADDR_OR_PSEUDO_REG (op1)
2906 && IS_INDEX_OR_PSEUDO_REG (op0));
2908 return IS_ADDR_OR_PSEUDO_REG (op0)
2909 && GET_CODE (op1) == CONST_INT
2910 && IS_DISP1_CONST (INTVAL (op1));
2922 /* Direct memory operand. */
2925 c4x_T_constraint (op)
2928 if (GET_CODE (op) != MEM)
2932 if (GET_CODE (op) != LO_SUM)
2934 /* Allow call operands. */
2935 return GET_CODE (op) == SYMBOL_REF
2936 && GET_MODE (op) == Pmode
2937 && SYMBOL_REF_FLAG (op);
2940 /* HImode and HFmode are not offsettable. */
2941 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2944 if ((GET_CODE (XEXP (op, 0)) == REG)
2945 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2946 return c4x_U_constraint (XEXP (op, 1));
2952 /* Symbolic operand. */
2955 c4x_U_constraint (op)
2958 /* Don't allow direct addressing to an arbitrary constant. */
2959 return GET_CODE (op) == CONST
2960 || GET_CODE (op) == SYMBOL_REF
2961 || GET_CODE (op) == LABEL_REF;
2966 c4x_autoinc_operand (op, mode)
2968 enum machine_mode mode ATTRIBUTE_UNUSED;
2970 if (GET_CODE (op) == MEM)
2972 enum rtx_code code = GET_CODE (XEXP (op, 0));
2978 || code == PRE_MODIFY
2979 || code == POST_MODIFY
2987 /* Match any operand. */
2990 any_operand (op, mode)
2991 register rtx op ATTRIBUTE_UNUSED;
2992 enum machine_mode mode ATTRIBUTE_UNUSED;
2998 /* Nonzero if OP is a floating point value with value 0.0. */
3001 fp_zero_operand (op, mode)
3003 enum machine_mode mode ATTRIBUTE_UNUSED;
3007 if (GET_CODE (op) != CONST_DOUBLE)
3009 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
3010 return REAL_VALUES_EQUAL (r, dconst0);
3015 const_operand (op, mode)
3017 register enum machine_mode mode;
3023 if (GET_CODE (op) != CONST_DOUBLE
3024 || GET_MODE (op) != mode
3025 || GET_MODE_CLASS (mode) != MODE_FLOAT)
3028 return c4x_immed_float_p (op);
3034 if (GET_CODE (op) == CONSTANT_P_RTX)
3037 if (GET_CODE (op) != CONST_INT
3038 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3039 || GET_MODE_CLASS (mode) != MODE_INT)
3042 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3054 stik_const_operand (op, mode)
3056 enum machine_mode mode ATTRIBUTE_UNUSED;
3058 return c4x_K_constant (op);
3063 not_const_operand (op, mode)
3065 enum machine_mode mode ATTRIBUTE_UNUSED;
3067 return c4x_N_constant (op);
3072 reg_operand (op, mode)
3074 enum machine_mode mode;
3076 if (GET_CODE (op) == SUBREG
3077 && GET_MODE (op) == QFmode)
3079 return register_operand (op, mode);
3084 mixed_subreg_operand (op, mode)
3086 enum machine_mode mode ATTRIBUTE_UNUSED;
3088 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3089 int and a long double. */
3090 if (GET_CODE (op) == SUBREG
3091 && (GET_MODE (op) == QFmode)
3092 && (GET_MODE (SUBREG_REG (op)) == QImode
3093 || GET_MODE (SUBREG_REG (op)) == HImode))
3100 reg_imm_operand (op, mode)
3102 enum machine_mode mode ATTRIBUTE_UNUSED;
3104 if (REG_P (op) || CONSTANT_P (op))
3111 not_modify_reg (op, mode)
3113 enum machine_mode mode ATTRIBUTE_UNUSED;
3115 if (REG_P (op) || CONSTANT_P (op))
3117 if (GET_CODE (op) != MEM)
3120 switch (GET_CODE (op))
3127 rtx op0 = XEXP (op, 0);
3128 rtx op1 = XEXP (op, 1);
3133 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3139 rtx op0 = XEXP (op, 0);
3141 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3159 not_rc_reg (op, mode)
3161 enum machine_mode mode ATTRIBUTE_UNUSED;
3163 if (REG_P (op) && REGNO (op) == RC_REGNO)
3169 /* Extended precision register R0-R1. */
3172 r0r1_reg_operand (op, mode)
3174 enum machine_mode mode;
3176 if (! reg_operand (op, mode))
3178 if (GET_CODE (op) == SUBREG)
3179 op = SUBREG_REG (op);
3180 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3184 /* Extended precision register R2-R3. */
3187 r2r3_reg_operand (op, mode)
3189 enum machine_mode mode;
3191 if (! reg_operand (op, mode))
3193 if (GET_CODE (op) == SUBREG)
3194 op = SUBREG_REG (op);
3195 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3199 /* Low extended precision register R0-R7. */
3202 ext_low_reg_operand (op, mode)
3204 enum machine_mode mode;
3206 if (! reg_operand (op, mode))
3208 if (GET_CODE (op) == SUBREG)
3209 op = SUBREG_REG (op);
3210 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3214 /* Extended precision register. */
3217 ext_reg_operand (op, mode)
3219 enum machine_mode mode;
3221 if (! reg_operand (op, mode))
3223 if (GET_CODE (op) == SUBREG)
3224 op = SUBREG_REG (op);
3227 return IS_EXT_OR_PSEUDO_REG (op);
3231 /* Standard precision register. */
3234 std_reg_operand (op, mode)
3236 enum machine_mode mode;
3238 if (! reg_operand (op, mode))
3240 if (GET_CODE (op) == SUBREG)
3241 op = SUBREG_REG (op);
3242 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3245 /* Standard precision or normal register. */
3248 std_or_reg_operand (op, mode)
3250 enum machine_mode mode;
3252 if (reload_in_progress)
3253 return std_reg_operand (op, mode);
3254 return reg_operand (op, mode);
3257 /* Address register. */
3260 addr_reg_operand (op, mode)
3262 enum machine_mode mode;
3264 if (! reg_operand (op, mode))
3266 return c4x_a_register (op);
3270 /* Index register. */
3273 index_reg_operand (op, mode)
3275 enum machine_mode mode;
3277 if (! reg_operand (op, mode))
3279 if (GET_CODE (op) == SUBREG)
3280 op = SUBREG_REG (op);
3281 return c4x_x_register (op);
3288 dp_reg_operand (op, mode)
3290 enum machine_mode mode ATTRIBUTE_UNUSED;
3292 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3299 sp_reg_operand (op, mode)
3301 enum machine_mode mode ATTRIBUTE_UNUSED;
3303 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3310 st_reg_operand (op, mode)
3312 enum machine_mode mode ATTRIBUTE_UNUSED;
3314 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3321 rc_reg_operand (op, mode)
3323 enum machine_mode mode ATTRIBUTE_UNUSED;
3325 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3330 call_address_operand (op, mode)
3332 enum machine_mode mode ATTRIBUTE_UNUSED;
3334 return (REG_P (op) || symbolic_address_operand (op, mode));
3338 /* Symbolic address operand. */
3341 symbolic_address_operand (op, mode)
3343 enum machine_mode mode ATTRIBUTE_UNUSED;
3345 switch (GET_CODE (op))
3357 /* Check dst operand of a move instruction. */
3360 dst_operand (op, mode)
3362 enum machine_mode mode;
3364 if (GET_CODE (op) == SUBREG
3365 && mixed_subreg_operand (op, mode))
3369 return reg_operand (op, mode);
3371 return nonimmediate_operand (op, mode);
3375 /* Check src operand of two operand arithmetic instructions. */
3378 src_operand (op, mode)
3380 enum machine_mode mode;
3382 if (GET_CODE (op) == SUBREG
3383 && mixed_subreg_operand (op, mode))
3387 return reg_operand (op, mode);
3389 if (mode == VOIDmode)
3390 mode = GET_MODE (op);
3392 if (GET_CODE (op) == CONST_INT)
3393 return (mode == QImode || mode == Pmode || mode == HImode)
3394 && c4x_I_constant (op);
3396 /* We don't like CONST_DOUBLE integers. */
3397 if (GET_CODE (op) == CONST_DOUBLE)
3398 return c4x_H_constant (op);
3400 /* Disallow symbolic addresses. Only the predicate
3401 symbolic_address_operand will match these. */
3402 if (GET_CODE (op) == SYMBOL_REF
3403 || GET_CODE (op) == LABEL_REF
3404 || GET_CODE (op) == CONST)
3407 /* If TARGET_LOAD_DIRECT_MEMS is non-zero, disallow direct memory
3408 access to symbolic addresses. These operands will get forced
3409 into a register and the movqi expander will generate a
3410 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is non-zero. */
3411 if (GET_CODE (op) == MEM
3412 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3413 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3414 || GET_CODE (XEXP (op, 0)) == CONST)))
3415 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3417 return general_operand (op, mode);
3422 src_hi_operand (op, mode)
3424 enum machine_mode mode;
3426 if (c4x_O_constant (op))
3428 return src_operand (op, mode);
3432 /* Check src operand of two operand logical instructions. */
3435 lsrc_operand (op, mode)
3437 enum machine_mode mode;
3439 if (mode == VOIDmode)
3440 mode = GET_MODE (op);
3442 if (mode != QImode && mode != Pmode)
3443 fatal_insn ("mode not QImode", op);
3445 if (GET_CODE (op) == CONST_INT)
3446 return c4x_L_constant (op) || c4x_J_constant (op);
3448 return src_operand (op, mode);
3452 /* Check src operand of two operand tricky instructions. */
3455 tsrc_operand (op, mode)
3457 enum machine_mode mode;
3459 if (mode == VOIDmode)
3460 mode = GET_MODE (op);
3462 if (mode != QImode && mode != Pmode)
3463 fatal_insn ("mode not QImode", op);
3465 if (GET_CODE (op) == CONST_INT)
3466 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3468 return src_operand (op, mode);
3473 reg_or_const_operand (op, mode)
3475 enum machine_mode mode;
3477 return reg_operand (op, mode) || const_operand (op, mode);
3481 /* Check for indirect operands allowable in parallel instruction. */
3484 par_ind_operand (op, mode)
3486 enum machine_mode mode;
3488 if (mode != VOIDmode && mode != GET_MODE (op))
3491 return c4x_S_indirect (op);
3495 /* Check for operands allowable in parallel instruction. */
3498 parallel_operand (op, mode)
3500 enum machine_mode mode;
3502 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3507 c4x_S_address_parse (op, base, incdec, index, disp)
3519 if (GET_CODE (op) != MEM)
3520 fatal_insn ("invalid indirect memory address", op);
3523 switch (GET_CODE (op))
3526 *base = REGNO (XEXP (op, 0));
3532 *base = REGNO (XEXP (op, 0));
3538 *base = REGNO (XEXP (op, 0));
3544 *base = REGNO (XEXP (op, 0));
3550 *base = REGNO (XEXP (op, 0));
3551 if (REG_P (XEXP (XEXP (op, 1), 1)))
3553 *index = REGNO (XEXP (XEXP (op, 1), 1));
3554 *disp = 0; /* ??? */
3557 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3562 *base = REGNO (XEXP (op, 0));
3563 if (REG_P (XEXP (XEXP (op, 1), 1)))
3565 *index = REGNO (XEXP (XEXP (op, 1), 1));
3566 *disp = 1; /* ??? */
3569 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3580 rtx op0 = XEXP (op, 0);
3581 rtx op1 = XEXP (op, 1);
3583 if (c4x_a_register (op0))
3585 if (c4x_x_register (op1))
3587 *base = REGNO (op0);
3588 *index = REGNO (op1);
3591 else if ((GET_CODE (op1) == CONST_INT
3592 && IS_DISP1_CONST (INTVAL (op1))))
3594 *base = REGNO (op0);
3595 *disp = INTVAL (op1);
3599 else if (c4x_x_register (op0) && c4x_a_register (op1))
3601 *base = REGNO (op1);
3602 *index = REGNO (op0);
3609 fatal_insn ("invalid indirect (S) memory address", op);
3615 c4x_address_conflict (op0, op1, store0, store1)
3630 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3633 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3634 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3636 if (store0 && store1)
3638 /* If we have two stores in parallel to the same address, then
3639 the C4x only executes one of the stores. This is unlikely to
3640 cause problems except when writing to a hardware device such
3641 as a FIFO since the second write will be lost. The user
3642 should flag the hardware location as being volatile so that
3643 we don't do this optimisation. While it is unlikely that we
3644 have an aliased address if both locations are not marked
3645 volatile, it is probably safer to flag a potential conflict
3646 if either location is volatile. */
3647 if (! flag_argument_noalias)
3649 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3654 /* If have a parallel load and a store to the same address, the load
3655 is performed first, so there is no conflict. Similarly, there is
3656 no conflict if have parallel loads from the same address. */
3658 /* Cannot use auto increment or auto decrement twice for same
3660 if (base0 == base1 && incdec0 && incdec0)
3663 /* It might be too confusing for GCC if we have use a base register
3664 with a side effect and a memory reference using the same register
3666 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3669 /* We can not optimize the case where op1 and op2 refer to the same
3671 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3679 /* Check for while loop inside a decrement and branch loop. */
3682 c4x_label_conflict (insn, jump, db)
3689 if (GET_CODE (insn) == CODE_LABEL)
3691 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3693 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3696 insn = PREV_INSN (insn);
3702 /* Validate combination of operands for parallel load/store instructions. */
3705 valid_parallel_load_store (operands, mode)
3707 enum machine_mode mode ATTRIBUTE_UNUSED;
3709 rtx op0 = operands[0];
3710 rtx op1 = operands[1];
3711 rtx op2 = operands[2];
3712 rtx op3 = operands[3];
3714 if (GET_CODE (op0) == SUBREG)
3715 op0 = SUBREG_REG (op0);
3716 if (GET_CODE (op1) == SUBREG)
3717 op1 = SUBREG_REG (op1);
3718 if (GET_CODE (op2) == SUBREG)
3719 op2 = SUBREG_REG (op2);
3720 if (GET_CODE (op3) == SUBREG)
3721 op3 = SUBREG_REG (op3);
3723 /* The patterns should only allow ext_low_reg_operand() or
3724 par_ind_operand() operands. Thus of the 4 operands, only 2
3725 should be REGs and the other 2 should be MEMs. */
3727 /* This test prevents the multipack pass from using this pattern if
3728 op0 is used as an index or base register in op2 or op3, since
3729 this combination will require reloading. */
3730 if (GET_CODE (op0) == REG
3731 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3732 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3736 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3737 return (REGNO (op0) != REGNO (op2))
3738 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3739 && ! c4x_address_conflict (op1, op3, 0, 0);
3742 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3743 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3744 && ! c4x_address_conflict (op0, op2, 1, 1);
3747 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3748 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3749 && ! c4x_address_conflict (op1, op2, 0, 1);
3752 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3753 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3754 && ! c4x_address_conflict (op0, op3, 1, 0);
3761 valid_parallel_operands_4 (operands, mode)
3763 enum machine_mode mode ATTRIBUTE_UNUSED;
3765 rtx op0 = operands[0];
3766 rtx op2 = operands[2];
3768 if (GET_CODE (op0) == SUBREG)
3769 op0 = SUBREG_REG (op0);
3770 if (GET_CODE (op2) == SUBREG)
3771 op2 = SUBREG_REG (op2);
3773 /* This test prevents the multipack pass from using this pattern if
3774 op0 is used as an index or base register in op2, since this combination
3775 will require reloading. */
3776 if (GET_CODE (op0) == REG
3777 && GET_CODE (op2) == MEM
3778 && reg_mentioned_p (op0, XEXP (op2, 0)))
3786 valid_parallel_operands_5 (operands, mode)
3788 enum machine_mode mode ATTRIBUTE_UNUSED;
3791 rtx op0 = operands[0];
3792 rtx op1 = operands[1];
3793 rtx op2 = operands[2];
3794 rtx op3 = operands[3];
3796 if (GET_CODE (op0) == SUBREG)
3797 op0 = SUBREG_REG (op0);
3798 if (GET_CODE (op1) == SUBREG)
3799 op1 = SUBREG_REG (op1);
3800 if (GET_CODE (op2) == SUBREG)
3801 op2 = SUBREG_REG (op2);
3803 /* The patterns should only allow ext_low_reg_operand() or
3804 par_ind_operand() operands. Operands 1 and 2 may be commutative
3805 but only one of them can be a register. */
3806 if (GET_CODE (op1) == REG)
3808 if (GET_CODE (op2) == REG)
3814 /* This test prevents the multipack pass from using this pattern if
3815 op0 is used as an index or base register in op3, since this combination
3816 will require reloading. */
3817 if (GET_CODE (op0) == REG
3818 && GET_CODE (op3) == MEM
3819 && reg_mentioned_p (op0, XEXP (op3, 0)))
3827 valid_parallel_operands_6 (operands, mode)
3829 enum machine_mode mode ATTRIBUTE_UNUSED;
3832 rtx op0 = operands[0];
3833 rtx op1 = operands[1];
3834 rtx op2 = operands[2];
3835 rtx op4 = operands[4];
3836 rtx op5 = operands[5];
3838 if (GET_CODE (op1) == SUBREG)
3839 op1 = SUBREG_REG (op1);
3840 if (GET_CODE (op2) == SUBREG)
3841 op2 = SUBREG_REG (op2);
3842 if (GET_CODE (op4) == SUBREG)
3843 op4 = SUBREG_REG (op4);
3844 if (GET_CODE (op5) == SUBREG)
3845 op5 = SUBREG_REG (op5);
3847 /* The patterns should only allow ext_low_reg_operand() or
3848 par_ind_operand() operands. Thus of the 4 input operands, only 2
3849 should be REGs and the other 2 should be MEMs. */
3851 if (GET_CODE (op1) == REG)
3853 if (GET_CODE (op2) == REG)
3855 if (GET_CODE (op4) == REG)
3857 if (GET_CODE (op5) == REG)
3860 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3861 Perhaps we should count the MEMs as well? */
3865 /* This test prevents the multipack pass from using this pattern if
3866 op0 is used as an index or base register in op4 or op5, since
3867 this combination will require reloading. */
3868 if (GET_CODE (op0) == REG
3869 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3870 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3877 /* Validate combination of src operands. Note that the operands have
3878 been screened by the src_operand predicate. We just have to check
3879 that the combination of operands is valid. If FORCE is set, ensure
3880 that the destination regno is valid if we have a 2 operand insn. */
3883 c4x_valid_operands (code, operands, mode, force)
3886 enum machine_mode mode ATTRIBUTE_UNUSED;
3891 enum rtx_code code1;
3892 enum rtx_code code2;
3894 if (code == COMPARE)
3905 if (GET_CODE (op1) == SUBREG)
3906 op1 = SUBREG_REG (op1);
3907 if (GET_CODE (op2) == SUBREG)
3908 op2 = SUBREG_REG (op2);
3910 code1 = GET_CODE (op1);
3911 code2 = GET_CODE (op2);
3913 if (code1 == REG && code2 == REG)
3916 if (code1 == MEM && code2 == MEM)
3918 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3920 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3931 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3936 if (! c4x_H_constant (op2))
3940 /* Any valid memory operand screened by src_operand is OK. */
3943 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3944 into a stack slot memory address comprising a PLUS and a
3950 fatal_insn ("c4x_valid_operands: Internal error", op2);
3954 /* Check that we have a valid destination register for a two operand
3956 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3959 /* We assume MINUS is commutative since the subtract patterns
3960 also support the reverse subtract instructions. Since op1
3961 is not a register, and op2 is a register, op1 can only
3962 be a restricted memory operand for a shift instruction. */
3963 if (code == ASHIFTRT || code == LSHIFTRT
3964 || code == ASHIFT || code == COMPARE)
3966 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3971 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3976 if (! c4x_H_constant (op1))
3980 /* Any valid memory operand screened by src_operand is OK. */
3988 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3989 into a stack slot memory address comprising a PLUS and a
3999 /* Check that we have a valid destination register for a two operand
4001 return ! force || REGNO (op1) == REGNO (operands[0]);
4005 int valid_operands (code, operands, mode)
4008 enum machine_mode mode;
4011 /* If we are not optimizing then we have to let anything go and let
4012 reload fix things up. instantiate_decl in function.c can produce
4013 invalid insns by changing the offset of a memory operand from a
4014 valid one into an invalid one, when the second operand is also a
4015 memory operand. The alternative is not to allow two memory
4016 operands for an insn when not optimizing. The problem only rarely
4017 occurs, for example with the C-torture program DFcmp.c. */
4019 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
4024 legitimize_operands (code, operands, mode)
4027 enum machine_mode mode;
4029 /* Compare only has 2 operands. */
4030 if (code == COMPARE)
4032 /* During RTL generation, force constants into pseudos so that
4033 they can get hoisted out of loops. This will tie up an extra
4034 register but can save an extra cycle. Only do this if loop
4035 optimisation enabled. (We cannot pull this trick for add and
4036 sub instructions since the flow pass won't find
4037 autoincrements etc.) This allows us to generate compare
4038 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4039 of LDI *AR0++, R0; CMPI 42, R0.
4041 Note that expand_binops will try to load an expensive constant
4042 into a register if it is used within a loop. Unfortunately,
4043 the cost mechanism doesn't allow us to look at the other
4044 operand to decide whether the constant is expensive. */
4046 if (! reload_in_progress
4049 && GET_CODE (operands[1]) == CONST_INT
4050 && preserve_subexpressions_p ()
4051 && rtx_cost (operands[1], code) > 1)
4052 operands[1] = force_reg (mode, operands[1]);
4054 if (! reload_in_progress
4055 && ! c4x_valid_operands (code, operands, mode, 0))
4056 operands[0] = force_reg (mode, operands[0]);
4060 /* We cannot do this for ADDI/SUBI insns since we will
4061 defeat the flow pass from finding autoincrement addressing
4063 if (! reload_in_progress
4064 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
4067 && GET_CODE (operands[2]) == CONST_INT
4068 && preserve_subexpressions_p ()
4069 && rtx_cost (operands[2], code) > 1)
4070 operands[2] = force_reg (mode, operands[2]);
4072 /* We can get better code on a C30 if we force constant shift counts
4073 into a register. This way they can get hoisted out of loops,
4074 tying up a register, but saving an instruction. The downside is
4075 that they may get allocated to an address or index register, and
4076 thus we will get a pipeline conflict if there is a nearby
4077 indirect address using an address register.
4079 Note that expand_binops will not try to load an expensive constant
4080 into a register if it is used within a loop for a shift insn. */
4082 if (! reload_in_progress
4083 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
4085 /* If the operand combination is invalid, we force operand1 into a
4086 register, preventing reload from having doing to do this at a
4088 operands[1] = force_reg (mode, operands[1]);
4091 emit_move_insn (operands[0], operands[1]);
4092 operands[1] = copy_rtx (operands[0]);
4096 /* Just in case... */
4097 if (! c4x_valid_operands (code, operands, mode, 0))
4098 operands[2] = force_reg (mode, operands[2]);
4102 /* Right shifts require a negative shift count, but GCC expects
4103 a positive count, so we emit a NEG. */
4104 if ((code == ASHIFTRT || code == LSHIFTRT)
4105 && (GET_CODE (operands[2]) != CONST_INT))
4106 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
4112 /* The following predicates are used for instruction scheduling. */
4115 group1_reg_operand (op, mode)
4117 enum machine_mode mode;
4119 if (mode != VOIDmode && mode != GET_MODE (op))
4121 if (GET_CODE (op) == SUBREG)
4122 op = SUBREG_REG (op);
4123 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4128 group1_mem_operand (op, mode)
4130 enum machine_mode mode;
4132 if (mode != VOIDmode && mode != GET_MODE (op))
4135 if (GET_CODE (op) == MEM)
4138 if (GET_CODE (op) == PLUS)
4140 rtx op0 = XEXP (op, 0);
4141 rtx op1 = XEXP (op, 1);
4143 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4144 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4147 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4155 /* Return true if any one of the address registers. */
4158 arx_reg_operand (op, mode)
4160 enum machine_mode mode;
4162 if (mode != VOIDmode && mode != GET_MODE (op))
4164 if (GET_CODE (op) == SUBREG)
4165 op = SUBREG_REG (op);
4166 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4171 c4x_arn_reg_operand (op, mode, regno)
4173 enum machine_mode mode;
4176 if (mode != VOIDmode && mode != GET_MODE (op))
4178 if (GET_CODE (op) == SUBREG)
4179 op = SUBREG_REG (op);
4180 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4185 c4x_arn_mem_operand (op, mode, regno)
4187 enum machine_mode mode;
4190 if (mode != VOIDmode && mode != GET_MODE (op))
4193 if (GET_CODE (op) == MEM)
4196 switch (GET_CODE (op))
4205 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4209 if (REG_P (XEXP (op, 0)) && (! reload_completed
4210 || (REGNO (XEXP (op, 0)) == regno)))
4212 if (REG_P (XEXP (XEXP (op, 1), 1))
4213 && (! reload_completed
4214 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4220 rtx op0 = XEXP (op, 0);
4221 rtx op1 = XEXP (op, 1);
4223 if ((REG_P (op0) && (! reload_completed
4224 || (REGNO (op0) == regno)))
4225 || (REG_P (op1) && (! reload_completed
4226 || (REGNO (op1) == regno))))
4240 ar0_reg_operand (op, mode)
4242 enum machine_mode mode;
4244 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4249 ar0_mem_operand (op, mode)
4251 enum machine_mode mode;
4253 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4258 ar1_reg_operand (op, mode)
4260 enum machine_mode mode;
4262 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4267 ar1_mem_operand (op, mode)
4269 enum machine_mode mode;
4271 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4276 ar2_reg_operand (op, mode)
4278 enum machine_mode mode;
4280 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4285 ar2_mem_operand (op, mode)
4287 enum machine_mode mode;
4289 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4294 ar3_reg_operand (op, mode)
4296 enum machine_mode mode;
4298 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4303 ar3_mem_operand (op, mode)
4305 enum machine_mode mode;
4307 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4312 ar4_reg_operand (op, mode)
4314 enum machine_mode mode;
4316 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4321 ar4_mem_operand (op, mode)
4323 enum machine_mode mode;
4325 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4330 ar5_reg_operand (op, mode)
4332 enum machine_mode mode;
4334 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4339 ar5_mem_operand (op, mode)
4341 enum machine_mode mode;
4343 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4348 ar6_reg_operand (op, mode)
4350 enum machine_mode mode;
4352 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4357 ar6_mem_operand (op, mode)
4359 enum machine_mode mode;
4361 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4366 ar7_reg_operand (op, mode)
4368 enum machine_mode mode;
4370 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4375 ar7_mem_operand (op, mode)
4377 enum machine_mode mode;
4379 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4384 ir0_reg_operand (op, mode)
4386 enum machine_mode mode;
4388 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4393 ir0_mem_operand (op, mode)
4395 enum machine_mode mode;
4397 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4402 ir1_reg_operand (op, mode)
4404 enum machine_mode mode;
4406 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4411 ir1_mem_operand (op, mode)
4413 enum machine_mode mode;
4415 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4419 /* This is similar to operand_subword but allows autoincrement
4423 c4x_operand_subword (op, i, validate_address, mode)
4426 int validate_address;
4427 enum machine_mode mode;
4429 if (mode != HImode && mode != HFmode)
4430 fatal_insn ("c4x_operand_subword: invalid mode", op);
4432 if (mode == HFmode && REG_P (op))
4433 fatal_insn ("c4x_operand_subword: invalid operand", op);
4435 if (GET_CODE (op) == MEM)
4437 enum rtx_code code = GET_CODE (XEXP (op, 0));
4438 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4439 enum machine_mode submode;
4444 else if (mode == HFmode)
4451 return gen_rtx_MEM (submode, XEXP (op, 0));
4457 /* We could handle these with some difficulty.
4458 e.g., *p-- => *(p-=2); *(p+1). */
4459 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4465 fatal_insn ("c4x_operand_subword: invalid address", op);
4467 /* Even though offsettable_address_p considers (MEM
4468 (LO_SUM)) to be offsettable, it is not safe if the
4469 address is at the end of the data page since we also have
4470 to fix up the associated high PART. In this case where
4471 we are trying to split a HImode or HFmode memory
4472 reference, we would have to emit another insn to reload a
4473 new HIGH value. It's easier to disable LO_SUM memory references
4474 in HImode or HFmode and we probably get better code. */
4476 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4483 return operand_subword (op, i, validate_address, mode);
4488 struct name_list *next;
4492 static struct name_list *global_head;
4493 static struct name_list *extern_head;
4496 /* Add NAME to list of global symbols and remove from external list if
4497 present on external list. */
4500 c4x_global_label (name)
4503 struct name_list *p, *last;
4505 /* Do not insert duplicate names, so linearly search through list of
4510 if (strcmp (p->name, name) == 0)
4514 p = (struct name_list *) permalloc (sizeof *p);
4515 p->next = global_head;
4519 /* Remove this name from ref list if present. */
4524 if (strcmp (p->name, name) == 0)
4527 last->next = p->next;
4529 extern_head = p->next;
4538 /* Add NAME to list of external symbols. */
4541 c4x_external_ref (name)
4544 struct name_list *p;
4546 /* Do not insert duplicate names. */
4550 if (strcmp (p->name, name) == 0)
4555 /* Do not insert ref if global found. */
4559 if (strcmp (p->name, name) == 0)
4563 p = (struct name_list *) permalloc (sizeof *p);
4564 p->next = extern_head;
4574 struct name_list *p;
4576 /* Output all external names that are not global. */
4580 fprintf (fp, "\t.ref\t");
4581 assemble_name (fp, p->name);
4585 fprintf (fp, "\t.end\n");
4590 c4x_check_attribute (attrib, list, decl, attributes)
4592 tree list, decl, *attributes;
4594 while (list != NULL_TREE
4595 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4596 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4597 list = TREE_CHAIN (list);
4599 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4605 c4x_insert_attributes (decl, attributes)
4606 tree decl, *attributes;
4608 switch (TREE_CODE (decl))
4611 c4x_check_attribute ("section", code_tree, decl, attributes);
4612 c4x_check_attribute ("const", pure_tree, decl, attributes);
4613 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4614 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4618 c4x_check_attribute ("section", data_tree, decl, attributes);
4626 /* Table of valid machine attributes. */
4627 const struct attribute_spec c4x_attribute_table[] =
4629 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4630 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4631 /* FIXME: code elsewhere in this file treats "naked" as a synonym of
4632 "interrupt"; should it be accepted here? */
4633 { "assembler", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4634 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4635 { NULL, 0, 0, false, false, false, NULL }
4638 /* Handle an attribute requiring a FUNCTION_TYPE;
4639 arguments as in struct attribute_spec.handler. */
4641 c4x_handle_fntype_attribute (node, name, args, flags, no_add_attrs)
4644 tree args ATTRIBUTE_UNUSED;
4645 int flags ATTRIBUTE_UNUSED;
4648 if (TREE_CODE (*node) != FUNCTION_TYPE)
4650 warning ("`%s' attribute only applies to functions",
4651 IDENTIFIER_POINTER (name));
4652 *no_add_attrs = true;
4659 /* !!! FIXME to emit RPTS correctly. */
4662 c4x_rptb_rpts_p (insn, op)
4665 /* The next insn should be our label marking where the
4666 repeat block starts. */
4667 insn = NEXT_INSN (insn);
4668 if (GET_CODE (insn) != CODE_LABEL)
4670 /* Some insns may have been shifted between the RPTB insn
4671 and the top label... They were probably destined to
4672 be moved out of the loop. For now, let's leave them
4673 where they are and print a warning. We should
4674 probably move these insns before the repeat block insn. */
4676 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4681 /* Skip any notes. */
4682 insn = next_nonnote_insn (insn);
4684 /* This should be our first insn in the loop. */
4685 if (! INSN_P (insn))
4688 /* Skip any notes. */
4689 insn = next_nonnote_insn (insn);
4691 if (! INSN_P (insn))
4694 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4700 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4704 /* Check if register r11 is used as the destination of an insn. */
4717 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4718 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4720 if (INSN_P (x) && (set = single_set (x)))
4723 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4726 fmt = GET_RTX_FORMAT (GET_CODE (x));
4727 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4731 if (c4x_r11_set_p (XEXP (x, i)))
4734 else if (fmt[i] == 'E')
4735 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4736 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4743 /* The c4x sometimes has a problem when the insn before the laj insn
4744 sets the r11 register. Check for this situation. */
4747 c4x_check_laj_p (insn)
4750 insn = prev_nonnote_insn (insn);
4752 /* If this is the start of the function no nop is needed. */
4756 /* If the previous insn is a code label we have to insert a nop. This
4757 could be a jump or table jump. We can find the normal jumps by
4758 scanning the function but this will not find table jumps. */
4759 if (GET_CODE (insn) == CODE_LABEL)
4762 /* If the previous insn sets register r11 we have to insert a nop. */
4763 if (c4x_r11_set_p (insn))
4766 /* No nop needed. */
4771 /* Adjust the cost of a scheduling dependency. Return the new cost of
4772 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4773 A set of an address register followed by a use occurs a 2 cycle
4774 stall (reduced to a single cycle on the c40 using LDA), while
4775 a read of an address register followed by a use occurs a single cycle. */
4777 #define SET_USE_COST 3
4778 #define SETLDA_USE_COST 2
4779 #define READ_USE_COST 2
4782 c4x_adjust_cost (insn, link, dep_insn, cost)
4788 /* Don't worry about this until we know what registers have been
4790 if (flag_schedule_insns == 0 && ! reload_completed)
4793 /* How do we handle dependencies where a read followed by another
4794 read causes a pipeline stall? For example, a read of ar0 followed
4795 by the use of ar0 for a memory reference. It looks like we
4796 need to extend the scheduler to handle this case. */
4798 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4799 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4800 so only deal with insns we know about. */
4801 if (recog_memoized (dep_insn) < 0)
4804 if (REG_NOTE_KIND (link) == 0)
4808 /* Data dependency; DEP_INSN writes a register that INSN reads some
4812 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4813 max = SET_USE_COST > max ? SET_USE_COST : max;
4814 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4815 max = READ_USE_COST > max ? READ_USE_COST : max;
4819 /* This could be significantly optimized. We should look
4820 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4821 insn uses ar0-ar7. We then test if the same register
4822 is used. The tricky bit is that some operands will
4823 use several registers... */
4824 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4825 max = SET_USE_COST > max ? SET_USE_COST : max;
4826 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4827 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4828 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4829 max = READ_USE_COST > max ? READ_USE_COST : max;
4831 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4832 max = SET_USE_COST > max ? SET_USE_COST : max;
4833 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4834 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4835 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4836 max = READ_USE_COST > max ? READ_USE_COST : max;
4838 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4839 max = SET_USE_COST > max ? SET_USE_COST : max;
4840 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4841 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4842 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4843 max = READ_USE_COST > max ? READ_USE_COST : max;
4845 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4846 max = SET_USE_COST > max ? SET_USE_COST : max;
4847 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4848 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4849 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4850 max = READ_USE_COST > max ? READ_USE_COST : max;
4852 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4853 max = SET_USE_COST > max ? SET_USE_COST : max;
4854 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4855 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4856 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4857 max = READ_USE_COST > max ? READ_USE_COST : max;
4859 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4860 max = SET_USE_COST > max ? SET_USE_COST : max;
4861 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4862 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4863 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4864 max = READ_USE_COST > max ? READ_USE_COST : max;
4866 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4867 max = SET_USE_COST > max ? SET_USE_COST : max;
4868 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4869 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4870 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4871 max = READ_USE_COST > max ? READ_USE_COST : max;
4873 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4874 max = SET_USE_COST > max ? SET_USE_COST : max;
4875 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4876 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4877 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4878 max = READ_USE_COST > max ? READ_USE_COST : max;
4880 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4881 max = SET_USE_COST > max ? SET_USE_COST : max;
4882 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4883 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4885 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4886 max = SET_USE_COST > max ? SET_USE_COST : max;
4887 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4888 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4894 /* For other data dependencies, the default cost specified in the
4898 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4900 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4903 /* For c4x anti dependencies, the cost is 0. */
4906 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4908 /* Output dependency; DEP_INSN writes a register that INSN writes some
4911 /* For c4x output dependencies, the cost is 0. */
4919 c4x_init_builtins ()
4921 tree endlink = void_list_node;
4923 builtin_function ("fast_ftoi",
4926 tree_cons (NULL_TREE, double_type_node, endlink)),
4927 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL);
4928 builtin_function ("ansi_ftoi",
4931 tree_cons (NULL_TREE, double_type_node, endlink)),
4932 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL);
4934 builtin_function ("fast_imult",
4937 tree_cons (NULL_TREE, integer_type_node,
4938 tree_cons (NULL_TREE,
4939 integer_type_node, endlink))),
4940 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL);
4943 builtin_function ("toieee",
4946 tree_cons (NULL_TREE, double_type_node, endlink)),
4947 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL);
4948 builtin_function ("frieee",
4951 tree_cons (NULL_TREE, double_type_node, endlink)),
4952 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL);
4953 builtin_function ("fast_invf",
4956 tree_cons (NULL_TREE, double_type_node, endlink)),
4957 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL);
4963 c4x_expand_builtin (exp, target, subtarget, mode, ignore)
4966 rtx subtarget ATTRIBUTE_UNUSED;
4967 enum machine_mode mode ATTRIBUTE_UNUSED;
4968 int ignore ATTRIBUTE_UNUSED;
4970 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4971 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4972 tree arglist = TREE_OPERAND (exp, 1);
4978 case C4X_BUILTIN_FIX:
4979 arg0 = TREE_VALUE (arglist);
4980 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4981 r0 = protect_from_queue (r0, 0);
4982 if (! target || ! register_operand (target, QImode))
4983 target = gen_reg_rtx (QImode);
4984 emit_insn (gen_fixqfqi_clobber (target, r0));
4987 case C4X_BUILTIN_FIX_ANSI:
4988 arg0 = TREE_VALUE (arglist);
4989 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4990 r0 = protect_from_queue (r0, 0);
4991 if (! target || ! register_operand (target, QImode))
4992 target = gen_reg_rtx (QImode);
4993 emit_insn (gen_fix_truncqfqi2 (target, r0));
4996 case C4X_BUILTIN_MPYI:
4999 arg0 = TREE_VALUE (arglist);
5000 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5001 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
5002 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
5003 r0 = protect_from_queue (r0, 0);
5004 r1 = protect_from_queue (r1, 0);
5005 if (! target || ! register_operand (target, QImode))
5006 target = gen_reg_rtx (QImode);
5007 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
5010 case C4X_BUILTIN_TOIEEE:
5013 arg0 = TREE_VALUE (arglist);
5014 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5015 r0 = protect_from_queue (r0, 0);
5016 if (! target || ! register_operand (target, QFmode))
5017 target = gen_reg_rtx (QFmode);
5018 emit_insn (gen_toieee (target, r0));
5021 case C4X_BUILTIN_FRIEEE:
5024 arg0 = TREE_VALUE (arglist);
5025 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5026 put_var_into_stack (arg0);
5027 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5028 r0 = protect_from_queue (r0, 0);
5029 if (register_operand (r0, QFmode))
5031 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5032 emit_move_insn (r1, r0);
5035 if (! target || ! register_operand (target, QFmode))
5036 target = gen_reg_rtx (QFmode);
5037 emit_insn (gen_frieee (target, r0));
5040 case C4X_BUILTIN_RCPF:
5043 arg0 = TREE_VALUE (arglist);
5044 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5045 r0 = protect_from_queue (r0, 0);
5046 if (! target || ! register_operand (target, QFmode))
5047 target = gen_reg_rtx (QFmode);
5048 emit_insn (gen_rcpfqf_clobber (target, r0));
5055 c4x_asm_named_section (name, flags)
5057 unsigned int flags ATTRIBUTE_UNUSED;
5059 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);