1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003,
4 Free Software Foundation, Inc.
6 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
7 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify
12 it under the terms of the GNU General Public License as published by
13 the Free Software Foundation; either version 2, or (at your option)
16 GCC is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to
23 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
24 Boston, MA 02110-1301, USA. */
26 /* Some output-actions in c4x.md need these. */
29 #include "coretypes.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
37 #include "insn-config.h"
38 #include "insn-attr.h"
39 #include "conditions.h"
52 #include "target-def.h"
53 #include "langhooks.h"
57 rtx fix_truncqfhi2_libfunc;
58 rtx fixuns_truncqfhi2_libfunc;
59 rtx fix_trunchfhi2_libfunc;
60 rtx fixuns_trunchfhi2_libfunc;
61 rtx floathiqf2_libfunc;
62 rtx floatunshiqf2_libfunc;
63 rtx floathihf2_libfunc;
64 rtx floatunshihf2_libfunc;
66 static int c4x_leaf_function;
68 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
70 /* Array of the smallest class containing reg number REGNO, indexed by
71 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
72 registers are available and set the class to NO_REGS for registers
73 that the target switches say are unavailable. */
75 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
77 /* Reg Modes Saved. */
78 R0R1_REGS, /* R0 QI, QF, HF No. */
79 R0R1_REGS, /* R1 QI, QF, HF No. */
80 R2R3_REGS, /* R2 QI, QF, HF No. */
81 R2R3_REGS, /* R3 QI, QF, HF No. */
82 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
84 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
85 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
86 ADDR_REGS, /* AR0 QI No. */
87 ADDR_REGS, /* AR1 QI No. */
88 ADDR_REGS, /* AR2 QI No. */
89 ADDR_REGS, /* AR3 QI QI. */
90 ADDR_REGS, /* AR4 QI QI. */
91 ADDR_REGS, /* AR5 QI QI. */
92 ADDR_REGS, /* AR6 QI QI. */
93 ADDR_REGS, /* AR7 QI QI. */
94 DP_REG, /* DP QI No. */
95 INDEX_REGS, /* IR0 QI No. */
96 INDEX_REGS, /* IR1 QI No. */
97 BK_REG, /* BK QI QI. */
98 SP_REG, /* SP QI No. */
99 ST_REG, /* ST CC No. */
100 NO_REGS, /* DIE/IE No. */
101 NO_REGS, /* IIE/IF No. */
102 NO_REGS, /* IIF/IOF No. */
103 INT_REGS, /* RS QI No. */
104 INT_REGS, /* RE QI No. */
105 RC_REG, /* RC QI No. */
106 EXT_REGS, /* R8 QI, QF, HF QI. */
107 EXT_REGS, /* R9 QI, QF, HF No. */
108 EXT_REGS, /* R10 QI, QF, HF No. */
109 EXT_REGS, /* R11 QI, QF, HF No. */
112 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
114 /* Reg Modes Saved. */
115 HFmode, /* R0 QI, QF, HF No. */
116 HFmode, /* R1 QI, QF, HF No. */
117 HFmode, /* R2 QI, QF, HF No. */
118 HFmode, /* R3 QI, QF, HF No. */
119 QFmode, /* R4 QI, QF, HF QI. */
120 QFmode, /* R5 QI, QF, HF QI. */
121 QImode, /* R6 QI, QF, HF QF. */
122 QImode, /* R7 QI, QF, HF QF. */
123 QImode, /* AR0 QI No. */
124 QImode, /* AR1 QI No. */
125 QImode, /* AR2 QI No. */
126 QImode, /* AR3 QI QI. */
127 QImode, /* AR4 QI QI. */
128 QImode, /* AR5 QI QI. */
129 QImode, /* AR6 QI QI. */
130 QImode, /* AR7 QI QI. */
131 VOIDmode, /* DP QI No. */
132 QImode, /* IR0 QI No. */
133 QImode, /* IR1 QI No. */
134 QImode, /* BK QI QI. */
135 VOIDmode, /* SP QI No. */
136 VOIDmode, /* ST CC No. */
137 VOIDmode, /* DIE/IE No. */
138 VOIDmode, /* IIE/IF No. */
139 VOIDmode, /* IIF/IOF No. */
140 QImode, /* RS QI No. */
141 QImode, /* RE QI No. */
142 VOIDmode, /* RC QI No. */
143 QFmode, /* R8 QI, QF, HF QI. */
144 HFmode, /* R9 QI, QF, HF No. */
145 HFmode, /* R10 QI, QF, HF No. */
146 HFmode, /* R11 QI, QF, HF No. */
150 /* Test and compare insns in c4x.md store the information needed to
151 generate branch and scc insns here. */
156 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
158 /* Pragma definitions. */
160 tree code_tree = NULL_TREE;
161 tree data_tree = NULL_TREE;
162 tree pure_tree = NULL_TREE;
163 tree noreturn_tree = NULL_TREE;
164 tree interrupt_tree = NULL_TREE;
165 tree naked_tree = NULL_TREE;
167 /* Forward declarations */
168 static bool c4x_handle_option (size_t, const char *, int);
169 static int c4x_isr_reg_used_p (unsigned int);
170 static int c4x_leaf_function_p (void);
171 static int c4x_naked_function_p (void);
172 static int c4x_immed_int_constant (rtx);
173 static int c4x_immed_float_constant (rtx);
174 static int c4x_R_indirect (rtx);
175 static void c4x_S_address_parse (rtx , int *, int *, int *, int *);
176 static int c4x_valid_operands (enum rtx_code, rtx *, enum machine_mode, int);
177 static int c4x_arn_reg_operand (rtx, enum machine_mode, unsigned int);
178 static int c4x_arn_mem_operand (rtx, enum machine_mode, unsigned int);
179 static void c4x_file_start (void);
180 static void c4x_file_end (void);
181 static void c4x_check_attribute (const char *, tree, tree, tree *);
182 static int c4x_r11_set_p (rtx);
183 static int c4x_rptb_valid_p (rtx, rtx);
184 static void c4x_reorg (void);
185 static int c4x_label_ref_used_p (rtx, rtx);
186 static tree c4x_handle_fntype_attribute (tree *, tree, tree, int, bool *);
187 const struct attribute_spec c4x_attribute_table[];
188 static void c4x_insert_attributes (tree, tree *);
189 static void c4x_asm_named_section (const char *, unsigned int, tree);
190 static int c4x_adjust_cost (rtx, rtx, rtx, int);
191 static void c4x_globalize_label (FILE *, const char *);
192 static bool c4x_rtx_costs (rtx, int, int, int *);
193 static int c4x_address_cost (rtx);
194 static void c4x_init_libfuncs (void);
195 static void c4x_external_libcall (rtx);
196 static rtx c4x_struct_value_rtx (tree, int);
197 static tree c4x_gimplify_va_arg_expr (tree, tree, tree *, tree *);
199 /* Initialize the GCC target structure. */
200 #undef TARGET_ASM_BYTE_OP
201 #define TARGET_ASM_BYTE_OP "\t.word\t"
202 #undef TARGET_ASM_ALIGNED_HI_OP
203 #define TARGET_ASM_ALIGNED_HI_OP NULL
204 #undef TARGET_ASM_ALIGNED_SI_OP
205 #define TARGET_ASM_ALIGNED_SI_OP NULL
206 #undef TARGET_ASM_FILE_START
207 #define TARGET_ASM_FILE_START c4x_file_start
208 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
209 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
210 #undef TARGET_ASM_FILE_END
211 #define TARGET_ASM_FILE_END c4x_file_end
213 #undef TARGET_ASM_EXTERNAL_LIBCALL
214 #define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
216 /* Play safe, not the fastest code. */
217 #undef TARGET_DEFAULT_TARGET_FLAGS
218 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_ALIASES | MASK_PARALLEL \
219 | MASK_PARALLEL_MPY | MASK_RPTB)
220 #undef TARGET_HANDLE_OPTION
221 #define TARGET_HANDLE_OPTION c4x_handle_option
223 #undef TARGET_ATTRIBUTE_TABLE
224 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
226 #undef TARGET_INSERT_ATTRIBUTES
227 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
229 #undef TARGET_INIT_BUILTINS
230 #define TARGET_INIT_BUILTINS c4x_init_builtins
232 #undef TARGET_EXPAND_BUILTIN
233 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
235 #undef TARGET_SCHED_ADJUST_COST
236 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
238 #undef TARGET_ASM_GLOBALIZE_LABEL
239 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
241 #undef TARGET_RTX_COSTS
242 #define TARGET_RTX_COSTS c4x_rtx_costs
243 #undef TARGET_ADDRESS_COST
244 #define TARGET_ADDRESS_COST c4x_address_cost
246 #undef TARGET_MACHINE_DEPENDENT_REORG
247 #define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
249 #undef TARGET_INIT_LIBFUNCS
250 #define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
252 #undef TARGET_STRUCT_VALUE_RTX
253 #define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
255 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
256 #define TARGET_GIMPLIFY_VA_ARG_EXPR c4x_gimplify_va_arg_expr
258 struct gcc_target targetm = TARGET_INITIALIZER;
260 /* Implement TARGET_HANDLE_OPTION. */
263 c4x_handle_option (size_t code, const char *arg, int value)
267 case OPT_m30: c4x_cpu_version = 30; return true;
268 case OPT_m31: c4x_cpu_version = 31; return true;
269 case OPT_m32: c4x_cpu_version = 32; return true;
270 case OPT_m33: c4x_cpu_version = 33; return true;
271 case OPT_m40: c4x_cpu_version = 40; return true;
272 case OPT_m44: c4x_cpu_version = 44; return true;
275 if (arg[0] == 'c' || arg[0] == 'C')
280 case 30: case 31: case 32: case 33: case 40: case 44:
281 c4x_cpu_version = value;
291 /* Override command line options.
292 Called once after all options have been parsed.
293 Mostly we process the processor
294 type and sometimes adjust other TARGET_ options. */
297 c4x_override_options (void)
299 /* Convert foo / 8.0 into foo * 0.125, etc. */
300 set_fast_math_flags (1);
302 /* We should phase out the following at some stage.
303 This provides compatibility with the old -mno-aliases option. */
304 if (! TARGET_ALIASES && ! flag_argument_noalias)
305 flag_argument_noalias = 1;
308 target_flags |= MASK_MPYI | MASK_DB;
311 target_flags &= ~(MASK_RPTB | MASK_PARALLEL);
313 if (!TARGET_PARALLEL)
314 target_flags &= ~MASK_PARALLEL_MPY;
318 /* This is called before c4x_override_options. */
321 c4x_optimization_options (int level ATTRIBUTE_UNUSED,
322 int size ATTRIBUTE_UNUSED)
324 /* Scheduling before register allocation can screw up global
325 register allocation, especially for functions that use MPY||ADD
326 instructions. The benefit we gain we get by scheduling before
327 register allocation is probably marginal anyhow. */
328 flag_schedule_insns = 0;
332 /* Write an ASCII string. */
334 #define C4X_ASCII_LIMIT 40
337 c4x_output_ascii (FILE *stream, const char *ptr, int len)
339 char sbuf[C4X_ASCII_LIMIT + 1];
340 int s, l, special, first = 1, onlys;
343 fprintf (stream, "\t.byte\t");
345 for (s = l = 0; len > 0; --len, ++ptr)
349 /* Escape " and \ with a \". */
350 special = *ptr == '\"' || *ptr == '\\';
352 /* If printable - add to buff. */
353 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
358 if (s < C4X_ASCII_LIMIT - 1)
373 fprintf (stream, "\"%s\"", sbuf);
375 if (TARGET_TI && l >= 80 && len > 1)
377 fprintf (stream, "\n\t.byte\t");
395 fprintf (stream, "%d", *ptr);
397 if (TARGET_TI && l >= 80 && len > 1)
399 fprintf (stream, "\n\t.byte\t");
410 fprintf (stream, "\"%s\"", sbuf);
413 fputc ('\n', stream);
418 c4x_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
423 case Pmode: /* Pointer (24/32 bits). */
425 case QImode: /* Integer (32 bits). */
426 return IS_INT_REGNO (regno);
428 case QFmode: /* Float, Double (32 bits). */
429 case HFmode: /* Long Double (40 bits). */
430 return IS_EXT_REGNO (regno);
432 case CCmode: /* Condition Codes. */
433 case CC_NOOVmode: /* Condition Codes. */
434 return IS_ST_REGNO (regno);
436 case HImode: /* Long Long (64 bits). */
437 /* We need two registers to store long longs. Note that
438 it is much easier to constrain the first register
439 to start on an even boundary. */
440 return IS_INT_REGNO (regno)
441 && IS_INT_REGNO (regno + 1)
445 return 0; /* We don't support these modes. */
451 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
453 c4x_hard_regno_rename_ok (unsigned int regno1, unsigned int regno2)
455 /* We cannot copy call saved registers from mode QI into QF or from
457 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
459 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
461 /* We cannot copy from an extended (40 bit) register to a standard
462 (32 bit) register because we only set the condition codes for
463 extended registers. */
464 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
466 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
471 /* The TI C3x C compiler register argument runtime model uses 6 registers,
472 AR2, R2, R3, RC, RS, RE.
474 The first two floating point arguments (float, double, long double)
475 that are found scanning from left to right are assigned to R2 and R3.
477 The remaining integer (char, short, int, long) or pointer arguments
478 are assigned to the remaining registers in the order AR2, R2, R3,
479 RC, RS, RE when scanning left to right, except for the last named
480 argument prior to an ellipsis denoting variable number of
481 arguments. We don't have to worry about the latter condition since
482 function.c treats the last named argument as anonymous (unnamed).
484 All arguments that cannot be passed in registers are pushed onto
485 the stack in reverse order (right to left). GCC handles that for us.
487 c4x_init_cumulative_args() is called at the start, so we can parse
488 the args to see how many floating point arguments and how many
489 integer (or pointer) arguments there are. c4x_function_arg() is
490 then called (sometimes repeatedly) for each argument (parsed left
491 to right) to obtain the register to pass the argument in, or zero
492 if the argument is to be passed on the stack. Once the compiler is
493 happy, c4x_function_arg_advance() is called.
495 Don't use R0 to pass arguments in, we use 0 to indicate a stack
498 static const int c4x_int_reglist[3][6] =
500 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
501 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
502 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
505 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
508 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
509 function whose data type is FNTYPE.
510 For a library call, FNTYPE is 0. */
513 c4x_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname)
515 tree param, next_param;
517 cum->floats = cum->ints = 0;
524 fprintf (stderr, "\nc4x_init_cumulative_args (");
527 tree ret_type = TREE_TYPE (fntype);
529 fprintf (stderr, "fntype code = %s, ret code = %s",
530 tree_code_name[(int) TREE_CODE (fntype)],
531 tree_code_name[(int) TREE_CODE (ret_type)]);
534 fprintf (stderr, "no fntype");
537 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
540 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
542 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
543 param; param = next_param)
547 next_param = TREE_CHAIN (param);
549 type = TREE_VALUE (param);
550 if (type && type != void_type_node)
552 enum machine_mode mode;
554 /* If the last arg doesn't have void type then we have
555 variable arguments. */
559 if ((mode = TYPE_MODE (type)))
561 if (! targetm.calls.must_pass_in_stack (mode, type))
563 /* Look for float, double, or long double argument. */
564 if (mode == QFmode || mode == HFmode)
566 /* Look for integer, enumeral, boolean, char, or pointer
568 else if (mode == QImode || mode == Pmode)
577 fprintf (stderr, "%s%s, args = %d)\n",
578 cum->prototype ? ", prototype" : "",
579 cum->var ? ", variable args" : "",
584 /* Update the data in CUM to advance over an argument
585 of mode MODE and data type TYPE.
586 (TYPE is null for libcalls where that information may not be available.) */
589 c4x_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
590 tree type, int named)
593 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
594 GET_MODE_NAME (mode), named);
598 && ! targetm.calls.must_pass_in_stack (mode, type))
600 /* Look for float, double, or long double argument. */
601 if (mode == QFmode || mode == HFmode)
603 /* Look for integer, enumeral, boolean, char, or pointer argument. */
604 else if (mode == QImode || mode == Pmode)
607 else if (! TARGET_MEMPARM && ! type)
609 /* Handle libcall arguments. */
610 if (mode == QFmode || mode == HFmode)
612 else if (mode == QImode || mode == Pmode)
619 /* Define where to put the arguments to a function. Value is zero to
620 push the argument on the stack, or a hard register in which to
623 MODE is the argument's machine mode.
624 TYPE is the data type of the argument (as a tree).
625 This is null for libcalls where that information may
627 CUM is a variable of type CUMULATIVE_ARGS which gives info about
628 the preceding args and about the function being called.
629 NAMED is nonzero if this argument is a named parameter
630 (otherwise it is an extra parameter matching an ellipsis). */
633 c4x_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
634 tree type, int named)
636 int reg = 0; /* Default to passing argument on stack. */
640 /* We can handle at most 2 floats in R2, R3. */
641 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
643 /* We can handle at most 6 integers minus number of floats passed
645 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
646 6 - cum->maxfloats : cum->ints;
648 /* If there is no prototype, assume all the arguments are integers. */
649 if (! cum->prototype)
652 cum->ints = cum->floats = 0;
656 /* This marks the last argument. We don't need to pass this through
658 if (type == void_type_node)
664 && ! targetm.calls.must_pass_in_stack (mode, type))
666 /* Look for float, double, or long double argument. */
667 if (mode == QFmode || mode == HFmode)
669 if (cum->floats < cum->maxfloats)
670 reg = c4x_fp_reglist[cum->floats];
672 /* Look for integer, enumeral, boolean, char, or pointer argument. */
673 else if (mode == QImode || mode == Pmode)
675 if (cum->ints < cum->maxints)
676 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
679 else if (! TARGET_MEMPARM && ! type)
681 /* We could use a different argument calling model for libcalls,
682 since we're only calling functions in libgcc. Thus we could
683 pass arguments for long longs in registers rather than on the
684 stack. In the meantime, use the odd TI format. We make the
685 assumption that we won't have more than two floating point
686 args, six integer args, and that all the arguments are of the
688 if (mode == QFmode || mode == HFmode)
689 reg = c4x_fp_reglist[cum->floats];
690 else if (mode == QImode || mode == Pmode)
691 reg = c4x_int_reglist[0][cum->ints];
696 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
697 GET_MODE_NAME (mode), named);
699 fprintf (stderr, ", reg=%s", reg_names[reg]);
701 fprintf (stderr, ", stack");
702 fprintf (stderr, ")\n");
705 return gen_rtx_REG (mode, reg);
710 /* C[34]x arguments grow in weird ways (downwards) that the standard
711 varargs stuff can't handle.. */
714 c4x_gimplify_va_arg_expr (tree valist, tree type,
715 tree *pre_p ATTRIBUTE_UNUSED,
716 tree *post_p ATTRIBUTE_UNUSED)
721 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
723 type = build_pointer_type (type);
725 t = build2 (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
726 build_int_cst (NULL_TREE, int_size_in_bytes (type)));
727 t = fold_convert (build_pointer_type (type), t);
728 t = build_va_arg_indirect_ref (t);
731 t = build_va_arg_indirect_ref (t);
738 c4x_isr_reg_used_p (unsigned int regno)
740 /* Don't save/restore FP or ST, we handle them separately. */
741 if (regno == FRAME_POINTER_REGNUM
742 || IS_ST_REGNO (regno))
745 /* We could be a little smarter abut saving/restoring DP.
746 We'll only save if for the big memory model or if
747 we're paranoid. ;-) */
748 if (IS_DP_REGNO (regno))
749 return ! TARGET_SMALL || TARGET_PARANOID;
751 /* Only save/restore regs in leaf function that are used. */
752 if (c4x_leaf_function)
753 return regs_ever_live[regno] && fixed_regs[regno] == 0;
755 /* Only save/restore regs that are used by the ISR and regs
756 that are likely to be used by functions the ISR calls
757 if they are not fixed. */
758 return IS_EXT_REGNO (regno)
759 || ((regs_ever_live[regno] || call_used_regs[regno])
760 && fixed_regs[regno] == 0);
765 c4x_leaf_function_p (void)
767 /* A leaf function makes no calls, so we only need
768 to save/restore the registers we actually use.
769 For the global variable leaf_function to be set, we need
770 to define LEAF_REGISTERS and all that it entails.
771 Let's check ourselves.... */
773 if (lookup_attribute ("leaf_pretend",
774 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
777 /* Use the leaf_pretend attribute at your own risk. This is a hack
778 to speed up ISRs that call a function infrequently where the
779 overhead of saving and restoring the additional registers is not
780 warranted. You must save and restore the additional registers
781 required by the called function. Caveat emptor. Here's enough
784 if (leaf_function_p ())
792 c4x_naked_function_p (void)
796 type = TREE_TYPE (current_function_decl);
797 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
802 c4x_interrupt_function_p (void)
804 const char *cfun_name;
805 if (lookup_attribute ("interrupt",
806 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
809 /* Look for TI style c_intnn. */
810 cfun_name = current_function_name ();
811 return cfun_name[0] == 'c'
812 && cfun_name[1] == '_'
813 && cfun_name[2] == 'i'
814 && cfun_name[3] == 'n'
815 && cfun_name[4] == 't'
816 && ISDIGIT (cfun_name[5])
817 && ISDIGIT (cfun_name[6]);
821 c4x_expand_prologue (void)
824 int size = get_frame_size ();
827 /* In functions where ar3 is not used but frame pointers are still
828 specified, frame pointers are not adjusted (if >= -O2) and this
829 is used so it won't needlessly push the frame pointer. */
832 /* For __naked__ function don't build a prologue. */
833 if (c4x_naked_function_p ())
838 /* For __interrupt__ function build specific prologue. */
839 if (c4x_interrupt_function_p ())
841 c4x_leaf_function = c4x_leaf_function_p ();
843 insn = emit_insn (gen_push_st ());
844 RTX_FRAME_RELATED_P (insn) = 1;
847 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
848 RTX_FRAME_RELATED_P (insn) = 1;
849 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
850 gen_rtx_REG (QImode, SP_REGNO)));
851 RTX_FRAME_RELATED_P (insn) = 1;
852 /* We require that an ISR uses fewer than 32768 words of
853 local variables, otherwise we have to go to lots of
854 effort to save a register, load it with the desired size,
855 adjust the stack pointer, and then restore the modified
856 register. Frankly, I think it is a poor ISR that
857 requires more than 32767 words of local temporary
860 error ("ISR %s requires %d words of local vars, max is 32767",
861 current_function_name (), size);
863 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
864 gen_rtx_REG (QImode, SP_REGNO),
866 RTX_FRAME_RELATED_P (insn) = 1;
868 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
870 if (c4x_isr_reg_used_p (regno))
872 if (regno == DP_REGNO)
874 insn = emit_insn (gen_push_dp ());
875 RTX_FRAME_RELATED_P (insn) = 1;
879 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
880 RTX_FRAME_RELATED_P (insn) = 1;
881 if (IS_EXT_REGNO (regno))
883 insn = emit_insn (gen_pushqf
884 (gen_rtx_REG (QFmode, regno)));
885 RTX_FRAME_RELATED_P (insn) = 1;
890 /* We need to clear the repeat mode flag if the ISR is
891 going to use a RPTB instruction or uses the RC, RS, or RE
893 if (regs_ever_live[RC_REGNO]
894 || regs_ever_live[RS_REGNO]
895 || regs_ever_live[RE_REGNO])
897 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
898 RTX_FRAME_RELATED_P (insn) = 1;
901 /* Reload DP reg if we are paranoid about some turkey
902 violating small memory model rules. */
903 if (TARGET_SMALL && TARGET_PARANOID)
905 insn = emit_insn (gen_set_ldp_prologue
906 (gen_rtx_REG (QImode, DP_REGNO),
907 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
908 RTX_FRAME_RELATED_P (insn) = 1;
913 if (frame_pointer_needed)
916 || (current_function_args_size != 0)
919 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
920 RTX_FRAME_RELATED_P (insn) = 1;
921 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
922 gen_rtx_REG (QImode, SP_REGNO)));
923 RTX_FRAME_RELATED_P (insn) = 1;
928 /* Since ar3 is not used, we don't need to push it. */
934 /* If we use ar3, we need to push it. */
936 if ((size != 0) || (current_function_args_size != 0))
938 /* If we are omitting the frame pointer, we still have
939 to make space for it so the offsets are correct
940 unless we don't use anything on the stack at all. */
947 /* Local vars are too big, it will take multiple operations
951 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
952 GEN_INT(size >> 16)));
953 RTX_FRAME_RELATED_P (insn) = 1;
954 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
955 gen_rtx_REG (QImode, R1_REGNO),
957 RTX_FRAME_RELATED_P (insn) = 1;
961 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
962 GEN_INT(size & ~0xffff)));
963 RTX_FRAME_RELATED_P (insn) = 1;
965 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
966 gen_rtx_REG (QImode, R1_REGNO),
967 GEN_INT(size & 0xffff)));
968 RTX_FRAME_RELATED_P (insn) = 1;
969 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
970 gen_rtx_REG (QImode, SP_REGNO),
971 gen_rtx_REG (QImode, R1_REGNO)));
972 RTX_FRAME_RELATED_P (insn) = 1;
976 /* Local vars take up less than 32767 words, so we can directly
978 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
979 gen_rtx_REG (QImode, SP_REGNO),
981 RTX_FRAME_RELATED_P (insn) = 1;
984 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
986 if (regs_ever_live[regno] && ! call_used_regs[regno])
988 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
990 if (TARGET_PRESERVE_FLOAT)
992 insn = emit_insn (gen_pushqi
993 (gen_rtx_REG (QImode, regno)));
994 RTX_FRAME_RELATED_P (insn) = 1;
996 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
997 RTX_FRAME_RELATED_P (insn) = 1;
999 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1001 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1002 RTX_FRAME_RELATED_P (insn) = 1;
1011 c4x_expand_epilogue(void)
1017 int size = get_frame_size ();
1019 /* For __naked__ function build no epilogue. */
1020 if (c4x_naked_function_p ())
1022 insn = emit_jump_insn (gen_return_from_epilogue ());
1023 RTX_FRAME_RELATED_P (insn) = 1;
1027 /* For __interrupt__ function build specific epilogue. */
1028 if (c4x_interrupt_function_p ())
1030 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1032 if (! c4x_isr_reg_used_p (regno))
1034 if (regno == DP_REGNO)
1036 insn = emit_insn (gen_pop_dp ());
1037 RTX_FRAME_RELATED_P (insn) = 1;
1041 /* We have to use unspec because the compiler will delete insns
1042 that are not call-saved. */
1043 if (IS_EXT_REGNO (regno))
1045 insn = emit_insn (gen_popqf_unspec
1046 (gen_rtx_REG (QFmode, regno)));
1047 RTX_FRAME_RELATED_P (insn) = 1;
1049 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1050 RTX_FRAME_RELATED_P (insn) = 1;
1055 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1056 gen_rtx_REG (QImode, SP_REGNO),
1058 RTX_FRAME_RELATED_P (insn) = 1;
1059 insn = emit_insn (gen_popqi
1060 (gen_rtx_REG (QImode, AR3_REGNO)));
1061 RTX_FRAME_RELATED_P (insn) = 1;
1063 insn = emit_insn (gen_pop_st ());
1064 RTX_FRAME_RELATED_P (insn) = 1;
1065 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1066 RTX_FRAME_RELATED_P (insn) = 1;
1070 if (frame_pointer_needed)
1073 || (current_function_args_size != 0)
1077 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1078 gen_rtx_MEM (QImode,
1080 (QImode, gen_rtx_REG (QImode,
1083 RTX_FRAME_RELATED_P (insn) = 1;
1085 /* We already have the return value and the fp,
1086 so we need to add those to the stack. */
1093 /* Since ar3 is not used for anything, we don't need to
1100 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1101 if (size || current_function_args_size)
1103 /* If we are omitting the frame pointer, we still have
1104 to make space for it so the offsets are correct
1105 unless we don't use anything on the stack at all. */
1110 /* Now restore the saved registers, putting in the delayed branch
1112 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1114 if (regs_ever_live[regno] && ! call_used_regs[regno])
1116 if (regno == AR3_REGNO && dont_pop_ar3)
1119 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1121 insn = emit_insn (gen_popqf_unspec
1122 (gen_rtx_REG (QFmode, regno)));
1123 RTX_FRAME_RELATED_P (insn) = 1;
1124 if (TARGET_PRESERVE_FLOAT)
1126 insn = emit_insn (gen_popqi_unspec
1127 (gen_rtx_REG (QImode, regno)));
1128 RTX_FRAME_RELATED_P (insn) = 1;
1133 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1134 RTX_FRAME_RELATED_P (insn) = 1;
1139 if (frame_pointer_needed)
1142 || (current_function_args_size != 0)
1145 /* Restore the old FP. */
1148 (gen_rtx_REG (QImode, AR3_REGNO),
1149 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1151 RTX_FRAME_RELATED_P (insn) = 1;
1157 /* Local vars are too big, it will take multiple operations
1161 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1162 GEN_INT(size >> 16)));
1163 RTX_FRAME_RELATED_P (insn) = 1;
1164 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1165 gen_rtx_REG (QImode, R3_REGNO),
1167 RTX_FRAME_RELATED_P (insn) = 1;
1171 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1172 GEN_INT(size & ~0xffff)));
1173 RTX_FRAME_RELATED_P (insn) = 1;
1175 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1176 gen_rtx_REG (QImode, R3_REGNO),
1177 GEN_INT(size & 0xffff)));
1178 RTX_FRAME_RELATED_P (insn) = 1;
1179 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1180 gen_rtx_REG (QImode, SP_REGNO),
1181 gen_rtx_REG (QImode, R3_REGNO)));
1182 RTX_FRAME_RELATED_P (insn) = 1;
1186 /* Local vars take up less than 32768 words, so we can directly
1187 subtract the number. */
1188 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1189 gen_rtx_REG (QImode, SP_REGNO),
1191 RTX_FRAME_RELATED_P (insn) = 1;
1196 insn = emit_jump_insn (gen_return_indirect_internal
1197 (gen_rtx_REG (QImode, R2_REGNO)));
1198 RTX_FRAME_RELATED_P (insn) = 1;
1202 insn = emit_jump_insn (gen_return_from_epilogue ());
1203 RTX_FRAME_RELATED_P (insn) = 1;
1210 c4x_null_epilogue_p (void)
1214 if (reload_completed
1215 && ! c4x_naked_function_p ()
1216 && ! c4x_interrupt_function_p ()
1217 && ! current_function_calls_alloca
1218 && ! current_function_args_size
1220 && ! get_frame_size ())
1222 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1223 if (regs_ever_live[regno] && ! call_used_regs[regno]
1224 && (regno != AR3_REGNO))
1233 c4x_emit_move_sequence (rtx *operands, enum machine_mode mode)
1235 rtx op0 = operands[0];
1236 rtx op1 = operands[1];
1238 if (! reload_in_progress
1241 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1242 op1 = force_reg (mode, op1);
1244 if (GET_CODE (op1) == LO_SUM
1245 && GET_MODE (op1) == Pmode
1246 && dp_reg_operand (XEXP (op1, 0), mode))
1248 /* expand_increment will sometimes create a LO_SUM immediate
1249 address. Undo this silliness. */
1250 op1 = XEXP (op1, 1);
1253 if (symbolic_address_operand (op1, mode))
1255 if (TARGET_LOAD_ADDRESS)
1257 /* Alias analysis seems to do a better job if we force
1258 constant addresses to memory after reload. */
1259 emit_insn (gen_load_immed_address (op0, op1));
1264 /* Stick symbol or label address into the constant pool. */
1265 op1 = force_const_mem (Pmode, op1);
1268 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1270 /* We could be a lot smarter about loading some of these
1272 op1 = force_const_mem (mode, op1);
1275 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1276 and emit associated (HIGH (SYMREF)) if large memory model.
1277 c4x_legitimize_address could be used to do this,
1278 perhaps by calling validize_address. */
1279 if (TARGET_EXPOSE_LDP
1280 && ! (reload_in_progress || reload_completed)
1281 && GET_CODE (op1) == MEM
1282 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1284 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1286 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1287 op1 = change_address (op1, mode,
1288 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1291 if (TARGET_EXPOSE_LDP
1292 && ! (reload_in_progress || reload_completed)
1293 && GET_CODE (op0) == MEM
1294 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1296 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1298 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1299 op0 = change_address (op0, mode,
1300 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1303 if (GET_CODE (op0) == SUBREG
1304 && mixed_subreg_operand (op0, mode))
1306 /* We should only generate these mixed mode patterns
1307 during RTL generation. If we need do it later on
1308 then we'll have to emit patterns that won't clobber CC. */
1309 if (reload_in_progress || reload_completed)
1311 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1312 op0 = SUBREG_REG (op0);
1313 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1315 op0 = copy_rtx (op0);
1316 PUT_MODE (op0, QImode);
1322 emit_insn (gen_storeqf_int_clobber (op0, op1));
1328 if (GET_CODE (op1) == SUBREG
1329 && mixed_subreg_operand (op1, mode))
1331 /* We should only generate these mixed mode patterns
1332 during RTL generation. If we need do it later on
1333 then we'll have to emit patterns that won't clobber CC. */
1334 if (reload_in_progress || reload_completed)
1336 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1337 op1 = SUBREG_REG (op1);
1338 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1340 op1 = copy_rtx (op1);
1341 PUT_MODE (op1, QImode);
1347 emit_insn (gen_loadqf_int_clobber (op0, op1));
1354 && reg_operand (op0, mode)
1355 && const_int_operand (op1, mode)
1356 && ! IS_INT16_CONST (INTVAL (op1))
1357 && ! IS_HIGH_CONST (INTVAL (op1)))
1359 emit_insn (gen_loadqi_big_constant (op0, op1));
1364 && reg_operand (op0, mode)
1365 && const_int_operand (op1, mode))
1367 emit_insn (gen_loadhi_big_constant (op0, op1));
1371 /* Adjust operands in case we have modified them. */
1375 /* Emit normal pattern. */
1381 c4x_emit_libcall (rtx libcall, enum rtx_code code,
1382 enum machine_mode dmode, enum machine_mode smode,
1383 int noperands, rtx *operands)
1393 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1394 operands[1], smode);
1395 equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
1399 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1400 operands[1], smode, operands[2], smode);
1401 equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
1408 insns = get_insns ();
1410 emit_libcall_block (insns, operands[0], ret, equiv);
1415 c4x_emit_libcall3 (rtx libcall, enum rtx_code code,
1416 enum machine_mode mode, rtx *operands)
1418 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1423 c4x_emit_libcall_mulhi (rtx libcall, enum rtx_code code,
1424 enum machine_mode mode, rtx *operands)
1431 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1432 operands[1], mode, operands[2], mode);
1433 equiv = gen_rtx_TRUNCATE (mode,
1434 gen_rtx_LSHIFTRT (HImode,
1435 gen_rtx_MULT (HImode,
1436 gen_rtx_fmt_e (code, HImode, operands[1]),
1437 gen_rtx_fmt_e (code, HImode, operands[2])),
1439 insns = get_insns ();
1441 emit_libcall_block (insns, operands[0], ret, equiv);
1446 c4x_legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
1448 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1449 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1450 rtx disp = NULL_RTX; /* Displacement. */
1453 code = GET_CODE (addr);
1456 /* Register indirect with auto increment/decrement. We don't
1457 allow SP here---push_operand should recognize an operand
1458 being pushed on the stack. */
1463 if (mode != QImode && mode != QFmode)
1467 base = XEXP (addr, 0);
1475 rtx op0 = XEXP (addr, 0);
1476 rtx op1 = XEXP (addr, 1);
1478 if (mode != QImode && mode != QFmode)
1482 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1484 base = XEXP (op1, 0);
1487 if (REGNO (base) != REGNO (op0))
1489 if (REG_P (XEXP (op1, 1)))
1490 indx = XEXP (op1, 1);
1492 disp = XEXP (op1, 1);
1496 /* Register indirect. */
1501 /* Register indirect with displacement or index. */
1504 rtx op0 = XEXP (addr, 0);
1505 rtx op1 = XEXP (addr, 1);
1506 enum rtx_code code0 = GET_CODE (op0);
1513 base = op0; /* Base + index. */
1515 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1523 base = op0; /* Base + displacement. */
1534 /* Direct addressing with DP register. */
1537 rtx op0 = XEXP (addr, 0);
1538 rtx op1 = XEXP (addr, 1);
1540 /* HImode and HFmode direct memory references aren't truly
1541 offsettable (consider case at end of data page). We
1542 probably get better code by loading a pointer and using an
1543 indirect memory reference. */
1544 if (mode == HImode || mode == HFmode)
1547 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1550 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1553 if (GET_CODE (op1) == CONST)
1559 /* Direct addressing with some work for the assembler... */
1561 /* Direct addressing. */
1564 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1566 /* These need to be converted to a LO_SUM (...).
1567 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1570 /* Do not allow direct memory access to absolute addresses.
1571 This is more pain than it's worth, especially for the
1572 small memory model where we can't guarantee that
1573 this address is within the data page---we don't want
1574 to modify the DP register in the small memory model,
1575 even temporarily, since an interrupt can sneak in.... */
1579 /* Indirect indirect addressing. */
1584 fatal_insn ("using CONST_DOUBLE for address", addr);
1590 /* Validate the base register. */
1593 /* Check that the address is offsettable for HImode and HFmode. */
1594 if (indx && (mode == HImode || mode == HFmode))
1597 /* Handle DP based stuff. */
1598 if (REGNO (base) == DP_REGNO)
1600 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1602 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1606 /* Now validate the index register. */
1609 if (GET_CODE (indx) != REG)
1611 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1613 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1617 /* Validate displacement. */
1620 if (GET_CODE (disp) != CONST_INT)
1622 if (mode == HImode || mode == HFmode)
1624 /* The offset displacement must be legitimate. */
1625 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1630 if (! IS_DISP8_CONST (INTVAL (disp)))
1633 /* Can't add an index with a disp. */
1642 c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED,
1643 enum machine_mode mode ATTRIBUTE_UNUSED)
1645 if (GET_CODE (orig) == SYMBOL_REF
1646 || GET_CODE (orig) == LABEL_REF)
1648 if (mode == HImode || mode == HFmode)
1650 /* We need to force the address into
1651 a register so that it is offsettable. */
1652 rtx addr_reg = gen_reg_rtx (Pmode);
1653 emit_move_insn (addr_reg, orig);
1658 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1661 emit_insn (gen_set_ldp (dp_reg, orig));
1663 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1671 /* Provide the costs of an addressing mode that contains ADDR.
1672 If ADDR is not a valid address, its cost is irrelevant.
1673 This is used in cse and loop optimization to determine
1674 if it is worthwhile storing a common address into a register.
1675 Unfortunately, the C4x address cost depends on other operands. */
1678 c4x_address_cost (rtx addr)
1680 switch (GET_CODE (addr))
1691 /* These shouldn't be directly generated. */
1699 rtx op1 = XEXP (addr, 1);
1701 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1702 return TARGET_SMALL ? 3 : 4;
1704 if (GET_CODE (op1) == CONST)
1706 rtx offset = const0_rtx;
1708 op1 = eliminate_constant_term (op1, &offset);
1710 /* ??? These costs need rethinking... */
1711 if (GET_CODE (op1) == LABEL_REF)
1714 if (GET_CODE (op1) != SYMBOL_REF)
1717 if (INTVAL (offset) == 0)
1722 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1728 register rtx op0 = XEXP (addr, 0);
1729 register rtx op1 = XEXP (addr, 1);
1731 if (GET_CODE (op0) != REG)
1734 switch (GET_CODE (op1))
1740 /* This cost for REG+REG must be greater than the cost
1741 for REG if we want autoincrement addressing modes. */
1745 /* The following tries to improve GIV combination
1746 in strength reduce but appears not to help. */
1747 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1750 if (IS_DISP1_CONST (INTVAL (op1)))
1753 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1768 c4x_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
1770 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1773 if (mode == CC_NOOVmode
1774 && (code == LE || code == GE || code == LT || code == GT))
1777 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1778 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1779 gen_rtx_COMPARE (mode, x, y)));
1784 c4x_output_cbranch (const char *form, rtx seq)
1791 static char str[100];
1795 delay = XVECEXP (final_sequence, 0, 1);
1796 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1797 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1798 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1801 cp = &str [strlen (str)];
1826 c4x_print_operand (FILE *file, rtx op, int letter)
1833 case '#': /* Delayed. */
1835 fprintf (file, "d");
1839 code = GET_CODE (op);
1842 case 'A': /* Direct address. */
1843 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1844 fprintf (file, "@");
1847 case 'H': /* Sethi. */
1848 output_addr_const (file, op);
1851 case 'I': /* Reversed condition. */
1852 code = reverse_condition (code);
1855 case 'L': /* Log 2 of constant. */
1856 if (code != CONST_INT)
1857 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1858 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1861 case 'N': /* Ones complement of small constant. */
1862 if (code != CONST_INT)
1863 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1864 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
1867 case 'K': /* Generate ldp(k) if direct address. */
1870 && GET_CODE (XEXP (op, 0)) == LO_SUM
1871 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1872 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1874 op1 = XEXP (XEXP (op, 0), 1);
1875 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1877 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1878 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1879 fprintf (file, "\n");
1884 case 'M': /* Generate ldp(k) if direct address. */
1885 if (! TARGET_SMALL /* Only used in asm statements. */
1887 && (GET_CODE (XEXP (op, 0)) == CONST
1888 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1890 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1891 output_address (XEXP (op, 0));
1892 fprintf (file, "\n\t");
1896 case 'O': /* Offset address. */
1897 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1899 else if (code == MEM)
1900 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1901 else if (code == REG)
1902 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1904 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1907 case 'C': /* Call. */
1910 case 'U': /* Call/callu. */
1911 if (code != SYMBOL_REF)
1912 fprintf (file, "u");
1922 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1924 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1926 fprintf (file, "%s", reg_names[REGNO (op)]);
1930 output_address (XEXP (op, 0));
1937 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1938 sizeof (str), 0, 1);
1939 fprintf (file, "%s", str);
1944 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
1948 fprintf (file, "ne");
1952 fprintf (file, "eq");
1956 fprintf (file, "ge");
1960 fprintf (file, "gt");
1964 fprintf (file, "le");
1968 fprintf (file, "lt");
1972 fprintf (file, "hs");
1976 fprintf (file, "hi");
1980 fprintf (file, "ls");
1984 fprintf (file, "lo");
1988 output_addr_const (file, op);
1992 output_addr_const (file, XEXP (op, 0));
1999 fatal_insn ("c4x_print_operand: Bad operand case", op);
2006 c4x_print_operand_address (FILE *file, rtx addr)
2008 switch (GET_CODE (addr))
2011 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2015 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2019 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2024 rtx op0 = XEXP (XEXP (addr, 1), 0);
2025 rtx op1 = XEXP (XEXP (addr, 1), 1);
2027 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2028 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2029 reg_names[REGNO (op1)]);
2030 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2031 fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
2032 reg_names[REGNO (op0)], INTVAL (op1));
2033 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2034 fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
2035 reg_names[REGNO (op0)], -INTVAL (op1));
2036 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2037 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2038 reg_names[REGNO (op1)]);
2040 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2046 rtx op0 = XEXP (XEXP (addr, 1), 0);
2047 rtx op1 = XEXP (XEXP (addr, 1), 1);
2049 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2050 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2051 reg_names[REGNO (op1)]);
2052 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2053 fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
2054 reg_names[REGNO (op0)], INTVAL (op1));
2055 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2056 fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
2057 reg_names[REGNO (op0)], -INTVAL (op1));
2058 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2059 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2060 reg_names[REGNO (op1)]);
2062 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2067 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2071 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2074 case PLUS: /* Indirect with displacement. */
2076 rtx op0 = XEXP (addr, 0);
2077 rtx op1 = XEXP (addr, 1);
2083 if (IS_INDEX_REG (op0))
2085 fprintf (file, "*+%s(%s)",
2086 reg_names[REGNO (op1)],
2087 reg_names[REGNO (op0)]); /* Index + base. */
2091 fprintf (file, "*+%s(%s)",
2092 reg_names[REGNO (op0)],
2093 reg_names[REGNO (op1)]); /* Base + index. */
2096 else if (INTVAL (op1) < 0)
2098 fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
2099 reg_names[REGNO (op0)],
2100 -INTVAL (op1)); /* Base - displacement. */
2104 fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
2105 reg_names[REGNO (op0)],
2106 INTVAL (op1)); /* Base + displacement. */
2110 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2116 rtx op0 = XEXP (addr, 0);
2117 rtx op1 = XEXP (addr, 1);
2119 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2120 c4x_print_operand_address (file, op1);
2122 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2129 fprintf (file, "@");
2130 output_addr_const (file, addr);
2133 /* We shouldn't access CONST_INT addresses. */
2137 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2143 /* Return nonzero if the floating point operand will fit
2144 in the immediate field. */
2147 c4x_immed_float_p (rtx op)
2153 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2154 if (GET_MODE (op) == HFmode)
2155 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2158 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2162 /* Sign extend exponent. */
2163 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2164 if (exponent == -128)
2166 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2167 return 0; /* Precision doesn't fit. */
2168 return (exponent <= 7) /* Positive exp. */
2169 && (exponent >= -7); /* Negative exp. */
2173 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2174 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2176 None of the last four instructions from the bottom of the block can
2177 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2178 BcondAT or RETIcondD.
2180 This routine scans the four previous insns for a jump insn, and if
2181 one is found, returns 1 so that we bung in a nop instruction.
2182 This simple minded strategy will add a nop, when it may not
2183 be required. Say when there is a JUMP_INSN near the end of the
2184 block that doesn't get converted into a delayed branch.
2186 Note that we cannot have a call insn, since we don't generate
2187 repeat loops with calls in them (although I suppose we could, but
2188 there's no benefit.)
2190 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2193 c4x_rptb_nop_p (rtx insn)
2198 /* Extract the start label from the jump pattern (rptb_end). */
2199 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2201 /* If there is a label at the end of the loop we must insert
2204 insn = previous_insn (insn);
2205 } while (GET_CODE (insn) == NOTE
2206 || GET_CODE (insn) == USE
2207 || GET_CODE (insn) == CLOBBER);
2208 if (GET_CODE (insn) == CODE_LABEL)
2211 for (i = 0; i < 4; i++)
2213 /* Search back for prev non-note and non-label insn. */
2214 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2215 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2217 if (insn == start_label)
2220 insn = previous_insn (insn);
2223 /* If we have a jump instruction we should insert a NOP. If we
2224 hit repeat block top we should only insert a NOP if the loop
2226 if (GET_CODE (insn) == JUMP_INSN)
2228 insn = previous_insn (insn);
2234 /* The C4x looping instruction needs to be emitted at the top of the
2235 loop. Emitting the true RTL for a looping instruction at the top of
2236 the loop can cause problems with flow analysis. So instead, a dummy
2237 doloop insn is emitted at the end of the loop. This routine checks
2238 for the presence of this doloop insn and then searches back to the
2239 top of the loop, where it inserts the true looping insn (provided
2240 there are no instructions in the loop which would cause problems).
2241 Any additional labels can be emitted at this point. In addition, if
2242 the desired loop count register was not allocated, this routine does
2245 Before we can create a repeat block looping instruction we have to
2246 verify that there are no jumps outside the loop and no jumps outside
2247 the loop go into this loop. This can happen in the basic blocks reorder
2248 pass. The C4x cpu cannot handle this. */
2251 c4x_label_ref_used_p (rtx x, rtx code_label)
2260 code = GET_CODE (x);
2261 if (code == LABEL_REF)
2262 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2264 fmt = GET_RTX_FORMAT (code);
2265 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2269 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2272 else if (fmt[i] == 'E')
2273 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2274 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2282 c4x_rptb_valid_p (rtx insn, rtx start_label)
2288 /* Find the start label. */
2289 for (; insn; insn = PREV_INSN (insn))
2290 if (insn == start_label)
2293 /* Note found then we cannot use a rptb or rpts. The label was
2294 probably moved by the basic block reorder pass. */
2299 /* If any jump jumps inside this block then we must fail. */
2300 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2302 if (GET_CODE (insn) == CODE_LABEL)
2304 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2305 if (GET_CODE (tmp) == JUMP_INSN
2306 && c4x_label_ref_used_p (tmp, insn))
2310 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2312 if (GET_CODE (insn) == CODE_LABEL)
2314 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2315 if (GET_CODE (tmp) == JUMP_INSN
2316 && c4x_label_ref_used_p (tmp, insn))
2320 /* If any jump jumps outside this block then we must fail. */
2321 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2323 if (GET_CODE (insn) == CODE_LABEL)
2325 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2326 if (GET_CODE (tmp) == JUMP_INSN
2327 && c4x_label_ref_used_p (tmp, insn))
2329 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2330 if (GET_CODE (tmp) == JUMP_INSN
2331 && c4x_label_ref_used_p (tmp, insn))
2336 /* All checks OK. */
2342 c4x_rptb_insert (rtx insn)
2346 rtx new_start_label;
2349 /* If the count register has not been allocated to RC, say if
2350 there is a movmem pattern in the loop, then do not insert a
2351 RPTB instruction. Instead we emit a decrement and branch
2352 at the end of the loop. */
2353 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2354 if (REGNO (count_reg) != RC_REGNO)
2357 /* Extract the start label from the jump pattern (rptb_end). */
2358 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2360 if (! c4x_rptb_valid_p (insn, start_label))
2362 /* We cannot use the rptb insn. Replace it so reorg can use
2363 the delay slots of the jump insn. */
2364 emit_insn_before (gen_addqi3 (count_reg, count_reg, constm1_rtx), insn);
2365 emit_insn_before (gen_cmpqi (count_reg, const0_rtx), insn);
2366 emit_insn_before (gen_bge (start_label), insn);
2367 LABEL_NUSES (start_label)++;
2372 end_label = gen_label_rtx ();
2373 LABEL_NUSES (end_label)++;
2374 emit_label_after (end_label, insn);
2376 new_start_label = gen_label_rtx ();
2377 LABEL_NUSES (new_start_label)++;
2379 for (; insn; insn = PREV_INSN (insn))
2381 if (insn == start_label)
2383 if (GET_CODE (insn) == JUMP_INSN &&
2384 JUMP_LABEL (insn) == start_label)
2385 redirect_jump (insn, new_start_label, 0);
2388 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2390 emit_label_after (new_start_label, insn);
2392 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2393 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2395 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2396 if (LABEL_NUSES (start_label) == 0)
2397 delete_insn (start_label);
2401 /* We need to use direct addressing for large constants and addresses
2402 that cannot fit within an instruction. We must check for these
2403 after after the final jump optimization pass, since this may
2404 introduce a local_move insn for a SYMBOL_REF. This pass
2405 must come before delayed branch slot filling since it can generate
2406 additional instructions.
2408 This function also fixes up RTPB style loops that didn't get RC
2409 allocated as the loop counter. */
2416 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2418 /* Look for insn. */
2421 int insn_code_number;
2424 insn_code_number = recog_memoized (insn);
2426 if (insn_code_number < 0)
2429 /* Insert the RTX for RPTB at the top of the loop
2430 and a label at the end of the loop. */
2431 if (insn_code_number == CODE_FOR_rptb_end)
2432 c4x_rptb_insert(insn);
2434 /* We need to split the insn here. Otherwise the calls to
2435 force_const_mem will not work for load_immed_address. */
2438 /* Don't split the insn if it has been deleted. */
2439 if (! INSN_DELETED_P (old))
2440 insn = try_split (PATTERN(old), old, 1);
2442 /* When not optimizing, the old insn will be still left around
2443 with only the 'deleted' bit set. Transform it into a note
2444 to avoid confusion of subsequent processing. */
2445 if (INSN_DELETED_P (old))
2447 PUT_CODE (old, NOTE);
2448 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2449 NOTE_SOURCE_FILE (old) = 0;
2457 c4x_a_register (rtx op)
2459 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2464 c4x_x_register (rtx op)
2466 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2471 c4x_immed_int_constant (rtx op)
2473 if (GET_CODE (op) != CONST_INT)
2476 return GET_MODE (op) == VOIDmode
2477 || GET_MODE_CLASS (GET_MODE (op)) == MODE_INT
2478 || GET_MODE_CLASS (GET_MODE (op)) == MODE_PARTIAL_INT;
2483 c4x_immed_float_constant (rtx op)
2485 if (GET_CODE (op) != CONST_DOUBLE)
2488 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2489 present this only means that a MEM rtx has been generated. It does
2490 not mean the rtx is really in memory. */
2492 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2497 c4x_shiftable_constant (rtx op)
2501 int val = INTVAL (op);
2503 for (i = 0; i < 16; i++)
2508 mask = ((0xffff >> i) << 16) | 0xffff;
2509 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2510 : (val >> i) & mask))
2517 c4x_H_constant (rtx op)
2519 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2524 c4x_I_constant (rtx op)
2526 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2531 c4x_J_constant (rtx op)
2535 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2540 c4x_K_constant (rtx op)
2542 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2544 return IS_INT5_CONST (INTVAL (op));
2549 c4x_L_constant (rtx op)
2551 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2556 c4x_N_constant (rtx op)
2558 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2563 c4x_O_constant (rtx op)
2565 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2569 /* The constraints do not have to check the register class,
2570 except when needed to discriminate between the constraints.
2571 The operand has been checked by the predicates to be valid. */
2573 /* ARx + 9-bit signed const or IRn
2574 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2575 We don't include the pre/post inc/dec forms here since
2576 they are handled by the <> constraints. */
2579 c4x_Q_constraint (rtx op)
2581 enum machine_mode mode = GET_MODE (op);
2583 if (GET_CODE (op) != MEM)
2586 switch (GET_CODE (op))
2593 rtx op0 = XEXP (op, 0);
2594 rtx op1 = XEXP (op, 1);
2602 if (GET_CODE (op1) != CONST_INT)
2605 /* HImode and HFmode must be offsettable. */
2606 if (mode == HImode || mode == HFmode)
2607 return IS_DISP8_OFF_CONST (INTVAL (op1));
2609 return IS_DISP8_CONST (INTVAL (op1));
2620 /* ARx + 5-bit unsigned const
2621 *ARx, *+ARx(n) for n < 32. */
2624 c4x_R_constraint (rtx op)
2626 enum machine_mode mode = GET_MODE (op);
2630 if (GET_CODE (op) != MEM)
2633 switch (GET_CODE (op))
2640 rtx op0 = XEXP (op, 0);
2641 rtx op1 = XEXP (op, 1);
2646 if (GET_CODE (op1) != CONST_INT)
2649 /* HImode and HFmode must be offsettable. */
2650 if (mode == HImode || mode == HFmode)
2651 return IS_UINT5_CONST (INTVAL (op1) + 1);
2653 return IS_UINT5_CONST (INTVAL (op1));
2665 c4x_R_indirect (rtx op)
2667 enum machine_mode mode = GET_MODE (op);
2669 if (TARGET_C3X || GET_CODE (op) != MEM)
2673 switch (GET_CODE (op))
2676 return IS_ADDR_OR_PSEUDO_REG (op);
2680 rtx op0 = XEXP (op, 0);
2681 rtx op1 = XEXP (op, 1);
2683 /* HImode and HFmode must be offsettable. */
2684 if (mode == HImode || mode == HFmode)
2685 return IS_ADDR_OR_PSEUDO_REG (op0)
2686 && GET_CODE (op1) == CONST_INT
2687 && IS_UINT5_CONST (INTVAL (op1) + 1);
2690 && IS_ADDR_OR_PSEUDO_REG (op0)
2691 && GET_CODE (op1) == CONST_INT
2692 && IS_UINT5_CONST (INTVAL (op1));
2703 /* ARx + 1-bit unsigned const or IRn
2704 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2705 We don't include the pre/post inc/dec forms here since
2706 they are handled by the <> constraints. */
2709 c4x_S_constraint (rtx op)
2711 enum machine_mode mode = GET_MODE (op);
2712 if (GET_CODE (op) != MEM)
2715 switch (GET_CODE (op))
2723 rtx op0 = XEXP (op, 0);
2724 rtx op1 = XEXP (op, 1);
2726 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2727 || (op0 != XEXP (op1, 0)))
2730 op0 = XEXP (op1, 0);
2731 op1 = XEXP (op1, 1);
2732 return REG_P (op0) && REG_P (op1);
2733 /* Pre or post_modify with a displacement of 0 or 1
2734 should not be generated. */
2740 rtx op0 = XEXP (op, 0);
2741 rtx op1 = XEXP (op, 1);
2749 if (GET_CODE (op1) != CONST_INT)
2752 /* HImode and HFmode must be offsettable. */
2753 if (mode == HImode || mode == HFmode)
2754 return IS_DISP1_OFF_CONST (INTVAL (op1));
2756 return IS_DISP1_CONST (INTVAL (op1));
2768 c4x_S_indirect (rtx op)
2770 enum machine_mode mode = GET_MODE (op);
2771 if (GET_CODE (op) != MEM)
2775 switch (GET_CODE (op))
2779 if (mode != QImode && mode != QFmode)
2786 return IS_ADDR_OR_PSEUDO_REG (op);
2791 rtx op0 = XEXP (op, 0);
2792 rtx op1 = XEXP (op, 1);
2794 if (mode != QImode && mode != QFmode)
2797 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2798 || (op0 != XEXP (op1, 0)))
2801 op0 = XEXP (op1, 0);
2802 op1 = XEXP (op1, 1);
2803 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2804 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2805 /* Pre or post_modify with a displacement of 0 or 1
2806 should not be generated. */
2811 rtx op0 = XEXP (op, 0);
2812 rtx op1 = XEXP (op, 1);
2816 /* HImode and HFmode must be offsettable. */
2817 if (mode == HImode || mode == HFmode)
2818 return IS_ADDR_OR_PSEUDO_REG (op0)
2819 && GET_CODE (op1) == CONST_INT
2820 && IS_DISP1_OFF_CONST (INTVAL (op1));
2823 return (IS_INDEX_OR_PSEUDO_REG (op1)
2824 && IS_ADDR_OR_PSEUDO_REG (op0))
2825 || (IS_ADDR_OR_PSEUDO_REG (op1)
2826 && IS_INDEX_OR_PSEUDO_REG (op0));
2828 return IS_ADDR_OR_PSEUDO_REG (op0)
2829 && GET_CODE (op1) == CONST_INT
2830 && IS_DISP1_CONST (INTVAL (op1));
2842 /* Direct memory operand. */
2845 c4x_T_constraint (rtx op)
2847 if (GET_CODE (op) != MEM)
2851 if (GET_CODE (op) != LO_SUM)
2853 /* Allow call operands. */
2854 return GET_CODE (op) == SYMBOL_REF
2855 && GET_MODE (op) == Pmode
2856 && SYMBOL_REF_FUNCTION_P (op);
2859 /* HImode and HFmode are not offsettable. */
2860 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2863 if ((GET_CODE (XEXP (op, 0)) == REG)
2864 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2865 return c4x_U_constraint (XEXP (op, 1));
2871 /* Symbolic operand. */
2874 c4x_U_constraint (rtx op)
2876 /* Don't allow direct addressing to an arbitrary constant. */
2877 return GET_CODE (op) == CONST
2878 || GET_CODE (op) == SYMBOL_REF
2879 || GET_CODE (op) == LABEL_REF;
2884 c4x_autoinc_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2886 if (GET_CODE (op) == MEM)
2888 enum rtx_code code = GET_CODE (XEXP (op, 0));
2894 || code == PRE_MODIFY
2895 || code == POST_MODIFY
2904 mixed_subreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2906 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
2907 int and a long double. */
2908 if (GET_CODE (op) == SUBREG
2909 && (GET_MODE (op) == QFmode)
2910 && (GET_MODE (SUBREG_REG (op)) == QImode
2911 || GET_MODE (SUBREG_REG (op)) == HImode))
2918 reg_imm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2920 if (REG_P (op) || CONSTANT_P (op))
2927 not_modify_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2929 if (REG_P (op) || CONSTANT_P (op))
2931 if (GET_CODE (op) != MEM)
2934 switch (GET_CODE (op))
2941 rtx op0 = XEXP (op, 0);
2942 rtx op1 = XEXP (op, 1);
2947 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
2953 rtx op0 = XEXP (op, 0);
2955 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2973 not_rc_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2975 if (REG_P (op) && REGNO (op) == RC_REGNO)
2982 c4x_S_address_parse (rtx op, int *base, int *incdec, int *index, int *disp)
2989 if (GET_CODE (op) != MEM)
2990 fatal_insn ("invalid indirect memory address", op);
2993 switch (GET_CODE (op))
2996 *base = REGNO (XEXP (op, 0));
3002 *base = REGNO (XEXP (op, 0));
3008 *base = REGNO (XEXP (op, 0));
3014 *base = REGNO (XEXP (op, 0));
3020 *base = REGNO (XEXP (op, 0));
3021 if (REG_P (XEXP (XEXP (op, 1), 1)))
3023 *index = REGNO (XEXP (XEXP (op, 1), 1));
3024 *disp = 0; /* ??? */
3027 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3032 *base = REGNO (XEXP (op, 0));
3033 if (REG_P (XEXP (XEXP (op, 1), 1)))
3035 *index = REGNO (XEXP (XEXP (op, 1), 1));
3036 *disp = 1; /* ??? */
3039 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3050 rtx op0 = XEXP (op, 0);
3051 rtx op1 = XEXP (op, 1);
3053 if (c4x_a_register (op0))
3055 if (c4x_x_register (op1))
3057 *base = REGNO (op0);
3058 *index = REGNO (op1);
3061 else if ((GET_CODE (op1) == CONST_INT
3062 && IS_DISP1_CONST (INTVAL (op1))))
3064 *base = REGNO (op0);
3065 *disp = INTVAL (op1);
3069 else if (c4x_x_register (op0) && c4x_a_register (op1))
3071 *base = REGNO (op1);
3072 *index = REGNO (op0);
3079 fatal_insn ("invalid indirect (S) memory address", op);
3085 c4x_address_conflict (rtx op0, rtx op1, int store0, int store1)
3096 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3099 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3100 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3102 if (store0 && store1)
3104 /* If we have two stores in parallel to the same address, then
3105 the C4x only executes one of the stores. This is unlikely to
3106 cause problems except when writing to a hardware device such
3107 as a FIFO since the second write will be lost. The user
3108 should flag the hardware location as being volatile so that
3109 we don't do this optimization. While it is unlikely that we
3110 have an aliased address if both locations are not marked
3111 volatile, it is probably safer to flag a potential conflict
3112 if either location is volatile. */
3113 if (! flag_argument_noalias)
3115 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3120 /* If have a parallel load and a store to the same address, the load
3121 is performed first, so there is no conflict. Similarly, there is
3122 no conflict if have parallel loads from the same address. */
3124 /* Cannot use auto increment or auto decrement twice for same
3126 if (base0 == base1 && incdec0 && incdec0)
3129 /* It might be too confusing for GCC if we have use a base register
3130 with a side effect and a memory reference using the same register
3132 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3135 /* We cannot optimize the case where op1 and op2 refer to the same
3137 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3145 /* Check for while loop inside a decrement and branch loop. */
3148 c4x_label_conflict (rtx insn, rtx jump, rtx db)
3152 if (GET_CODE (insn) == CODE_LABEL)
3154 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3156 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3159 insn = PREV_INSN (insn);
3165 /* Validate combination of operands for parallel load/store instructions. */
3168 valid_parallel_load_store (rtx *operands,
3169 enum machine_mode mode ATTRIBUTE_UNUSED)
3171 rtx op0 = operands[0];
3172 rtx op1 = operands[1];
3173 rtx op2 = operands[2];
3174 rtx op3 = operands[3];
3176 if (GET_CODE (op0) == SUBREG)
3177 op0 = SUBREG_REG (op0);
3178 if (GET_CODE (op1) == SUBREG)
3179 op1 = SUBREG_REG (op1);
3180 if (GET_CODE (op2) == SUBREG)
3181 op2 = SUBREG_REG (op2);
3182 if (GET_CODE (op3) == SUBREG)
3183 op3 = SUBREG_REG (op3);
3185 /* The patterns should only allow ext_low_reg_operand() or
3186 par_ind_operand() operands. Thus of the 4 operands, only 2
3187 should be REGs and the other 2 should be MEMs. */
3189 /* This test prevents the multipack pass from using this pattern if
3190 op0 is used as an index or base register in op2 or op3, since
3191 this combination will require reloading. */
3192 if (GET_CODE (op0) == REG
3193 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3194 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3198 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3199 return (REGNO (op0) != REGNO (op2))
3200 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3201 && ! c4x_address_conflict (op1, op3, 0, 0);
3204 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3205 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3206 && ! c4x_address_conflict (op0, op2, 1, 1);
3209 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3210 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3211 && ! c4x_address_conflict (op1, op2, 0, 1);
3214 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3215 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3216 && ! c4x_address_conflict (op0, op3, 1, 0);
3223 valid_parallel_operands_4 (rtx *operands,
3224 enum machine_mode mode ATTRIBUTE_UNUSED)
3226 rtx op0 = operands[0];
3227 rtx op2 = operands[2];
3229 if (GET_CODE (op0) == SUBREG)
3230 op0 = SUBREG_REG (op0);
3231 if (GET_CODE (op2) == SUBREG)
3232 op2 = SUBREG_REG (op2);
3234 /* This test prevents the multipack pass from using this pattern if
3235 op0 is used as an index or base register in op2, since this combination
3236 will require reloading. */
3237 if (GET_CODE (op0) == REG
3238 && GET_CODE (op2) == MEM
3239 && reg_mentioned_p (op0, XEXP (op2, 0)))
3247 valid_parallel_operands_5 (rtx *operands,
3248 enum machine_mode mode ATTRIBUTE_UNUSED)
3251 rtx op0 = operands[0];
3252 rtx op1 = operands[1];
3253 rtx op2 = operands[2];
3254 rtx op3 = operands[3];
3256 if (GET_CODE (op0) == SUBREG)
3257 op0 = SUBREG_REG (op0);
3258 if (GET_CODE (op1) == SUBREG)
3259 op1 = SUBREG_REG (op1);
3260 if (GET_CODE (op2) == SUBREG)
3261 op2 = SUBREG_REG (op2);
3263 /* The patterns should only allow ext_low_reg_operand() or
3264 par_ind_operand() operands. Operands 1 and 2 may be commutative
3265 but only one of them can be a register. */
3266 if (GET_CODE (op1) == REG)
3268 if (GET_CODE (op2) == REG)
3274 /* This test prevents the multipack pass from using this pattern if
3275 op0 is used as an index or base register in op3, since this combination
3276 will require reloading. */
3277 if (GET_CODE (op0) == REG
3278 && GET_CODE (op3) == MEM
3279 && reg_mentioned_p (op0, XEXP (op3, 0)))
3287 valid_parallel_operands_6 (rtx *operands,
3288 enum machine_mode mode ATTRIBUTE_UNUSED)
3291 rtx op0 = operands[0];
3292 rtx op1 = operands[1];
3293 rtx op2 = operands[2];
3294 rtx op4 = operands[4];
3295 rtx op5 = operands[5];
3297 if (GET_CODE (op1) == SUBREG)
3298 op1 = SUBREG_REG (op1);
3299 if (GET_CODE (op2) == SUBREG)
3300 op2 = SUBREG_REG (op2);
3301 if (GET_CODE (op4) == SUBREG)
3302 op4 = SUBREG_REG (op4);
3303 if (GET_CODE (op5) == SUBREG)
3304 op5 = SUBREG_REG (op5);
3306 /* The patterns should only allow ext_low_reg_operand() or
3307 par_ind_operand() operands. Thus of the 4 input operands, only 2
3308 should be REGs and the other 2 should be MEMs. */
3310 if (GET_CODE (op1) == REG)
3312 if (GET_CODE (op2) == REG)
3314 if (GET_CODE (op4) == REG)
3316 if (GET_CODE (op5) == REG)
3319 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3320 Perhaps we should count the MEMs as well? */
3324 /* This test prevents the multipack pass from using this pattern if
3325 op0 is used as an index or base register in op4 or op5, since
3326 this combination will require reloading. */
3327 if (GET_CODE (op0) == REG
3328 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3329 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3336 /* Validate combination of src operands. Note that the operands have
3337 been screened by the src_operand predicate. We just have to check
3338 that the combination of operands is valid. If FORCE is set, ensure
3339 that the destination regno is valid if we have a 2 operand insn. */
3342 c4x_valid_operands (enum rtx_code code, rtx *operands,
3343 enum machine_mode mode ATTRIBUTE_UNUSED,
3349 enum rtx_code code1;
3350 enum rtx_code code2;
3353 /* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
3354 if (code == IF_THEN_ELSE)
3355 return 1 || (operands[0] == operands[2] || operands[0] == operands[3]);
3357 if (code == COMPARE)
3370 if (GET_CODE (op0) == SUBREG)
3371 op0 = SUBREG_REG (op0);
3372 if (GET_CODE (op1) == SUBREG)
3373 op1 = SUBREG_REG (op1);
3374 if (GET_CODE (op2) == SUBREG)
3375 op2 = SUBREG_REG (op2);
3377 code1 = GET_CODE (op1);
3378 code2 = GET_CODE (op2);
3381 if (code1 == REG && code2 == REG)
3384 if (code1 == MEM && code2 == MEM)
3386 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3388 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3391 /* We cannot handle two MEMs or two CONSTS, etc. */
3400 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3405 if (! c4x_H_constant (op2))
3409 /* Any valid memory operand screened by src_operand is OK. */
3414 fatal_insn ("c4x_valid_operands: Internal error", op2);
3418 if (GET_CODE (op0) == SCRATCH)
3424 /* Check that we have a valid destination register for a two operand
3426 return ! force || code == COMPARE || REGNO (op1) == REGNO (op0);
3430 /* Check non-commutative operators. */
3431 if (code == ASHIFTRT || code == LSHIFTRT
3432 || code == ASHIFT || code == COMPARE)
3434 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3437 /* Assume MINUS is commutative since the subtract patterns
3438 also support the reverse subtract instructions. Since op1
3439 is not a register, and op2 is a register, op1 can only
3440 be a restricted memory operand for a shift instruction. */
3449 if (! c4x_H_constant (op1))
3453 /* Any valid memory operand screened by src_operand is OK. */
3462 if (GET_CODE (op0) == SCRATCH)
3468 /* Check that we have a valid destination register for a two operand
3470 return ! force || REGNO (op1) == REGNO (op0);
3473 if (c4x_J_constant (op1) && c4x_R_indirect (op2))