1 /* Subroutines used for code generation on the Argonaut ARC cpu.
2 Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* ??? This is an old port, and is undoubtedly suffering from bit rot. */
26 #include "coretypes.h"
31 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
44 #include "target-def.h"
46 /* Which cpu we're compiling for (NULL(=base), ???). */
47 const char *arc_cpu_string;
50 /* Name of mangle string to add to symbols to separate code compiled for each
52 const char *arc_mangle_cpu;
54 /* Save the operands last given to a compare for use when we
55 generate a scc or bcc insn. */
56 rtx arc_compare_op0, arc_compare_op1;
58 /* Name of text, data, and rodata sections, as specified on command line.
59 Selected by -m{text,data,rodata} flags. */
60 const char *arc_text_string = ARC_DEFAULT_TEXT_SECTION;
61 const char *arc_data_string = ARC_DEFAULT_DATA_SECTION;
62 const char *arc_rodata_string = ARC_DEFAULT_RODATA_SECTION;
64 /* Name of text, data, and rodata sections used in varasm.c. */
65 const char *arc_text_section;
66 const char *arc_data_section;
67 const char *arc_rodata_section;
69 /* Array of valid operand punctuation characters. */
70 char arc_punct_chars[256];
72 /* Variables used by arc_final_prescan_insn to implement conditional
74 static int arc_ccfsm_state;
75 static int arc_ccfsm_current_cc;
76 static rtx arc_ccfsm_target_insn;
77 static int arc_ccfsm_target_label;
79 /* The maximum number of insns skipped which will be conditionalised if
81 #define MAX_INSNS_SKIPPED 3
83 /* A nop is needed between a 4 byte insn that sets the condition codes and
84 a branch that uses them (the same isn't true for an 8 byte insn that sets
85 the condition codes). Set by arc_final_prescan_insn. Used by
87 static int last_insn_set_cc_p;
88 static int current_insn_set_cc_p;
89 static void record_cc_ref PARAMS ((rtx));
90 static void arc_init_reg_tables PARAMS ((void));
91 static int get_arc_condition_code PARAMS ((rtx));
92 const struct attribute_spec arc_attribute_table[];
93 static tree arc_handle_interrupt_attribute PARAMS ((tree *, tree, tree, int, bool *));
94 static bool arc_assemble_integer PARAMS ((rtx, unsigned int, int));
95 static void arc_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
96 static void arc_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
97 static void arc_file_start PARAMS ((void));
98 static void arc_internal_label PARAMS ((FILE *, const char *, unsigned long));
99 static bool arc_rtx_costs PARAMS ((rtx, int, int, int *));
100 static int arc_address_cost PARAMS ((rtx));
102 /* Initialize the GCC target structure. */
103 #undef TARGET_ASM_ALIGNED_HI_OP
104 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
105 #undef TARGET_ASM_ALIGNED_SI_OP
106 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
107 #undef TARGET_ASM_INTEGER
108 #define TARGET_ASM_INTEGER arc_assemble_integer
110 #undef TARGET_ASM_FUNCTION_PROLOGUE
111 #define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
112 #undef TARGET_ASM_FUNCTION_EPILOGUE
113 #define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
114 #undef TARGET_ASM_FILE_START
115 #define TARGET_ASM_FILE_START arc_file_start
116 #undef TARGET_ATTRIBUTE_TABLE
117 #define TARGET_ATTRIBUTE_TABLE arc_attribute_table
118 #undef TARGET_ASM_INTERNAL_LABEL
119 #define TARGET_ASM_INTERNAL_LABEL arc_internal_label
121 #undef TARGET_RTX_COSTS
122 #define TARGET_RTX_COSTS arc_rtx_costs
123 #undef TARGET_ADDRESS_COST
124 #define TARGET_ADDRESS_COST arc_address_cost
126 struct gcc_target targetm = TARGET_INITIALIZER;
128 /* Called by OVERRIDE_OPTIONS to initialize various things. */
135 if (arc_cpu_string == 0
136 || !strcmp (arc_cpu_string, "base"))
138 /* Ensure we have a printable value for the .cpu pseudo-op. */
139 arc_cpu_string = "base";
141 arc_mangle_cpu = NULL;
143 else if (ARC_EXTENSION_CPU (arc_cpu_string))
144 ; /* nothing to do */
147 error ("bad value (%s) for -mcpu switch", arc_cpu_string);
148 arc_cpu_string = "base";
150 arc_mangle_cpu = NULL;
153 /* Set the pseudo-ops for the various standard sections. */
154 arc_text_section = tmp = xmalloc (strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1);
155 sprintf (tmp, ARC_SECTION_FORMAT, arc_text_string);
156 arc_data_section = tmp = xmalloc (strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1);
157 sprintf (tmp, ARC_SECTION_FORMAT, arc_data_string);
158 arc_rodata_section = tmp = xmalloc (strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1);
159 sprintf (tmp, ARC_SECTION_FORMAT, arc_rodata_string);
161 arc_init_reg_tables ();
163 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
164 memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
165 arc_punct_chars['#'] = 1;
166 arc_punct_chars['*'] = 1;
167 arc_punct_chars['?'] = 1;
168 arc_punct_chars['!'] = 1;
169 arc_punct_chars['~'] = 1;
172 /* The condition codes of the ARC, and the inverse function. */
173 static const char *const arc_condition_codes[] =
175 "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
176 "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
179 #define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
181 /* Returns the index of the ARC condition code string in
182 `arc_condition_codes'. COMPARISON should be an rtx like
183 `(eq (...) (...))'. */
186 get_arc_condition_code (comparison)
189 switch (GET_CODE (comparison))
197 case GTU : return 14;
198 case LEU : return 15;
207 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
208 return the mode to be used for the comparison. */
211 arc_select_cc_mode (op, x, y)
213 rtx x, y ATTRIBUTE_UNUSED;
221 switch (GET_CODE (x))
240 /* Vectors to keep interesting information about registers where it can easily
241 be got. We use to use the actual mode value as the bit number, but there
242 is (or may be) more than 32 modes now. Instead we use two tables: one
243 indexed by hard register number, and one indexed by mode. */
245 /* The purpose of arc_mode_class is to shrink the range of modes so that
246 they all fit (as bit numbers) in a 32 bit word (again). Each real mode is
247 mapped into one arc_mode_class mode. */
249 enum arc_mode_class {
251 S_MODE, D_MODE, T_MODE, O_MODE,
252 SF_MODE, DF_MODE, TF_MODE, OF_MODE
255 /* Modes for condition codes. */
256 #define C_MODES (1 << (int) C_MODE)
258 /* Modes for single-word and smaller quantities. */
259 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
261 /* Modes for double-word and smaller quantities. */
262 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
264 /* Modes for quad-word and smaller quantities. */
265 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
267 /* Value is 1 if register/mode pair is acceptable on arc. */
269 const unsigned int arc_hard_regno_mode_ok[] = {
270 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
271 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
272 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
273 D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
275 /* ??? Leave these as S_MODES for now. */
276 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
277 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
278 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
279 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES
282 unsigned int arc_mode_class [NUM_MACHINE_MODES];
284 enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
287 arc_init_reg_tables ()
291 for (i = 0; i < NUM_MACHINE_MODES; i++)
293 switch (GET_MODE_CLASS (i))
296 case MODE_PARTIAL_INT:
297 case MODE_COMPLEX_INT:
298 if (GET_MODE_SIZE (i) <= 4)
299 arc_mode_class[i] = 1 << (int) S_MODE;
300 else if (GET_MODE_SIZE (i) == 8)
301 arc_mode_class[i] = 1 << (int) D_MODE;
302 else if (GET_MODE_SIZE (i) == 16)
303 arc_mode_class[i] = 1 << (int) T_MODE;
304 else if (GET_MODE_SIZE (i) == 32)
305 arc_mode_class[i] = 1 << (int) O_MODE;
307 arc_mode_class[i] = 0;
310 case MODE_COMPLEX_FLOAT:
311 if (GET_MODE_SIZE (i) <= 4)
312 arc_mode_class[i] = 1 << (int) SF_MODE;
313 else if (GET_MODE_SIZE (i) == 8)
314 arc_mode_class[i] = 1 << (int) DF_MODE;
315 else if (GET_MODE_SIZE (i) == 16)
316 arc_mode_class[i] = 1 << (int) TF_MODE;
317 else if (GET_MODE_SIZE (i) == 32)
318 arc_mode_class[i] = 1 << (int) OF_MODE;
320 arc_mode_class[i] = 0;
324 /* mode_class hasn't been initialized yet for EXTRA_CC_MODES, so
325 we must explicitly check for them here. */
326 if (i == (int) CCmode || i == (int) CCZNmode || i == (int) CCZNCmode)
327 arc_mode_class[i] = 1 << (int) C_MODE;
329 arc_mode_class[i] = 0;
334 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
337 arc_regno_reg_class[i] = GENERAL_REGS;
339 arc_regno_reg_class[i] = LPCOUNT_REG;
341 arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */;
343 arc_regno_reg_class[i] = NO_REGS;
347 /* ARC specific attribute support.
349 The ARC has these attributes:
350 interrupt - for interrupt functions
353 const struct attribute_spec arc_attribute_table[] =
355 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
356 { "interrupt", 1, 1, true, false, false, arc_handle_interrupt_attribute },
357 { NULL, 0, 0, false, false, false, NULL }
360 /* Handle an "interrupt" attribute; arguments as in
361 struct attribute_spec.handler. */
363 arc_handle_interrupt_attribute (node, name, args, flags, no_add_attrs)
364 tree *node ATTRIBUTE_UNUSED;
367 int flags ATTRIBUTE_UNUSED;
370 tree value = TREE_VALUE (args);
372 if (TREE_CODE (value) != STRING_CST)
374 warning ("argument of `%s' attribute is not a string constant",
375 IDENTIFIER_POINTER (name));
376 *no_add_attrs = true;
378 else if (strcmp (TREE_STRING_POINTER (value), "ilink1")
379 && strcmp (TREE_STRING_POINTER (value), "ilink2"))
381 warning ("argument of `%s' attribute is not \"ilink1\" or \"ilink2\"",
382 IDENTIFIER_POINTER (name));
383 *no_add_attrs = true;
390 /* Acceptable arguments to the call insn. */
393 call_address_operand (op, mode)
395 enum machine_mode mode;
397 return (symbolic_operand (op, mode)
398 || (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op))
399 || (GET_CODE (op) == REG));
403 call_operand (op, mode)
405 enum machine_mode mode;
407 if (GET_CODE (op) != MEM)
410 return call_address_operand (op, mode);
413 /* Returns 1 if OP is a symbol reference. */
416 symbolic_operand (op, mode)
418 enum machine_mode mode ATTRIBUTE_UNUSED;
420 switch (GET_CODE (op))
431 /* Return truth value of statement that OP is a symbolic memory
432 operand of mode MODE. */
435 symbolic_memory_operand (op, mode)
437 enum machine_mode mode ATTRIBUTE_UNUSED;
439 if (GET_CODE (op) == SUBREG)
440 op = SUBREG_REG (op);
441 if (GET_CODE (op) != MEM)
444 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
445 || GET_CODE (op) == LABEL_REF);
448 /* Return true if OP is a short immediate (shimm) value. */
451 short_immediate_operand (op, mode)
453 enum machine_mode mode ATTRIBUTE_UNUSED;
455 if (GET_CODE (op) != CONST_INT)
457 return SMALL_INT (INTVAL (op));
460 /* Return true if OP will require a long immediate (limm) value.
461 This is currently only used when calculating length attributes. */
464 long_immediate_operand (op, mode)
466 enum machine_mode mode ATTRIBUTE_UNUSED;
468 switch (GET_CODE (op))
475 return !SMALL_INT (INTVAL (op));
477 /* These can happen because large unsigned 32 bit constants are
478 represented this way (the multiplication patterns can cause these
479 to be generated). They also occur for SFmode values. */
487 /* Return true if OP is a MEM that when used as a load or store address will
488 require an 8 byte insn.
489 Load and store instructions don't allow the same possibilities but they're
490 similar enough that this one function will do.
491 This is currently only used when calculating length attributes. */
494 long_immediate_loadstore_operand (op, mode)
496 enum machine_mode mode ATTRIBUTE_UNUSED;
498 if (GET_CODE (op) != MEM)
502 switch (GET_CODE (op))
509 /* This must be handled as "st c,[limm]". Ditto for load.
510 Technically, the assembler could translate some possibilities to
511 "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
512 assume that it does. */
515 /* These can happen because large unsigned 32 bit constants are
516 represented this way (the multiplication patterns can cause these
517 to be generated). They also occur for SFmode values. */
522 if (GET_CODE (XEXP (op, 1)) == CONST_INT
523 && !SMALL_INT (INTVAL (XEXP (op, 1))))
532 /* Return true if OP is an acceptable argument for a single word
536 move_src_operand (op, mode)
538 enum machine_mode mode;
540 switch (GET_CODE (op))
547 return (LARGE_INT (INTVAL (op)));
549 /* We can handle DImode integer constants in SImode if the value
550 (signed or unsigned) will fit in 32 bits. This is needed because
551 large unsigned 32 bit constants are represented as CONST_DOUBLEs. */
553 return arc_double_limm_p (op);
554 /* We can handle 32 bit floating point constants. */
556 return GET_MODE (op) == SFmode;
559 return register_operand (op, mode);
561 /* (subreg (mem ...) ...) can occur here if the inner part was once a
562 pseudo-reg and is now a stack slot. */
563 if (GET_CODE (SUBREG_REG (op)) == MEM)
564 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
566 return register_operand (op, mode);
568 return address_operand (XEXP (op, 0), mode);
574 /* Return true if OP is an acceptable argument for a double word
578 move_double_src_operand (op, mode)
580 enum machine_mode mode;
582 switch (GET_CODE (op))
585 return register_operand (op, mode);
587 /* (subreg (mem ...) ...) can occur here if the inner part was once a
588 pseudo-reg and is now a stack slot. */
589 if (GET_CODE (SUBREG_REG (op)) == MEM)
590 return move_double_src_operand (SUBREG_REG (op), mode);
592 return register_operand (op, mode);
594 /* Disallow auto inc/dec for now. */
595 if (GET_CODE (XEXP (op, 0)) == PRE_DEC
596 || GET_CODE (XEXP (op, 0)) == PRE_INC)
598 return address_operand (XEXP (op, 0), mode);
607 /* Return true if OP is an acceptable argument for a move destination. */
610 move_dest_operand (op, mode)
612 enum machine_mode mode;
614 switch (GET_CODE (op))
617 return register_operand (op, mode);
619 /* (subreg (mem ...) ...) can occur here if the inner part was once a
620 pseudo-reg and is now a stack slot. */
621 if (GET_CODE (SUBREG_REG (op)) == MEM)
622 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
624 return register_operand (op, mode);
626 return address_operand (XEXP (op, 0), mode);
632 /* Return true if OP is valid load with update operand. */
635 load_update_operand (op, mode)
637 enum machine_mode mode;
639 if (GET_CODE (op) != MEM
640 || GET_MODE (op) != mode)
643 if (GET_CODE (op) != PLUS
644 || GET_MODE (op) != Pmode
645 || !register_operand (XEXP (op, 0), Pmode)
646 || !nonmemory_operand (XEXP (op, 1), Pmode))
651 /* Return true if OP is valid store with update operand. */
654 store_update_operand (op, mode)
656 enum machine_mode mode;
658 if (GET_CODE (op) != MEM
659 || GET_MODE (op) != mode)
662 if (GET_CODE (op) != PLUS
663 || GET_MODE (op) != Pmode
664 || !register_operand (XEXP (op, 0), Pmode)
665 || !(GET_CODE (XEXP (op, 1)) == CONST_INT
666 && SMALL_INT (INTVAL (XEXP (op, 1)))))
671 /* Return true if OP is a non-volatile non-immediate operand.
672 Volatile memory refs require a special "cache-bypass" instruction
673 and only the standard movXX patterns are set up to handle them. */
676 nonvol_nonimm_operand (op, mode)
678 enum machine_mode mode;
680 if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
682 return nonimmediate_operand (op, mode);
685 /* Accept integer operands in the range -0x80000000..0x7fffffff. We have
686 to check the range carefully since this predicate is used in DImode
690 const_sint32_operand (op, mode)
692 enum machine_mode mode ATTRIBUTE_UNUSED;
694 /* All allowed constants will fit a CONST_INT. */
695 return (GET_CODE (op) == CONST_INT
696 && (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff));
699 /* Accept integer operands in the range 0..0xffffffff. We have to check the
700 range carefully since this predicate is used in DImode contexts. Also, we
701 need some extra crud to make it work when hosted on 64-bit machines. */
704 const_uint32_operand (op, mode)
706 enum machine_mode mode ATTRIBUTE_UNUSED;
708 #if HOST_BITS_PER_WIDE_INT > 32
709 /* All allowed constants will fit a CONST_INT. */
710 return (GET_CODE (op) == CONST_INT
711 && (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL));
713 return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0)
714 || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));
718 /* Return 1 if OP is a comparison operator valid for the mode of CC.
719 This allows the use of MATCH_OPERATOR to recognize all the branch insns.
721 Some insns only set a few bits in the condition code. So only allow those
722 comparisons that use the bits that are valid. */
725 proper_comparison_operator (op, mode)
727 enum machine_mode mode ATTRIBUTE_UNUSED;
729 enum rtx_code code = GET_CODE (op);
731 if (GET_RTX_CLASS (code) != '<')
734 if (GET_MODE (XEXP (op, 0)) == CCZNmode)
735 return (code == EQ || code == NE);
736 if (GET_MODE (XEXP (op, 0)) == CCZNCmode)
737 return (code == EQ || code == NE
738 || code == LTU || code == GEU || code == GTU || code == LEU);
742 /* Misc. utilities. */
744 /* X and Y are two things to compare using CODE. Emit the compare insn and
745 return the rtx for the cc reg in the proper mode. */
748 gen_compare_reg (code, x, y)
752 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
755 cc_reg = gen_rtx_REG (mode, 61);
757 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
758 gen_rtx_COMPARE (mode, x, y)));
763 /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
764 We assume the value can be either signed or unsigned. */
767 arc_double_limm_p (value)
770 HOST_WIDE_INT low, high;
772 if (GET_CODE (value) != CONST_DOUBLE)
775 low = CONST_DOUBLE_LOW (value);
776 high = CONST_DOUBLE_HIGH (value);
778 if (low & 0x80000000)
780 return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
781 || (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
782 == - (unsigned HOST_WIDE_INT) 0x80000000)
787 return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
791 /* Do any needed setup for a variadic function. For the ARC, we must
792 create a register parameter block, and then copy any anonymous arguments
793 in registers to memory.
795 CUM has not been updated for the last named argument which has type TYPE
796 and mode MODE, and we rely on this fact.
798 We do things a little weird here. We're supposed to only allocate space
799 for the anonymous arguments. However we need to keep the stack eight byte
800 aligned. So we round the space up if necessary, and leave it to va_start
804 arc_setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
805 CUMULATIVE_ARGS *cum;
806 enum machine_mode mode;
807 tree type ATTRIBUTE_UNUSED;
813 /* All BLKmode values are passed by reference. */
817 first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1)
820 if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl)
822 /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
823 int first_reg_offset = first_anon_arg;
824 /* Size in words to "pretend" allocate. */
825 int size = MAX_ARC_PARM_REGS - first_reg_offset;
826 /* Extra slop to keep stack eight byte aligned. */
827 int align_slop = size & 1;
830 regblock = gen_rtx_MEM (BLKmode,
831 plus_constant (arg_pointer_rtx,
832 FIRST_PARM_OFFSET (0)
833 + align_slop * UNITS_PER_WORD));
834 set_mem_alias_set (regblock, get_varargs_alias_set ());
835 set_mem_align (regblock, BITS_PER_WORD);
836 move_block_from_reg (first_reg_offset, regblock,
837 MAX_ARC_PARM_REGS - first_reg_offset);
839 *pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop)
844 /* Cost functions. */
846 /* Compute a (partial) cost for rtx X. Return true if the complete
847 cost has been computed, and false if subexpressions should be
848 scanned. In either case, *TOTAL contains the cost result. */
851 arc_rtx_costs (x, code, outer_code, total)
854 int outer_code ATTRIBUTE_UNUSED;
859 /* Small integers are as cheap as registers. 4 byte values can
860 be fetched as immediate constants - let's give that the cost
863 if (SMALL_INT (INTVAL (x)))
873 *total = COSTS_N_INSNS (1);
879 split_double (x, &high, &low);
880 *total = COSTS_N_INSNS (!SMALL_INT (INTVAL (high))
881 + !SMALL_INT (INTVAL (low)));
885 /* Encourage synth_mult to find a synthetic multiply when reasonable.
886 If we need more than 12 insns to do a multiply, then go out-of-line,
887 since the call overhead will be < 10% of the cost of the multiply. */
892 *total = COSTS_N_INSNS (1);
893 else if (GET_CODE (XEXP (x, 1)) != CONST_INT)
894 *total = COSTS_N_INSNS (16);
896 *total = COSTS_N_INSNS (INTVAL (XEXP ((x), 1)));
905 /* Provide the costs of an addressing mode that contains ADDR.
906 If ADDR is not a valid address, its cost is irrelevant. */
909 arc_address_cost (addr)
912 switch (GET_CODE (addr))
924 register rtx plus0 = XEXP (addr, 0);
925 register rtx plus1 = XEXP (addr, 1);
927 if (GET_CODE (plus0) != REG)
930 switch (GET_CODE (plus1))
933 return SMALL_INT (plus1) ? 1 : 2;
950 /* Function prologue/epilogue handlers. */
952 /* ARC stack frames look like:
954 Before call After call
955 +-----------------------+ +-----------------------+
957 high | local variables, | | local variables, |
958 mem | reg save area, etc. | | reg save area, etc. |
960 +-----------------------+ +-----------------------+
962 | arguments on stack. | | arguments on stack. |
964 SP+16->+-----------------------+FP+48->+-----------------------+
965 | 4 word save area for | | reg parm save area, |
966 | return addr, prev %fp | | only created for |
967 SP+0->+-----------------------+ | variable argument |
969 FP+16->+-----------------------+
970 | 4 word save area for |
971 | return addr, prev %fp |
972 FP+0->+-----------------------+
976 +-----------------------+
978 | register save area |
980 +-----------------------+
982 | alloca allocations |
984 +-----------------------+
986 | arguments on stack |
988 SP+16->+-----------------------+
989 low | 4 word save area for |
990 memory | return addr, prev %fp |
991 SP+0->+-----------------------+
994 1) The "reg parm save area" does not exist for non variable argument fns.
995 The "reg parm save area" can be eliminated completely if we created our
996 own va-arc.h, but that has tradeoffs as well (so it's not done). */
998 /* Structure to be filled in by arc_compute_frame_size with register
999 save masks, and offsets for the current function. */
1000 struct arc_frame_info
1002 unsigned int total_size; /* # bytes that the entire frame takes up. */
1003 unsigned int extra_size; /* # bytes of extra stuff. */
1004 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1005 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1006 unsigned int reg_size; /* # bytes needed to store regs. */
1007 unsigned int var_size; /* # bytes that variables take up. */
1008 unsigned int reg_offset; /* Offset from new sp to store regs. */
1009 unsigned int gmask; /* Mask of saved gp registers. */
1010 int initialized; /* Nonzero if frame size already calculated. */
1013 /* Current frame information calculated by arc_compute_frame_size. */
1014 static struct arc_frame_info current_frame_info;
1016 /* Zero structure to initialize current_frame_info. */
1017 static struct arc_frame_info zero_frame_info;
1019 /* Type of function DECL.
1021 The result is cached. To reset the cache at the end of a function,
1022 call with DECL = NULL_TREE. */
1024 enum arc_function_type
1025 arc_compute_function_type (decl)
1030 static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN;
1031 /* Last function we were called for. */
1032 static tree last_fn = NULL_TREE;
1034 /* Resetting the cached value? */
1035 if (decl == NULL_TREE)
1037 fn_type = ARC_FUNCTION_UNKNOWN;
1038 last_fn = NULL_TREE;
1042 if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN)
1045 /* Assume we have a normal function (not an interrupt handler). */
1046 fn_type = ARC_FUNCTION_NORMAL;
1048 /* Now see if this is an interrupt handler. */
1049 for (a = DECL_ATTRIBUTES (current_function_decl);
1053 tree name = TREE_PURPOSE (a), args = TREE_VALUE (a);
1055 if (name == get_identifier ("__interrupt__")
1056 && list_length (args) == 1
1057 && TREE_CODE (TREE_VALUE (args)) == STRING_CST)
1059 tree value = TREE_VALUE (args);
1061 if (!strcmp (TREE_STRING_POINTER (value), "ilink1"))
1062 fn_type = ARC_FUNCTION_ILINK1;
1063 else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
1064 fn_type = ARC_FUNCTION_ILINK2;
1075 #define ILINK1_REGNUM 29
1076 #define ILINK2_REGNUM 30
1077 #define RETURN_ADDR_REGNUM 31
1078 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1079 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1081 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1082 The return address and frame pointer are treated separately.
1083 Don't consider them here. */
1084 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1085 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1086 && (regs_ever_live[regno] && (!call_used_regs[regno] || interrupt_p)))
1088 #define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM])
1090 /* Return the bytes needed to compute the frame pointer from the current
1093 SIZE is the size needed for local variables. */
1096 arc_compute_frame_size (size)
1097 int size; /* # of var. bytes allocated. */
1100 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1101 unsigned int reg_size, reg_offset;
1103 enum arc_function_type fn_type;
1107 args_size = current_function_outgoing_args_size;
1108 pretend_size = current_function_pretend_args_size;
1109 extra_size = FIRST_PARM_OFFSET (0);
1110 total_size = extra_size + pretend_size + args_size + var_size;
1111 reg_offset = FIRST_PARM_OFFSET(0) + current_function_outgoing_args_size;
1115 /* See if this is an interrupt handler. Call used registers must be saved
1117 fn_type = arc_compute_function_type (current_function_decl);
1118 interrupt_p = ARC_INTERRUPT_P (fn_type);
1120 /* Calculate space needed for registers.
1121 ??? We ignore the extension registers for now. */
1123 for (regno = 0; regno <= 31; regno++)
1125 if (MUST_SAVE_REGISTER (regno, interrupt_p))
1127 reg_size += UNITS_PER_WORD;
1128 gmask |= 1 << regno;
1132 total_size += reg_size;
1134 /* If the only space to allocate is the fp/blink save area this is an
1135 empty frame. However, if we'll be making a function call we need to
1136 allocate a stack frame for our callee's fp/blink save area. */
1137 if (total_size == extra_size
1138 && !MUST_SAVE_RETURN_ADDR)
1139 total_size = extra_size = 0;
1141 total_size = ARC_STACK_ALIGN (total_size);
1143 /* Save computed information. */
1144 current_frame_info.total_size = total_size;
1145 current_frame_info.extra_size = extra_size;
1146 current_frame_info.pretend_size = pretend_size;
1147 current_frame_info.var_size = var_size;
1148 current_frame_info.args_size = args_size;
1149 current_frame_info.reg_size = reg_size;
1150 current_frame_info.reg_offset = reg_offset;
1151 current_frame_info.gmask = gmask;
1152 current_frame_info.initialized = reload_completed;
1154 /* Ok, we're done. */
1158 /* Common code to save/restore registers. */
1161 arc_save_restore (file, base_reg, offset, gmask, op)
1163 const char *base_reg;
1164 unsigned int offset;
1173 for (regno = 0; regno <= 31; regno++)
1175 if ((gmask & (1L << regno)) != 0)
1177 fprintf (file, "\t%s %s,[%s,%d]\n",
1178 op, reg_names[regno], base_reg, offset);
1179 offset += UNITS_PER_WORD;
1184 /* Target hook to assemble an integer object. The ARC version needs to
1185 emit a special directive for references to labels and function
1189 arc_assemble_integer (x, size, aligned_p)
1194 if (size == UNITS_PER_WORD && aligned_p
1195 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1196 || GET_CODE (x) == LABEL_REF))
1198 fputs ("\t.word\t%st(", asm_out_file);
1199 output_addr_const (asm_out_file, x);
1200 fputs (")\n", asm_out_file);
1203 return default_assemble_integer (x, size, aligned_p);
1206 /* Set up the stack and frame pointer (if desired) for the function. */
1209 arc_output_function_prologue (file, size)
1213 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1214 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1215 unsigned int gmask = current_frame_info.gmask;
1216 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1218 /* If this is an interrupt handler, set up our stack frame.
1219 ??? Optimize later. */
1220 if (ARC_INTERRUPT_P (fn_type))
1222 fprintf (file, "\t%s interrupt handler\n",
1224 fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str);
1227 /* This is only for the human reader. */
1228 fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1229 ASM_COMMENT_START, ASM_COMMENT_START,
1230 current_frame_info.var_size,
1231 current_frame_info.reg_size / 4,
1232 current_frame_info.args_size,
1233 current_frame_info.extra_size);
1235 size = ARC_STACK_ALIGN (size);
1236 size = (! current_frame_info.initialized
1237 ? arc_compute_frame_size (size)
1238 : current_frame_info.total_size);
1240 /* These cases shouldn't happen. Catch them now. */
1241 if (size == 0 && gmask)
1244 /* Allocate space for register arguments if this is a variadic function. */
1245 if (current_frame_info.pretend_size != 0)
1246 fprintf (file, "\tsub %s,%s,%d\n",
1247 sp_str, sp_str, current_frame_info.pretend_size);
1249 /* The home-grown ABI says link register is saved first. */
1250 if (MUST_SAVE_RETURN_ADDR)
1251 fprintf (file, "\tst %s,[%s,%d]\n",
1252 reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD);
1254 /* Set up the previous frame pointer next (if we need to). */
1255 if (frame_pointer_needed)
1257 fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str);
1258 fprintf (file, "\tmov %s,%s\n", fp_str, sp_str);
1261 /* ??? We don't handle the case where the saved regs are more than 252
1262 bytes away from sp. This can be handled by decrementing sp once, saving
1263 the regs, and then decrementing it again. The epilogue doesn't have this
1264 problem as the `ld' insn takes reg+limm values (though it would be more
1265 efficient to avoid reg+limm). */
1267 /* Allocate the stack frame. */
1268 if (size - current_frame_info.pretend_size > 0)
1269 fprintf (file, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1270 sp_str, sp_str, size - current_frame_info.pretend_size);
1272 /* Save any needed call-saved regs (and call-used if this is an
1273 interrupt handler). */
1274 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1275 /* The zeroing of these two bits is unnecessary,
1276 but leave this in for clarity. */
1277 gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1280 fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START);
1283 /* Do any necessary cleanup after a function to restore stack, frame,
1287 arc_output_function_epilogue (file, size)
1291 rtx epilogue_delay = current_function_epilogue_delay_list;
1292 int noepilogue = FALSE;
1293 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1295 /* This is only for the human reader. */
1296 fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1298 size = ARC_STACK_ALIGN (size);
1299 size = (!current_frame_info.initialized
1300 ? arc_compute_frame_size (size)
1301 : current_frame_info.total_size);
1303 if (size == 0 && epilogue_delay == 0)
1305 rtx insn = get_last_insn ();
1307 /* If the last insn was a BARRIER, we don't have to write any code
1308 because a jump (aka return) was put there. */
1309 if (GET_CODE (insn) == NOTE)
1310 insn = prev_nonnote_insn (insn);
1311 if (insn && GET_CODE (insn) == BARRIER)
1317 unsigned int pretend_size = current_frame_info.pretend_size;
1318 unsigned int frame_size = size - pretend_size;
1319 int restored, fp_restored_p;
1320 int can_trust_sp_p = !current_function_calls_alloca;
1321 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1322 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1324 /* ??? There are lots of optimizations that can be done here.
1325 EG: Use fp to restore regs if it's closer.
1326 Maybe in time we'll do them all. For now, always restore regs from
1327 sp, but don't restore sp if we don't have to. */
1329 if (!can_trust_sp_p)
1331 if (!frame_pointer_needed)
1333 fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1334 sp_str, fp_str, frame_size, ASM_COMMENT_START);
1337 /* Restore any saved registers. */
1338 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1339 /* The zeroing of these two bits is unnecessary,
1340 but leave this in for clarity. */
1341 current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1344 if (MUST_SAVE_RETURN_ADDR)
1345 fprintf (file, "\tld %s,[%s,%d]\n",
1346 reg_names[RETURN_ADDR_REGNUM],
1347 frame_pointer_needed ? fp_str : sp_str,
1348 UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size));
1350 /* Keep track of how much of the stack pointer we've restored.
1351 It makes the following a lot more readable. */
1355 /* We try to emit the epilogue delay slot insn right after the load
1356 of the return address register so that it can execute with the
1357 stack intact. Secondly, loads are delayed. */
1358 /* ??? If stack intactness is important, always emit now. */
1359 if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX)
1361 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1);
1362 epilogue_delay = NULL_RTX;
1365 if (frame_pointer_needed)
1367 /* Try to restore the frame pointer in the delay slot. We can't,
1368 however, if any of these is true. */
1369 if (epilogue_delay != NULL_RTX
1370 || !SMALL_INT (frame_size)
1372 || ARC_INTERRUPT_P (fn_type))
1374 /* Note that we restore fp and sp here! */
1375 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1376 restored += frame_size;
1380 else if (!SMALL_INT (size /* frame_size + pretend_size */)
1381 || ARC_INTERRUPT_P (fn_type))
1383 fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size);
1384 restored += frame_size;
1387 /* These must be done before the return insn because the delay slot
1388 does the final stack restore. */
1389 if (ARC_INTERRUPT_P (fn_type))
1393 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1);
1397 /* Emit the return instruction. */
1399 static const int regs[4] = {
1400 0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM
1402 fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]);
1405 /* If the only register saved is the return address, we need a
1406 nop, unless we have an instruction to put into it. Otherwise
1407 we don't since reloading multiple registers doesn't reference
1408 the register being loaded. */
1410 if (ARC_INTERRUPT_P (fn_type))
1411 fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str);
1412 else if (epilogue_delay != NULL_RTX)
1414 if (frame_pointer_needed && !fp_restored_p)
1416 if (restored < size)
1418 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1);
1420 else if (frame_pointer_needed && !fp_restored_p)
1422 if (!SMALL_INT (frame_size))
1424 /* Note that we restore fp and sp here! */
1425 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1427 else if (restored < size)
1429 if (!SMALL_INT (size - restored))
1431 fprintf (file, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1432 sp_str, sp_str, size - restored);
1435 fprintf (file, "\tnop\n");
1438 /* Reset state info for each function. */
1439 current_frame_info = zero_frame_info;
1440 arc_compute_function_type (NULL_TREE);
1443 /* Define the number of delay slots needed for the function epilogue.
1445 Interrupt handlers can't have any epilogue delay slots (it's always needed
1446 for something else, I think). For normal functions, we have to worry about
1447 using call-saved regs as they'll be restored before the delay slot insn.
1448 Functions with non-empty frames already have enough choices for the epilogue
1449 delay slot so for now we only consider functions with empty frames. */
1452 arc_delay_slots_for_epilogue ()
1454 if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL)
1456 if (!current_frame_info.initialized)
1457 (void) arc_compute_frame_size (get_frame_size ());
1458 if (current_frame_info.total_size == 0)
1463 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
1464 Any single length instruction which doesn't reference the stack or frame
1465 pointer or any call-saved register is OK. SLOT will always be 0. */
1468 arc_eligible_for_epilogue_delay (trial, slot)
1475 if (get_attr_length (trial) == 1
1476 /* If registers where saved, presumably there's more than enough
1477 possibilities for the delay slot. The alternative is something
1478 more complicated (of course, if we expanded the epilogue as rtl
1479 this problem would go away). */
1480 /* ??? Note that this will always be true since only functions with
1481 empty frames have epilogue delay slots. See
1482 arc_delay_slots_for_epilogue. */
1483 && current_frame_info.gmask == 0
1484 && ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial))
1485 && ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial)))
1492 /* Emit special PIC prologues and epilogues. */
1500 /* Return true if OP is a shift operator. */
1503 shift_operator (op, mode)
1505 enum machine_mode mode ATTRIBUTE_UNUSED;
1507 switch (GET_CODE (op))
1518 /* Output the assembler code for doing a shift.
1519 We go to a bit of trouble to generate efficient code as the ARC only has
1520 single bit shifts. This is taken from the h8300 port. We only have one
1521 mode of shifting and can't access individual bytes like the h8300 can, so
1522 this is greatly simplified (at the expense of not generating hyper-
1525 This function is not used if the variable shift insns are present. */
1527 /* ??? We assume the output operand is the same as operand 1.
1528 This can be optimized (deleted) in the case of 1 bit shifts. */
1529 /* ??? We use the loop register here. We don't use it elsewhere (yet) and
1530 using it here will give us a chance to play with it. */
1533 output_shift (operands)
1536 rtx shift = operands[3];
1537 enum machine_mode mode = GET_MODE (shift);
1538 enum rtx_code code = GET_CODE (shift);
1539 const char *shift_one;
1546 case ASHIFT: shift_one = "asl %0,%0"; break;
1547 case ASHIFTRT: shift_one = "asr %0,%0"; break;
1548 case LSHIFTRT: shift_one = "lsr %0,%0"; break;
1552 if (GET_CODE (operands[2]) != CONST_INT)
1555 output_asm_insn ("mov lp_count,%2", operands);
1557 output_asm_insn ("mov %4,%2", operands);
1562 int n = INTVAL (operands[2]);
1564 /* If the count is negative, make it 0. */
1567 /* If the count is too big, truncate it.
1568 ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1569 do the intuitive thing. */
1570 else if (n > GET_MODE_BITSIZE (mode))
1571 n = GET_MODE_BITSIZE (mode);
1573 /* First see if we can do them inline. */
1577 output_asm_insn (shift_one, operands);
1579 /* See if we can use a rotate/and. */
1580 else if (n == BITS_PER_WORD - 1)
1585 output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands);
1588 /* The ARC doesn't have a rol insn. Use something else. */
1589 output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands);
1592 /* The ARC doesn't have a rol insn. Use something else. */
1593 output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands);
1605 output_asm_insn ("mov lp_count,%c2", operands);
1607 output_asm_insn ("mov %4,%c2", operands);
1612 sprintf (buf, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1615 sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1617 output_asm_insn (buf, operands);
1618 output_asm_insn ("sr %4,[lp_start]", operands);
1619 output_asm_insn ("add %4,%4,1", operands);
1620 output_asm_insn ("sr %4,[lp_end]", operands);
1621 output_asm_insn ("nop\n\tnop", operands);
1623 fprintf (asm_out_file, "\t%s single insn loop\n",
1626 fprintf (asm_out_file, "1:\t%s single insn loop\n",
1628 output_asm_insn (shift_one, operands);
1632 fprintf (asm_out_file, "1:\t%s begin shift loop\n",
1634 output_asm_insn ("sub.f %4,%4,1", operands);
1635 output_asm_insn ("nop", operands);
1636 output_asm_insn ("bn.nd 2f", operands);
1637 output_asm_insn (shift_one, operands);
1638 output_asm_insn ("b.nd 1b", operands);
1639 fprintf (asm_out_file, "2:\t%s end shift loop\n",
1648 /* Nested function support. */
1650 /* Emit RTL insns to initialize the variable parts of a trampoline.
1651 FNADDR is an RTX for the address of the function's pure code.
1652 CXT is an RTX for the static chain value for the function. */
1655 arc_initialize_trampoline (tramp, fnaddr, cxt)
1656 rtx tramp ATTRIBUTE_UNUSED, fnaddr ATTRIBUTE_UNUSED, cxt ATTRIBUTE_UNUSED;
1660 /* Set the cpu type and print out other fancy things,
1661 at the top of the file. */
1666 default_file_start ();
1667 fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string);
1670 /* Print operand X (an rtx) in assembler syntax to file FILE.
1671 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1672 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1675 arc_print_operand (file, x, code)
1683 /* Conditional branches. For now these are equivalent. */
1685 /* Unconditional branches. Output the appropriate delay slot suffix. */
1686 if (!final_sequence || XVECLEN (final_sequence, 0) == 1)
1688 /* There's nothing in the delay slot. */
1689 fputs (".nd", file);
1693 rtx jump = XVECEXP (final_sequence, 0, 0);
1694 rtx delay = XVECEXP (final_sequence, 0, 1);
1695 if (INSN_ANNULLED_BRANCH_P (jump))
1696 fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file);
1701 case '?' : /* with leading "." */
1702 case '!' : /* without leading "." */
1703 /* This insn can be conditionally executed. See if the ccfsm machinery
1704 says it should be conditionalized. */
1705 if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4)
1707 /* Is this insn in a delay slot? */
1708 if (final_sequence && XVECLEN (final_sequence, 0) == 2)
1710 rtx insn = XVECEXP (final_sequence, 0, 1);
1712 /* If the insn is annulled and is from the target path, we need
1713 to inverse the condition test. */
1714 if (INSN_ANNULLED_BRANCH_P (insn))
1716 if (INSN_FROM_TARGET_P (insn))
1717 fprintf (file, "%s%s",
1718 code == '?' ? "." : "",
1719 arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]);
1721 fprintf (file, "%s%s",
1722 code == '?' ? "." : "",
1723 arc_condition_codes[arc_ccfsm_current_cc]);
1727 /* This insn is executed for either path, so don't
1728 conditionalize it at all. */
1729 ; /* nothing to do */
1734 /* This insn isn't in a delay slot. */
1735 fprintf (file, "%s%s",
1736 code == '?' ? "." : "",
1737 arc_condition_codes[arc_ccfsm_current_cc]);
1742 /* Output a nop if we're between a set of the condition codes,
1743 and a conditional branch. */
1744 if (last_insn_set_cc_p)
1745 fputs ("nop\n\t", file);
1748 fputs (arc_condition_codes[get_arc_condition_code (x)], file);
1751 fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
1752 (get_arc_condition_code (x))],
1756 /* Write second word of DImode or DFmode reference,
1757 register or memory. */
1758 if (GET_CODE (x) == REG)
1759 fputs (reg_names[REGNO (x)+1], file);
1760 else if (GET_CODE (x) == MEM)
1763 /* Handle possible auto-increment. Since it is pre-increment and
1764 we have already done it, we can just use an offset of four. */
1765 /* ??? This is taken from rs6000.c I think. I don't think it is
1766 currently necessary, but keep it around. */
1767 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1768 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1769 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1771 output_address (plus_constant (XEXP (x, 0), 4));
1775 output_operand_lossage ("invalid operand to %%R code");
1778 if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1779 || GET_CODE (x) == LABEL_REF)
1781 fprintf (file, "%%st(");
1782 output_addr_const (file, x);
1783 fprintf (file, ")");
1789 if (GET_CODE (x) == REG)
1791 /* L = least significant word, H = most significant word */
1792 if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L'))
1793 fputs (reg_names[REGNO (x)], file);
1795 fputs (reg_names[REGNO (x)+1], file);
1797 else if (GET_CODE (x) == CONST_INT
1798 || GET_CODE (x) == CONST_DOUBLE)
1802 split_double (x, &first, &second);
1803 fprintf (file, "0x%08lx",
1804 (long)(code == 'L' ? INTVAL (first) : INTVAL (second)));
1807 output_operand_lossage ("invalid operand to %%H/%%L code");
1813 if (GET_CODE (x) != CONST_DOUBLE
1814 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
1817 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
1818 fprintf (file, "%s", str);
1822 /* Output a load/store with update indicator if appropriate. */
1823 if (GET_CODE (x) == MEM)
1825 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1826 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1830 output_operand_lossage ("invalid operand to %%U code");
1833 /* Output cache bypass indicator for a load/store insn. Volatile memory
1834 refs are defined to use the cache bypass mechanism. */
1835 if (GET_CODE (x) == MEM)
1837 if (MEM_VOLATILE_P (x))
1838 fputs (".di", file);
1841 output_operand_lossage ("invalid operand to %%V code");
1844 /* Do nothing special. */
1848 output_operand_lossage ("invalid operand output code");
1851 switch (GET_CODE (x))
1854 fputs (reg_names[REGNO (x)], file);
1858 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
1859 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1860 GET_MODE_SIZE (GET_MODE (x))));
1861 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
1862 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1863 - GET_MODE_SIZE (GET_MODE (x))));
1865 output_address (XEXP (x, 0));
1869 /* We handle SFmode constants here as output_addr_const doesn't. */
1870 if (GET_MODE (x) == SFmode)
1875 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1876 REAL_VALUE_TO_TARGET_SINGLE (d, l);
1877 fprintf (file, "0x%08lx", l);
1880 /* Fall through. Let output_addr_const deal with it. */
1882 output_addr_const (file, x);
1887 /* Print a memory address as an operand to reference that memory location. */
1890 arc_print_operand_address (file, addr)
1894 register rtx base, index = 0;
1897 switch (GET_CODE (addr))
1900 fputs (reg_names[REGNO (addr)], file);
1903 if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr))
1905 fprintf (file, "%%st(");
1906 output_addr_const (file, addr);
1907 fprintf (file, ")");
1910 output_addr_const (file, addr);
1913 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
1914 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
1915 else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
1916 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
1918 base = XEXP (addr, 0), index = XEXP (addr, 1);
1919 if (GET_CODE (base) != REG)
1921 fputs (reg_names[REGNO (base)], file);
1925 fprintf (file, ",%d", offset);
1927 else if (GET_CODE (index) == REG)
1928 fprintf (file, ",%s", reg_names[REGNO (index)]);
1929 else if (GET_CODE (index) == SYMBOL_REF)
1930 fputc (',', file), output_addr_const (file, index);
1936 /* We shouldn't get here as we've lost the mode of the memory object
1937 (which says how much to inc/dec by. */
1941 output_addr_const (file, addr);
1946 /* Update compare/branch separation marker. */
1949 record_cc_ref (insn)
1952 last_insn_set_cc_p = current_insn_set_cc_p;
1954 switch (get_attr_cond (insn))
1959 if (get_attr_length (insn) == 1)
1960 current_insn_set_cc_p = 1;
1962 current_insn_set_cc_p = 0;
1965 current_insn_set_cc_p = 0;
1970 /* Conditional execution support.
1972 This is based on the ARM port but for now is much simpler.
1974 A finite state machine takes care of noticing whether or not instructions
1975 can be conditionally executed, and thus decrease execution time and code
1976 size by deleting branch instructions. The fsm is controlled by
1977 final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns
1978 in the .md file for the branch insns also have a hand in this. */
1980 /* The state of the fsm controlling condition codes are:
1981 0: normal, do nothing special
1982 1: don't output this insn
1983 2: don't output this insn
1984 3: make insns conditional
1985 4: make insns conditional
1987 State transitions (state->state by whom, under what condition):
1988 0 -> 1 final_prescan_insn, if insn is conditional branch
1989 0 -> 2 final_prescan_insn, if the `target' is an unconditional branch
1990 1 -> 3 branch patterns, after having not output the conditional branch
1991 2 -> 4 branch patterns, after having not output the conditional branch
1992 3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
1993 (the target label has CODE_LABEL_NUMBER equal to
1994 arc_ccfsm_target_label).
1995 4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1997 If the jump clobbers the conditions then we use states 2 and 4.
1999 A similar thing can be done with conditional return insns.
2001 We also handle separating branches from sets of the condition code.
2002 This is done here because knowledge of the ccfsm state is required,
2003 we may not be outputting the branch. */
2006 arc_final_prescan_insn (insn, opvec, noperands)
2008 rtx *opvec ATTRIBUTE_UNUSED;
2009 int noperands ATTRIBUTE_UNUSED;
2011 /* BODY will hold the body of INSN. */
2012 register rtx body = PATTERN (insn);
2014 /* This will be 1 if trying to repeat the trick (ie: do the `else' part of
2015 an if/then/else), and things need to be reversed. */
2018 /* If we start with a return insn, we only succeed if we find another one. */
2019 int seeking_return = 0;
2021 /* START_INSN will hold the insn from where we start looking. This is the
2022 first insn after the following code_label if REVERSE is true. */
2023 rtx start_insn = insn;
2025 /* Update compare/branch separation marker. */
2026 record_cc_ref (insn);
2028 /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
2029 We can't do this in macro FINAL_PRESCAN_INSN because its called from
2030 final_scan_insn which has `optimize' as a local. */
2031 if (optimize < 2 || TARGET_NO_COND_EXEC)
2034 /* If in state 4, check if the target branch is reached, in order to
2035 change back to state 0. */
2036 if (arc_ccfsm_state == 4)
2038 if (insn == arc_ccfsm_target_insn)
2040 arc_ccfsm_target_insn = NULL;
2041 arc_ccfsm_state = 0;
2046 /* If in state 3, it is possible to repeat the trick, if this insn is an
2047 unconditional branch to a label, and immediately following this branch
2048 is the previous target label which is only used once, and the label this
2049 branch jumps to is not too far off. Or in other words "we've done the
2050 `then' part, see if we can do the `else' part." */
2051 if (arc_ccfsm_state == 3)
2053 if (simplejump_p (insn))
2055 start_insn = next_nonnote_insn (start_insn);
2056 if (GET_CODE (start_insn) == BARRIER)
2058 /* ??? Isn't this always a barrier? */
2059 start_insn = next_nonnote_insn (start_insn);
2061 if (GET_CODE (start_insn) == CODE_LABEL
2062 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2063 && LABEL_NUSES (start_insn) == 1)
2068 else if (GET_CODE (body) == RETURN)
2070 start_insn = next_nonnote_insn (start_insn);
2071 if (GET_CODE (start_insn) == BARRIER)
2072 start_insn = next_nonnote_insn (start_insn);
2073 if (GET_CODE (start_insn) == CODE_LABEL
2074 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2075 && LABEL_NUSES (start_insn) == 1)
2087 if (GET_CODE (insn) != JUMP_INSN)
2090 /* This jump might be paralleled with a clobber of the condition codes,
2091 the jump should always come first. */
2092 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2093 body = XVECEXP (body, 0, 0);
2096 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2097 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2099 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2100 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2101 int then_not_else = TRUE;
2102 /* Nonzero if next insn must be the target label. */
2103 int next_must_be_target_label_p;
2104 rtx this_insn = start_insn, label = 0;
2106 /* Register the insn jumped to. */
2109 if (!seeking_return)
2110 label = XEXP (SET_SRC (body), 0);
2112 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2113 label = XEXP (XEXP (SET_SRC (body), 1), 0);
2114 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2116 label = XEXP (XEXP (SET_SRC (body), 2), 0);
2117 then_not_else = FALSE;
2119 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2121 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2124 then_not_else = FALSE;
2129 /* See how many insns this branch skips, and what kind of insns. If all
2130 insns are okay, and the label or unconditional branch to the same
2131 label is not too far away, succeed. */
2132 for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
2133 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2138 this_insn = next_nonnote_insn (this_insn);
2142 if (next_must_be_target_label_p)
2144 if (GET_CODE (this_insn) == BARRIER)
2146 if (GET_CODE (this_insn) == CODE_LABEL
2147 && this_insn == label)
2149 arc_ccfsm_state = 1;
2157 scanbody = PATTERN (this_insn);
2159 switch (GET_CODE (this_insn))
2162 /* Succeed if it is the target label, otherwise fail since
2163 control falls in from somewhere else. */
2164 if (this_insn == label)
2166 arc_ccfsm_state = 1;
2174 /* Succeed if the following insn is the target label.
2176 If return insns are used then the last insn in a function
2177 will be a barrier. */
2178 next_must_be_target_label_p = TRUE;
2182 /* Can handle a call insn if there are no insns after it.
2183 IE: The next "insn" is the target label. We don't have to
2184 worry about delay slots as such insns are SEQUENCE's inside
2185 INSN's. ??? It is possible to handle such insns though. */
2186 if (get_attr_cond (this_insn) == COND_CANUSE)
2187 next_must_be_target_label_p = TRUE;
2193 /* If this is an unconditional branch to the same label, succeed.
2194 If it is to another label, do nothing. If it is conditional,
2196 /* ??? Probably, the test for the SET and the PC are unnecessary. */
2198 if (GET_CODE (scanbody) == SET
2199 && GET_CODE (SET_DEST (scanbody)) == PC)
2201 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2202 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2204 arc_ccfsm_state = 2;
2207 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2210 else if (GET_CODE (scanbody) == RETURN
2213 arc_ccfsm_state = 2;
2216 else if (GET_CODE (scanbody) == PARALLEL)
2218 if (get_attr_cond (this_insn) != COND_CANUSE)
2224 /* We can only do this with insns that can use the condition
2225 codes (and don't set them). */
2226 if (GET_CODE (scanbody) == SET
2227 || GET_CODE (scanbody) == PARALLEL)
2229 if (get_attr_cond (this_insn) != COND_CANUSE)
2232 /* We can't handle other insns like sequences. */
2244 if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse))
2245 arc_ccfsm_target_label = CODE_LABEL_NUMBER (label);
2246 else if (seeking_return || arc_ccfsm_state == 2)
2248 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2250 this_insn = next_nonnote_insn (this_insn);
2251 if (this_insn && (GET_CODE (this_insn) == BARRIER
2252 || GET_CODE (this_insn) == CODE_LABEL))
2257 /* Oh dear! we ran off the end, give up. */
2258 extract_insn_cached (insn);
2259 arc_ccfsm_state = 0;
2260 arc_ccfsm_target_insn = NULL;
2263 arc_ccfsm_target_insn = this_insn;
2268 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2271 arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body),
2274 if (reverse || then_not_else)
2275 arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc);
2278 /* Restore recog_data. Getting the attributes of other insns can
2279 destroy this array, but final.c assumes that it remains intact
2280 across this call. */
2281 extract_insn_cached (insn);
2285 /* Record that we are currently outputting label NUM with prefix PREFIX.
2286 It it's the label we're looking for, reset the ccfsm machinery.
2288 Called from (*targetm.asm_out.internal_label). */
2291 arc_ccfsm_at_label (prefix, num)
2295 if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num
2296 && !strcmp (prefix, "L"))
2298 arc_ccfsm_state = 0;
2299 arc_ccfsm_target_insn = NULL_RTX;
2303 /* See if the current insn, which is a conditional branch, is to be
2307 arc_ccfsm_branch_deleted_p ()
2309 if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2)
2314 /* Record a branch isn't output because subsequent insns can be
2318 arc_ccfsm_record_branch_deleted ()
2320 /* Indicate we're conditionalizing insns now. */
2321 arc_ccfsm_state += 2;
2323 /* If the next insn is a subroutine call, we still need a nop between the
2324 cc setter and user. We need to undo the effect of calling record_cc_ref
2325 for the just deleted branch. */
2326 current_insn_set_cc_p = last_insn_set_cc_p;
2330 arc_va_start (valist, nextarg)
2334 /* See arc_setup_incoming_varargs for reasons for this oddity. */
2335 if (current_function_args_info < 8
2336 && (current_function_args_info & 1))
2337 nextarg = plus_constant (nextarg, UNITS_PER_WORD);
2339 std_expand_builtin_va_start (valist, nextarg);
2343 arc_va_arg (valist, type)
2348 tree type_ptr = build_pointer_type (type);
2350 /* All aggregates are passed by reference. All scalar types larger
2351 than 8 bytes are passed by reference. */
2353 if (AGGREGATE_TYPE_P (type) || int_size_in_bytes (type) > 8)
2355 tree type_ptr_ptr = build_pointer_type (type_ptr);
2357 addr = build (INDIRECT_REF, type_ptr,
2358 build (NOP_EXPR, type_ptr_ptr, valist));
2360 incr = build (PLUS_EXPR, TREE_TYPE (valist),
2361 valist, build_int_2 (UNITS_PER_WORD, 0));
2365 HOST_WIDE_INT align, rounded_size;
2367 /* Compute the rounded size of the type. */
2368 align = PARM_BOUNDARY / BITS_PER_UNIT;
2369 rounded_size = (((TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT
2370 + align - 1) / align) * align);
2372 /* Align 8 byte operands. */
2374 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2376 /* AP = (TYPE *)(((int)AP + 7) & -8) */
2378 addr = build (NOP_EXPR, integer_type_node, valist);
2379 addr = fold (build (PLUS_EXPR, integer_type_node, addr,
2380 build_int_2 (7, 0)));
2381 addr = fold (build (BIT_AND_EXPR, integer_type_node, addr,
2382 build_int_2 (-8, 0)));
2383 addr = fold (build (NOP_EXPR, TREE_TYPE (valist), addr));
2386 /* The increment is always rounded_size past the aligned pointer. */
2387 incr = fold (build (PLUS_EXPR, TREE_TYPE (addr), addr,
2388 build_int_2 (rounded_size, 0)));
2390 /* Adjust the pointer in big-endian mode. */
2391 if (BYTES_BIG_ENDIAN)
2394 adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT;
2395 if (rounded_size > align)
2398 addr = fold (build (PLUS_EXPR, TREE_TYPE (addr), addr,
2399 build_int_2 (rounded_size - adj, 0)));
2403 /* Evaluate the data address. */
2404 addr_rtx = expand_expr (addr, NULL_RTX, Pmode, EXPAND_NORMAL);
2405 addr_rtx = copy_to_reg (addr_rtx);
2407 /* Compute new value for AP. */
2408 incr = build (MODIFY_EXPR, TREE_TYPE (valist), valist, incr);
2409 TREE_SIDE_EFFECTS (incr) = 1;
2410 expand_expr (incr, const0_rtx, VOIDmode, EXPAND_NORMAL);
2415 /* This is how to output a definition of an internal numbered label where
2416 PREFIX is the class of label and NUM is the number within the class. */
2419 arc_internal_label (stream, prefix, labelno)
2422 unsigned long labelno;
2424 arc_ccfsm_at_label (prefix, labelno);
2425 default_internal_label (stream, prefix, labelno);