/* Subroutines for insn-output.c for ATMEL AVR micro controllers
- Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
- Free Software Foundation, Inc.
- Contributed by Denis Chertykov (denisc@overta.ru)
+ Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
+ 2009, 2010, 2011 Free Software Foundation, Inc.
+ Contributed by Denis Chertykov (chertykov@gmail.com)
This file is part of GCC.
#include "rtl.h"
#include "regs.h"
#include "hard-reg-set.h"
-#include "real.h"
#include "insn-config.h"
#include "conditions.h"
#include "insn-attr.h"
+#include "insn-codes.h"
#include "flags.h"
#include "reload.h"
#include "tree.h"
#include "output.h"
#include "expr.h"
-#include "toplev.h"
+#include "diagnostic-core.h"
#include "obstack.h"
#include "function.h"
#include "recog.h"
+#include "optabs.h"
#include "ggc.h"
+#include "langhooks.h"
#include "tm_p.h"
#include "target.h"
#include "target-def.h"
+#include "params.h"
#include "df.h"
/* Maximal allowed offset for an address in the LD command */
#define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
+static void avr_option_override (void);
static int avr_naked_function_p (tree);
static int interrupt_function_p (tree);
static int signal_function_p (tree);
static int sequent_regs_live (void);
static const char *ptrreg_to_str (int);
static const char *cond_string (enum rtx_code);
-static int avr_num_arg_regs (enum machine_mode, tree);
+static int avr_num_arg_regs (enum machine_mode, const_tree);
static RTX_CODE compare_condition (rtx insn);
+static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
static int compare_sign_p (rtx insn);
static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
-const struct attribute_spec avr_attribute_table[];
static bool avr_assemble_integer (rtx, unsigned int, int);
static void avr_file_start (void);
static void avr_file_end (void);
+static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
static void avr_asm_function_end_prologue (FILE *);
static void avr_asm_function_begin_epilogue (FILE *);
+static bool avr_cannot_modify_jumps_p (void);
+static rtx avr_function_value (const_tree, const_tree, bool);
+static rtx avr_libcall_value (enum machine_mode, const_rtx);
+static bool avr_function_value_regno_p (const unsigned int);
static void avr_insert_attributes (tree, tree *);
static void avr_asm_init_sections (void);
static unsigned int avr_section_type_flags (tree, const char *, int);
static void avr_reorg (void);
static void avr_asm_out_ctor (rtx, int);
static void avr_asm_out_dtor (rtx, int);
-static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
-static bool avr_rtx_costs (rtx, int, int, int *);
-static int avr_address_cost (rtx);
+static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
+static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
+static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
+static bool avr_rtx_costs (rtx, int, int, int *, bool);
+static int avr_address_cost (rtx, bool);
static bool avr_return_in_memory (const_tree, const_tree);
static struct machine_function * avr_init_machine_status (void);
+static void avr_init_builtins (void);
+static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+static rtx avr_builtin_setjmp_frame_value (void);
+static bool avr_hard_regno_scratch_ok (unsigned int);
+static unsigned int avr_case_values_threshold (void);
+static bool avr_frame_pointer_required_p (void);
+static bool avr_can_eliminate (const int, const int);
+static bool avr_class_likely_spilled_p (reg_class_t c);
+static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
+ const_tree, bool);
+static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
+ const_tree, bool);
+static bool avr_function_ok_for_sibcall (tree, tree);
+static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
+static void avr_encode_section_info (tree, rtx, int);
+
/* Allocate registers from r25 to r8 for parameters for function calls. */
#define FIRST_CUM_REG 26
/* AVR register names {"r0", "r1", ..., "r31"} */
static const char *const avr_regnames[] = REGISTER_NAMES;
-/* This holds the last insn address. */
-static int last_insn_address = 0;
-
/* Preprocessor macros to define depending on MCU type. */
const char *avr_extra_arch_macro;
/* Current architecture. */
const struct base_arch_s *avr_current_arch;
-section *progmem_section;
-
-static const struct base_arch_s avr_arch_types[] = {
- { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
- { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
- { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
- { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
- { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
- { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
- { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
- { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
- { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
- { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
- { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
-};
+/* Current device. */
+const struct mcu_type_s *avr_current_device;
-/* These names are used as the index into the avr_arch_types[] table
- above. */
-
-enum avr_arch
-{
- ARCH_UNKNOWN,
- ARCH_AVR1,
- ARCH_AVR2,
- ARCH_AVR25,
- ARCH_AVR3,
- ARCH_AVR31,
- ARCH_AVR35,
- ARCH_AVR4,
- ARCH_AVR5,
- ARCH_AVR51,
- ARCH_AVR6
-};
+section *progmem_section;
-struct mcu_type_s {
- const char *const name;
- int arch; /* index in avr_arch_types[] */
- /* Must lie outside user's namespace. NULL == no macro. */
- const char *const macro;
-};
+/* To track if code will use .bss and/or .data. */
+bool avr_need_clear_bss_p = false;
+bool avr_need_copy_data_p = false;
-/* List of all known AVR MCU types - if updated, it has to be kept
- in sync in several places (FIXME: is there a better way?):
- - here
- - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
- - t-avr (MULTILIB_MATCHES)
- - gas/config/tc-avr.c
- - avr-libc */
-
-static const struct mcu_type_s avr_mcu_types[] = {
- /* Classic, <= 8K. */
- { "avr2", ARCH_AVR2, NULL },
- { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
- { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
- { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
- { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
- { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
- { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
- { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
- { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
- { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
- { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
- { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
- { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
- /* Classic + MOVW, <= 8K. */
- { "avr25", ARCH_AVR25, NULL },
- { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
- { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
- { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
- { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
- { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
- { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
- { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
- { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
- { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
- { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
- { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
- { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
- { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
- { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
- { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
- /* Classic, > 8K, <= 64K. */
- { "avr3", ARCH_AVR3, NULL },
- { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
- { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
- { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
- /* Classic, == 128K. */
- { "avr31", ARCH_AVR31, NULL },
- { "atmega103", ARCH_AVR31, "__AVR_ATmega103__" },
- /* Classic + MOVW + JMP/CALL. */
- { "avr35", ARCH_AVR35, NULL },
- { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
- { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
- /* Enhanced, <= 8K. */
- { "avr4", ARCH_AVR4, NULL },
- { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
- { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
- { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
- { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
- { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
- { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
- { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
- { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
- { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
- { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
- { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
- { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
- { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
- /* Enhanced, > 8K, <= 64K. */
- { "avr5", ARCH_AVR5, NULL },
- { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
- { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
- { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
- { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
- { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
- { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
- { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
- { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
- { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
- { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
- { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
- { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
- { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
- { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
- { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
- { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
- { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
- { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
- { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
- { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
- { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
- { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
- { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
- { "atmega32hvb", ARCH_AVR5, "__AVR_ATmega32HVB__" },
- { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
- { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
- { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
- { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
- { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
- { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
- { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
- { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
- { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
- { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
- { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
- { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
- { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
- { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
- { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
- { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
- { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
- /* Enhanced, == 128K. */
- { "avr51", ARCH_AVR51, NULL },
- { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
- { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
- { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
- { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
- { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
- { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
- { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
- /* 3-Byte PC. */
- { "avr6", ARCH_AVR6, NULL },
- { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
- { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
- /* Assembler only. */
- { "avr1", ARCH_AVR1, NULL },
- { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
- { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
- { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
- { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
- { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
- { NULL, ARCH_UNKNOWN, NULL }
+/* AVR attributes. */
+static const struct attribute_spec avr_attribute_table[] =
+{
+ /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
+ affects_type_identity } */
+ { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
+ false },
+ { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
+ false },
+ { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
+ false },
+ { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
+ false },
+ { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
+ false },
+ { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
+ false },
+ { NULL, 0, 0, false, false, false, NULL, false }
};
-
-int avr_case_values_threshold = 30000;
\f
/* Initialize the GCC target structure. */
#undef TARGET_ASM_ALIGNED_HI_OP
#define TARGET_ASM_INTEGER avr_assemble_integer
#undef TARGET_ASM_FILE_START
#define TARGET_ASM_FILE_START avr_file_start
-#undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
-#define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
#undef TARGET_ASM_FILE_END
#define TARGET_ASM_FILE_END avr_file_end
#define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
#undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
#define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
+
+#undef TARGET_FUNCTION_VALUE
+#define TARGET_FUNCTION_VALUE avr_function_value
+#undef TARGET_LIBCALL_VALUE
+#define TARGET_LIBCALL_VALUE avr_libcall_value
+#undef TARGET_FUNCTION_VALUE_REGNO_P
+#define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
+
#undef TARGET_ATTRIBUTE_TABLE
#define TARGET_ATTRIBUTE_TABLE avr_attribute_table
#undef TARGET_ASM_FUNCTION_RODATA_SECTION
#define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
#undef TARGET_SECTION_TYPE_FLAGS
#define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
+
+/* `TARGET_ASM_NAMED_SECTION' must be defined in avr.h. */
+
+#undef TARGET_ASM_INIT_SECTIONS
+#define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
+#undef TARGET_ENCODE_SECTION_INFO
+#define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
+
+#undef TARGET_REGISTER_MOVE_COST
+#define TARGET_REGISTER_MOVE_COST avr_register_move_cost
+#undef TARGET_MEMORY_MOVE_COST
+#define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
#undef TARGET_RTX_COSTS
#define TARGET_RTX_COSTS avr_rtx_costs
#undef TARGET_ADDRESS_COST
#define TARGET_ADDRESS_COST avr_address_cost
#undef TARGET_MACHINE_DEPENDENT_REORG
#define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
+#undef TARGET_FUNCTION_ARG
+#define TARGET_FUNCTION_ARG avr_function_arg
+#undef TARGET_FUNCTION_ARG_ADVANCE
+#define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
+
+#undef TARGET_LEGITIMIZE_ADDRESS
+#define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
#undef TARGET_RETURN_IN_MEMORY
#define TARGET_RETURN_IN_MEMORY avr_return_in_memory
#undef TARGET_STRICT_ARGUMENT_NAMING
#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
-struct gcc_target targetm = TARGET_INITIALIZER;
-\f
-void
-avr_override_options (void)
-{
- const struct mcu_type_s *t;
+#undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
+#define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
- flag_delete_null_pointer_checks = 0;
+#undef TARGET_HARD_REGNO_SCRATCH_OK
+#define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
+#undef TARGET_CASE_VALUES_THRESHOLD
+#define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
- for (t = avr_mcu_types; t->name; t++)
- if (strcmp (t->name, avr_mcu_name) == 0)
- break;
+#undef TARGET_LEGITIMATE_ADDRESS_P
+#define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
+
+#undef TARGET_FRAME_POINTER_REQUIRED
+#define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
+#undef TARGET_CAN_ELIMINATE
+#define TARGET_CAN_ELIMINATE avr_can_eliminate
+
+#undef TARGET_CLASS_LIKELY_SPILLED_P
+#define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
+
+#undef TARGET_OPTION_OVERRIDE
+#define TARGET_OPTION_OVERRIDE avr_option_override
+
+#undef TARGET_CANNOT_MODIFY_JUMPS_P
+#define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
+
+#undef TARGET_FUNCTION_OK_FOR_SIBCALL
+#define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
+
+#undef TARGET_INIT_BUILTINS
+#define TARGET_INIT_BUILTINS avr_init_builtins
+
+#undef TARGET_EXPAND_BUILTIN
+#define TARGET_EXPAND_BUILTIN avr_expand_builtin
- if (!t->name)
- {
- fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
- avr_mcu_name);
- for (t = avr_mcu_types; t->name; t++)
- fprintf (stderr," %s\n", t->name);
- }
- avr_current_arch = &avr_arch_types[t->arch];
- avr_extra_arch_macro = t->macro;
+struct gcc_target targetm = TARGET_INITIALIZER;
+\f
+static void
+avr_option_override (void)
+{
+ flag_delete_null_pointer_checks = 0;
- if (optimize && !TARGET_NO_TABLEJUMP)
- avr_case_values_threshold =
- (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
+ avr_current_device = &avr_mcu_types[avr_mcu_index];
+ avr_current_arch = &avr_arch_types[avr_current_device->arch];
+ avr_extra_arch_macro = avr_current_device->macro;
tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
/* return register class from register number. */
-static const int reg_class_tab[]={
+static const enum reg_class reg_class_tab[]={
GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
static struct machine_function *
avr_init_machine_status (void)
{
- return ((struct machine_function *)
- ggc_alloc_cleared (sizeof (struct machine_function)));
+ return ggc_alloc_cleared_machine_function ();
}
/* Return register class for register R. */
return ALL_REGS;
}
+/* A helper for the subsequent function attribute used to dig for
+ attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
+
+static inline int
+avr_lookup_function_attribute1 (const_tree func, const char *name)
+{
+ if (FUNCTION_DECL == TREE_CODE (func))
+ {
+ if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
+ {
+ return true;
+ }
+
+ func = TREE_TYPE (func);
+ }
+
+ gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
+ || TREE_CODE (func) == METHOD_TYPE);
+
+ return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
+}
+
/* Return nonzero if FUNC is a naked function. */
static int
avr_naked_function_p (tree func)
{
- tree a;
-
- gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
-
- a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
- return a != NULL_TREE;
+ return avr_lookup_function_attribute1 (func, "naked");
}
/* Return nonzero if FUNC is an interrupt function as specified
static int
interrupt_function_p (tree func)
{
- tree a;
-
- if (TREE_CODE (func) != FUNCTION_DECL)
- return 0;
-
- a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
- return a != NULL_TREE;
+ return avr_lookup_function_attribute1 (func, "interrupt");
}
/* Return nonzero if FUNC is a signal function as specified
static int
signal_function_p (tree func)
{
- tree a;
-
- if (TREE_CODE (func) != FUNCTION_DECL)
- return 0;
-
- a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
- return a != NULL_TREE;
+ return avr_lookup_function_attribute1 (func, "signal");
}
/* Return nonzero if FUNC is a OS_task function. */
static int
avr_OS_task_function_p (tree func)
{
- tree a;
-
- gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
-
- a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
- return a != NULL_TREE;
+ return avr_lookup_function_attribute1 (func, "OS_task");
}
/* Return nonzero if FUNC is a OS_main function. */
static int
avr_OS_main_function_p (tree func)
{
- tree a;
-
- gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
-
- a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
- return a != NULL_TREE;
+ return avr_lookup_function_attribute1 (func, "OS_main");
}
/* Return the number of hard registers to push/pop in the prologue/epilogue
int int_or_sig_p = (interrupt_function_p (current_function_decl)
|| signal_function_p (current_function_decl));
- if (!reload_completed)
- cfun->machine->is_leaf = leaf_function_p ();
-
if (set)
CLEAR_HARD_REG_SET (*set);
count = 0;
if (fixed_regs[reg])
continue;
- if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
+ if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
|| (df_regs_ever_live_p (reg)
&& (int_or_sig_p || !call_used_regs[reg])
&& !(frame_pointer_needed
return count;
}
+/* Return true if register FROM can be eliminated via register TO. */
+
+bool
+avr_can_eliminate (const int from, const int to)
+{
+ return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
+ || ((from == FRAME_POINTER_REGNUM
+ || from == FRAME_POINTER_REGNUM + 1)
+ && !frame_pointer_needed));
+}
+
/* Compute offset between arg_pointer and frame_pointer. */
int
-initial_elimination_offset (int from, int to)
+avr_initial_elimination_offset (int from, int to)
{
if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
return 0;
}
}
+/* Actual start of frame is virtual_stack_vars_rtx this is offset from
+ frame pointer by +STARTING_FRAME_OFFSET.
+ Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
+ avoids creating add/sub of offset in nonlocal goto and setjmp. */
+
+rtx avr_builtin_setjmp_frame_value (void)
+{
+ return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
+ gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
+}
+
+/* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
+ This is return address of function. */
+rtx
+avr_return_addr_rtx (int count, rtx tem)
+{
+ rtx r;
+
+ /* Can only return this functions return address. Others not supported. */
+ if (count)
+ return NULL;
+
+ if (AVR_3_BYTE_PC)
+ {
+ r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
+ warning (0, "'builtin_return_address' contains only 2 bytes of address");
+ }
+ else
+ r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
+
+ r = gen_rtx_PLUS (Pmode, tem, r);
+ r = gen_frame_mem (Pmode, memory_address (Pmode, r));
+ r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
+ return r;
+}
+
/* Return 1 if the function epilogue is just a single "ret". */
int
return length;
}
+/* Implement INCOMING_RETURN_ADDR_RTX. */
+
+rtx
+avr_incoming_return_addr_rtx (void)
+{
+ /* The return address is at the top of the stack. Note that the push
+ was via post-decrement, which means the actual address is off by one. */
+ return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
+}
+
+/* Helper for expand_prologue. Emit a push of a byte register. */
+
+static void
+emit_push_byte (unsigned regno, bool frame_related_p)
+{
+ rtx mem, reg, insn;
+
+ mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
+ mem = gen_frame_mem (QImode, mem);
+ reg = gen_rtx_REG (QImode, regno);
+
+ insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
+ if (frame_related_p)
+ RTX_FRAME_RELATED_P (insn) = 1;
+
+ cfun->machine->stack_usage++;
+}
+
+
/* Output function prologue. */
void
HARD_REG_SET set;
int minimize;
HOST_WIDE_INT size = get_frame_size();
- /* Define templates for push instructions. */
- rtx pushbyte = gen_rtx_MEM (QImode,
- gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
- rtx pushword = gen_rtx_MEM (HImode,
- gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
rtx insn;
-
- last_insn_address = 0;
/* Init cfun->machine. */
cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
cfun->machine->is_signal = signal_function_p (current_function_decl);
cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
+ cfun->machine->stack_usage = 0;
/* Prologue: naked. */
if (cfun->machine->is_naked)
if (cfun->machine->is_interrupt || cfun->machine->is_signal)
{
+ /* Enable interrupts. */
if (cfun->machine->is_interrupt)
- {
- /* Enable interrupts. */
- insn = emit_insn (gen_enable_interrupt ());
- RTX_FRAME_RELATED_P (insn) = 1;
- }
+ emit_insn (gen_enable_interrupt ());
/* Push zero reg. */
- insn = emit_move_insn (pushbyte, zero_reg_rtx);
- RTX_FRAME_RELATED_P (insn) = 1;
+ emit_push_byte (ZERO_REGNO, true);
/* Push tmp reg. */
- insn = emit_move_insn (pushbyte, tmp_reg_rtx);
- RTX_FRAME_RELATED_P (insn) = 1;
+ emit_push_byte (TMP_REGNO, true);
/* Push SREG. */
- insn = emit_move_insn (tmp_reg_rtx,
- gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
- RTX_FRAME_RELATED_P (insn) = 1;
- insn = emit_move_insn (pushbyte, tmp_reg_rtx);
- RTX_FRAME_RELATED_P (insn) = 1;
+ /* ??? There's no dwarf2 column reserved for SREG. */
+ emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
+ emit_push_byte (TMP_REGNO, false);
/* Push RAMPZ. */
- if(AVR_HAVE_RAMPZ
- && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
+ /* ??? There's no dwarf2 column reserved for RAMPZ. */
+ if (AVR_HAVE_RAMPZ
+ && TEST_HARD_REG_BIT (set, REG_Z)
+ && TEST_HARD_REG_BIT (set, REG_Z + 1))
{
- insn = emit_move_insn (tmp_reg_rtx,
- gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
- RTX_FRAME_RELATED_P (insn) = 1;
- insn = emit_move_insn (pushbyte, tmp_reg_rtx);
- RTX_FRAME_RELATED_P (insn) = 1;
+ emit_move_insn (tmp_reg_rtx,
+ gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
+ emit_push_byte (TMP_REGNO, false);
}
/* Clear zero reg. */
- insn = emit_move_insn (zero_reg_rtx, const0_rtx);
- RTX_FRAME_RELATED_P (insn) = 1;
+ emit_move_insn (zero_reg_rtx, const0_rtx);
/* Prevent any attempt to delete the setting of ZERO_REG! */
emit_use (zero_reg_rtx);
|| (AVR_2_BYTE_PC && live_seq > 6)
|| live_seq > 7))
{
- insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
- gen_int_mode (size, HImode));
- RTX_FRAME_RELATED_P (insn) = 1;
+ int first_reg, reg, offset;
- insn =
- emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
- gen_int_mode (size + live_seq, HImode)));
+ emit_move_insn (gen_rtx_REG (HImode, REG_X),
+ gen_int_mode (size, HImode));
+
+ insn = emit_insn (gen_call_prologue_saves
+ (gen_int_mode (live_seq, HImode),
+ gen_int_mode (size + live_seq, HImode)));
RTX_FRAME_RELATED_P (insn) = 1;
+
+ /* Describe the effect of the unspec_volatile call to prologue_saves.
+ Note that this formulation assumes that add_reg_note pushes the
+ notes to the front. Thus we build them in the reverse order of
+ how we want dwarf2out to process them. */
+
+ /* The function does always set frame_pointer_rtx, but whether that
+ is going to be permanent in the function is frame_pointer_needed. */
+ add_reg_note (insn, REG_CFA_ADJUST_CFA,
+ gen_rtx_SET (VOIDmode,
+ (frame_pointer_needed
+ ? frame_pointer_rtx : stack_pointer_rtx),
+ plus_constant (stack_pointer_rtx,
+ -(size + live_seq))));
+
+ /* Note that live_seq always contains r28+r29, but the other
+ registers to be saved are all below 18. */
+ first_reg = 18 - (live_seq - 2);
+
+ for (reg = 29, offset = -live_seq + 1;
+ reg >= first_reg;
+ reg = (reg == 28 ? 17 : reg - 1), ++offset)
+ {
+ rtx m, r;
+
+ m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
+ r = gen_rtx_REG (QImode, reg);
+ add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
+ }
+
+ cfun->machine->stack_usage += size + live_seq;
}
else
{
int reg;
for (reg = 0; reg < 32; ++reg)
- {
- if (TEST_HARD_REG_BIT (set, reg))
- {
- /* Emit push of register to save. */
- insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
- RTX_FRAME_RELATED_P (insn) = 1;
- }
- }
+ if (TEST_HARD_REG_BIT (set, reg))
+ emit_push_byte (reg, true);
+
if (frame_pointer_needed)
{
if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
{
- /* Push frame pointer. */
- insn = emit_move_insn (pushword, frame_pointer_rtx);
- RTX_FRAME_RELATED_P (insn) = 1;
+ /* Push frame pointer. Always be consistent about the
+ ordering of pushes -- epilogue_restores expects the
+ register pair to be pushed low byte first. */
+ emit_push_byte (REG_Y, true);
+ emit_push_byte (REG_Y + 1, true);
}
if (!size)
is selected. */
rtx myfp;
rtx fp_plus_insns;
- rtx sp_plus_insns = NULL_RTX;
- if (TARGET_TINY_STACK)
+ if (AVR_HAVE_8BIT_SP)
{
- /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
- over 'sbiw' (2 cycles, same size). */
- myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
+ /* The high byte (r29) doesn't change. Prefer 'subi'
+ (1 cycle) over 'sbiw' (2 cycles, same size). */
+ myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
}
else
{
/* Method 1-Adjust frame pointer. */
start_sequence ();
- insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
- RTX_FRAME_RELATED_P (insn) = 1;
+ /* Normally the dwarf2out frame-related-expr interpreter does
+ not expect to have the CFA change once the frame pointer is
+ set up. Thus we avoid marking the move insn below and
+ instead indicate that the entire operation is complete after
+ the frame pointer subtraction is done. */
- insn =
- emit_move_insn (myfp,
- gen_rtx_PLUS (GET_MODE(myfp), myfp,
- gen_int_mode (-size,
- GET_MODE(myfp))));
- RTX_FRAME_RELATED_P (insn) = 1;
+ emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
- /* Copy to stack pointer. */
- if (TARGET_TINY_STACK)
+ insn = emit_move_insn (myfp, plus_constant (myfp, -size));
+ RTX_FRAME_RELATED_P (insn) = 1;
+ add_reg_note (insn, REG_CFA_ADJUST_CFA,
+ gen_rtx_SET (VOIDmode, frame_pointer_rtx,
+ plus_constant (stack_pointer_rtx,
+ -size)));
+
+ /* Copy to stack pointer. Note that since we've already
+ changed the CFA to the frame pointer this operation
+ need not be annotated at all. */
+ if (AVR_HAVE_8BIT_SP)
{
- insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
- RTX_FRAME_RELATED_P (insn) = 1;
+ emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
}
else if (TARGET_NO_INTERRUPTS
|| cfun->machine->is_signal
|| cfun->machine->is_OS_main)
{
- insn =
- emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
- frame_pointer_rtx));
- RTX_FRAME_RELATED_P (insn) = 1;
+ emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
+ frame_pointer_rtx));
}
else if (cfun->machine->is_interrupt)
{
- insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
- frame_pointer_rtx));
- RTX_FRAME_RELATED_P (insn) = 1;
+ emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
+ frame_pointer_rtx));
}
else
{
- insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
- RTX_FRAME_RELATED_P (insn) = 1;
+ emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
}
fp_plus_insns = get_insns ();
/* Method 2-Adjust Stack pointer. */
if (size <= 6)
{
+ rtx sp_plus_insns;
+
start_sequence ();
- insn =
- emit_move_insn (stack_pointer_rtx,
- gen_rtx_PLUS (HImode,
- stack_pointer_rtx,
- gen_int_mode (-size,
- HImode)));
+ insn = plus_constant (stack_pointer_rtx, -size);
+ insn = emit_move_insn (stack_pointer_rtx, insn);
RTX_FRAME_RELATED_P (insn) = 1;
- insn =
- emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
+ insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
RTX_FRAME_RELATED_P (insn) = 1;
sp_plus_insns = get_insns ();
end_sequence ();
- }
- /* Use shortest method. */
- if (size <= 6 && (get_sequence_length (sp_plus_insns)
- < get_sequence_length (fp_plus_insns)))
- emit_insn (sp_plus_insns);
- else
+ /* Use shortest method. */
+ if (get_sequence_length (sp_plus_insns)
+ < get_sequence_length (fp_plus_insns))
+ emit_insn (sp_plus_insns);
+ else
+ emit_insn (fp_plus_insns);
+ }
+ else
emit_insn (fp_plus_insns);
+
+ cfun->machine->stack_usage += size;
}
}
}
+
+ if (flag_stack_usage_info)
+ current_function_static_stack_size = cfun->machine->stack_usage;
}
/* Output summary at end of function prologue. */
}
fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
get_frame_size());
+ fprintf (file, "/* stack size = %d */\n",
+ cfun->machine->stack_usage);
+ /* Create symbol stack offset here so all functions have it. Add 1 to stack
+ usage for offset so that SP + .L__stack_offset = return address. */
+ fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
}
return 0;
}
+/* Helper for expand_epilogue. Emit a pop of a byte register. */
+
+static void
+emit_pop_byte (unsigned regno)
+{
+ rtx mem, reg;
+
+ mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
+ mem = gen_frame_mem (QImode, mem);
+ reg = gen_rtx_REG (QImode, regno);
+
+ emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
+}
+
/* Output RTL epilogue. */
void
-expand_epilogue (void)
+expand_epilogue (bool sibcall_p)
{
int reg;
int live_seq;
/* epilogue: naked */
if (cfun->machine->is_naked)
{
+ gcc_assert (!sibcall_p);
+
emit_jump_insn (gen_return ());
return;
}
/* Try two methods to adjust stack and select shortest. */
rtx myfp;
rtx fp_plus_insns;
- rtx sp_plus_insns = NULL_RTX;
-
- if (TARGET_TINY_STACK)
+
+ if (AVR_HAVE_8BIT_SP)
{
/* The high byte (r29) doesn't change - prefer 'subi'
(1 cycle) over 'sbiw' (2 cycles, same size). */
- myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
+ myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
}
else
{
/* Method 1-Adjust frame pointer. */
start_sequence ();
- emit_move_insn (myfp,
- gen_rtx_PLUS (HImode, myfp,
- gen_int_mode (size,
- GET_MODE(myfp))));
+ emit_move_insn (myfp, plus_constant (myfp, size));
/* Copy to stack pointer. */
- if (TARGET_TINY_STACK)
+ if (AVR_HAVE_8BIT_SP)
{
emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
}
/* Method 2-Adjust Stack pointer. */
if (size <= 5)
{
+ rtx sp_plus_insns;
+
start_sequence ();
emit_move_insn (stack_pointer_rtx,
- gen_rtx_PLUS (HImode, stack_pointer_rtx,
- gen_int_mode (size,
- HImode)));
+ plus_constant (stack_pointer_rtx, size));
sp_plus_insns = get_insns ();
end_sequence ();
- }
- /* Use shortest method. */
- if (size <= 5 && (get_sequence_length (sp_plus_insns)
- < get_sequence_length (fp_plus_insns)))
- emit_insn (sp_plus_insns);
- else
+ /* Use shortest method. */
+ if (get_sequence_length (sp_plus_insns)
+ < get_sequence_length (fp_plus_insns))
+ emit_insn (sp_plus_insns);
+ else
+ emit_insn (fp_plus_insns);
+ }
+ else
emit_insn (fp_plus_insns);
}
if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
{
- /* Restore previous frame_pointer. */
- emit_insn (gen_pophi (frame_pointer_rtx));
+ /* Restore previous frame_pointer. See expand_prologue for
+ rationale for not using pophi. */
+ emit_pop_byte (REG_Y + 1);
+ emit_pop_byte (REG_Y);
}
}
+
/* Restore used registers. */
for (reg = 31; reg >= 0; --reg)
- {
- if (TEST_HARD_REG_BIT (set, reg))
- emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
- }
+ if (TEST_HARD_REG_BIT (set, reg))
+ emit_pop_byte (reg);
+
if (cfun->machine->is_interrupt || cfun->machine->is_signal)
{
/* Restore RAMPZ using tmp reg as scratch. */
- if(AVR_HAVE_RAMPZ
- && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
+ if (AVR_HAVE_RAMPZ
+ && TEST_HARD_REG_BIT (set, REG_Z)
+ && TEST_HARD_REG_BIT (set, REG_Z + 1))
{
- emit_insn (gen_popqi (tmp_reg_rtx));
- emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
+ emit_pop_byte (TMP_REGNO);
+ emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
tmp_reg_rtx);
}
/* Restore SREG using tmp reg as scratch. */
- emit_insn (gen_popqi (tmp_reg_rtx));
+ emit_pop_byte (TMP_REGNO);
- emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
+ emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
tmp_reg_rtx);
/* Restore tmp REG. */
- emit_insn (gen_popqi (tmp_reg_rtx));
+ emit_pop_byte (TMP_REGNO);
/* Restore zero REG. */
- emit_insn (gen_popqi (zero_reg_rtx));
+ emit_pop_byte (ZERO_REGNO);
}
- emit_jump_insn (gen_return ());
+ if (!sibcall_p)
+ emit_jump_insn (gen_return ());
}
}
fprintf (file, "/* epilogue start */\n");
}
+
+/* Implement TARGET_CANNOT_MODITY_JUMPS_P */
+
+static bool
+avr_cannot_modify_jumps_p (void)
+{
+
+ /* Naked Functions must not have any instructions after
+ their epilogue, see PR42240 */
+
+ if (reload_completed
+ && cfun->machine
+ && cfun->machine->is_naked)
+ {
+ return true;
+ }
+
+ return false;
+}
+
+
/* Return nonzero if X (an RTX) is a legitimate memory address on the target
machine for a memory operand of mode MODE. */
-int
-legitimate_address_p (enum machine_mode mode, rtx x, int strict)
+bool
+avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
{
enum reg_class r = NO_REGS;
true_regnum (XEXP (x, 0)));
debug_rtx (x);
}
+
if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
: REG_OK_FOR_BASE_NOSTRICT_P (x)))
r = POINTER_REGS;
if (fit)
{
if (! strict
+ || REGNO (XEXP (x,0)) == REG_X
|| REGNO (XEXP (x,0)) == REG_Y
|| REGNO (XEXP (x,0)) == REG_Z)
r = BASE_POINTER_REGS;
memory address for an operand of mode MODE */
rtx
-legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
+avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
{
x = oldx;
if (TARGET_ALL_DEBUG)
}
+/* Helper function to print assembler resp. track instruction
+ sequence lengths.
+
+ If PLEN == NULL:
+ Output assembler code from template TPL with operands supplied
+ by OPERANDS. This is just forwarding to output_asm_insn.
+
+ If PLEN != NULL:
+ Add N_WORDS to *PLEN.
+ Don't output anything.
+*/
+
+static void
+avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
+{
+ if (NULL == plen)
+ {
+ output_asm_insn (tpl, operands);
+ }
+ else
+ {
+ *plen += n_words;
+ }
+}
+
+
/* Return a pointer register name as a string. */
static const char *
default:
if (CONSTANT_ADDRESS_P (addr)
- && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
- || GET_CODE (addr) == LABEL_REF))
+ && text_segment_operand (addr, VOIDmode))
{
- fprintf (file, "gs(");
- output_addr_const (file,addr);
- fprintf (file ,")");
+ rtx x = addr;
+ if (GET_CODE (x) == CONST)
+ x = XEXP (x, 0);
+ if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
+ {
+ /* Assembler gs() will implant word address. Make offset
+ a byte offset inside gs() for assembler. This is
+ needed because the more logical (constant+gs(sym)) is not
+ accepted by gas. For 128K and lower devices this is ok. For
+ large devices it will create a Trampoline to offset from symbol
+ which may not be what the user really wanted. */
+ fprintf (file, "gs(");
+ output_addr_const (file, XEXP (x,0));
+ fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
+ if (AVR_3_BYTE_PC)
+ if (warning (0, "pointer offset from symbol maybe incorrect"))
+ {
+ output_addr_const (stderr, addr);
+ fprintf(stderr,"\n");
+ }
+ }
+ else
+ {
+ fprintf (file, "gs(");
+ output_addr_const (file, addr);
+ fprintf (file, ")");
+ }
}
else
output_addr_const (file, addr);
else if (GET_CODE (x) == MEM)
{
rtx addr = XEXP (x,0);
-
- if (CONSTANT_P (addr) && abcd)
+ if (code == 'm')
{
- fputc ('(', file);
- output_address (addr);
- fprintf (file, ")+%d", abcd);
+ if (!CONSTANT_P (addr))
+ fatal_insn ("bad address, not a constant):", addr);
+ /* Assembler template with m-code is data - not progmem section */
+ if (text_segment_operand (addr, VOIDmode))
+ if (warning ( 0, "accessing data memory with program memory address"))
+ {
+ output_addr_const (stderr, addr);
+ fprintf(stderr,"\n");
+ }
+ output_addr_const (file, addr);
}
else if (code == 'o')
{
else
print_operand_address (file, addr);
}
+ else if (code == 'x')
+ {
+ /* Constant progmem address - like used in jmp or call */
+ if (0 == text_segment_operand (x, VOIDmode))
+ if (warning ( 0, "accessing program memory with data memory address"))
+ {
+ output_addr_const (stderr, x);
+ fprintf(stderr,"\n");
+ }
+ /* Use normal symbol for direct address no linker trampoline needed */
+ output_addr_const (file, x);
+ }
else if (GET_CODE (x) == CONST_DOUBLE)
{
long val;
class CLASS needed to hold a value of mode MODE. */
int
-class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
+class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
{
return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
}
int
avr_jump_mode (rtx x, rtx insn)
{
- int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
+ int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
? XEXP (x, 0) : x));
int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
int jump_distance = cur_addr - dest_addr;
&& INTVAL (op) <= 0xff && INTVAL (op) >= 0);
}
-/* Output all insn addresses and their sizes into the assembly language
- output file. This is helpful for debugging whether the length attributes
- in the md file are correct.
- Output insn cost for next insn. */
+/* Output insn cost for next insn. */
void
final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
int num_operands ATTRIBUTE_UNUSED)
{
- int uid = INSN_UID (insn);
-
- if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
+ if (TARGET_ALL_DEBUG)
{
- fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
- INSN_ADDRESSES (uid),
- INSN_ADDRESSES (uid) - last_insn_address,
- rtx_cost (PATTERN (insn), INSN));
+ fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
+ rtx_cost (PATTERN (insn), INSN, !optimize_size));
}
- last_insn_address = INSN_ADDRESSES (uid);
}
/* Return 0 if undefined, 1 if always true or always false. */
int
-avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
+avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
{
unsigned int max = (mode == QImode ? 0xff :
mode == HImode ? 0xffff :
mode == SImode ? 0xffffffff : 0);
- if (max && operator && GET_CODE (x) == CONST_INT)
+ if (max && op && GET_CODE (x) == CONST_INT)
{
- if (unsigned_condition (operator) != operator)
+ if (unsigned_condition (op) != op)
max >>= 1;
if (max != (INTVAL (x) & max)
{
cum->nregs = 18;
cum->regno = FIRST_CUM_REG;
- if (!libname && fntype)
- {
- int stdarg = (TYPE_ARG_TYPES (fntype) != 0
- && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
- != void_type_node));
- if (stdarg)
- cum->nregs = 0;
- }
+ if (!libname && stdarg_p (fntype))
+ cum->nregs = 0;
+
+ /* Assume the calle may be tail called */
+
+ cfun->machine->sibcall_fails = 0;
}
/* Returns the number of registers to allocate for a function argument. */
static int
-avr_num_arg_regs (enum machine_mode mode, tree type)
+avr_num_arg_regs (enum machine_mode mode, const_tree type)
{
int size;
/* Controls whether a function argument is passed
in a register, and which register. */
-rtx
-function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
- int named ATTRIBUTE_UNUSED)
+static rtx
+avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+ const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int bytes = avr_num_arg_regs (mode, type);
if (cum->nregs && bytes <= cum->nregs)
/* Update the summarizer variable CUM to advance past an argument
in the argument list. */
-void
-function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
- int named ATTRIBUTE_UNUSED)
+static void
+avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+ const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int bytes = avr_num_arg_regs (mode, type);
cum->nregs -= bytes;
cum->regno -= bytes;
+ /* A parameter is being passed in a call-saved register. As the original
+ contents of these regs has to be restored before leaving the function,
+ a function must not pass arguments in call-saved regs in order to get
+ tail-called. */
+
+ if (cum->regno >= 8
+ && cum->nregs >= 0
+ && !call_used_regs[cum->regno])
+ {
+ /* FIXME: We ship info on failing tail-call in struct machine_function.
+ This uses internals of calls.c:expand_call() and the way args_so_far
+ is used. targetm.function_ok_for_sibcall() needs to be extended to
+ pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
+ dependent so that such an extension is not wanted. */
+
+ cfun->machine->sibcall_fails = 1;
+ }
+
+ /* Test if all registers needed by the ABI are actually available. If the
+ user has fixed a GPR needed to pass an argument, an (implicit) function
+ call would clobber that fixed register. See PR45099 for an example. */
+
+ if (cum->regno >= 8
+ && cum->nregs >= 0)
+ {
+ int regno;
+
+ for (regno = cum->regno; regno < cum->regno + bytes; regno++)
+ if (fixed_regs[regno])
+ error ("Register %s is needed to pass a parameter but is fixed",
+ reg_names[regno]);
+ }
+
if (cum->nregs <= 0)
{
cum->nregs = 0;
}
}
+/* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
+/* Decide whether we can make a sibling call to a function. DECL is the
+ declaration of the function being targeted by the call and EXP is the
+ CALL_EXPR representing the call. */
+
+static bool
+avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
+{
+ tree fntype_callee;
+
+ /* Tail-calling must fail if callee-saved regs are used to pass
+ function args. We must not tail-call when `epilogue_restores'
+ is used. Unfortunately, we cannot tell at this point if that
+ actually will happen or not, and we cannot step back from
+ tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
+
+ if (cfun->machine->sibcall_fails
+ || TARGET_CALL_PROLOGUES)
+ {
+ return false;
+ }
+
+ fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
+
+ if (decl_callee)
+ {
+ decl_callee = TREE_TYPE (decl_callee);
+ }
+ else
+ {
+ decl_callee = fntype_callee;
+
+ while (FUNCTION_TYPE != TREE_CODE (decl_callee)
+ && METHOD_TYPE != TREE_CODE (decl_callee))
+ {
+ decl_callee = TREE_TYPE (decl_callee);
+ }
+ }
+
+ /* Ensure that caller and callee have compatible epilogues */
+
+ if (interrupt_function_p (current_function_decl)
+ || signal_function_p (current_function_decl)
+ || avr_naked_function_p (decl_callee)
+ || avr_naked_function_p (current_function_decl)
+ /* FIXME: For OS_task and OS_main, we are over-conservative.
+ This is due to missing documentation of these attributes
+ and what they actually should do and should not do. */
+ || (avr_OS_task_function_p (decl_callee)
+ != avr_OS_task_function_p (current_function_decl))
+ || (avr_OS_main_function_p (decl_callee)
+ != avr_OS_main_function_p (current_function_decl)))
+ {
+ return false;
+ }
+
+ return true;
+}
+
/***********************************************************************
Functions for outputting various mov's for a various modes
************************************************************************/
}
else if (GET_CODE (dest) == MEM)
{
- const char *template;
+ const char *templ;
if (src == const0_rtx)
operands[1] = zero_reg_rtx;
- template = out_movqi_mr_r (insn, operands, real_l);
+ templ = out_movqi_mr_r (insn, operands, real_l);
if (!real_l)
- output_asm_insn (template, operands);
+ output_asm_insn (templ, operands);
operands[1] = src;
}
{
if (test_hard_reg_class (STACK_REG, dest))
{
- if (TARGET_TINY_STACK)
+ if (AVR_HAVE_8BIT_SP)
return *l = 1, AS2 (out,__SP_L__,%A1);
/* Use simple load of stack pointer if no interrupts are
used. */
}
else if (GET_CODE (dest) == MEM)
{
- const char *template;
+ const char *templ;
if (src == const0_rtx)
operands[1] = zero_reg_rtx;
- template = out_movhi_mr_r (insn, operands, real_l);
+ templ = out_movhi_mr_r (insn, operands, real_l);
if (!real_l)
- output_asm_insn (template, operands);
+ output_asm_insn (templ, operands);
operands[1] = src;
return "";
if (optimize > 0 && io_address_operand (x, QImode))
{
*l = 1;
- return AS2 (in,%0,%1-0x20);
+ return AS2 (in,%0,%m1-0x20);
}
*l = 2;
- return AS2 (lds,%0,%1);
+ return AS2 (lds,%0,%m1);
}
/* memory access by reg+disp */
else if (GET_CODE (x) == PLUS
if (optimize > 0 && io_address_operand (base, HImode))
{
*l = 2;
- return (AS2 (in,%A0,%A1-0x20) CR_TAB
- AS2 (in,%B0,%B1-0x20));
+ return (AS2 (in,%A0,%m1-0x20) CR_TAB
+ AS2 (in,%B0,%m1+1-0x20));
}
*l = 4;
- return (AS2 (lds,%A0,%A1) CR_TAB
- AS2 (lds,%B0,%B1));
+ return (AS2 (lds,%A0,%m1) CR_TAB
+ AS2 (lds,%B0,%m1+1));
}
fatal_insn ("unknown move insn:",insn);
AS2 (ld,%C0,%1) CR_TAB
AS2 (ld,%D0,%1));
else if (CONSTANT_ADDRESS_P (base))
- return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
- AS2 (lds,%B0,%B1) CR_TAB
- AS2 (lds,%C0,%C1) CR_TAB
- AS2 (lds,%D0,%D1));
+ return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
+ AS2 (lds,%B0,%m1+1) CR_TAB
+ AS2 (lds,%C0,%m1+2) CR_TAB
+ AS2 (lds,%D0,%m1+3));
fatal_insn ("unknown move insn:",insn);
return "";
l = &tmp;
if (CONSTANT_ADDRESS_P (base))
- return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
- AS2 (sts,%B0,%B1) CR_TAB
- AS2 (sts,%C0,%C1) CR_TAB
- AS2 (sts,%D0,%D1));
+ return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
+ AS2 (sts,%m0+1,%B1) CR_TAB
+ AS2 (sts,%m0+2,%C1) CR_TAB
+ AS2 (sts,%m0+3,%D1));
if (reg_base > 0) /* (r) */
{
if (reg_base == REG_X) /* (R26) */
}
const char *
-output_movsisf(rtx insn, rtx operands[], int *l)
+output_movsisf (rtx insn, rtx operands[], rtx clobber_reg, int *l)
{
int dummy;
rtx dest = operands[0];
AS2 (mov,%D0,%D1));
}
}
+ else if (CONST_INT_P (src)
+ || CONST_DOUBLE_P (src))
+ {
+ return output_reload_insisf (insn, operands, clobber_reg, real_l);
+ }
else if (CONSTANT_P (src))
{
if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
AS2 (ldi,%C0,hlo8(%1)) CR_TAB
AS2 (ldi,%D0,hhi8(%1)));
}
-
- if (GET_CODE (src) == CONST_INT)
- {
- const char *const clr_op0 =
- AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
- AS1 (clr,%B0) CR_TAB
- AS2 (movw,%C0,%A0))
- : (AS1 (clr,%A0) CR_TAB
- AS1 (clr,%B0) CR_TAB
- AS1 (clr,%C0) CR_TAB
- AS1 (clr,%D0));
-
- if (src == const0_rtx) /* mov r,L */
- {
- *l = AVR_HAVE_MOVW ? 3 : 4;
- return clr_op0;
- }
- else if (src == const1_rtx)
- {
- if (!real_l)
- output_asm_insn (clr_op0, operands);
- *l = AVR_HAVE_MOVW ? 4 : 5;
- return AS1 (inc,%A0);
- }
- else if (src == constm1_rtx)
- {
- /* Immediate constants -1 to any register */
- if (AVR_HAVE_MOVW)
- {
- *l = 4;
- return (AS1 (clr,%A0) CR_TAB
- AS1 (dec,%A0) CR_TAB
- AS2 (mov,%B0,%A0) CR_TAB
- AS2 (movw,%C0,%A0));
- }
- *l = 5;
- return (AS1 (clr,%A0) CR_TAB
- AS1 (dec,%A0) CR_TAB
- AS2 (mov,%B0,%A0) CR_TAB
- AS2 (mov,%C0,%A0) CR_TAB
- AS2 (mov,%D0,%A0));
- }
- else
- {
- int bit_nr = exact_log2 (INTVAL (src));
-
- if (bit_nr >= 0)
- {
- *l = AVR_HAVE_MOVW ? 5 : 6;
- if (!real_l)
- {
- output_asm_insn (clr_op0, operands);
- output_asm_insn ("set", operands);
- }
- if (!real_l)
- avr_output_bld (operands, bit_nr);
-
- return "";
- }
- }
- }
-
/* Last resort, better than loading from memory. */
*l = 10;
return (AS2 (mov,__tmp_reg__,r31) CR_TAB
}
else if (GET_CODE (dest) == MEM)
{
- const char *template;
+ const char *templ;
- if (src == const0_rtx)
+ if (src == CONST0_RTX (GET_MODE (dest)))
operands[1] = zero_reg_rtx;
- template = out_movsi_mr_r (insn, operands, real_l);
+ templ = out_movsi_mr_r (insn, operands, real_l);
if (!real_l)
- output_asm_insn (template, operands);
+ output_asm_insn (templ, operands);
operands[1] = src;
return "";
if (optimize > 0 && io_address_operand (x, QImode))
{
*l = 1;
- return AS2 (out,%0-0x20,%1);
+ return AS2 (out,%m0-0x20,%1);
}
*l = 2;
- return AS2 (sts,%0,%1);
+ return AS2 (sts,%m0,%1);
}
/* memory access by reg+disp */
else if (GET_CODE (x) == PLUS
if (optimize > 0 && io_address_operand (base, HImode))
{
*l = 2;
- return (AS2 (out,%B0-0x20,%B1) CR_TAB
- AS2 (out,%A0-0x20,%A1));
+ return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
+ AS2 (out,%m0-0x20,%A1));
}
- return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
- AS2 (sts,%A0,%A1));
+ return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
+ AS2 (sts,%m0,%A1));
}
if (reg_base > 0)
{
/* Return 1 if frame pointer for current function required. */
-int
-frame_pointer_required_p (void)
+bool
+avr_frame_pointer_required_p (void)
{
return (cfun->calls_alloca
|| crtl->args.info.nregs == 0
/* Output test instruction for HImode. */
const char *
-out_tsthi (rtx insn, int *l)
+out_tsthi (rtx insn, rtx op, int *l)
{
if (compare_sign_p (insn))
{
if (l) *l = 1;
return AS1 (tst,%B0);
}
- if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
+ if (reg_unused_after (insn, op)
&& compare_eq_p (insn))
{
/* Faster than sbiw if we can clobber the operand. */
if (l) *l = 1;
- return AS2 (or,%A0,%B0);
+ return "or %A0,%B0";
}
- if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
+ if (test_hard_reg_class (ADDW_REGS, op))
{
if (l) *l = 1;
return AS2 (sbiw,%0,0);
/* Output test instruction for SImode. */
const char *
-out_tstsi (rtx insn, int *l)
+out_tstsi (rtx insn, rtx op, int *l)
{
if (compare_sign_p (insn))
{
if (l) *l = 1;
return AS1 (tst,%D0);
}
- if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
+ if (test_hard_reg_class (ADDW_REGS, op))
{
if (l) *l = 3;
return (AS2 (sbiw,%A0,0) CR_TAB
carefully hand-optimized in ?sh??i3_out. */
void
-out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
+out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
int *len, int t_len)
{
rtx op[10];
else
{
while (count-- > 0)
- output_asm_insn (template, op);
+ output_asm_insn (templ, op);
}
return;
else
{
strcat (str, "\n1:\t");
- strcat (str, template);
+ strcat (str, templ);
strcat (str, second_label ? "\n2:\t" : "\n\t");
strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
strcat (str, CR_TAB);
return (AS1 (swap,%A0) CR_TAB
AS1 (swap,%B0) CR_TAB
AS2 (ldi,%3,0xf0) CR_TAB
- AS2 (and,%B0,%3) CR_TAB
+ "and %B0,%3" CR_TAB
AS2 (eor,%B0,%A0) CR_TAB
- AS2 (and,%A0,%3) CR_TAB
+ "and %A0,%3" CR_TAB
AS2 (eor,%B0,%A0));
}
break; /* optimize_size ? 6 : 8 */
AS1 (swap,%A0) CR_TAB
AS1 (swap,%B0) CR_TAB
AS2 (ldi,%3,0xf0) CR_TAB
- AS2 (and,%B0,%3) CR_TAB
+ "and %B0,%3" CR_TAB
AS2 (eor,%B0,%A0) CR_TAB
- AS2 (and,%A0,%3) CR_TAB
+ "and %A0,%3" CR_TAB
AS2 (eor,%B0,%A0));
}
break; /* 10 */
AS1 (clr,%A0) CR_TAB
AS1 (swap,%B0) CR_TAB
AS2 (ldi,%3,0xf0) CR_TAB
- AS2 (and,%B0,%3));
+ "and %B0,%3");
}
*len = 6;
return (AS2 (mov,%B0,%A0) CR_TAB
AS1 (swap,%B0) CR_TAB
AS1 (lsl,%B0) CR_TAB
AS2 (ldi,%3,0xe0) CR_TAB
- AS2 (and,%B0,%3));
+ "and %B0,%3");
}
if (AVR_HAVE_MUL)
{
return (AS1 (swap,%B0) CR_TAB
AS1 (swap,%A0) CR_TAB
AS2 (ldi,%3,0x0f) CR_TAB
- AS2 (and,%A0,%3) CR_TAB
+ "and %A0,%3" CR_TAB
AS2 (eor,%A0,%B0) CR_TAB
- AS2 (and,%B0,%3) CR_TAB
+ "and %B0,%3" CR_TAB
AS2 (eor,%A0,%B0));
}
break; /* optimize_size ? 6 : 8 */
AS1 (swap,%B0) CR_TAB
AS1 (swap,%A0) CR_TAB
AS2 (ldi,%3,0x0f) CR_TAB
- AS2 (and,%A0,%3) CR_TAB
+ "and %A0,%3" CR_TAB
AS2 (eor,%A0,%B0) CR_TAB
- AS2 (and,%B0,%3) CR_TAB
+ "and %B0,%3" CR_TAB
AS2 (eor,%A0,%B0));
}
break; /* 10 */
AS1 (clr,%B0) CR_TAB
AS1 (swap,%A0) CR_TAB
AS2 (ldi,%3,0x0f) CR_TAB
- AS2 (and,%A0,%3));
+ "and %A0,%3");
}
*len = 6;
return (AS2 (mov,%A0,%B0) CR_TAB
AS1 (swap,%A0) CR_TAB
AS1 (lsr,%A0) CR_TAB
AS2 (ldi,%3,0x07) CR_TAB
- AS2 (and,%A0,%3));
+ "and %A0,%3");
}
if (AVR_HAVE_MUL)
{
return "";
}
+/* Create RTL split patterns for byte sized rotate expressions. This
+ produces a series of move instructions and considers overlap situations.
+ Overlapping non-HImode operands need a scratch register. */
+
+bool
+avr_rotate_bytes (rtx operands[])
+{
+ int i, j;
+ enum machine_mode mode = GET_MODE (operands[0]);
+ bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
+ bool same_reg = rtx_equal_p (operands[0], operands[1]);
+ int num = INTVAL (operands[2]);
+ rtx scratch = operands[3];
+ /* Work out if byte or word move is needed. Odd byte rotates need QImode.
+ Word move if no scratch is needed, otherwise use size of scratch. */
+ enum machine_mode move_mode = QImode;
+ int move_size, offset, size;
+
+ if (num & 0xf)
+ move_mode = QImode;
+ else if ((mode == SImode && !same_reg) || !overlapped)
+ move_mode = HImode;
+ else
+ move_mode = GET_MODE (scratch);
+
+ /* Force DI rotate to use QI moves since other DI moves are currently split
+ into QI moves so forward propagation works better. */
+ if (mode == DImode)
+ move_mode = QImode;
+ /* Make scratch smaller if needed. */
+ if (GET_MODE (scratch) == HImode && move_mode == QImode)
+ scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
+
+ move_size = GET_MODE_SIZE (move_mode);
+ /* Number of bytes/words to rotate. */
+ offset = (num >> 3) / move_size;
+ /* Number of moves needed. */
+ size = GET_MODE_SIZE (mode) / move_size;
+ /* Himode byte swap is special case to avoid a scratch register. */
+ if (mode == HImode && same_reg)
+ {
+ /* HImode byte swap, using xor. This is as quick as using scratch. */
+ rtx src, dst;
+ src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
+ dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
+ if (!rtx_equal_p (dst, src))
+ {
+ emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
+ emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
+ emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
+ }
+ }
+ else
+ {
+#define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
+ /* Create linked list of moves to determine move order. */
+ struct {
+ rtx src, dst;
+ int links;
+ } move[MAX_SIZE + 8];
+ int blocked, moves;
+
+ gcc_assert (size <= MAX_SIZE);
+ /* Generate list of subreg moves. */
+ for (i = 0; i < size; i++)
+ {
+ int from = i;
+ int to = (from + offset) % size;
+ move[i].src = simplify_gen_subreg (move_mode, operands[1],
+ mode, from * move_size);
+ move[i].dst = simplify_gen_subreg (move_mode, operands[0],
+ mode, to * move_size);
+ move[i].links = -1;
+ }
+ /* Mark dependence where a dst of one move is the src of another move.
+ The first move is a conflict as it must wait until second is
+ performed. We ignore moves to self - we catch this later. */
+ if (overlapped)
+ for (i = 0; i < size; i++)
+ if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
+ for (j = 0; j < size; j++)
+ if (j != i && rtx_equal_p (move[j].src, move[i].dst))
+ {
+ /* The dst of move i is the src of move j. */
+ move[i].links = j;
+ break;
+ }
+
+ blocked = -1;
+ moves = 0;
+ /* Go through move list and perform non-conflicting moves. As each
+ non-overlapping move is made, it may remove other conflicts
+ so the process is repeated until no conflicts remain. */
+ do
+ {
+ blocked = -1;
+ moves = 0;
+ /* Emit move where dst is not also a src or we have used that
+ src already. */
+ for (i = 0; i < size; i++)
+ if (move[i].src != NULL_RTX)
+ {
+ if (move[i].links == -1
+ || move[move[i].links].src == NULL_RTX)
+ {
+ moves++;
+ /* Ignore NOP moves to self. */
+ if (!rtx_equal_p (move[i].dst, move[i].src))
+ emit_move_insn (move[i].dst, move[i].src);
+
+ /* Remove conflict from list. */
+ move[i].src = NULL_RTX;
+ }
+ else
+ blocked = i;
+ }
+
+ /* Check for deadlock. This is when no moves occurred and we have
+ at least one blocked move. */
+ if (moves == 0 && blocked != -1)
+ {
+ /* Need to use scratch register to break deadlock.
+ Add move to put dst of blocked move into scratch.
+ When this move occurs, it will break chain deadlock.
+ The scratch register is substituted for real move. */
+
+ move[size].src = move[blocked].dst;
+ move[size].dst = scratch;
+ /* Scratch move is never blocked. */
+ move[size].links = -1;
+ /* Make sure we have valid link. */
+ gcc_assert (move[blocked].links != -1);
+ /* Replace src of blocking move with scratch reg. */
+ move[move[blocked].links].src = scratch;
+ /* Make dependent on scratch move occuring. */
+ move[blocked].links = size;
+ size=size+1;
+ }
+ }
+ while (blocked != -1);
+ }
+ return true;
+}
+
/* Modifies the length assigned to instruction INSN
LEN is the initially computed length of the insn. */
break;
case SImode:
case SFmode:
- output_movsisf (insn, op, &len);
+ output_movsisf (insn, op, NULL_RTX, &len);
break;
default:
break;
{
switch (GET_MODE (op[1]))
{
- case HImode: out_tsthi (insn,&len); break;
- case SImode: out_tstsi (insn,&len); break;
+ case HImode: out_tsthi (insn, op[1], &len); break;
+ case SImode: out_tstsi (insn, op[1], &len); break;
default: break;
}
}
break;
case SImode:
case SFmode:
- output_reload_insisf (insn, op, &len);
+ output_reload_insisf (insn, op, XEXP (op[2], 0), &len);
break;
default:
break;
avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
{
if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
- && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
- || GET_CODE (x) == LABEL_REF))
+ && text_segment_operand (x, VOIDmode) )
{
fputs ("\t.word\tgs(", asm_out_file);
output_addr_const (asm_out_file, x);
return default_assemble_integer (x, size, aligned_p);
}
-/* The routine used to output NUL terminated strings. We use a special
- version of this for most svr4 targets because doing so makes the
- generated assembly code more compact (and thus faster to assemble)
- as well as more readable, especially for targets like the i386
- (where the only alternative is to output character sequences as
- comma separated lists of numbers). */
+/* Worker function for ASM_DECLARE_FUNCTION_NAME. */
void
-gas_output_limited_string(FILE *file, const char *str)
+avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
{
- const unsigned char *_limited_str = (const unsigned char *) str;
- unsigned ch;
- fprintf (file, "%s\"", STRING_ASM_OP);
- for (; (ch = *_limited_str); _limited_str++)
- {
- int escape;
- switch (escape = ESCAPES[ch])
- {
- case 0:
- putc (ch, file);
- break;
- case 1:
- fprintf (file, "\\%03o", ch);
- break;
- default:
- putc ('\\', file);
- putc (escape, file);
- break;
- }
- }
- fprintf (file, "\"\n");
-}
-/* The routine used to output sequences of byte values. We use a special
- version of this for most svr4 targets because doing so makes the
- generated assembly code more compact (and thus faster to assemble)
- as well as more readable. Note that if we find subparts of the
- character sequence which end with NUL (and which are shorter than
- STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
+ /* If the function has the 'signal' or 'interrupt' attribute, test to
+ make sure that the name of the function is "__vector_NN" so as to
+ catch when the user misspells the interrupt vector name. */
-void
-gas_output_ascii(FILE *file, const char *str, size_t length)
-{
- const unsigned char *_ascii_bytes = (const unsigned char *) str;
- const unsigned char *limit = _ascii_bytes + length;
- unsigned bytes_in_chunk = 0;
- for (; _ascii_bytes < limit; _ascii_bytes++)
+ if (cfun->machine->is_interrupt)
{
- const unsigned char *p;
- if (bytes_in_chunk >= 60)
- {
- fprintf (file, "\"\n");
- bytes_in_chunk = 0;
- }
- for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
- continue;
- if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
- {
- if (bytes_in_chunk > 0)
- {
- fprintf (file, "\"\n");
- bytes_in_chunk = 0;
- }
- gas_output_limited_string (file, (const char*)_ascii_bytes);
- _ascii_bytes = p;
- }
- else
- {
- int escape;
- unsigned ch;
- if (bytes_in_chunk == 0)
- fprintf (file, "\t.ascii\t\"");
- switch (escape = ESCAPES[ch = *_ascii_bytes])
- {
- case 0:
- putc (ch, file);
- bytes_in_chunk++;
- break;
- case 1:
- fprintf (file, "\\%03o", ch);
- bytes_in_chunk += 4;
- break;
- default:
- putc ('\\', file);
- putc (escape, file);
- bytes_in_chunk += 2;
- break;
- }
- }
+ if (strncmp (name, "__vector", strlen ("__vector")) != 0)
+ {
+ warning_at (DECL_SOURCE_LOCATION (decl), 0,
+ "%qs appears to be a misspelled interrupt handler",
+ name);
+ }
}
- if (bytes_in_chunk > 0)
- fprintf (file, "\"\n");
+ else if (cfun->machine->is_signal)
+ {
+ if (strncmp (name, "__vector", strlen ("__vector")) != 0)
+ {
+ warning_at (DECL_SOURCE_LOCATION (decl), 0,
+ "%qs appears to be a misspelled signal handler",
+ name);
+ }
+ }
+
+ ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
+ ASM_OUTPUT_LABEL (file, name);
}
+
/* Return value is nonzero if pseudos that have been
assigned to registers of class CLASS would likely be spilled
because registers of CLASS are needed for spill registers. */
-enum reg_class
-class_likely_spilled_p (int c)
+static bool
+avr_class_likely_spilled_p (reg_class_t c)
{
return (c != ALL_REGS && c != ADDW_REGS);
}
Only `progmem' attribute valid for type. */
-const struct attribute_spec avr_attribute_table[] =
-{
- /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
- { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
- { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
- { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
- { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
- { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
- { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
- { NULL, 0, 0, false, false, false, NULL }
-};
-
/* Handle a "progmem" attribute; arguments as in
struct attribute_spec.handler. */
static tree
}
else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
{
- if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
- {
- warning (0, "only initialized variables can be placed into "
- "program memory area");
- *no_add_attrs = true;
- }
+ *no_add_attrs = false;
}
else
{
- warning (OPT_Wattributes, "%qs attribute ignored",
- IDENTIFIER_POINTER (name));
+ warning (OPT_Wattributes, "%qE attribute ignored",
+ name);
*no_add_attrs = true;
}
}
{
if (TREE_CODE (*node) != FUNCTION_DECL)
{
- warning (OPT_Wattributes, "%qs attribute only applies to functions",
- IDENTIFIER_POINTER (name));
+ warning (OPT_Wattributes, "%qE attribute only applies to functions",
+ name);
*no_add_attrs = true;
}
- else
- {
- const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
- const char *attr = IDENTIFIER_POINTER (name);
-
- /* If the function has the 'signal' or 'interrupt' attribute, test to
- make sure that the name of the function is "__vector_NN" so as to
- catch when the user misspells the interrupt vector name. */
-
- if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
- {
- if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
- {
- warning (0, "%qs appears to be a misspelled interrupt handler",
- func_name);
- }
- }
- else if (strncmp (attr, "signal", strlen ("signal")) == 0)
- {
- if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
- {
- warning (0, "%qs appears to be a misspelled signal handler",
- func_name);
- }
- }
- }
return NULL_TREE;
}
{
if (TREE_CODE (*node) != FUNCTION_TYPE)
{
- warning (OPT_Wattributes, "%qs attribute only applies to functions",
- IDENTIFIER_POINTER (name));
+ warning (OPT_Wattributes, "%qE attribute only applies to functions",
+ name);
*no_add_attrs = true;
}
&& (TREE_STATIC (node) || DECL_EXTERNAL (node))
&& avr_progmem_p (node, *attributes))
{
- static const char dsec[] = ".progmem.data";
- *attributes = tree_cons (get_identifier ("section"),
- build_tree_list (NULL, build_string (strlen (dsec), dsec)),
- *attributes);
+ tree node0 = node;
+
+ /* For C++, we have to peel arrays in order to get correct
+ determination of readonlyness. */
+
+ do
+ node0 = TREE_TYPE (node0);
+ while (TREE_CODE (node0) == ARRAY_TYPE);
+
+ if (error_mark_node == node0)
+ return;
+
+ if (TYPE_READONLY (node0))
+ {
+ static const char dsec[] = ".progmem.data";
- /* ??? This seems sketchy. Why can't the user declare the
- thing const in the first place? */
- TREE_READONLY (node) = 1;
+ *attributes = tree_cons (get_identifier ("section"),
+ build_tree_list (NULL, build_string (strlen (dsec), dsec)),
+ *attributes);
+ }
+ else
+ {
+ error ("variable %q+D must be const in order to be put into"
+ " read-only section by means of %<__attribute__((progmem))%>",
+ node);
+ }
}
}
fprintf (asm_out_file, "\t.p2align 1\n");
}
-/* Implement TARGET_ASM_INIT_SECTIONS. */
+
+/* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
+/* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
+/* Track need of __do_clear_bss. */
+
+void
+avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
+ const char *name, unsigned HOST_WIDE_INT size,
+ unsigned int align, bool local_p)
+{
+ avr_need_clear_bss_p = true;
+
+ if (local_p)
+ ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
+ else
+ ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
+}
+
+
+/* Unnamed section callback for data_section
+ to track need of __do_copy_data. */
+
+static void
+avr_output_data_section_asm_op (const void *data)
+{
+ avr_need_copy_data_p = true;
+
+ /* Dispatch to default. */
+ output_section_asm_op (data);
+}
+
+
+/* Unnamed section callback for bss_section
+ to track need of __do_clear_bss. */
+
+static void
+avr_output_bss_section_asm_op (const void *data)
+{
+ avr_need_clear_bss_p = true;
+
+ /* Dispatch to default. */
+ output_section_asm_op (data);
+}
+
+
+/* Implement `TARGET_ASM_INIT_SECTIONS'. */
static void
avr_asm_init_sections (void)
avr_output_progmem_section_asm_op,
NULL);
readonly_data_section = data_section;
+
+ data_section->unnamed.callback = avr_output_data_section_asm_op;
+ bss_section->unnamed.callback = avr_output_bss_section_asm_op;
+}
+
+
+/* Implement `TARGET_ASM_NAMED_SECTION'. */
+/* Track need of __do_clear_bss, __do_copy_data for named sections. */
+
+void
+avr_asm_named_section (const char *name, unsigned int flags, tree decl)
+{
+ if (!avr_need_copy_data_p)
+ avr_need_copy_data_p = (0 == strncmp (name, ".data", 5)
+ || 0 == strncmp (name, ".rodata", 7)
+ || 0 == strncmp (name, ".gnu.linkonce.d", 15));
+
+ if (!avr_need_clear_bss_p)
+ avr_need_clear_bss_p = (0 == strncmp (name, ".bss", 4));
+
+ default_elf_asm_named_section (name, flags, decl);
}
static unsigned int
".noinit section");
}
+ if (0 == strncmp (name, ".progmem.data", strlen (".progmem.data")))
+ flags &= ~SECTION_WRITE;
+
return flags;
}
+
+/* Implement `TARGET_ENCODE_SECTION_INFO'. */
+
+static void
+avr_encode_section_info (tree decl, rtx rtl,
+ int new_decl_p)
+{
+ /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
+ readily available, see PR34734. So we postpone the warning
+ about uninitialized data in program memory section until here. */
+
+ if (new_decl_p
+ && decl && DECL_P (decl)
+ && NULL_TREE == DECL_INITIAL (decl)
+ && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
+ {
+ warning (OPT_Wuninitialized,
+ "uninitialized variable %q+D put into "
+ "program memory area", decl);
+ }
+
+ default_encode_section_info (decl, rtl, new_decl_p);
+}
+
+
+/* Implement `TARGET_ASM_FILE_START'. */
/* Outputs some appropriate text to go at the start of an assembler
file. */
avr_file_start (void)
{
if (avr_current_arch->asm_only)
- error ("MCU %qs supported for assembler only", avr_mcu_name);
+ error ("MCU %qs supported for assembler only", avr_current_device->name);
default_file_start ();
-/* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
+/* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
fputs ("__SREG__ = 0x3f\n"
"__SP_H__ = 0x3e\n"
"__SP_L__ = 0x3d\n", asm_out_file);
fputs ("__tmp_reg__ = 0\n"
"__zero_reg__ = 1\n", asm_out_file);
-
- /* FIXME: output these only if there is anything in the .data / .bss
- sections - some code size could be saved by not linking in the
- initialization code from libgcc if one or both sections are empty. */
- fputs ("\t.global __do_copy_data\n", asm_out_file);
- fputs ("\t.global __do_clear_bss\n", asm_out_file);
}
+
+/* Implement `TARGET_ASM_FILE_END'. */
/* Outputs to the stdio stream FILE some
appropriate text to go at the end of an assembler file. */
static void
avr_file_end (void)
{
+ /* Output these only if there is anything in the
+ .data* / .rodata* / .gnu.linkonce.* resp. .bss*
+ input section(s) - some code size can be saved by not
+ linking in the initialization code from libgcc if resp.
+ sections are empty. */
+
+ if (avr_need_copy_data_p)
+ fputs (".global __do_copy_data\n", asm_out_file);
+
+ if (avr_need_clear_bss_p)
+ fputs (".global __do_clear_bss\n", asm_out_file);
}
/* Choose the order in which to allocate hard registers for
}
+/* Implement `TARGET_REGISTER_MOVE_COST' */
+
+static int
+avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+ reg_class_t from, reg_class_t to)
+{
+ return (from == STACK_REG ? 6
+ : to == STACK_REG ? 12
+ : 2);
+}
+
+
+/* Implement `TARGET_MEMORY_MOVE_COST' */
+
+static int
+avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
+ bool in ATTRIBUTE_UNUSED)
+{
+ return (mode == QImode ? 2
+ : mode == HImode ? 4
+ : mode == SImode ? 8
+ : mode == SFmode ? 8
+ : 16);
+}
+
+
/* Mutually recursive subroutine of avr_rtx_cost for calculating the
cost of an RTX operand given its context. X is the rtx of the
operand, MODE is its mode, and OUTER is the rtx_code of this
operand's parent operator. */
static int
-avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
+avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
+ bool speed)
{
enum rtx_code code = GET_CODE (x);
int total;
}
total = 0;
- avr_rtx_costs (x, code, outer, &total);
+ avr_rtx_costs (x, code, outer, &total, speed);
return total;
}
case, *TOTAL contains the cost result. */
static bool
-avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
+avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
+ bool speed)
{
+ enum rtx_code code = (enum rtx_code) codearg;
enum machine_mode mode = GET_MODE (x);
HOST_WIDE_INT val;
default:
return false;
}
- *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
return true;
case ABS:
default:
return false;
}
- *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
return true;
case NOT:
*total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
- *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
return true;
case ZERO_EXTEND:
*total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
- GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
- *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
return true;
case SIGN_EXTEND:
*total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
- GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
- *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
return true;
case PLUS:
case QImode:
*total = COSTS_N_INSNS (1);
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
break;
case HImode:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
*total = COSTS_N_INSNS (2);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
*total = COSTS_N_INSNS (1);
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
*total = COSTS_N_INSNS (4);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
*total = COSTS_N_INSNS (1);
default:
return false;
}
- *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
return true;
case MINUS:
case AND:
case IOR:
*total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
- *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
return true;
case XOR:
*total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
- *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
return true;
case MULT:
{
case QImode:
if (AVR_HAVE_MUL)
- *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
- else if (optimize_size)
+ *total = COSTS_N_INSNS (!speed ? 3 : 4);
+ else if (!speed)
*total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
else
return false;
case HImode:
if (AVR_HAVE_MUL)
- *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
- else if (optimize_size)
+ *total = COSTS_N_INSNS (!speed ? 7 : 10);
+ else if (!speed)
*total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
else
return false;
default:
return false;
}
- *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
return true;
case DIV:
case MOD:
case UDIV:
case UMOD:
- if (optimize_size)
+ if (!speed)
*total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
else
return false;
- *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
return true;
+ case ROTATE:
+ switch (mode)
+ {
+ case QImode:
+ if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
+ *total = COSTS_N_INSNS (1);
+
+ break;
+
+ case HImode:
+ if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
+ *total = COSTS_N_INSNS (3);
+
+ break;
+
+ case SImode:
+ if (CONST_INT_P (XEXP (x, 1)))
+ switch (INTVAL (XEXP (x, 1)))
+ {
+ case 8:
+ case 24:
+ *total = COSTS_N_INSNS (5);
+ break;
+ case 16:
+ *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
+ break;
+ }
+ break;
+
+ default:
+ return false;
+ }
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
+ return true;
+
case ASHIFT:
switch (mode)
{
case QImode:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
- *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 4 : 17);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
else
{
case HImode:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
- *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 5 : 41);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
else
switch (INTVAL (XEXP (x, 1)))
*total = COSTS_N_INSNS (5);
break;
case 4:
- *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
+ *total = COSTS_N_INSNS (!speed ? 5 : 8);
break;
case 6:
- *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
+ *total = COSTS_N_INSNS (!speed ? 5 : 9);
break;
case 5:
- *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
+ *total = COSTS_N_INSNS (!speed ? 5 : 10);
break;
default:
- *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 5 : 41);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
break;
case SImode:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
- *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 7 : 113);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
else
switch (INTVAL (XEXP (x, 1)))
*total = COSTS_N_INSNS (6);
break;
case 2:
- *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
+ *total = COSTS_N_INSNS (!speed ? 7 : 8);
break;
default:
- *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 7 : 113);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
break;
default:
return false;
}
- *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
return true;
case ASHIFTRT:
case QImode:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
- *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 4 : 17);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
else
{
case HImode:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
- *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 5 : 41);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
else
switch (INTVAL (XEXP (x, 1)))
*total = COSTS_N_INSNS (5);
break;
case 11:
- *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
+ *total = COSTS_N_INSNS (!speed ? 5 : 6);
break;
case 12:
- *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
+ *total = COSTS_N_INSNS (!speed ? 5 : 7);
break;
case 6:
case 13:
- *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
+ *total = COSTS_N_INSNS (!speed ? 5 : 8);
break;
default:
- *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 5 : 41);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
break;
case SImode:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
- *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 7 : 113);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
else
switch (INTVAL (XEXP (x, 1)))
*total = COSTS_N_INSNS (6);
break;
case 2:
- *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
+ *total = COSTS_N_INSNS (!speed ? 7 : 8);
break;
case 31:
*total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
break;
default:
- *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 7 : 113);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
break;
default:
return false;
}
- *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
return true;
case LSHIFTRT:
case QImode:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
- *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 4 : 17);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
else
{
case HImode:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
- *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 5 : 41);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
else
switch (INTVAL (XEXP (x, 1)))
case 12:
case 13:
case 14:
- *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
+ *total = COSTS_N_INSNS (!speed ? 5 : 6);
break;
case 4:
- *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
+ *total = COSTS_N_INSNS (!speed ? 5 : 7);
break;
case 5:
case 6:
- *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
+ *total = COSTS_N_INSNS (!speed ? 5 : 9);
break;
default:
- *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 5 : 41);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
break;
case SImode:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
- *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 7 : 113);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
else
switch (INTVAL (XEXP (x, 1)))
*total = COSTS_N_INSNS (4);
break;
case 2:
- *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
+ *total = COSTS_N_INSNS (!speed ? 7 : 8);
break;
case 8:
case 16:
*total = COSTS_N_INSNS (6);
break;
default:
- *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total = COSTS_N_INSNS (!speed ? 7 : 113);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
}
break;
default:
return false;
}
- *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
return true;
case COMPARE:
case QImode:
*total = COSTS_N_INSNS (1);
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
break;
case HImode:
*total = COSTS_N_INSNS (2);
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
else if (INTVAL (XEXP (x, 1)) != 0)
*total += COSTS_N_INSNS (1);
break;
case SImode:
*total = COSTS_N_INSNS (4);
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
- *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
else if (INTVAL (XEXP (x, 1)) != 0)
*total += COSTS_N_INSNS (3);
break;
default:
return false;
}
- *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
return true;
default:
/* Calculate the cost of a memory address. */
static int
-avr_address_cost (rtx x)
+avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
{
if (GET_CODE (x) == PLUS
&& GET_CODE (XEXP (x,1)) == CONST_INT
XEXP (pattern,1) = x;
INSN_CODE (next) = -1;
}
+ else if (true_regnum (XEXP (pattern, 0)) >= 0
+ && XEXP (pattern, 1) == const0_rtx)
+ {
+ /* This is a tst insn, we can reverse it. */
+ rtx next = next_real_insn (insn);
+ rtx pat = PATTERN (next);
+ rtx src = SET_SRC (pat);
+ rtx t = XEXP (src,0);
+
+ PUT_CODE (t, swap_condition (GET_CODE (t)));
+ XEXP (pattern, 1) = XEXP (pattern, 0);
+ XEXP (pattern, 0) = const0_rtx;
+ INSN_CODE (next) = -1;
+ INSN_CODE (insn) = -1;
+ }
else if (true_regnum (XEXP (pattern,0)) >= 0
&& GET_CODE (XEXP (pattern,1)) == CONST_INT)
{
}
}
}
- else if (true_regnum (SET_SRC (pattern)) >= 0)
- {
- /* This is a tst insn */
- rtx next = next_real_insn (insn);
- rtx pat = PATTERN (next);
- rtx src = SET_SRC (pat);
- rtx t = XEXP (src,0);
-
- PUT_CODE (t, swap_condition (GET_CODE (t)));
- SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
- SET_SRC (pattern));
- INSN_CODE (next) = -1;
- INSN_CODE (insn) = -1;
- }
}
}
}
/* Returns register number for function return value.*/
-int
+static inline unsigned int
avr_ret_register (void)
{
return 24;
}
+/* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
+
+static bool
+avr_function_value_regno_p (const unsigned int regno)
+{
+ return (regno == avr_ret_register ());
+}
+
/* Create an RTX representing the place where a
library function returns a value of mode MODE. */
-rtx
-avr_libcall_value (enum machine_mode mode)
+static rtx
+avr_libcall_value (enum machine_mode mode,
+ const_rtx func ATTRIBUTE_UNUSED)
{
int offs = GET_MODE_SIZE (mode);
if (offs < 2)
offs = 2;
- return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
+ return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
}
/* Create an RTX representing the place where a
function returns a value of data type VALTYPE. */
-rtx
-avr_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
+static rtx
+avr_function_value (const_tree type,
+ const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
+ bool outgoing ATTRIBUTE_UNUSED)
{
unsigned int offs;
-
+
if (TYPE_MODE (type) != BLKmode)
- return avr_libcall_value (TYPE_MODE (type));
+ return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
offs = int_size_in_bytes (type);
if (offs < 2)
else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
offs = GET_MODE_SIZE (DImode);
- return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
-}
-
-/* Places additional restrictions on the register class to
- use when it is necessary to copy value X into a register
- in class CLASS. */
-
-enum reg_class
-preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
-{
- return class;
+ return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
}
int
-test_hard_reg_class (enum reg_class class, rtx x)
+test_hard_reg_class (enum reg_class rclass, rtx x)
{
int regno = true_regnum (x);
if (regno < 0)
return 0;
- if (TEST_HARD_REG_CLASS (class, regno))
+ if (TEST_HARD_REG_CLASS (rclass, regno))
return 1;
return 0;
int
avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
{
- /* Disallow QImode in stack pointer regs. */
- if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
- return 0;
-
- /* The only thing that can go into registers r28:r29 is a Pmode. */
- if (regno == REG_Y && mode == Pmode)
- return 1;
-
- /* Otherwise disallow all regno/mode combinations that span r28:r29. */
- if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
- return 0;
-
- if (mode == QImode)
+ /* NOTE: 8-bit values must not be disallowed for R28 or R29.
+ Disallowing QI et al. in these regs might lead to code like
+ (set (subreg:QI (reg:HI 28) n) ...)
+ which will result in wrong code because reload does not
+ handle SUBREGs of hard regsisters like this.
+ This could be fixed in reload. However, it appears
+ that fixing reload is not wanted by reload people. */
+
+ /* Any GENERAL_REGS register can hold 8-bit values. */
+
+ if (GET_MODE_SIZE (mode) == 1)
return 1;
- /* Modes larger than QImode occupy consecutive registers. */
- if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
+ /* FIXME: Ideally, the following test is not needed.
+ However, it turned out that it can reduce the number
+ of spill fails. AVR and it's poor endowment with
+ address registers is extreme stress test for reload. */
+
+ if (GET_MODE_SIZE (mode) >= 4
+ && regno >= REG_X)
return 0;
- /* All modes larger than QImode should start in an even register. */
+ /* All modes larger than 8 bits should start in an even register. */
+
return !(regno & 1);
}
}
+/* Reload a SI or SF compile time constant (OP[1]) into a GPR (OP[0]).
+ CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
+ into a NO_LD_REGS. If CLOBBER_REG is NULL_RTX we either don't need a
+ clobber reg or have to cook one up.
+
+ LEN == NULL: Output instructions.
+
+ LEN != NULL: Output nothing. Increment *LEN by number of words occupied
+ by the insns printed.
+
+ Return "". */
+
const char *
-output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
+output_reload_insisf (rtx insn ATTRIBUTE_UNUSED,
+ rtx *op, rtx clobber_reg, int *len)
{
- rtx src = operands[1];
- int cnst = (GET_CODE (src) == CONST_INT);
+ rtx src = op[1];
+ rtx dest = op[0];
+ rtx xval, xdest[4];
+ int ival[4];
+ int clobber_val = 1234;
+ bool cooked_clobber_p = false;
+ bool set_p = false;
+ unsigned int n;
+ enum machine_mode mode = GET_MODE (dest);
+
+ gcc_assert (REG_P (dest));
if (len)
+ *len = 0;
+
+ /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
+ but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
+
+ if (14 == REGNO (dest))
{
- if (cnst)
- *len = 4 + ((INTVAL (src) & 0xff) != 0)
- + ((INTVAL (src) & 0xff00) != 0)
- + ((INTVAL (src) & 0xff0000) != 0)
- + ((INTVAL (src) & 0xff000000) != 0);
- else
- *len = 8;
-
- return "";
+ clobber_reg = gen_rtx_REG (QImode, 17);
}
- if (cnst && ((INTVAL (src) & 0xff) == 0))
- output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
- else
- {
- output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
- output_asm_insn (AS2 (mov, %A0, %2), operands);
- }
- if (cnst && ((INTVAL (src) & 0xff00) == 0))
- output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
- else
+ /* We might need a clobber reg but don't have one. Look at the value
+ to be loaded more closely. A clobber is only needed if it contains
+ a byte that is neither 0, -1 or a power of 2. */
+
+ if (NULL_RTX == clobber_reg
+ && !test_hard_reg_class (LD_REGS, dest))
{
- output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
- output_asm_insn (AS2 (mov, %B0, %2), operands);
+ for (n = 0; n < GET_MODE_SIZE (mode); n++)
+ {
+ xval = simplify_gen_subreg (QImode, src, mode, n);
+
+ if (!(const0_rtx == xval
+ || constm1_rtx == xval
+ || single_one_operand (xval, QImode)))
+ {
+ /* We have no clobber reg but need one. Cook one up.
+ That's cheaper than loading from constant pool. */
+
+ cooked_clobber_p = true;
+ clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
+ avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
+ break;
+ }
+ }
}
- if (cnst && ((INTVAL (src) & 0xff0000) == 0))
- output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
- else
+
+ /* Now start filling DEST from LSB to MSB. */
+
+ for (n = 0; n < GET_MODE_SIZE (mode); n++)
{
- output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
- output_asm_insn (AS2 (mov, %C0, %2), operands);
+ bool done_byte = false;
+ unsigned int j;
+ rtx xop[3];
+
+ /* Crop the n-th sub-byte. */
+
+ xval = simplify_gen_subreg (QImode, src, mode, n);
+ xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
+ ival[n] = INTVAL (xval);
+
+ /* Look if we can reuse the low word by means of MOVW. */
+
+ if (n == 2
+ && AVR_HAVE_MOVW)
+ {
+ rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
+ rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
+
+ if (INTVAL (lo16) == INTVAL (hi16))
+ {
+ avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
+ break;
+ }
+ }
+
+ /* Use CLR to zero a value so that cc0 is set as expected
+ for zero. */
+
+ if (ival[n] == 0)
+ {
+ avr_asm_len ("clr %0", &xdest[n], len, 1);
+ continue;
+ }
+
+ if (clobber_val == ival[n]
+ && REGNO (clobber_reg) == REGNO (xdest[n]))
+ {
+ continue;
+ }
+
+ /* LD_REGS can use LDI to move a constant value */
+
+ if (test_hard_reg_class (LD_REGS, xdest[n]))
+ {
+ xop[0] = xdest[n];
+ xop[1] = xval;
+ avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
+ continue;
+ }
+
+ /* Try to reuse value already loaded in some lower byte. */
+
+ for (j = 0; j < n; j++)
+ if (ival[j] == ival[n])
+ {
+ xop[0] = xdest[n];
+ xop[1] = xdest[j];
+
+ avr_asm_len ("mov %0,%1", xop, len, 1);
+ done_byte = true;
+ break;
+ }
+
+ if (done_byte)
+ continue;
+
+ /* Need no clobber reg for -1: Use CLR/DEC */
+
+ if (-1 == ival[n])
+ {
+ avr_asm_len ("clr %0" CR_TAB
+ "dec %0", &xdest[n], len, 2);
+ continue;
+ }
+
+ /* Use T flag or INC to manage powers of 2 if we have
+ no clobber reg. */
+
+ if (NULL_RTX == clobber_reg
+ && single_one_operand (xval, QImode))
+ {
+ if (1 == ival[n])
+ {
+ avr_asm_len ("clr %0" CR_TAB
+ "inc %0", &xdest[n], len, 2);
+ continue;
+ }
+
+ xop[0] = xdest[n];
+ xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
+
+ gcc_assert (constm1_rtx != xop[1]);
+
+ if (!set_p)
+ {
+ set_p = true;
+ avr_asm_len ("set", xop, len, 1);
+ }
+
+ avr_asm_len ("clr %0" CR_TAB
+ "bld %0,%1", xop, len, 2);
+ continue;
+ }
+
+ /* We actually need the LD_REGS clobber reg. */
+
+ gcc_assert (NULL_RTX != clobber_reg);
+
+ xop[0] = xdest[n];
+ xop[1] = xval;
+ xop[2] = clobber_reg;
+ clobber_val = ival[n];
+
+ avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
+ "mov %0,%2", xop, len, 2);
}
- if (cnst && ((INTVAL (src) & 0xff000000) == 0))
- output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
- else
+
+ /* If we cooked up a clobber reg above, restore it. */
+
+ if (cooked_clobber_p)
{
- output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
- output_asm_insn (AS2 (mov, %D0, %2), operands);
+ avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
}
+
return "";
}
fprintf (stream, "\trjmp .L%d\n", value);
}
-/* Returns 1 if SCRATCH are safe to be allocated as a scratch
+/* Returns true if SCRATCH are safe to be allocated as a scratch
registers (for a define_peephole2) in the current function. */
-int
-avr_peep2_scratch_safe (rtx scratch)
+bool
+avr_hard_regno_scratch_ok (unsigned int regno)
{
- if ((interrupt_function_p (current_function_decl)
- || signal_function_p (current_function_decl))
- && leaf_function_p ())
+ /* Interrupt functions can only use registers that have already been saved
+ by the prologue, even if they would normally be call-clobbered. */
+
+ if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
+ && !df_regs_ever_live_p (regno))
+ return false;
+
+ /* Don't allow hard registers that might be part of the frame pointer.
+ Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
+ and don't care for a frame pointer that spans more than one register. */
+
+ if ((!reload_completed || frame_pointer_needed)
+ && (regno == REG_Y || regno == REG_Y + 1))
{
- int first_reg = true_regnum (scratch);
- int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
- int reg;
+ return false;
+ }
- for (reg = first_reg; reg <= last_reg; reg++)
- {
- if (!df_regs_ever_live_p (reg))
- return 0;
- }
+ return true;
+}
+
+/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
+
+int
+avr_hard_regno_rename_ok (unsigned int old_reg,
+ unsigned int new_reg)
+{
+ /* Interrupt functions can only use registers that have already been
+ saved by the prologue, even if they would normally be
+ call-clobbered. */
+
+ if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
+ && !df_regs_ever_live_p (new_reg))
+ return 0;
+
+ /* Don't allow hard registers that might be part of the frame pointer.
+ Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
+ and don't care for a frame pointer that spans more than one register. */
+
+ if ((!reload_completed || frame_pointer_needed)
+ && (old_reg == REG_Y || old_reg == REG_Y + 1
+ || new_reg == REG_Y || new_reg == REG_Y + 1))
+ {
+ return 0;
}
+
return 1;
}
-/* Output a branch that tests a single bit of a register (QI, HI or SImode)
+/* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
or memory location in the I/O space (QImode only).
Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
Operand 1: register operand to test, or CONST_INT memory address.
- Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
+ Operand 2: bit number.
Operand 3: label to jump to if the test is true. */
const char *
if (INTVAL (operands[1]) < 0x40)
{
if (comp == EQ)
- output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
+ output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
else
- output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
+ output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
}
else
{
- output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
+ output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
if (comp == EQ)
output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
else
else /* HImode or SImode */
{
static char buf[] = "sbrc %A1,0";
- int bit_nr = exact_log2 (INTVAL (operands[2])
- & GET_MODE_MASK (GET_MODE (operands[1])));
-
+ int bit_nr = INTVAL (operands[2]);
buf[3] = (comp == EQ) ? 's' : 'c';
buf[6] = 'A' + (bit_nr >> 3);
buf[9] = '0' + (bit_nr & 7);
if (long_jump)
return (AS1 (rjmp,.+4) CR_TAB
- AS1 (jmp,%3));
+ AS1 (jmp,%x3));
if (!reverse)
- return AS1 (rjmp,%3);
+ return AS1 (rjmp,%x3);
return "";
}
return false;
}
+/* Worker function for CASE_VALUES_THRESHOLD. */
+
+unsigned int avr_case_values_threshold (void)
+{
+ return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
+}
+
+/* Helper for __builtin_avr_delay_cycles */
+
+static void
+avr_expand_delay_cycles (rtx operands0)
+{
+ unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
+ unsigned HOST_WIDE_INT cycles_used;
+ unsigned HOST_WIDE_INT loop_count;
+
+ if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
+ {
+ loop_count = ((cycles - 9) / 6) + 1;
+ cycles_used = ((loop_count - 1) * 6) + 9;
+ emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
+ cycles -= cycles_used;
+ }
+
+ if (IN_RANGE (cycles, 262145, 83886081))
+ {
+ loop_count = ((cycles - 7) / 5) + 1;
+ if (loop_count > 0xFFFFFF)
+ loop_count = 0xFFFFFF;
+ cycles_used = ((loop_count - 1) * 5) + 7;
+ emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
+ cycles -= cycles_used;
+ }
+
+ if (IN_RANGE (cycles, 768, 262144))
+ {
+ loop_count = ((cycles - 5) / 4) + 1;
+ if (loop_count > 0xFFFF)
+ loop_count = 0xFFFF;
+ cycles_used = ((loop_count - 1) * 4) + 5;
+ emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
+ cycles -= cycles_used;
+ }
+
+ if (IN_RANGE (cycles, 6, 767))
+ {
+ loop_count = cycles / 3;
+ if (loop_count > 255)
+ loop_count = 255;
+ cycles_used = loop_count * 3;
+ emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
+ cycles -= cycles_used;
+ }
+
+ while (cycles >= 2)
+ {
+ emit_insn (gen_nopv (GEN_INT(2)));
+ cycles -= 2;
+ }
+
+ if (cycles == 1)
+ {
+ emit_insn (gen_nopv (GEN_INT(1)));
+ cycles--;
+ }
+}
+
+/* IDs for all the AVR builtins. */
+
+enum avr_builtin_id
+ {
+ AVR_BUILTIN_NOP,
+ AVR_BUILTIN_SEI,
+ AVR_BUILTIN_CLI,
+ AVR_BUILTIN_WDR,
+ AVR_BUILTIN_SLEEP,
+ AVR_BUILTIN_SWAP,
+ AVR_BUILTIN_FMUL,
+ AVR_BUILTIN_FMULS,
+ AVR_BUILTIN_FMULSU,
+ AVR_BUILTIN_DELAY_CYCLES
+ };
+
+#define DEF_BUILTIN(NAME, TYPE, CODE) \
+ do \
+ { \
+ add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
+ NULL, NULL_TREE); \
+ } while (0)
+
+
+/* Implement `TARGET_INIT_BUILTINS' */
+/* Set up all builtin functions for this target. */
+
+static void
+avr_init_builtins (void)
+{
+ tree void_ftype_void
+ = build_function_type_list (void_type_node, NULL_TREE);
+ tree uchar_ftype_uchar
+ = build_function_type_list (unsigned_char_type_node,
+ unsigned_char_type_node,
+ NULL_TREE);
+ tree uint_ftype_uchar_uchar
+ = build_function_type_list (unsigned_type_node,
+ unsigned_char_type_node,
+ unsigned_char_type_node,
+ NULL_TREE);
+ tree int_ftype_char_char
+ = build_function_type_list (integer_type_node,
+ char_type_node,
+ char_type_node,
+ NULL_TREE);
+ tree int_ftype_char_uchar
+ = build_function_type_list (integer_type_node,
+ char_type_node,
+ unsigned_char_type_node,
+ NULL_TREE);
+ tree void_ftype_ulong
+ = build_function_type_list (void_type_node,
+ long_unsigned_type_node,
+ NULL_TREE);
+
+ DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
+ DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
+ DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
+ DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
+ DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
+ DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
+ DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
+ AVR_BUILTIN_DELAY_CYCLES);
+
+ DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
+ AVR_BUILTIN_FMUL);
+ DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
+ AVR_BUILTIN_FMULS);
+ DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
+ AVR_BUILTIN_FMULSU);
+}
+
+#undef DEF_BUILTIN
+
+struct avr_builtin_description
+{
+ const enum insn_code icode;
+ const char *const name;
+ const enum avr_builtin_id id;
+};
+
+static const struct avr_builtin_description
+bdesc_1arg[] =
+ {
+ { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
+ };
+
+static const struct avr_builtin_description
+bdesc_2arg[] =
+ {
+ { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
+ { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
+ { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
+ };
+
+/* Subroutine of avr_expand_builtin to take care of unop insns. */
+
+static rtx
+avr_expand_unop_builtin (enum insn_code icode, tree exp,
+ rtx target)
+{
+ rtx pat;
+ tree arg0 = CALL_EXPR_ARG (exp, 0);
+ rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+ enum machine_mode op0mode = GET_MODE (op0);
+ enum machine_mode tmode = insn_data[icode].operand[0].mode;
+ enum machine_mode mode0 = insn_data[icode].operand[1].mode;
+
+ if (! target
+ || GET_MODE (target) != tmode
+ || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
+ {
+ target = gen_reg_rtx (tmode);
+ }
+
+ if (op0mode == SImode && mode0 == HImode)
+ {
+ op0mode = HImode;
+ op0 = gen_lowpart (HImode, op0);
+ }
+
+ gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
+
+ if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
+ op0 = copy_to_mode_reg (mode0, op0);
+
+ pat = GEN_FCN (icode) (target, op0);
+ if (! pat)
+ return 0;
+
+ emit_insn (pat);
+
+ return target;
+}
+
+
+/* Subroutine of avr_expand_builtin to take care of binop insns. */
+
+static rtx
+avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
+{
+ rtx pat;
+ tree arg0 = CALL_EXPR_ARG (exp, 0);
+ tree arg1 = CALL_EXPR_ARG (exp, 1);
+ rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+ rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+ enum machine_mode op0mode = GET_MODE (op0);
+ enum machine_mode op1mode = GET_MODE (op1);
+ enum machine_mode tmode = insn_data[icode].operand[0].mode;
+ enum machine_mode mode0 = insn_data[icode].operand[1].mode;
+ enum machine_mode mode1 = insn_data[icode].operand[2].mode;
+
+ if (! target
+ || GET_MODE (target) != tmode
+ || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
+ {
+ target = gen_reg_rtx (tmode);
+ }
+
+ if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
+ {
+ op0mode = HImode;
+ op0 = gen_lowpart (HImode, op0);
+ }
+
+ if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
+ {
+ op1mode = HImode;
+ op1 = gen_lowpart (HImode, op1);
+ }
+
+ /* In case the insn wants input operands in modes different from
+ the result, abort. */
+
+ gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
+ && (op1mode == mode1 || op1mode == VOIDmode));
+
+ if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
+ op0 = copy_to_mode_reg (mode0, op0);
+
+ if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
+ op1 = copy_to_mode_reg (mode1, op1);
+
+ pat = GEN_FCN (icode) (target, op0, op1);
+
+ if (! pat)
+ return 0;
+
+ emit_insn (pat);
+ return target;
+}
+
+
+/* Expand an expression EXP that calls a built-in function,
+ with result going to TARGET if that's convenient
+ (and in mode MODE if that's convenient).
+ SUBTARGET may be used as the target for computing one of EXP's operands.
+ IGNORE is nonzero if the value is to be ignored. */
+
+static rtx
+avr_expand_builtin (tree exp, rtx target,
+ rtx subtarget ATTRIBUTE_UNUSED,
+ enum machine_mode mode ATTRIBUTE_UNUSED,
+ int ignore ATTRIBUTE_UNUSED)
+{
+ size_t i;
+ const struct avr_builtin_description *d;
+ tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
+ unsigned int id = DECL_FUNCTION_CODE (fndecl);
+ tree arg0;
+ rtx op0;
+
+ switch (id)
+ {
+ case AVR_BUILTIN_NOP:
+ emit_insn (gen_nopv (GEN_INT(1)));
+ return 0;
+
+ case AVR_BUILTIN_SEI:
+ emit_insn (gen_enable_interrupt ());
+ return 0;
+
+ case AVR_BUILTIN_CLI:
+ emit_insn (gen_disable_interrupt ());
+ return 0;
+
+ case AVR_BUILTIN_WDR:
+ emit_insn (gen_wdr ());
+ return 0;
+
+ case AVR_BUILTIN_SLEEP:
+ emit_insn (gen_sleep ());
+ return 0;
+
+ case AVR_BUILTIN_DELAY_CYCLES:
+ {
+ arg0 = CALL_EXPR_ARG (exp, 0);
+ op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+
+ if (! CONST_INT_P (op0))
+ error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
+
+ avr_expand_delay_cycles (op0);
+ return 0;
+ }
+ }
+
+ for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
+ if (d->id == id)
+ return avr_expand_unop_builtin (d->icode, exp, target);
+
+ for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
+ if (d->id == id)
+ return avr_expand_binop_builtin (d->icode, exp, target);
+
+ gcc_unreachable ();
+}
+
+
#include "gt-avr.h"