/* Default target hook functions.
- Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009
+ Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
-02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
/* The migration of target macros to target hooks works as follows:
#include "recog.h"
+bool
+default_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+ rtx addr ATTRIBUTE_UNUSED,
+ bool strict ATTRIBUTE_UNUSED)
+{
+#ifdef GO_IF_LEGITIMATE_ADDRESS
+ /* Defer to the old implementation using a goto. */
+ if (strict)
+ return strict_memory_address_p (mode, addr);
+ else
+ return memory_address_p (mode, addr);
+#else
+ gcc_unreachable ();
+#endif
+}
+
void
default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
{
#endif
}
+int
+default_unspec_may_trap_p (const_rtx x, unsigned flags)
+{
+ int i;
+
+ if (GET_CODE (x) == UNSPEC_VOLATILE
+ /* Any floating arithmetic may trap. */
+ || (SCALAR_FLOAT_MODE_P (GET_MODE (x))
+ && flag_trapping_math))
+ return 1;
+
+ for (i = 0; i < XVECLEN (x, 0); ++i)
+ {
+ if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
+ return 1;
+ }
+
+ return 0;
+}
+
+enum machine_mode
+default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
+ enum machine_mode mode,
+ int *punsignedp ATTRIBUTE_UNUSED,
+ const_tree funtype ATTRIBUTE_UNUSED,
+ int for_return ATTRIBUTE_UNUSED)
+{
+ if (for_return == 2)
+ return promote_mode (type, mode, punsignedp);
+ return mode;
+}
+
+enum machine_mode
+default_promote_function_mode_always_promote (const_tree type,
+ enum machine_mode mode,
+ int *punsignedp,
+ const_tree funtype ATTRIBUTE_UNUSED,
+ int for_return ATTRIBUTE_UNUSED)
+{
+ return promote_mode (type, mode, punsignedp);
+}
+
+
enum machine_mode
default_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
{
}
bool
-default_return_in_memory (tree type,
- tree fntype ATTRIBUTE_UNUSED)
+default_return_in_memory (const_tree type,
+ const_tree fntype ATTRIBUTE_UNUSED)
{
-#ifndef RETURN_IN_MEMORY
return (TYPE_MODE (type) == BLKmode);
-#else
- return RETURN_IN_MEMORY (type);
-#endif
+}
+
+rtx
+default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
+ enum machine_mode mode ATTRIBUTE_UNUSED)
+{
+ return x;
}
rtx
enum machine_mode
default_eh_return_filter_mode (void)
{
+ return targetm.unwind_word_mode ();
+}
+
+enum machine_mode
+default_libgcc_cmp_return_mode (void)
+{
+ return word_mode;
+}
+
+enum machine_mode
+default_libgcc_shift_count_mode (void)
+{
+ return word_mode;
+}
+
+enum machine_mode
+default_unwind_word_mode (void)
+{
return word_mode;
}
return true;
}
+/* Return machine mode for non-standard suffix
+ or VOIDmode if non-standard suffixes are unsupported. */
+enum machine_mode
+default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
+{
+ return VOIDmode;
+}
/* The generic C++ ABI specifies this is a 64-bit value. */
tree
bool
hook_pass_by_reference_must_pass_in_stack (CUMULATIVE_ARGS *c ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED, tree type ATTRIBUTE_UNUSED,
+ enum machine_mode mode ATTRIBUTE_UNUSED, const_tree type ATTRIBUTE_UNUSED,
bool named_arg ATTRIBUTE_UNUSED)
{
return targetm.calls.must_pass_in_stack (mode, type);
bool
hook_callee_copies_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
- tree type ATTRIBUTE_UNUSED, bool named)
+ const_tree type ATTRIBUTE_UNUSED, bool named)
{
return named;
}
return false;
case MODE_DECIMAL_FLOAT:
+ case MODE_FRACT:
+ case MODE_UFRACT:
+ case MODE_ACCUM:
+ case MODE_UACCUM:
return false;
default:
return ENABLE_DECIMAL_FLOAT;
}
+/* True if the target supports fixed-point arithmetic. */
+
+bool
+default_fixed_point_supported_p (void)
+{
+ return ENABLE_FIXED_POINT;
+}
+
/* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
an error message.
these cases. */
const char *
-default_invalid_within_doloop (rtx insn)
+default_invalid_within_doloop (const_rtx insn)
{
if (CALL_P (insn))
return "Function call in loop.";
- if (JUMP_P (insn)
- && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
- || GET_CODE (PATTERN (insn)) == ADDR_VEC))
+ if (JUMP_TABLE_DATA_P (insn))
return "Computed branch in the loop.";
return NULL;
/* Mapping of builtin functions to vectorized variants. */
tree
-default_builtin_vectorized_function (enum built_in_function fn ATTRIBUTE_UNUSED,
+default_builtin_vectorized_function (unsigned int fn ATTRIBUTE_UNUSED,
tree type_out ATTRIBUTE_UNUSED,
tree type_in ATTRIBUTE_UNUSED)
{
/* Vectorized conversion. */
tree
-default_builtin_vectorized_conversion (enum tree_code code ATTRIBUTE_UNUSED,
+default_builtin_vectorized_conversion (unsigned int code ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED)
{
return NULL_TREE;
}
+/* Reciprocal. */
+
+tree
+default_builtin_reciprocal (unsigned int fn ATTRIBUTE_UNUSED,
+ bool md_fn ATTRIBUTE_UNUSED,
+ bool sqrt ATTRIBUTE_UNUSED)
+{
+ return NULL_TREE;
+}
+
bool
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
- tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
+ const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
{
return false;
}
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
- tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
+ const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
{
return true;
}
const char *
hook_invalid_arg_for_unprototyped_fn (
- tree typelist ATTRIBUTE_UNUSED,
- tree funcdecl ATTRIBUTE_UNUSED,
- tree val ATTRIBUTE_UNUSED)
+ const_tree typelist ATTRIBUTE_UNUSED,
+ const_tree funcdecl ATTRIBUTE_UNUSED,
+ const_tree val ATTRIBUTE_UNUSED)
{
return NULL;
}
if (t == NULL)
{
- t = build_decl (VAR_DECL, get_identifier ("__stack_chk_guard"),
+ t = build_decl (UNKNOWN_LOCATION,
+ VAR_DECL, get_identifier ("__stack_chk_guard"),
ptr_type_node);
TREE_STATIC (t) = 1;
TREE_PUBLIC (t) = 1;
if (t == NULL_TREE)
{
t = build_function_type_list (void_type_node, NULL_TREE);
- t = build_decl (FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
+ t = build_decl (UNKNOWN_LOCATION,
+ FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
TREE_STATIC (t) = 1;
TREE_PUBLIC (t) = 1;
DECL_EXTERNAL (t) = 1;
if (t == NULL_TREE)
{
t = build_function_type_list (void_type_node, NULL_TREE);
- t = build_decl (FUNCTION_DECL,
+ t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
get_identifier ("__stack_chk_fail_local"), t);
TREE_STATIC (t) = 1;
TREE_PUBLIC (t) = 1;
}
bool
-hook_bool_rtx_commutative_p (rtx x, int outer_code ATTRIBUTE_UNUSED)
+hook_bool_const_rtx_commutative_p (const_rtx x,
+ int outer_code ATTRIBUTE_UNUSED)
{
return COMMUTATIVE_P (x);
}
rtx
-default_function_value (tree ret_type ATTRIBUTE_UNUSED,
- tree fn_decl_or_type,
+default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
+ const_tree fn_decl_or_type,
bool outgoing ATTRIBUTE_UNUSED)
{
/* The old interface doesn't handle receiving the function type. */
#ifdef FUNCTION_VALUE
return FUNCTION_VALUE (ret_type, fn_decl_or_type);
#else
- return NULL_RTX;
+ gcc_unreachable ();
+#endif
+}
+
+rtx
+default_libcall_value (enum machine_mode mode ATTRIBUTE_UNUSED,
+ const_rtx fun ATTRIBUTE_UNUSED)
+{
+#ifdef LIBCALL_VALUE
+ return LIBCALL_VALUE (mode);
+#else
+ gcc_unreachable ();
#endif
}
return virtual_incoming_args_rtx;
}
+rtx
+default_static_chain (const_tree fndecl, bool incoming_p)
+{
+ if (!DECL_STATIC_CHAIN (fndecl))
+ return NULL;
+
+ if (incoming_p)
+ {
+#ifdef STATIC_CHAIN_INCOMING_REGNUM
+ return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
+#endif
+ }
+
+#ifdef STATIC_CHAIN_REGNUM
+ return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
+#endif
+
+ {
+ static bool issued_error;
+ if (!issued_error)
+ {
+ issued_error = true;
+ sorry ("nested functions not supported on this target");
+ }
+
+ /* It really doesn't matter what we return here, so long at it
+ doesn't cause the rest of the compiler to crash. */
+ return gen_rtx_MEM (Pmode, stack_pointer_rtx);
+ }
+}
+
+void
+default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
+ rtx ARG_UNUSED (r_chain))
+{
+ sorry ("nested function trampolines not supported on this target");
+}
+
+enum reg_class
+default_branch_target_register_class (void)
+{
+ return NO_REGS;
+}
+
+#ifdef IRA_COVER_CLASSES
+const enum reg_class *
+default_ira_cover_classes (void)
+{
+ static enum reg_class classes[] = IRA_COVER_CLASSES;
+ return classes;
+}
+#endif
+
enum reg_class
default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
enum reg_class reload_class ATTRIBUTE_UNUSED,
enum machine_mode reload_mode ATTRIBUTE_UNUSED,
secondary_reload_info *sri)
{
- enum reg_class class = NO_REGS;
+ enum reg_class rclass = NO_REGS;
if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
{
}
#ifdef SECONDARY_INPUT_RELOAD_CLASS
if (in_p)
- class = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x);
+ rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x);
#endif
#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
if (! in_p)
- class = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x);
+ rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x);
#endif
- if (class != NO_REGS)
+ if (rclass != NO_REGS)
{
enum insn_code icode = (in_p ? reload_in_optab[(int) reload_mode]
: reload_out_optab[(int) reload_mode]);
if (reg_class_subset_p (reload_class, insn_class))
{
- gcc_assert (scratch_class == class);
- class = NO_REGS;
+ gcc_assert (scratch_class == rclass);
+ rclass = NO_REGS;
}
else
- class = insn_class;
+ rclass = insn_class;
}
- if (class == NO_REGS)
+ if (rclass == NO_REGS)
sri->icode = icode;
else
sri->t_icode = icode;
}
- return class;
+ return rclass;
+}
+
+bool
+default_handle_c_option (size_t code ATTRIBUTE_UNUSED,
+ const char *arg ATTRIBUTE_UNUSED,
+ int value ATTRIBUTE_UNUSED)
+{
+ return false;
+}
+
+/* By default, if flag_pic is true, then neither local nor global relocs
+ should be placed in readonly memory. */
+
+int
+default_reloc_rw_mask (void)
+{
+ return flag_pic ? 3 : 0;
+}
+
+/* By default, do no modification. */
+tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
+ tree id)
+{
+ return id;
}
+bool
+default_builtin_vector_alignment_reachable (const_tree type, bool is_packed)
+{
+ if (is_packed)
+ return false;
+
+ /* Assuming that types whose size is > pointer-size are not guaranteed to be
+ naturally aligned. */
+ if (tree_int_cst_compare (TYPE_SIZE (type), bitsize_int (POINTER_SIZE)) > 0)
+ return false;
+
+ /* Assuming that types whose size is <= pointer-size
+ are naturally aligned. */
+ return true;
+}
-/* If STRICT_ALIGNMENT is true we use the container type for accessing
- volatile bitfields. This is generally the preferred behavior for memory
- mapped peripherals on RISC architectures.
- If STRICT_ALIGNMENT is false we use the narrowest type possible. This
- is typically used to avoid spurious page faults and extra memory accesses
- due to unaligned accesses on CISC architectures. */
+/* By default, assume that a target supports any factor of misalignment
+ memory access if it supports movmisalign patten.
+ is_packed is true if the memory access is defined in a packed struct. */
+bool
+default_builtin_support_vector_misalignment (enum machine_mode mode,
+ const_tree type
+ ATTRIBUTE_UNUSED,
+ int misalignment
+ ATTRIBUTE_UNUSED,
+ bool is_packed
+ ATTRIBUTE_UNUSED)
+{
+ if (optab_handler (movmisalign_optab, mode)->insn_code != CODE_FOR_nothing)
+ return true;
+ return false;
+}
+/* Determine whether or not a pointer mode is valid. Assume defaults
+ of ptr_mode or Pmode - can be overridden. */
bool
-default_narrow_bitfield (void)
+default_valid_pointer_mode (enum machine_mode mode)
+{
+ return (mode == ptr_mode || mode == Pmode);
+}
+
+/* Return the mode for a pointer to a given ADDRSPACE, defaulting to ptr_mode
+ for the generic address space only. */
+
+enum machine_mode
+default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
+{
+ gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
+ return ptr_mode;
+}
+
+/* Return the mode for an address in a given ADDRSPACE, defaulting to Pmode
+ for the generic address space only. */
+
+enum machine_mode
+default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
{
- return !STRICT_ALIGNMENT;
+ gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
+ return Pmode;
+}
+
+/* Named address space version of valid_pointer_mode. */
+
+bool
+default_addr_space_valid_pointer_mode (enum machine_mode mode, addr_space_t as)
+{
+ if (!ADDR_SPACE_GENERIC_P (as))
+ return (mode == targetm.addr_space.pointer_mode (as)
+ || mode == targetm.addr_space.address_mode (as));
+
+ return targetm.valid_pointer_mode (mode);
+}
+
+/* Some places still assume that all pointer or address modes are the
+ standard Pmode and ptr_mode. These optimizations become invalid if
+ the target actually supports multiple different modes. For now,
+ we disable such optimizations on such targets, using this function. */
+
+bool
+target_default_pointer_address_modes_p (void)
+{
+ if (targetm.addr_space.address_mode != default_addr_space_address_mode)
+ return false;
+ if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
+ return false;
+
+ return true;
+}
+
+/* Named address space version of legitimate_address_p. */
+
+bool
+default_addr_space_legitimate_address_p (enum machine_mode mode, rtx mem,
+ bool strict, addr_space_t as)
+{
+ if (!ADDR_SPACE_GENERIC_P (as))
+ gcc_unreachable ();
+
+ return targetm.legitimate_address_p (mode, mem, strict);
+}
+
+/* Named address space version of LEGITIMIZE_ADDRESS. */
+
+rtx
+default_addr_space_legitimize_address (rtx x, rtx oldx,
+ enum machine_mode mode, addr_space_t as)
+{
+ if (!ADDR_SPACE_GENERIC_P (as))
+ return x;
+
+ return targetm.legitimize_address (x, oldx, mode);
+}
+
+/* The default hook for determining if one named address space is a subset of
+ another and to return which address space to use as the common address
+ space. */
+
+bool
+default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
+{
+ return (subset == superset);
+}
+
+/* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
+ called for targets with only a generic address space. */
+
+rtx
+default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
+ tree from_type ATTRIBUTE_UNUSED,
+ tree to_type ATTRIBUTE_UNUSED)
+{
+ gcc_unreachable ();
+}
+
+bool
+default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
+{
+ return true;
+}
+
+bool
+default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
+ tree ARG_UNUSED (name),
+ tree ARG_UNUSED (args),
+ int ARG_UNUSED (flags))
+{
+ warning (OPT_Wattributes,
+ "target attribute is not supported on this machine");
+
+ return false;
+}
+
+bool
+default_target_option_pragma_parse (tree ARG_UNUSED (args),
+ tree ARG_UNUSED (pop_target))
+{
+ warning (OPT_Wpragmas,
+ "#pragma GCC target is not supported for this machine");
+
+ return false;
+}
+
+bool
+default_target_can_inline_p (tree caller, tree callee)
+{
+ bool ret = false;
+ tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
+ tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
+
+ /* If callee has no option attributes, then it is ok to inline */
+ if (!callee_opts)
+ ret = true;
+
+ /* If caller has no option attributes, but callee does then it is not ok to
+ inline */
+ else if (!caller_opts)
+ ret = false;
+
+ /* If both caller and callee have attributes, assume that if the pointer is
+ different, the the two functions have different target options since
+ build_target_option_node uses a hash table for the options. */
+ else
+ ret = (callee_opts == caller_opts);
+
+ return ret;
+}
+
+#ifndef HAVE_casesi
+# define HAVE_casesi 0
+#endif
+
+/* If the machine does not have a case insn that compares the bounds,
+ this means extra overhead for dispatch tables, which raises the
+ threshold for using them. */
+
+unsigned int default_case_values_threshold (void)
+{
+ return (HAVE_casesi ? 4 : 5);
+}
+
+bool
+default_have_conditional_execution (void)
+{
+#ifdef HAVE_conditional_execution
+ return HAVE_conditional_execution;
+#else
+ return false;
+#endif
}
#include "gt-targhooks.h"