X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftarghooks.c;h=baad65b0cacb29f4b097a716f434f4a6541edc1a;hb=bca4d139fd7eb1b554c6701071c27048a0e458a6;hp=a2745c4cbcd3c5b969a36ff60907102f3eae20af;hpb=600f4be72c2b85173c384b38c1c0be907583a931;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/targhooks.c b/gcc/targhooks.c index a2745c4cbcd..baad65b0cac 100644 --- a/gcc/targhooks.c +++ b/gcc/targhooks.c @@ -1,5 +1,5 @@ /* Default target hook functions. - Copyright (C) 2003, 2004 Free Software Foundation, Inc. + Copyright (C) 2003, 2004, 2005, 2007 Free Software Foundation, Inc. This file is part of GCC. @@ -15,8 +15,8 @@ for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 59 Temple Place - Suite 330, Boston, MA -02111-1307, USA. */ +Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA +02110-1301, USA. */ /* The migration of target macros to target hooks works as follows: @@ -61,6 +61,12 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include "target.h" #include "tm_p.h" #include "target-def.h" +#include "ggc.h" +#include "hard-reg-set.h" +#include "reload.h" +#include "optabs.h" +#include "recog.h" + void default_external_libcall (rtx fun ATTRIBUTE_UNUSED) @@ -128,6 +134,37 @@ default_pretend_outgoing_varargs_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED) != default_setup_incoming_varargs); } +enum machine_mode +default_eh_return_filter_mode (void) +{ + return word_mode; +} + +/* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */ + +unsigned HOST_WIDE_INT +default_shift_truncation_mask (enum machine_mode mode) +{ + return SHIFT_COUNT_TRUNCATED ? GET_MODE_BITSIZE (mode) - 1 : 0; +} + +/* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */ + +unsigned int +default_min_divisions_for_recip_mul (enum machine_mode mode ATTRIBUTE_UNUSED) +{ + return have_insn_for (DIV, mode) ? 3 : 2; +} + +/* The default implementation of TARGET_MODE_REP_EXTENDED. */ + +int +default_mode_rep_extended (enum machine_mode mode ATTRIBUTE_UNUSED, + enum machine_mode mode_rep ATTRIBUTE_UNUSED) +{ + return UNKNOWN; +} + /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */ bool @@ -158,7 +195,7 @@ default_cxx_get_cookie_size (tree type) (true_type)) bytes. */ tree sizetype_size; tree type_align; - + sizetype_size = size_in_bytes (sizetype); type_align = size_int (TYPE_ALIGN_UNIT (type)); if (INT_CST_LT_UNSIGNED (type_align, sizetype_size)) @@ -168,3 +205,430 @@ default_cxx_get_cookie_size (tree type) return cookie_size; } + +/* Return true if a parameter must be passed by reference. This version + of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */ + +bool +hook_pass_by_reference_must_pass_in_stack (CUMULATIVE_ARGS *c ATTRIBUTE_UNUSED, + enum machine_mode mode ATTRIBUTE_UNUSED, tree type ATTRIBUTE_UNUSED, + bool named_arg ATTRIBUTE_UNUSED) +{ + return targetm.calls.must_pass_in_stack (mode, type); +} + +/* Return true if a parameter follows callee copies conventions. This + version of the hook is true for all named arguments. */ + +bool +hook_callee_copies_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED, + enum machine_mode mode ATTRIBUTE_UNUSED, + tree type ATTRIBUTE_UNUSED, bool named) +{ + return named; +} + +/* Emit any directives required to unwind this instruction. */ + +void +default_unwind_emit (FILE * stream ATTRIBUTE_UNUSED, + rtx insn ATTRIBUTE_UNUSED) +{ + /* Should never happen. */ + gcc_unreachable (); +} + +/* True if MODE is valid for the target. By "valid", we mean able to + be manipulated in non-trivial ways. In particular, this means all + the arithmetic is supported. + + By default we guess this means that any C type is supported. If + we can't map the mode back to a type that would be available in C, + then reject it. Special case, here, is the double-word arithmetic + supported by optabs.c. */ + +bool +default_scalar_mode_supported_p (enum machine_mode mode) +{ + int precision = GET_MODE_PRECISION (mode); + + switch (GET_MODE_CLASS (mode)) + { + case MODE_PARTIAL_INT: + case MODE_INT: + if (precision == CHAR_TYPE_SIZE) + return true; + if (precision == SHORT_TYPE_SIZE) + return true; + if (precision == INT_TYPE_SIZE) + return true; + if (precision == LONG_TYPE_SIZE) + return true; + if (precision == LONG_LONG_TYPE_SIZE) + return true; + if (precision == 2 * BITS_PER_WORD) + return true; + return false; + + case MODE_FLOAT: + if (precision == FLOAT_TYPE_SIZE) + return true; + if (precision == DOUBLE_TYPE_SIZE) + return true; + if (precision == LONG_DOUBLE_TYPE_SIZE) + return true; + return false; + + case MODE_DECIMAL_FLOAT: + return false; + + default: + gcc_unreachable (); + } +} + +/* True if the target supports decimal floating point. */ + +bool +default_decimal_float_supported_p (void) +{ + return ENABLE_DECIMAL_FLOAT; +} + +/* NULL if INSN insn is valid within a low-overhead loop, otherwise returns + an error message. + + This function checks whether a given INSN is valid within a low-overhead + loop. If INSN is invalid it returns the reason for that, otherwise it + returns NULL. A called function may clobber any special registers required + for low-overhead looping. Additionally, some targets (eg, PPC) use the count + register for branch on table instructions. We reject the doloop pattern in + these cases. */ + +const char * +default_invalid_within_doloop (rtx insn) +{ + if (CALL_P (insn)) + return "Function call in loop."; + + if (JUMP_P (insn) + && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC + || GET_CODE (PATTERN (insn)) == ADDR_VEC)) + return "Computed branch in the loop."; + + return NULL; +} + +/* Mapping of builtin functions to vectorized variants. */ + +tree +default_builtin_vectorized_function (enum built_in_function fn ATTRIBUTE_UNUSED, + tree type_out ATTRIBUTE_UNUSED, + tree type_in ATTRIBUTE_UNUSED) +{ + return NULL_TREE; +} + +/* Vectorized conversion. */ + +tree +default_builtin_vectorized_conversion (enum tree_code code ATTRIBUTE_UNUSED, + tree type ATTRIBUTE_UNUSED) +{ + return NULL_TREE; +} + +bool +hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false ( + CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED, + enum machine_mode mode ATTRIBUTE_UNUSED, + tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED) +{ + return false; +} + +bool +hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true ( + CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED, + enum machine_mode mode ATTRIBUTE_UNUSED, + tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED) +{ + return true; +} + +int +hook_int_CUMULATIVE_ARGS_mode_tree_bool_0 ( + CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED, + enum machine_mode mode ATTRIBUTE_UNUSED, + tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED) +{ + return 0; +} + +void +hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED) +{ +} + +const char * +hook_invalid_arg_for_unprototyped_fn ( + tree typelist ATTRIBUTE_UNUSED, + tree funcdecl ATTRIBUTE_UNUSED, + tree val ATTRIBUTE_UNUSED) +{ + return NULL; +} + +/* Initialize the stack protection decls. */ + +/* Stack protection related decls living in libgcc. */ +static GTY(()) tree stack_chk_guard_decl; + +tree +default_stack_protect_guard (void) +{ + tree t = stack_chk_guard_decl; + + if (t == NULL) + { + t = build_decl (VAR_DECL, get_identifier ("__stack_chk_guard"), + ptr_type_node); + TREE_STATIC (t) = 1; + TREE_PUBLIC (t) = 1; + DECL_EXTERNAL (t) = 1; + TREE_USED (t) = 1; + TREE_THIS_VOLATILE (t) = 1; + DECL_ARTIFICIAL (t) = 1; + DECL_IGNORED_P (t) = 1; + + stack_chk_guard_decl = t; + } + + return t; +} + +static GTY(()) tree stack_chk_fail_decl; + +tree +default_external_stack_protect_fail (void) +{ + tree t = stack_chk_fail_decl; + + if (t == NULL_TREE) + { + t = build_function_type_list (void_type_node, NULL_TREE); + t = build_decl (FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t); + TREE_STATIC (t) = 1; + TREE_PUBLIC (t) = 1; + DECL_EXTERNAL (t) = 1; + TREE_USED (t) = 1; + TREE_THIS_VOLATILE (t) = 1; + TREE_NOTHROW (t) = 1; + DECL_ARTIFICIAL (t) = 1; + DECL_IGNORED_P (t) = 1; + DECL_VISIBILITY (t) = VISIBILITY_DEFAULT; + DECL_VISIBILITY_SPECIFIED (t) = 1; + + stack_chk_fail_decl = t; + } + + return build_call_expr (t, 0); +} + +tree +default_hidden_stack_protect_fail (void) +{ +#ifndef HAVE_GAS_HIDDEN + return default_external_stack_protect_fail (); +#else + tree t = stack_chk_fail_decl; + + if (!flag_pic) + return default_external_stack_protect_fail (); + + if (t == NULL_TREE) + { + t = build_function_type_list (void_type_node, NULL_TREE); + t = build_decl (FUNCTION_DECL, + get_identifier ("__stack_chk_fail_local"), t); + TREE_STATIC (t) = 1; + TREE_PUBLIC (t) = 1; + DECL_EXTERNAL (t) = 1; + TREE_USED (t) = 1; + TREE_THIS_VOLATILE (t) = 1; + TREE_NOTHROW (t) = 1; + DECL_ARTIFICIAL (t) = 1; + DECL_IGNORED_P (t) = 1; + DECL_VISIBILITY_SPECIFIED (t) = 1; + DECL_VISIBILITY (t) = VISIBILITY_HIDDEN; + + stack_chk_fail_decl = t; + } + + return build_call_expr (t, 0); +#endif +} + +bool +hook_bool_rtx_commutative_p (rtx x, int outer_code ATTRIBUTE_UNUSED) +{ + return COMMUTATIVE_P (x); +} + +rtx +default_function_value (tree ret_type ATTRIBUTE_UNUSED, + tree fn_decl_or_type, + bool outgoing ATTRIBUTE_UNUSED) +{ + /* The old interface doesn't handle receiving the function type. */ + if (fn_decl_or_type + && !DECL_P (fn_decl_or_type)) + fn_decl_or_type = NULL; + +#ifdef FUNCTION_OUTGOING_VALUE + if (outgoing) + return FUNCTION_OUTGOING_VALUE (ret_type, fn_decl_or_type); +#endif + +#ifdef FUNCTION_VALUE + return FUNCTION_VALUE (ret_type, fn_decl_or_type); +#else + return NULL_RTX; +#endif +} + +rtx +default_internal_arg_pointer (void) +{ + /* If the reg that the virtual arg pointer will be translated into is + not a fixed reg or is the stack pointer, make a copy of the virtual + arg pointer, and address parms via the copy. The frame pointer is + considered fixed even though it is not marked as such. */ + if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM + || ! (fixed_regs[ARG_POINTER_REGNUM] + || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))) + return copy_to_reg (virtual_incoming_args_rtx); + else + return virtual_incoming_args_rtx; +} + +enum reg_class +default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED, + enum reg_class reload_class ATTRIBUTE_UNUSED, + enum machine_mode reload_mode ATTRIBUTE_UNUSED, + secondary_reload_info *sri) +{ + enum reg_class class = NO_REGS; + + if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing) + { + sri->icode = sri->prev_sri->t_icode; + return NO_REGS; + } +#ifdef SECONDARY_INPUT_RELOAD_CLASS + if (in_p) + class = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x); +#endif +#ifdef SECONDARY_OUTPUT_RELOAD_CLASS + if (! in_p) + class = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x); +#endif + if (class != NO_REGS) + { + enum insn_code icode = (in_p ? reload_in_optab[(int) reload_mode] + : reload_out_optab[(int) reload_mode]); + + if (icode != CODE_FOR_nothing + && insn_data[(int) icode].operand[in_p].predicate + && ! insn_data[(int) icode].operand[in_p].predicate (x, reload_mode)) + icode = CODE_FOR_nothing; + else if (icode != CODE_FOR_nothing) + { + const char *insn_constraint, *scratch_constraint; + char insn_letter, scratch_letter; + enum reg_class insn_class, scratch_class; + + gcc_assert (insn_data[(int) icode].n_operands == 3); + insn_constraint = insn_data[(int) icode].operand[!in_p].constraint; + if (!*insn_constraint) + insn_class = ALL_REGS; + else + { + if (in_p) + { + gcc_assert (*insn_constraint == '='); + insn_constraint++; + } + insn_letter = *insn_constraint; + insn_class + = (insn_letter == 'r' ? GENERAL_REGS + : REG_CLASS_FROM_CONSTRAINT ((unsigned char) insn_letter, + insn_constraint)); + gcc_assert (insn_class != NO_REGS); + } + + scratch_constraint = insn_data[(int) icode].operand[2].constraint; + /* The scratch register's constraint must start with "=&", + except for an input reload, where only "=" is necessary, + and where it might be beneficial to re-use registers from + the input. */ + gcc_assert (scratch_constraint[0] == '=' + && (in_p || scratch_constraint[1] == '&')); + scratch_constraint++; + if (*scratch_constraint == '&') + scratch_constraint++; + scratch_letter = *scratch_constraint; + scratch_class + = (scratch_letter == 'r' ? GENERAL_REGS + : REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter, + scratch_constraint)); + + if (reg_class_subset_p (reload_class, insn_class)) + { + gcc_assert (scratch_class == class); + class = NO_REGS; + } + else + class = insn_class; + + } + if (class == NO_REGS) + sri->icode = icode; + else + sri->t_icode = icode; + } + return class; +} + + +/* If STRICT_ALIGNMENT is true we use the container type for accessing + volatile bitfields. This is generally the preferred behavior for memory + mapped peripherals on RISC architectures. + If STRICT_ALIGNMENT is false we use the narrowest type possible. This + is typically used to avoid spurious page faults and extra memory accesses + due to unaligned accesses on CISC architectures. */ + +bool +default_narrow_bitfield (void) +{ + return !STRICT_ALIGNMENT; +} + +bool +default_handle_c_option (size_t code ATTRIBUTE_UNUSED, + const char *arg ATTRIBUTE_UNUSED, + int value ATTRIBUTE_UNUSED) +{ + return false; +} + +/* By default, if flag_pic is true, then neither local nor global relocs + should be placed in readonly memory. */ + +int +default_reloc_rw_mask (void) +{ + return flag_pic ? 3 : 0; +} + +#include "gt-targhooks.h"