1 /* Default target hook functions.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 /* The migration of target macros to target hooks works as follows:
23 1. Create a target hook that uses the existing target macros to
24 implement the same functionality.
26 2. Convert all the MI files to use the hook instead of the macro.
28 3. Repeat for a majority of the remaining target macros. This will
31 4. Tell target maintainers to start migrating.
33 5. Eventually convert the backends to override the hook instead of
34 defining the macros. This will take some time too.
36 6. TBD when, poison the macros. Unmigrated targets will break at
39 Note that we expect steps 1-3 to be done by the people that
40 understand what the MI does with each macro, and step 5 to be done
41 by the target maintainers for their respective targets.
43 Note that steps 1 and 2 don't have to be done together, but no
44 target can override the new hook until step 2 is complete for it.
46 Once the macros are poisoned, we will revert to the old migration
47 rules - migrate the macro, callers, and targets all at once. This
48 comment can thus be removed at that point. */
52 #include "coretypes.h"
63 #include "target-def.h"
65 #include "hard-reg-set.h"
72 default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
74 #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
75 ASM_OUTPUT_EXTERNAL_LIBCALL(asm_out_file, fun);
80 default_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
88 default_return_in_memory (tree type,
89 tree fntype ATTRIBUTE_UNUSED)
91 #ifndef RETURN_IN_MEMORY
92 return (TYPE_MODE (type) == BLKmode);
94 return RETURN_IN_MEMORY (type);
99 default_expand_builtin_saveregs (void)
101 error ("__builtin_saveregs not supported by this target");
106 default_setup_incoming_varargs (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
107 enum machine_mode mode ATTRIBUTE_UNUSED,
108 tree type ATTRIBUTE_UNUSED,
109 int *pretend_arg_size ATTRIBUTE_UNUSED,
110 int second_time ATTRIBUTE_UNUSED)
114 /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
117 default_builtin_setjmp_frame_value (void)
119 return virtual_stack_vars_rtx;
122 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
125 hook_bool_CUMULATIVE_ARGS_false (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
131 default_pretend_outgoing_varargs_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
133 return (targetm.calls.setup_incoming_varargs
134 != default_setup_incoming_varargs);
138 default_eh_return_filter_mode (void)
143 /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
145 unsigned HOST_WIDE_INT
146 default_shift_truncation_mask (enum machine_mode mode)
148 return SHIFT_COUNT_TRUNCATED ? GET_MODE_BITSIZE (mode) - 1 : 0;
151 /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
154 default_min_divisions_for_recip_mul (enum machine_mode mode ATTRIBUTE_UNUSED)
156 return have_insn_for (DIV, mode) ? 3 : 2;
159 /* The default implementation of TARGET_MODE_REP_EXTENDED. */
162 default_mode_rep_extended (enum machine_mode mode ATTRIBUTE_UNUSED,
163 enum machine_mode mode_rep ATTRIBUTE_UNUSED)
168 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
171 hook_bool_CUMULATIVE_ARGS_true (CUMULATIVE_ARGS * a ATTRIBUTE_UNUSED)
177 /* The generic C++ ABI specifies this is a 64-bit value. */
179 default_cxx_guard_type (void)
181 return long_long_integer_type_node;
185 /* Returns the size of the cookie to use when allocating an array
186 whose elements have the indicated TYPE. Assumes that it is already
187 known that a cookie is needed. */
190 default_cxx_get_cookie_size (tree type)
194 /* We need to allocate an additional max (sizeof (size_t), alignof
195 (true_type)) bytes. */
199 sizetype_size = size_in_bytes (sizetype);
200 type_align = size_int (TYPE_ALIGN_UNIT (type));
201 if (INT_CST_LT_UNSIGNED (type_align, sizetype_size))
202 cookie_size = sizetype_size;
204 cookie_size = type_align;
209 /* Return true if a parameter must be passed by reference. This version
210 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
213 hook_pass_by_reference_must_pass_in_stack (CUMULATIVE_ARGS *c ATTRIBUTE_UNUSED,
214 enum machine_mode mode ATTRIBUTE_UNUSED, tree type ATTRIBUTE_UNUSED,
215 bool named_arg ATTRIBUTE_UNUSED)
217 return targetm.calls.must_pass_in_stack (mode, type);
220 /* Return true if a parameter follows callee copies conventions. This
221 version of the hook is true for all named arguments. */
224 hook_callee_copies_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
225 enum machine_mode mode ATTRIBUTE_UNUSED,
226 tree type ATTRIBUTE_UNUSED, bool named)
231 /* Emit any directives required to unwind this instruction. */
234 default_unwind_emit (FILE * stream ATTRIBUTE_UNUSED,
235 rtx insn ATTRIBUTE_UNUSED)
237 /* Should never happen. */
241 /* True if MODE is valid for the target. By "valid", we mean able to
242 be manipulated in non-trivial ways. In particular, this means all
243 the arithmetic is supported.
245 By default we guess this means that any C type is supported. If
246 we can't map the mode back to a type that would be available in C,
247 then reject it. Special case, here, is the double-word arithmetic
248 supported by optabs.c. */
251 default_scalar_mode_supported_p (enum machine_mode mode)
253 int precision = GET_MODE_PRECISION (mode);
255 switch (GET_MODE_CLASS (mode))
257 case MODE_PARTIAL_INT:
259 if (precision == CHAR_TYPE_SIZE)
261 if (precision == SHORT_TYPE_SIZE)
263 if (precision == INT_TYPE_SIZE)
265 if (precision == LONG_TYPE_SIZE)
267 if (precision == LONG_LONG_TYPE_SIZE)
269 if (precision == 2 * BITS_PER_WORD)
274 if (precision == FLOAT_TYPE_SIZE)
276 if (precision == DOUBLE_TYPE_SIZE)
278 if (precision == LONG_DOUBLE_TYPE_SIZE)
282 case MODE_DECIMAL_FLOAT:
290 /* True if the target supports decimal floating point. */
293 default_decimal_float_supported_p (void)
295 return ENABLE_DECIMAL_FLOAT;
298 /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
301 This function checks whether a given INSN is valid within a low-overhead
302 loop. If INSN is invalid it returns the reason for that, otherwise it
303 returns NULL. A called function may clobber any special registers required
304 for low-overhead looping. Additionally, some targets (eg, PPC) use the count
305 register for branch on table instructions. We reject the doloop pattern in
309 default_invalid_within_doloop (rtx insn)
312 return "Function call in loop.";
315 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
316 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
317 return "Computed branch in the loop.";
322 /* Mapping of builtin functions to vectorized variants. */
325 default_builtin_vectorized_function (enum built_in_function fn ATTRIBUTE_UNUSED,
326 tree type_out ATTRIBUTE_UNUSED,
327 tree type_in ATTRIBUTE_UNUSED)
333 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
334 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
335 enum machine_mode mode ATTRIBUTE_UNUSED,
336 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
342 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
343 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
344 enum machine_mode mode ATTRIBUTE_UNUSED,
345 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
351 hook_int_CUMULATIVE_ARGS_mode_tree_bool_0 (
352 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
353 enum machine_mode mode ATTRIBUTE_UNUSED,
354 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
360 hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
365 hook_invalid_arg_for_unprototyped_fn (
366 tree typelist ATTRIBUTE_UNUSED,
367 tree funcdecl ATTRIBUTE_UNUSED,
368 tree val ATTRIBUTE_UNUSED)
373 /* Initialize the stack protection decls. */
375 /* Stack protection related decls living in libgcc. */
376 static GTY(()) tree stack_chk_guard_decl;
379 default_stack_protect_guard (void)
381 tree t = stack_chk_guard_decl;
385 t = build_decl (VAR_DECL, get_identifier ("__stack_chk_guard"),
389 DECL_EXTERNAL (t) = 1;
391 TREE_THIS_VOLATILE (t) = 1;
392 DECL_ARTIFICIAL (t) = 1;
393 DECL_IGNORED_P (t) = 1;
395 stack_chk_guard_decl = t;
401 static GTY(()) tree stack_chk_fail_decl;
404 default_external_stack_protect_fail (void)
406 tree t = stack_chk_fail_decl;
410 t = build_function_type_list (void_type_node, NULL_TREE);
411 t = build_decl (FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
414 DECL_EXTERNAL (t) = 1;
416 TREE_THIS_VOLATILE (t) = 1;
417 TREE_NOTHROW (t) = 1;
418 DECL_ARTIFICIAL (t) = 1;
419 DECL_IGNORED_P (t) = 1;
420 DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
421 DECL_VISIBILITY_SPECIFIED (t) = 1;
423 stack_chk_fail_decl = t;
426 return build_function_call_expr (t, NULL_TREE);
430 default_hidden_stack_protect_fail (void)
432 #ifndef HAVE_GAS_HIDDEN
433 return default_external_stack_protect_fail ();
435 tree t = stack_chk_fail_decl;
438 return default_external_stack_protect_fail ();
442 t = build_function_type_list (void_type_node, NULL_TREE);
443 t = build_decl (FUNCTION_DECL,
444 get_identifier ("__stack_chk_fail_local"), t);
447 DECL_EXTERNAL (t) = 1;
449 TREE_THIS_VOLATILE (t) = 1;
450 TREE_NOTHROW (t) = 1;
451 DECL_ARTIFICIAL (t) = 1;
452 DECL_IGNORED_P (t) = 1;
453 DECL_VISIBILITY_SPECIFIED (t) = 1;
454 DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
456 stack_chk_fail_decl = t;
459 return build_function_call_expr (t, NULL_TREE);
464 hook_bool_rtx_commutative_p (rtx x, int outer_code ATTRIBUTE_UNUSED)
466 return COMMUTATIVE_P (x);
470 default_function_value (tree ret_type ATTRIBUTE_UNUSED,
471 tree fn_decl_or_type,
472 bool outgoing ATTRIBUTE_UNUSED)
474 /* The old interface doesn't handle receiving the function type. */
476 && !DECL_P (fn_decl_or_type))
477 fn_decl_or_type = NULL;
479 #ifdef FUNCTION_OUTGOING_VALUE
481 return FUNCTION_OUTGOING_VALUE (ret_type, fn_decl_or_type);
484 #ifdef FUNCTION_VALUE
485 return FUNCTION_VALUE (ret_type, fn_decl_or_type);
492 default_internal_arg_pointer (void)
494 /* If the reg that the virtual arg pointer will be translated into is
495 not a fixed reg or is the stack pointer, make a copy of the virtual
496 arg pointer, and address parms via the copy. The frame pointer is
497 considered fixed even though it is not marked as such. */
498 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
499 || ! (fixed_regs[ARG_POINTER_REGNUM]
500 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
501 return copy_to_reg (virtual_incoming_args_rtx);
503 return virtual_incoming_args_rtx;
507 default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
508 enum reg_class reload_class ATTRIBUTE_UNUSED,
509 enum machine_mode reload_mode ATTRIBUTE_UNUSED,
510 secondary_reload_info *sri)
512 enum reg_class class = NO_REGS;
514 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
516 sri->icode = sri->prev_sri->t_icode;
519 #ifdef SECONDARY_INPUT_RELOAD_CLASS
521 class = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x);
523 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
525 class = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x);
527 if (class != NO_REGS)
529 enum insn_code icode = (in_p ? reload_in_optab[(int) reload_mode]
530 : reload_out_optab[(int) reload_mode]);
532 if (icode != CODE_FOR_nothing
533 && insn_data[(int) icode].operand[in_p].predicate
534 && ! insn_data[(int) icode].operand[in_p].predicate (x, reload_mode))
535 icode = CODE_FOR_nothing;
536 else if (icode != CODE_FOR_nothing)
538 const char *insn_constraint, *scratch_constraint;
539 char insn_letter, scratch_letter;
540 enum reg_class insn_class, scratch_class;
542 gcc_assert (insn_data[(int) icode].n_operands == 3);
543 insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
544 if (!*insn_constraint)
545 insn_class = ALL_REGS;
550 gcc_assert (*insn_constraint == '=');
553 insn_letter = *insn_constraint;
555 = (insn_letter == 'r' ? GENERAL_REGS
556 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) insn_letter,
558 gcc_assert (insn_class != NO_REGS);
561 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
562 /* The scratch register's constraint must start with "=&",
563 except for an input reload, where only "=" is necessary,
564 and where it might be beneficial to re-use registers from
566 gcc_assert (scratch_constraint[0] == '='
567 && (in_p || scratch_constraint[1] == '&'));
568 scratch_constraint++;
569 if (*scratch_constraint == '&')
570 scratch_constraint++;
571 scratch_letter = *scratch_constraint;
573 = (scratch_letter == 'r' ? GENERAL_REGS
574 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
575 scratch_constraint));
577 if (reg_class_subset_p (reload_class, insn_class))
579 gcc_assert (scratch_class == class);
586 if (class == NO_REGS)
589 sri->t_icode = icode;
595 /* If STRICT_ALIGNMENT is true we use the container type for accessing
596 volatile bitfields. This is generally the preferred behavior for memory
597 mapped peripherals on RISC architectures.
598 If STRICT_ALIGNMENT is false we use the narrowest type possible. This
599 is typically used to avoid spurious page faults and extra memory accesses
600 due to unaligned accesses on CISC architectures. */
603 default_narrow_bitfield (void)
605 return !STRICT_ALIGNMENT;
608 #include "gt-targhooks.h"