/* Procedure integration for GCC.
- Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001 Free Software Foundation, Inc.
+ Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
+ 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
Contributed by Michael Tiemann (tiemann@cygnus.com)
This file is part of GCC.
#include "config.h"
#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
#include "rtl.h"
#include "tree.h"
#include "params.h"
#include "ggc.h"
#include "target.h"
-
-#include "obstack.h"
-#define obstack_chunk_alloc xmalloc
-#define obstack_chunk_free free
-
-extern struct obstack *function_maybepermanent_obstack;
+#include "langhooks.h"
/* Similar, but round to the next highest integer that meets the
alignment. */
\f
/* Private type used by {get/has}_func_hard_reg_initial_val. */
-typedef struct initial_value_pair {
+typedef struct initial_value_pair GTY(()) {
rtx hard_reg;
rtx pseudo;
} initial_value_pair;
-typedef struct initial_value_struct {
+typedef struct initial_value_struct GTY(()) {
int num_entries;
int max_entries;
- initial_value_pair *entries;
+ initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
} initial_value_struct;
static void setup_initial_hard_reg_value_integration PARAMS ((struct function *, struct inline_remap *));
function_attribute_inlinable_p (fndecl)
tree fndecl;
{
- bool has_machine_attr = false;
- tree a;
-
- for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
+ if (targetm.attribute_table)
{
- tree name = TREE_PURPOSE (a);
- int i;
+ tree a;
- for (i = 0; targetm.attribute_table[i].name != NULL; i++)
+ for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
{
- if (is_attribute_p (targetm.attribute_table[i].name, name))
- {
- has_machine_attr = true;
- break;
- }
+ tree name = TREE_PURPOSE (a);
+ int i;
+
+ for (i = 0; targetm.attribute_table[i].name != NULL; i++)
+ if (is_attribute_p (targetm.attribute_table[i].name, name))
+ return (*targetm.function_attribute_inlinable_p) (fndecl);
}
- if (has_machine_attr)
- break;
}
- if (has_machine_attr)
- return (*targetm.function_attribute_inlinable_p) (fndecl);
- else
- return true;
+ return true;
}
/* Zero if the current function (whose FUNCTION_DECL is FNDECL)
tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
/* For functions marked as inline increase the maximum size to
- MAX_INLINE_INSNS (-finline-limit-<n>). For regular functions
- use the limit given by INTEGRATE_THRESHOLD. */
+ MAX_INLINE_INSNS_RTL (--param max-inline-insn-rtl=<n>). For
+ regular functions use the limit given by INTEGRATE_THRESHOLD.
+ Note that the RTL inliner is not used by the languages that use
+ the tree inliner (C, C++). */
int max_insns = (DECL_INLINE (fndecl))
- ? (MAX_INLINE_INSNS
+ ? (MAX_INLINE_INSNS_RTL
+ 8 * list_length (DECL_ARGUMENTS (fndecl)))
: INTEGRATE_THRESHOLD (fndecl);
return N_("function cannot be inline");
/* No inlines with varargs. */
- if ((last && TREE_VALUE (last) != void_type_node)
- || current_function_varargs)
+ if (last && TREE_VALUE (last) != void_type_node)
return N_("varargs function cannot be inline");
if (current_function_calls_alloca)
/* We will not inline a function which uses computed goto. The addresses of
its local labels, which may be tucked into global storage, are of course
- not constant across instantiations, which causes unexpected behaviour. */
+ not constant across instantiations, which causes unexpected behavior. */
if (current_function_has_computed_jump)
return N_("function with computed jump cannot inline");
}
/* If the function has a target specific attribute attached to it,
- then we assume that we should not inline it. This can be overriden
+ then we assume that we should not inline it. This can be overridden
by the target if it defines TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P. */
if (!function_attribute_inlinable_p (fndecl))
return N_("function with target specific attribute(s) cannot be inlined");
/* Copy the declaration. */
if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
{
+ tree type;
+ int invisiref = 0;
+
+ /* See if the frontend wants to pass this by invisible reference. */
+ if (TREE_CODE (decl) == PARM_DECL
+ && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
+ && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
+ && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
+ {
+ invisiref = 1;
+ type = DECL_ARG_TYPE (decl);
+ }
+ else
+ type = TREE_TYPE (decl);
+
/* For a parameter, we must make an equivalent VAR_DECL, not a
new PARM_DECL. */
- copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
- TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
- TREE_READONLY (copy) = TREE_READONLY (decl);
- TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
+ copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
+ if (!invisiref)
+ {
+ TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
+ TREE_READONLY (copy) = TREE_READONLY (decl);
+ TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
+ }
+ else
+ {
+ TREE_ADDRESSABLE (copy) = 0;
+ TREE_READONLY (copy) = 1;
+ TREE_THIS_VOLATILE (copy) = 0;
+ }
}
else
{
copy = copy_node (decl);
- if (DECL_LANG_SPECIFIC (copy))
- copy_lang_decl (copy);
+ (*lang_hooks.dup_lang_specific_decl) (copy);
/* TREE_ADDRESSABLE isn't used to indicate that a label's
address has been taken; it's for internal bookkeeping in
DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
/* The new variable/label has no RTL, yet. */
- SET_DECL_RTL (copy, NULL_RTX);
+ if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
+ SET_DECL_RTL (copy, NULL_RTX);
/* These args would always appear unused, if not for this. */
TREE_USED (copy) = 1;
;
else if (DECL_CONTEXT (decl) != from_fn)
/* Things that weren't in the scope of the function we're inlining
- from aren't in the scope we're inlining too, either. */
+ from aren't in the scope we're inlining to, either. */
;
else if (TREE_STATIC (decl))
- /* Function-scoped static variables should say in the original
+ /* Function-scoped static variables should stay in the original
function. */
;
else
}
/* Compare two BLOCKs for qsort. The key we sort on is the
- BLOCK_ABSTRACT_ORIGIN of the blocks. */
+ BLOCK_ABSTRACT_ORIGIN of the blocks. We cannot just subtract the
+ two pointers, because it may overflow sizeof(int). */
static int
compare_blocks (v1, v2)
{
tree b1 = *((const tree *) v1);
tree b2 = *((const tree *) v2);
+ char *p1 = (char *) BLOCK_ABSTRACT_ORIGIN (b1);
+ char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
- return ((char *) BLOCK_ABSTRACT_ORIGIN (b1)
- - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
+ if (p1 == p2)
+ return 0;
+ return p1 < p2 ? -1 : 1;
}
/* Compare two BLOCKs for bsearch. The first pointer corresponds to
{
const union tree_node *b1 = (const union tree_node *) v1;
tree b2 = *((const tree *) v2);
+ char *p1 = (char *) b1;
+ char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
- return ((const char *) b1 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
+ if (p1 == p2)
+ return 0;
+ return p1 < p2 ? -1 : 1;
}
/* Integrate the procedure defined by FNDECL. Note that this function
rtx stack_save = 0;
rtx temp;
struct inline_remap *map = 0;
- rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
+ rtvec arg_vector = inl_f->original_arg_vector;
rtx static_chain_value = 0;
int inl_max_uid;
int eh_region_offset;
enum machine_mode mode;
if (actual == 0)
- return (rtx) (HOST_WIDE_INT) -1;
+ return (rtx) (size_t) -1;
arg = TREE_VALUE (actual);
mode = TYPE_MODE (DECL_ARG_TYPE (formal));
|| (mode == BLKmode
&& (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
!= TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
- return (rtx) (HOST_WIDE_INT) -1;
+ return (rtx) (size_t) -1;
}
+ /* If there is a TARGET which is a readonly BLKmode MEM and DECL_RESULT
+ is also a mem, we are going to lose the readonly on the stores, so don't
+ inline. */
+ if (target != 0 && GET_CODE (target) == MEM && GET_MODE (target) == BLKmode
+ && RTX_UNCHANGING_P (target) && DECL_RTL_SET_P (DECL_RESULT (fndecl))
+ && GET_CODE (DECL_RTL (DECL_RESULT (fndecl))) == MEM)
+ return (rtx) (size_t) -1;
+
/* Extra arguments are valid, but will be ignored below, so we must
evaluate them here for side-effects. */
for (; actual; actual = TREE_CHAIN (actual))
else
arg_vals[i] = 0;
+ /* If the formal type was const but the actual was not, we might
+ end up here with an rtx wrongly tagged unchanging in the caller's
+ context. Fix that. */
+ if (arg_vals[i] != 0
+ && (GET_CODE (arg_vals[i]) == REG || GET_CODE (arg_vals[i]) == MEM)
+ && ! TREE_READONLY (TREE_VALUE (actual)))
+ RTX_UNCHANGING_P (arg_vals[i]) = 0;
+
if (arg_vals[i] != 0
&& (! TREE_READONLY (formal)
/* If the parameter is not read-only, copy our argument through
??? These numbers are quite arbitrary and were obtained by
experimentation. At some point, we should try to allocate the
- table after all the parameters are set up so we an more accurately
+ table after all the parameters are set up so we can more accurately
estimate the number of pseudos we will need. */
VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
if (inl_f->needs_context)
static_chain_value = lookup_static_chain (fndecl);
+ /* If the inlined function calls __builtin_constant_p, then we'll
+ need to call purge_builtin_constant_p on this function. */
+ if (inl_f->calls_constant_p)
+ current_function_calls_constant_p = 1;
+
if (GET_CODE (parm_insns) == NOTE
&& NOTE_LINE_NUMBER (parm_insns) > 0)
{
- rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
- NOTE_LINE_NUMBER (parm_insns));
+ rtx note = emit_line_note (NOTE_SOURCE_FILE (parm_insns),
+ NOTE_LINE_NUMBER (parm_insns));
if (note)
RTX_INTEGRATED_P (note) = 1;
}
&& ! (GET_CODE (XEXP (loc, 0)) == REG
&& REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
{
- rtx note = emit_note (DECL_SOURCE_FILE (formal),
- DECL_SOURCE_LINE (formal));
+ rtx note = emit_line_note (DECL_SOURCE_FILE (formal),
+ DECL_SOURCE_LINE (formal));
if (note)
RTX_INTEGRATED_P (note) = 1;
this block to the list of blocks at this binding level. We
can't do it the way it's done for function-at-a-time mode the
superblocks have not been created yet. */
- insert_block (block);
+ (*lang_hooks.decls.insert_block) (block);
else
{
BLOCK_CHAIN (block)
This line number note is still needed for debugging though, so we can't
delete it. */
if (flag_test_coverage)
- emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
+ emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
- emit_line_note (input_filename, lineno);
+ emit_line_note (input_filename, input_line);
/* If the function returns a BLKmode object in a register, copy it
out of the temp register into a BLKmode memory object. */
free (real_label_map);
VARRAY_FREE (map->const_equiv_varray);
free (map->reg_map);
- VARRAY_FREE (map->block_map);
free (map->insn_map);
free (map);
free (arg_vals);
Copying is done in two passes, first the insns and then their REG_NOTES.
- If static_chain_value is non-zero, it represents the context-pointer
+ If static_chain_value is nonzero, it represents the context-pointer
register for the function. */
static void
#ifdef HAVE_cc0
rtx cc0_insn = 0;
#endif
+ rtx static_chain_mem = 0;
/* Copy the insns one by one. Do this in two passes, first the insns and
then their REG_NOTES. */
&& REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
break;
- /* If this is setting the static chain rtx, omit it. */
+ /* Look for the address of the static chain slot. The
+ rtx_equal_p comparisons against the
+ static_chain_incoming_rtx below may fail if the static
+ chain is in memory and the address specified is not
+ "legitimate". This happens on Xtensa where the static
+ chain is at a negative offset from argp and where only
+ positive offsets are legitimate. When the RTL is
+ generated, the address is "legitimized" by copying it
+ into a register, causing the rtx_equal_p comparisons to
+ fail. This workaround looks for code that sets a
+ register to the address of the static chain. Subsequent
+ memory references via that register can then be
+ identified as static chain references. We assume that
+ the register is only assigned once, and that the static
+ chain address is only live in one register at a time. */
+
else if (static_chain_value != 0
&& set != 0
+ && GET_CODE (static_chain_incoming_rtx) == MEM
&& GET_CODE (SET_DEST (set)) == REG
- && rtx_equal_p (SET_DEST (set),
- static_chain_incoming_rtx))
+ && rtx_equal_p (SET_SRC (set),
+ XEXP (static_chain_incoming_rtx, 0)))
+ {
+ static_chain_mem =
+ gen_rtx_MEM (GET_MODE (static_chain_incoming_rtx),
+ SET_DEST (set));
+
+ /* emit the instruction in case it is used for something
+ other than setting the static chain; if it's not used,
+ it can always be removed as dead code */
+ copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
+ }
+
+ /* If this is setting the static chain rtx, omit it. */
+ else if (static_chain_value != 0
+ && set != 0
+ && (rtx_equal_p (SET_DEST (set),
+ static_chain_incoming_rtx)
+ || (static_chain_mem
+ && rtx_equal_p (SET_DEST (set), static_chain_mem))))
break;
/* If this is setting the static chain pseudo, set it from
the value we want to give it instead. */
else if (static_chain_value != 0
&& set != 0
- && rtx_equal_p (SET_SRC (set),
- static_chain_incoming_rtx))
+ && (rtx_equal_p (SET_SRC (set),
+ static_chain_incoming_rtx)
+ || (static_chain_mem
+ && rtx_equal_p (SET_SRC (set), static_chain_mem))))
{
rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
copy = emit_move_insn (newdest, static_chain_value);
- static_chain_value = 0;
+ if (GET_CODE (static_chain_incoming_rtx) != MEM)
+ static_chain_value = 0;
}
/* If this is setting the virtual stack vars register, this must
#else
try_constants (copy, map);
#endif
+ INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
break;
case JUMP_INSN:
cc0_insn = 0;
#endif
try_constants (copy, map);
+ INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
/* If this used to be a conditional jump insn but whose branch
direction is now know, we must do something special. */
SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
CONST_OR_PURE_CALL_P (copy) = CONST_OR_PURE_CALL_P (insn);
+ INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
/* Because the USAGE information potentially contains objects other
than hard registers, we need to copy it. */
NOTE_INSN_DELETED notes aren't useful. */
- if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
+ if (NOTE_LINE_NUMBER (insn) > 0)
+ copy = emit_line_note (NOTE_SOURCE_FILE (insn),
+ NOTE_LINE_NUMBER (insn));
+ else if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
{
- copy = emit_note (NOTE_SOURCE_FILE (insn),
- NOTE_LINE_NUMBER (insn));
- if (copy
- && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
- || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
+ copy = emit_note (NULL, NOTE_LINE_NUMBER (insn));
+ NOTE_DATA (copy) = NOTE_DATA (insn);
+ if ((NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
+ || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
&& NOTE_BLOCK (insn))
{
tree *mapped_block_p;
else
NOTE_BLOCK (copy) = *mapped_block_p;
}
- else if (copy
- && NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
+ else if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
NOTE_EXPECTED_VALUE (copy)
= copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
map, 0);
next = XEXP (note, 1);
if (REG_NOTE_KIND (note) == REG_LABEL)
remove_note (new_insn, note);
- else if (REG_NOTE_KIND (note) == REG_EH_REGION)
+ else if (REG_NOTE_KIND (note) == REG_EH_REGION
+ && INTVAL (XEXP (note, 0)) > 0)
XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
+ eh_region_offset);
}
subst_constants (&r, NULL_RTX, map, 1);
SET_DECL_RTL (d, r);
- if (GET_CODE (r) == REG)
- REGNO_DECL (REGNO (r)) = d;
- else if (GET_CODE (r) == CONCAT)
- {
- REGNO_DECL (REGNO (XEXP (r, 0))) = d;
- REGNO_DECL (REGNO (XEXP (r, 1))) = d;
- }
-
apply_change_group ();
}
SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
- seq = gen_sequence ();
+ seq = get_insns ();
end_sequence ();
emit_insn_after (seq, map->insns_at_start);
return temp;
SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
- seq = gen_sequence ();
+ seq = get_insns ();
end_sequence ();
emit_insn_after (seq, map->insns_at_start);
return temp;
RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
/* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
- if (REG_POINTER (map->x_regno_reg_rtx[regno]))
+ /* Objects may initially be represented as registers, but
+ but turned into a MEM if their address is taken by
+ put_var_into_stack. Therefore, the register table may have
+ entries which are MEMs.
+
+ We briefly tried to clear such entries, but that ended up
+ cascading into many changes due to the optimizers not being
+ prepared for empty entries in the register table. So we've
+ decided to allow the MEMs in the register table for now. */
+ if (REG_P (map->x_regno_reg_rtx[regno])
+ && REG_POINTER (map->x_regno_reg_rtx[regno]))
mark_reg_pointer (map->reg_map[regno],
map->regno_pointer_align[regno]);
regno = REGNO (map->reg_map[regno]);
case PC:
case CC0:
case CONST_INT:
+ case CONST_VECTOR:
return orig;
case SYMBOL_REF:
copy_rtx_and_substitute (constant, map, for_lhs)),
0);
}
+ else if (TREE_CONSTANT_POOL_ADDRESS_P (orig) && inlining)
+ notice_rtl_inlining_of_deferred_constant ();
return orig;
if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
{
copy = rtx_alloc (ASM_OPERANDS);
- copy->volatil = orig->volatil;
+ RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
PUT_MODE (copy, GET_MODE (orig));
ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
map, 0));
- MEM_COPY_ATTRIBUTES (copy, orig);
+ MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
return
gen_rtx_CALL (GET_MODE (orig), copy,
return validize_mem (force_const_mem (const_mode, constant));
}
- copy = rtx_alloc (MEM);
- PUT_MODE (copy, mode);
- XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map, 0);
+ copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
+ map, 0));
MEM_COPY_ATTRIBUTES (copy, orig);
+
+ /* If inlining and this is not for the LHS, turn off RTX_UNCHANGING_P
+ since this may be an indirect reference to a parameter and the
+ actual may not be readonly. */
+ if (inlining && !for_lhs)
+ RTX_UNCHANGING_P (copy) = 0;
+
+ /* If inlining, squish aliasing data that references the subroutine's
+ parameter list, since that's no longer applicable. */
+ if (inlining && MEM_EXPR (copy)
+ && TREE_CODE (MEM_EXPR (copy)) == INDIRECT_REF
+ && TREE_CODE (TREE_OPERAND (MEM_EXPR (copy), 0)) == PARM_DECL)
+ set_mem_expr (copy, NULL_TREE);
+
return copy;
default:
copy = rtx_alloc (code);
PUT_MODE (copy, mode);
- copy->in_struct = orig->in_struct;
- copy->volatil = orig->volatil;
- copy->unchanging = orig->unchanging;
+ RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
+ RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
+ RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
apply_change_group ();
subst_constants (&PATTERN (insn), insn, map, 0);
apply_change_group ();
+
+ /* Enforce consistency between the addresses in the regular insn flow
+ and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
+ if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
+ {
+ subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
+ apply_change_group ();
+ }
/* Show we don't know the value of anything stored or clobbered. */
note_stores (PATTERN (insn), mark_stores, NULL);
case PC:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_VECTOR:
case SYMBOL_REF:
case CONST:
case LABEL_REF:
{
src = SET_SRC (x);
if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
-#ifdef HAVE_cc0
- || dest == cc0_rtx
-#endif
- )
+ || CC0_P (dest))
{
compare_mode = GET_MODE (XEXP (src, 0));
if (compare_mode == VOIDmode)
|| REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
&& CONSTANT_P (XEXP (src, 1)))
|| GET_CODE (src) == COMPARE
-#ifdef HAVE_cc0
- || dest == cc0_rtx
-#endif
+ || CC0_P (dest)
|| (dest == pc_rtx
&& (src == pc_rtx || GET_CODE (src) == RETURN
|| GET_CODE (src) == LABEL_REF))))
if (compare_mode != VOIDmode
&& GET_CODE (src) == COMPARE
&& (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
-#ifdef HAVE_cc0
- || dest == cc0_rtx
-#endif
- )
+ || CC0_P (dest))
&& GET_MODE (XEXP (src, 0)) == VOIDmode
&& GET_MODE (XEXP (src, 1)) == VOIDmode)
{
case 'w':
case 'n':
case 't':
+ case 'B':
break;
case 'E':
new = CONST0_RTX (mode);
else
{
- REAL_VALUE_TYPE val = FLOAT_STORE_FLAG_VALUE (mode);
+ REAL_VALUE_TYPE val;
+
+ /* Avoid automatic aggregate initialization. */
+ val = FLOAT_STORE_FLAG_VALUE (mode);
new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
}
}
from its DECL_SAVED_INSNS. Used for inline functions that are output
at end of compilation instead of where they came in the source. */
+static GTY(()) struct function *old_cfun;
+
void
output_inline_function (fndecl)
tree fndecl;
{
- struct function *old_cfun = cfun;
enum debug_info_type old_write_symbols = write_symbols;
- struct gcc_debug_hooks *old_debug_hooks = debug_hooks;
+ const struct gcc_debug_hooks *const old_debug_hooks = debug_hooks;
struct function *f = DECL_SAVED_INSNS (fndecl);
+ old_cfun = cfun;
cfun = f;
current_function_decl = fndecl;
- clear_emit_caches ();
set_new_last_label_num (f->inl_max_label_num);
debug_hooks = &do_nothing_debug_hooks;
}
- /* Do any preparation, such as emitting abstract debug info for the inline
- before it gets mangled by optimization. */
- (*debug_hooks->outlining_inline_function) (fndecl);
+ /* Make sure warnings emitted by the optimizers (e.g. control reaches
+ end of non-void function) is not wildly incorrect. */
+ input_location = DECL_SOURCE_LOCATION (fndecl);
/* Compile this function all the way down to assembly code. As a
side effect this destroys the saved RTL representation, but
the function. */
rtx
+get_hard_reg_initial_reg (fun, reg)
+ struct function *fun;
+ rtx reg;
+{
+ struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
+ int i;
+
+ if (ivs == 0)
+ return NULL_RTX;
+
+ for (i = 0; i < ivs->num_entries; i++)
+ if (rtx_equal_p (ivs->entries[i].pseudo, reg))
+ return ivs->entries[i].hard_reg;
+
+ return NULL_RTX;
+}
+
+rtx
has_func_hard_reg_initial_val (fun, reg)
struct function *fun;
rtx reg;
if (ivs == 0)
{
- fun->hard_reg_initial_vals = (void *) xmalloc (sizeof (initial_value_struct));
+ fun->hard_reg_initial_vals = (void *) ggc_alloc (sizeof (initial_value_struct));
ivs = fun->hard_reg_initial_vals;
ivs->num_entries = 0;
ivs->max_entries = 5;
- ivs->entries = (initial_value_pair *) xmalloc (5 * sizeof (initial_value_pair));
+ ivs->entries = (initial_value_pair *) ggc_alloc (5 * sizeof (initial_value_pair));
}
if (ivs->num_entries >= ivs->max_entries)
{
ivs->max_entries += 5;
ivs->entries =
- (initial_value_pair *) xrealloc (ivs->entries,
- ivs->max_entries
- * sizeof (initial_value_pair));
+ (initial_value_pair *) ggc_realloc (ivs->entries,
+ ivs->max_entries
+ * sizeof (initial_value_pair));
}
ivs->entries[ivs->num_entries].hard_reg = reg;
return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
}
-void
-mark_hard_reg_initial_vals (fun)
- struct function *fun;
-{
- struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
- int i;
-
- if (ivs == 0)
- return;
-
- for (i = 0; i < ivs->num_entries; i ++)
- {
- ggc_mark_rtx (ivs->entries[i].hard_reg);
- ggc_mark_rtx (ivs->entries[i].pseudo);
- }
-}
-
static void
setup_initial_hard_reg_value_integration (inl_f, remap)
struct function *inl_f;
seq = get_insns ();
end_sequence ();
- emit_insns_after (seq, get_insns ());
+ emit_insn_after (seq, get_insns ());
}
/* If the backend knows where to allocate pseudos for hard
}
#endif
}
+
+#include "gt-integrate.h"