/* Subroutines for manipulating rtx's in semantically interesting ways.
Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
-02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "tree.h"
#include "tm_p.h"
#include "flags.h"
+#include "except.h"
#include "function.h"
#include "expr.h"
#include "optabs.h"
#include "recog.h"
#include "langhooks.h"
#include "target.h"
+#include "output.h"
static rtx break_out_memory_refs (rtx);
static void emit_stack_probe (rtx);
We may not immediately return from the recursive call here, lest
all_constant gets lost. */
- if (GET_CODE (XEXP (x, 1)) == CONST_INT)
+ if (CONST_INT_P (XEXP (x, 1)))
{
c += INTVAL (XEXP (x, 1));
return x;
/* First handle constants appearing at this level explicitly. */
- if (GET_CODE (XEXP (x, 1)) == CONST_INT
+ if (CONST_INT_P (XEXP (x, 1))
&& 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
XEXP (x, 1)))
- && GET_CODE (tem) == CONST_INT)
+ && CONST_INT_P (tem))
{
*constptr = tem;
return eliminate_constant_term (XEXP (x, 0), constptr);
if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
&& 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
*constptr, tem))
- && GET_CODE (tem) == CONST_INT)
+ && CONST_INT_P (tem))
{
*constptr = tem;
return gen_rtx_PLUS (GET_MODE (x), x0, x1);
if (TREE_CODE (exp) == WITH_SIZE_EXPR)
size = TREE_OPERAND (exp, 1);
else
- size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lang_hooks.expr_size (exp), exp);
+ {
+ size = tree_expr_size (exp);
+ gcc_assert (size);
+ gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
+ }
- return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), 0);
+ return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
}
/* Return a wide integer for the size in bytes of the value of EXP, or -1
if (TREE_CODE (exp) == WITH_SIZE_EXPR)
size = TREE_OPERAND (exp, 1);
else
- size = lang_hooks.expr_size (exp);
+ {
+ size = tree_expr_size (exp);
+ gcc_assert (size);
+ }
if (size == 0 || !host_integerp (size, 0))
return -1;
rtx op1 = break_out_memory_refs (XEXP (x, 1));
if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
- x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
+ x = simplify_gen_binary (GET_CODE (x), Pmode, op0, op1);
}
return x;
case MULT:
/* For addition we can safely permute the conversion and addition
operation if one operand is a constant and converting the constant
- does not change it. We can always safely permute them if we are
- making the address narrower. */
+ does not change it or if one operand is a constant and we are
+ using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
+ We can always safely permute them if we are making the address
+ narrower. */
if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
|| (GET_CODE (x) == PLUS
- && GET_CODE (XEXP (x, 1)) == CONST_INT
- && XEXP (x, 1) == convert_memory_address (to_mode, XEXP (x, 1))))
+ && CONST_INT_P (XEXP (x, 1))
+ && (XEXP (x, 1) == convert_memory_address (to_mode, XEXP (x, 1))
+ || POINTERS_EXTEND_UNSIGNED < 0)))
return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
convert_memory_address (to_mode, XEXP (x, 0)),
XEXP (x, 1));
/* At this point, any valid address is accepted. */
if (memory_address_p (mode, x))
- goto win;
+ goto done;
/* If it was valid before but breaking out memory refs invalidated it,
use it the old way. */
if (memory_address_p (mode, oldx))
- goto win2;
+ {
+ x = oldx;
+ goto done;
+ }
/* Perform machine-dependent transformations on X
in certain cases. This is not necessary since the code
below can handle all possible cases, but machine-dependent
transformations can make better code. */
- LEGITIMIZE_ADDRESS (x, oldx, mode, win);
+ {
+ rtx orig_x = x;
+ x = targetm.legitimize_address (x, oldx, mode);
+ if (orig_x != x && memory_address_p (mode, x))
+ goto done;
+ }
/* PLUS and MULT can appear in special ways
as the result of attempts to make an address usable for indexing.
the register is a valid address. */
else
x = force_reg (Pmode, x);
-
- goto done;
-
- win2:
- x = oldx;
- win:
- if (flag_force_addr && ! cse_not_expected && !REG_P (x))
- {
- x = force_operand (x, NULL_RTX);
- x = force_reg (Pmode, x);
- }
}
done:
+ gcc_assert (memory_address_p (mode, x));
/* If we didn't change the address, we are done. Otherwise, mark
a reg as a pointer if we have REG or REG + CONST_INT. */
if (oldx == x)
mark_reg_pointer (x, BITS_PER_UNIT);
else if (GET_CODE (x) == PLUS
&& REG_P (XEXP (x, 0))
- && GET_CODE (XEXP (x, 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (x, 1)))
mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
/* OLDX may have been the address on a temporary. Update the address
return x;
}
-/* Like `memory_address' but pretend `flag_force_addr' is 0. */
-
-rtx
-memory_address_noforce (enum machine_mode mode, rtx x)
-{
- int ambient_force_addr = flag_force_addr;
- rtx val;
-
- flag_force_addr = 0;
- val = memory_address (mode, x);
- flag_force_addr = ambient_force_addr;
- return val;
-}
-
/* Convert a mem ref into one with a valid memory address.
Pass through anything else unchanged. */
{
if (!MEM_P (ref))
return ref;
- if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
- && memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
+ ref = use_anchored_address (ref);
+ if (memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
return ref;
/* Don't alter REF itself, since that is probably a stack slot. */
return replace_equiv_address (ref, XEXP (ref, 0));
}
+
+/* If X is a memory reference to a member of an object block, try rewriting
+ it to use an anchor instead. Return the new memory reference on success
+ and the old one on failure. */
+
+rtx
+use_anchored_address (rtx x)
+{
+ rtx base;
+ HOST_WIDE_INT offset;
+
+ if (!flag_section_anchors)
+ return x;
+
+ if (!MEM_P (x))
+ return x;
+
+ /* Split the address into a base and offset. */
+ base = XEXP (x, 0);
+ offset = 0;
+ if (GET_CODE (base) == CONST
+ && GET_CODE (XEXP (base, 0)) == PLUS
+ && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
+ {
+ offset += INTVAL (XEXP (XEXP (base, 0), 1));
+ base = XEXP (XEXP (base, 0), 0);
+ }
+
+ /* Check whether BASE is suitable for anchors. */
+ if (GET_CODE (base) != SYMBOL_REF
+ || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
+ || SYMBOL_REF_ANCHOR_P (base)
+ || SYMBOL_REF_BLOCK (base) == NULL
+ || !targetm.use_anchors_for_symbol_p (base))
+ return x;
+
+ /* Decide where BASE is going to be. */
+ place_block_symbol (base);
+
+ /* Get the anchor we need to use. */
+ offset += SYMBOL_REF_BLOCK_OFFSET (base);
+ base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
+ SYMBOL_REF_TLS_MODEL (base));
+
+ /* Work out the offset from the anchor. */
+ offset -= SYMBOL_REF_BLOCK_OFFSET (base);
+
+ /* If we're going to run a CSE pass, force the anchor into a register.
+ We will then be able to reuse registers for several accesses, if the
+ target costs say that that's worthwhile. */
+ if (!cse_not_expected)
+ base = force_reg (GET_MODE (base), base);
+
+ return replace_equiv_address (x, plus_constant (base, offset));
+}
\f
/* Copy the value or contents of X to a new temp reg and return that reg. */
else if (GET_CODE (x) == CONST
&& GET_CODE (XEXP (x, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
- && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
{
rtx s = XEXP (XEXP (x, 0), 0);
rtx c = XEXP (XEXP (x, 0), 1);
align = MIN (sa, ca);
}
- if (align)
+ if (align || (MEM_P (x) && MEM_POINTER (x)))
mark_reg_pointer (temp, align);
}
return temp;
}
\f
-/* Return the mode to use to store a scalar of TYPE and MODE.
+/* Return the mode to use to pass or return a scalar of TYPE and MODE.
PUNSIGNEDP points to the signedness of the type and may be adjusted
to show what signedness to use on extension operations.
- FOR_CALL is nonzero if this call is promoting args for a call. */
+ FOR_RETURN is nonzero if the caller is promoting the return value
+ of FNDECL, else it is for promoting args. */
-#if defined(PROMOTE_MODE) && !defined(PROMOTE_FUNCTION_MODE)
-#define PROMOTE_FUNCTION_MODE PROMOTE_MODE
-#endif
+enum machine_mode
+promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp,
+ const_tree funtype, int for_return)
+{
+ switch (TREE_CODE (type))
+ {
+ case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
+ case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
+ case POINTER_TYPE: case REFERENCE_TYPE:
+ return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
+ for_return);
+
+ default:
+ return mode;
+ }
+}
+/* Return the mode to use to store a scalar of TYPE and MODE.
+ PUNSIGNEDP points to the signedness of the type and may be adjusted
+ to show what signedness to use on extension operations. */
enum machine_mode
-promote_mode (tree type, enum machine_mode mode, int *punsignedp,
- int for_call ATTRIBUTE_UNUSED)
+promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode,
+ int *punsignedp ATTRIBUTE_UNUSED)
{
- enum tree_code code = TREE_CODE (type);
+ /* FIXME: this is the same logic that was there until GCC 4.4, but we
+ probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
+ is not defined. The affected targets are M32C, S390, SPARC. */
+#ifdef PROMOTE_MODE
+ const enum tree_code code = TREE_CODE (type);
int unsignedp = *punsignedp;
-#ifndef PROMOTE_MODE
- if (! for_call)
- return mode;
-#endif
-
switch (code)
{
-#ifdef PROMOTE_FUNCTION_MODE
case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
- case REAL_TYPE: case OFFSET_TYPE:
-#ifdef PROMOTE_MODE
- if (for_call)
- {
-#endif
- PROMOTE_FUNCTION_MODE (mode, unsignedp, type);
-#ifdef PROMOTE_MODE
- }
- else
- {
- PROMOTE_MODE (mode, unsignedp, type);
- }
-#endif
+ case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
+ PROMOTE_MODE (mode, unsignedp, type);
+ *punsignedp = unsignedp;
+ return mode;
break;
-#endif
#ifdef POINTERS_EXTEND_UNSIGNED
case REFERENCE_TYPE:
case POINTER_TYPE:
- mode = Pmode;
- unsignedp = POINTERS_EXTEND_UNSIGNED;
+ *punsignedp = POINTERS_EXTEND_UNSIGNED;
+ return Pmode;
break;
#endif
default:
- break;
+ return mode;
}
-
- *punsignedp = unsignedp;
+#else
return mode;
+#endif
}
+
+
+/* Use one of promote_mode or promote_function_mode to find the promoted
+ mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
+ of DECL after promotion. */
+
+enum machine_mode
+promote_decl_mode (const_tree decl, int *punsignedp)
+{
+ tree type = TREE_TYPE (decl);
+ int unsignedp = TYPE_UNSIGNED (type);
+ enum machine_mode mode = DECL_MODE (decl);
+ enum machine_mode pmode;
+
+ if (TREE_CODE (decl) == RESULT_DECL
+ || TREE_CODE (decl) == PARM_DECL)
+ pmode = promote_function_mode (type, mode, &unsignedp,
+ TREE_TYPE (current_function_decl), 2);
+ else
+ pmode = promote_mode (type, mode, &unsignedp);
+
+ if (punsignedp)
+ *punsignedp = unsignedp;
+ return pmode;
+}
+
\f
/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
This pops when ADJUST is positive. ADJUST need not be constant. */
/* We expect all variable sized adjustments to be multiple of
PREFERRED_STACK_BOUNDARY. */
- if (GET_CODE (adjust) == CONST_INT)
+ if (CONST_INT_P (adjust))
stack_pointer_delta -= INTVAL (adjust);
temp = expand_binop (Pmode,
/* We expect all variable sized adjustments to be multiple of
PREFERRED_STACK_BOUNDARY. */
- if (GET_CODE (adjust) == CONST_INT)
+ if (CONST_INT_P (adjust))
stack_pointer_delta += INTVAL (adjust);
temp = expand_binop (Pmode,
if (align == 1)
return size;
- if (GET_CODE (size) == CONST_INT)
+ if (CONST_INT_P (size))
{
- HOST_WIDE_INT new = (INTVAL (size) + align - 1) / align * align;
+ HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
- if (INTVAL (size) != new)
- size = GEN_INT (new);
+ if (INTVAL (size) != new_size)
+ size = GEN_INT (new_size);
}
else
{
/* These clobbers prevent the scheduler from moving
references to variable arrays below the code
that deletes (pops) the arrays. */
- emit_insn (gen_rtx_CLOBBER (VOIDmode,
- gen_rtx_MEM (BLKmode,
- gen_rtx_SCRATCH (VOIDmode))));
- emit_insn (gen_rtx_CLOBBER (VOIDmode,
- gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
+ emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
+ emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
}
discard_pending_stack_adjust ();
return virtual_stack_dynamic_rtx;
/* Otherwise, show we're calling alloca or equivalent. */
- current_function_calls_alloca = 1;
+ cfun->calls_alloca = 1;
/* Ensure the size is in the proper mode. */
if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
/* We can't attempt to minimize alignment necessary, because we don't
know the final value of preferred_stack_boundary yet while executing
this code. */
- cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
+ crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
/* We will need to ensure that the address we return is aligned to
BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
would use reg notes to store the "optimized" size and fix things
up later. These days we know this information before we ever
start building RTL so the reg notes are unnecessary. */
- if (!current_function_calls_setjmp)
+ if (!cfun->calls_setjmp)
{
int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
alignment. This constraint may be too strong. */
gcc_assert (PREFERRED_STACK_BOUNDARY == BIGGEST_ALIGNMENT);
- if (GET_CODE (size) == CONST_INT)
+ if (CONST_INT_P (size))
{
- HOST_WIDE_INT new = INTVAL (size) / align * align;
+ HOST_WIDE_INT new_size = INTVAL (size) / align * align;
- if (INTVAL (size) != new)
- size = GEN_INT (new);
+ if (INTVAL (size) != new_size)
+ size = GEN_INT (new_size);
}
else
{
gcc_assert (!(stack_pointer_delta
% (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
- /* If needed, check that we have the required amount of stack. Take into
- account what has already been checked. */
- if (flag_stack_check && ! STACK_CHECK_BUILTIN)
- probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
+ /* If needed, check that we have the required amount of stack.
+ Take into account what has already been checked. */
+ if (flag_stack_check == GENERIC_STACK_CHECK)
+ probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
+ size);
+ else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
+ probe_stack_range (STACK_CHECK_PROTECT, size);
/* Don't use a TARGET that isn't a pseudo or is the wrong mode. */
if (target == 0 || !REG_P (target)
#endif
/* Check stack bounds if necessary. */
- if (current_function_limit_stack)
+ if (crtl->limit_stack)
{
rtx available;
rtx space_available = gen_label_rtx ();
/* If we have to generate explicit probes, see if we have a constant
small number of them to generate. If so, that's the easy case. */
- else if (GET_CODE (size) == CONST_INT
+ else if (CONST_INT_P (size)
&& INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
{
HOST_WIDE_INT offset;
and 0 otherwise. */
rtx
-hard_function_value (tree valtype, tree func, tree fntype,
+hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
int outgoing ATTRIBUTE_UNUSED)
{
rtx val;
in which a scalar value of mode MODE was returned by a library call. */
rtx
-hard_libcall_value (enum machine_mode mode)
+hard_libcall_value (enum machine_mode mode, rtx fun)
{
- return LIBCALL_VALUE (mode);
+ return targetm.calls.libcall_value (mode, fun);
}
/* Look up the tree code for a given rtx code