#include "target.h"
#include "timevar.h"
#include "df.h"
+#include "diagnostic.h"
/* Decide whether a function's arguments should be processed
from first to last or from last to first.
#endif
/* This macro is used to determine whether store_by_pieces should be
- called to "memset" storage with byte values other than zero, or
- to "memcpy" storage when the source is a constant string. */
+ called to "memset" storage with byte values other than zero. */
+#ifndef SET_BY_PIECES_P
+#define SET_BY_PIECES_P(SIZE, ALIGN) \
+ (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
+ < (unsigned int) SET_RATIO)
+#endif
+
+/* This macro is used to determine whether store_by_pieces should be
+ called to "memcpy" storage when the source is a constant string. */
#ifndef STORE_BY_PIECES_P
#define STORE_BY_PIECES_P(SIZE, ALIGN) \
(move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
#endif
\f
-/* This is run once per compilation to set up which modes can be used
- directly in memory and to initialize the block move optab. */
+/* This is run to set up which modes can be used
+ directly in memory and to initialize the block move optab. It is run
+ at the beginning of compilation and when the target is reinitialized. */
void
-init_expr_once (void)
+init_expr_target (void)
{
rtx insn, pat;
enum machine_mode mode;
}
\f
/* Copy data from FROM to TO, where the machine modes are not the same.
- Both modes may be integer, or both may be floating.
+ Both modes may be integer, or both may be floating, or both may be
+ fixed-point.
UNSIGNEDP should be nonzero if FROM is an unsigned type.
This causes zero-extension instead of sign-extension. */
}
/* Otherwise use a libcall. */
- libcall = convert_optab_handler (tab, to_mode, from_mode)->libfunc;
+ libcall = convert_optab_libfunc (tab, to_mode, from_mode);
/* Is this conversion implemented yet? */
gcc_assert (libcall);
from = new_from;
}
+ /* Make sure both are fixed-point modes or both are not. */
+ gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
+ ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
+ if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
+ {
+ /* If we widen from_mode to to_mode and they are in the same class,
+ we won't saturate the result.
+ Otherwise, always saturate the result to play safe. */
+ if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
+ && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
+ expand_fixed_convert (to, from, 0, 0);
+ else
+ expand_fixed_convert (to, from, 0, 1);
+ return;
+ }
+
/* Now both modes are integers. */
/* Handle expanding beyond a word. */
else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
&& XVECLEN (dst, 0) > 1)
tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
- else if (CONSTANT_P (src)
- || (REG_P (src) && GET_MODE (src) == mode))
+ else if (CONSTANT_P (src))
+ {
+ HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
+
+ if (len == ssize)
+ tmps[i] = src;
+ else
+ {
+ rtx first, second;
+
+ gcc_assert (2 * len == ssize);
+ split_double (src, &first, &second);
+ if (i)
+ tmps[i] = second;
+ else
+ tmps[i] = first;
+ }
+ }
+ else if (REG_P (src) && GET_MODE (src) == mode)
tmps[i] = src;
else
tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
/* Determine whether the LEN bytes generated by CONSTFUN can be
stored to memory using several move instructions. CONSTFUNDATA is
a pointer which will be passed as argument in every CONSTFUN call.
- ALIGN is maximum alignment we can assume. Return nonzero if a
- call to store_by_pieces should succeed. */
+ ALIGN is maximum alignment we can assume. MEMSETP is true if this is
+ a memset operation and false if it's a copy of a constant string.
+ Return nonzero if a call to store_by_pieces should succeed. */
int
can_store_by_pieces (unsigned HOST_WIDE_INT len,
rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
- void *constfundata, unsigned int align)
+ void *constfundata, unsigned int align, bool memsetp)
{
unsigned HOST_WIDE_INT l;
unsigned int max_size;
if (len == 0)
return 1;
- if (! STORE_BY_PIECES_P (len, align))
+ if (! (memsetp
+ ? SET_BY_PIECES_P (len, align)
+ : STORE_BY_PIECES_P (len, align)))
return 0;
tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
/* Generate several move instructions to store LEN bytes generated by
CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
pointer which will be passed as argument in every CONSTFUN call.
- ALIGN is maximum alignment we can assume.
+ ALIGN is maximum alignment we can assume. MEMSETP is true if this is
+ a memset operation and false if it's a copy of a constant string.
If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
mempcpy, and if ENDP is 2 return memory the end minus one byte ala
stpcpy. */
rtx
store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
- void *constfundata, unsigned int align, int endp)
+ void *constfundata, unsigned int align, bool memsetp, int endp)
{
struct store_by_pieces data;
return to;
}
- gcc_assert (STORE_BY_PIECES_P (len, align));
+ gcc_assert (memsetp
+ ? SET_BY_PIECES_P (len, align)
+ : STORE_BY_PIECES_P (len, align));
data.constfun = constfun;
data.constfundata = constfundata;
data.len = len;
if (COMPLEX_MODE_P (mode))
return emit_move_complex (mode, x, y);
- if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
+ if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
+ || ALL_FIXED_POINT_MODE_P (mode))
{
rtx result = emit_move_via_integer (mode, x, y, true);
/* If X or Y are memory references, verify that their addresses are valid
for the machine. */
if (MEM_P (x)
- && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
- && ! push_operand (x, GET_MODE (x)))
- || (flag_force_addr
- && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
+ && (! memory_address_p (GET_MODE (x), XEXP (x, 0))
+ && ! push_operand (x, GET_MODE (x))))
x = validize_mem (x);
if (MEM_P (y)
- && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
- || (flag_force_addr
- && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
+ && ! memory_address_p (GET_MODE (y), XEXP (y, 0)))
y = validize_mem (y);
gcc_assert (mode != BLKmode);
str_copy_len = MIN (str_copy_len, exp_len);
if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
(void *) TREE_STRING_POINTER (exp),
- MEM_ALIGN (target)))
+ MEM_ALIGN (target), false))
goto normal_expr;
dest_mem = target;
dest_mem = store_by_pieces (dest_mem,
str_copy_len, builtin_strncpy_read_str,
(void *) TREE_STRING_POINTER (exp),
- MEM_ALIGN (target),
+ MEM_ALIGN (target), false,
exp_len > str_copy_len ? 1 : 0);
if (exp_len > str_copy_len)
- clear_storage (dest_mem, GEN_INT (exp_len - str_copy_len),
+ clear_storage (adjust_address (dest_mem, BLKmode, 0),
+ GEN_INT (exp_len - str_copy_len),
BLOCK_OP_NORMAL);
return NULL_RTX;
}
case INTEGER_CST:
case REAL_CST:
+ case FIXED_CST:
if (!initializer_zerop (value))
nz_elts += mult;
elt_count += mult;
case INTEGER_TYPE:
case REAL_TYPE:
+ case FIXED_POINT_TYPE:
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
case POINTER_TYPE:
/* ??? This should be considered a front-end bug. We should not be
generating ADDR_EXPR of something that isn't an LVALUE. The only
exception here is STRING_CST. */
- if (TREE_CODE (exp) == CONSTRUCTOR
- || CONSTANT_CLASS_P (exp))
+ if (CONSTANT_CLASS_P (exp))
return XEXP (expand_expr_constant (exp, 0, modifier), 0);
/* Everything must be something allowed by is_gimple_addressable. */
default:
/* If the object is a DECL, then expand it for its rtl. Don't bypass
expand_expr, as that can have various side effects; LABEL_DECLs for
- example, may not have their DECL_RTL set yet. Assume language
- specific tree nodes can be expanded in some interesting way. */
+ example, may not have their DECL_RTL set yet. Expand the rtl of
+ CONSTRUCTORs too, which should yield a memory reference for the
+ constructor's contents. Assume language specific tree nodes can
+ be expanded in some interesting way. */
if (DECL_P (exp)
+ || TREE_CODE (exp) == CONSTRUCTOR
|| TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
{
result = expand_expr (exp, target, tmode,
return result;
}
+/* Generate code for computing CONSTRUCTOR EXP.
+ An rtx for the computed value is returned. If AVOID_TEMP_MEM
+ is TRUE, instead of creating a temporary variable in memory
+ NULL is returned and the caller needs to handle it differently. */
+
+static rtx
+expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
+ bool avoid_temp_mem)
+{
+ tree type = TREE_TYPE (exp);
+ enum machine_mode mode = TYPE_MODE (type);
+
+ /* Try to avoid creating a temporary at all. This is possible
+ if all of the initializer is zero.
+ FIXME: try to handle all [0..255] initializers we can handle
+ with memset. */
+ if (TREE_STATIC (exp)
+ && !TREE_ADDRESSABLE (exp)
+ && target != 0 && mode == BLKmode
+ && all_zeros_p (exp))
+ {
+ clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
+ return target;
+ }
+
+ /* All elts simple constants => refer to a constant in memory. But
+ if this is a non-BLKmode mode, let it store a field at a time
+ since that should make a CONST_INT or CONST_DOUBLE when we
+ fold. Likewise, if we have a target we can use, it is best to
+ store directly into the target unless the type is large enough
+ that memcpy will be used. If we are making an initializer and
+ all operands are constant, put it in memory as well.
+
+ FIXME: Avoid trying to fill vector constructors piece-meal.
+ Output them with output_constant_def below unless we're sure
+ they're zeros. This should go away when vector initializers
+ are treated like VECTOR_CST instead of arrays. */
+ if ((TREE_STATIC (exp)
+ && ((mode == BLKmode
+ && ! (target != 0 && safe_from_p (target, exp, 1)))
+ || TREE_ADDRESSABLE (exp)
+ || (host_integerp (TYPE_SIZE_UNIT (type), 1)
+ && (! MOVE_BY_PIECES_P
+ (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
+ TYPE_ALIGN (type)))
+ && ! mostly_zeros_p (exp))))
+ || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
+ && TREE_CONSTANT (exp)))
+ {
+ rtx constructor;
+
+ if (avoid_temp_mem)
+ return NULL_RTX;
+
+ constructor = expand_expr_constant (exp, 1, modifier);
+
+ if (modifier != EXPAND_CONST_ADDRESS
+ && modifier != EXPAND_INITIALIZER
+ && modifier != EXPAND_SUM)
+ constructor = validize_mem (constructor);
+
+ return constructor;
+ }
+
+ /* Handle calls that pass values in multiple non-contiguous
+ locations. The Irix 6 ABI has examples of this. */
+ if (target == 0 || ! safe_from_p (target, exp, 1)
+ || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
+ {
+ if (avoid_temp_mem)
+ return NULL_RTX;
+
+ target
+ = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
+ | (TREE_READONLY (exp)
+ * TYPE_QUAL_CONST))),
+ 0, TREE_ADDRESSABLE (exp), 1);
+ }
+
+ store_constructor (exp, target, 0, int_expr_size (exp));
+ return target;
+}
+
/* expand_expr: generate code for computing expression EXP.
An rtx for the computed value is returned. The value is never null.
if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
temp = validize_mem (decl_rtl);
- /* If DECL_RTL is memory, we are in the normal case and either
- the address is not valid or it is not a register and -fforce-addr
- is specified, get the address into a register. */
+ /* If DECL_RTL is memory, we are in the normal case and the
+ address is not valid, get the address into a register. */
else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
{
decl_rtl = use_anchored_address (decl_rtl);
if (modifier != EXPAND_CONST_ADDRESS
&& modifier != EXPAND_SUM
- && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
- || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
+ && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)))
temp = replace_equiv_address (decl_rtl,
copy_rtx (XEXP (decl_rtl, 0)));
}
{
tree tmp = NULL_TREE;
if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
- || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
+ || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
+ || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
+ || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
+ || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
+ || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
return const_vector_from_tree (exp);
if (GET_MODE_CLASS (mode) == MODE_INT)
{
return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
TYPE_MODE (TREE_TYPE (exp)));
+ case FIXED_CST:
+ return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
+ TYPE_MODE (TREE_TYPE (exp)));
+
case COMPLEX_CST:
/* Handle evaluating a complex constant in a CONCAT target. */
if (original_target && GET_CODE (original_target) == CONCAT)
if (modifier != EXPAND_CONST_ADDRESS
&& modifier != EXPAND_INITIALIZER
&& modifier != EXPAND_SUM
- && (! memory_address_p (mode, XEXP (temp, 0))
- || flag_force_addr))
+ && ! memory_address_p (mode, XEXP (temp, 0)))
return replace_equiv_address (temp,
copy_rtx (XEXP (temp, 0)));
return temp;
return const0_rtx;
}
- /* Try to avoid creating a temporary at all. This is possible
- if all of the initializer is zero.
- FIXME: try to handle all [0..255] initializers we can handle
- with memset. */
- else if (TREE_STATIC (exp)
- && !TREE_ADDRESSABLE (exp)
- && target != 0 && mode == BLKmode
- && all_zeros_p (exp))
- {
- clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
- return target;
- }
-
- /* All elts simple constants => refer to a constant in memory. But
- if this is a non-BLKmode mode, let it store a field at a time
- since that should make a CONST_INT or CONST_DOUBLE when we
- fold. Likewise, if we have a target we can use, it is best to
- store directly into the target unless the type is large enough
- that memcpy will be used. If we are making an initializer and
- all operands are constant, put it in memory as well.
-
- FIXME: Avoid trying to fill vector constructors piece-meal.
- Output them with output_constant_def below unless we're sure
- they're zeros. This should go away when vector initializers
- are treated like VECTOR_CST instead of arrays.
- */
- else if ((TREE_STATIC (exp)
- && ((mode == BLKmode
- && ! (target != 0 && safe_from_p (target, exp, 1)))
- || TREE_ADDRESSABLE (exp)
- || (host_integerp (TYPE_SIZE_UNIT (type), 1)
- && (! MOVE_BY_PIECES_P
- (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
- TYPE_ALIGN (type)))
- && ! mostly_zeros_p (exp))))
- || ((modifier == EXPAND_INITIALIZER
- || modifier == EXPAND_CONST_ADDRESS)
- && TREE_CONSTANT (exp)))
- {
- rtx constructor = expand_expr_constant (exp, 1, modifier);
-
- if (modifier != EXPAND_CONST_ADDRESS
- && modifier != EXPAND_INITIALIZER
- && modifier != EXPAND_SUM)
- constructor = validize_mem (constructor);
-
- return constructor;
- }
- else
- {
- /* Handle calls that pass values in multiple non-contiguous
- locations. The Irix 6 ABI has examples of this. */
- if (target == 0 || ! safe_from_p (target, exp, 1)
- || GET_CODE (target) == PARALLEL
- || modifier == EXPAND_STACK_PARM)
- target
- = assign_temp (build_qualified_type (type,
- (TYPE_QUALS (type)
- | (TREE_READONLY (exp)
- * TYPE_QUAL_CONST))),
- 0, TREE_ADDRESSABLE (exp), 1);
-
- store_constructor (exp, target, 0, int_expr_size (exp));
- return target;
- }
+ return expand_constructor (exp, target, modifier, false);
case MISALIGNED_INDIRECT_REF:
case ALIGN_INDIRECT_REF:
field, value)
if (tree_int_cst_equal (field, index))
{
- if (!TREE_SIDE_EFFECTS (value))
- return expand_expr (fold (value), target, tmode,
- modifier);
- break;
+ if (TREE_SIDE_EFFECTS (value))
+ break;
+
+ if (TREE_CODE (value) == CONSTRUCTOR)
+ {
+ /* If VALUE is a CONSTRUCTOR, this
+ optimization is only useful if
+ this doesn't store the CONSTRUCTOR
+ into memory. If it does, it is more
+ efficient to just load the data from
+ the array directly. */
+ rtx ret = expand_constructor (value, target,
+ modifier, true);
+ if (ret == NULL_RTX)
+ break;
+ }
+
+ return expand_expr (fold (value), target, tmode,
+ modifier);
}
}
else if(TREE_CODE (init) == STRING_CST)
return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
case CALL_EXPR:
- /* Check for a built-in function. */
- if (TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
- && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
- == FUNCTION_DECL)
- && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
- {
- if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
- == BUILT_IN_FRONTEND)
- return lang_hooks.expand_expr (exp, original_target,
- tmode, modifier,
- alt_rtl);
- else
- return expand_builtin (exp, target, subtarget, tmode, ignore);
- }
-
+ /* All valid uses of __builtin_va_arg_pack () are removed during
+ inlining. */
+ if (CALL_EXPR_VA_ARG_PACK (exp))
+ error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
+ {
+ tree fndecl = get_callee_fndecl (exp), attr;
+
+ if (fndecl
+ && (attr = lookup_attribute ("error",
+ DECL_ATTRIBUTES (fndecl))) != NULL)
+ error ("%Kcall to %qs declared with attribute error: %s",
+ exp, lang_hooks.decl_printable_name (fndecl, 1),
+ TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
+ if (fndecl
+ && (attr = lookup_attribute ("warning",
+ DECL_ATTRIBUTES (fndecl))) != NULL)
+ warning (0, "%Kcall to %qs declared with attribute warning: %s",
+ exp, lang_hooks.decl_printable_name (fndecl, 1),
+ TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
+
+ /* Check for a built-in function. */
+ if (fndecl && DECL_BUILT_IN (fndecl))
+ {
+ if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_FRONTEND)
+ return lang_hooks.expand_expr (exp, original_target,
+ tmode, modifier, alt_rtl);
+ else
+ return expand_builtin (exp, target, subtarget, tmode, ignore);
+ }
+ }
return expand_call (exp, target, ignore);
case NON_LVALUE_EXPR:
case PLUS_EXPR:
/* Check if this is a case for multiplication and addition. */
- if (TREE_CODE (type) == INTEGER_TYPE
+ if ((TREE_CODE (type) == INTEGER_TYPE
+ || TREE_CODE (type) == FIXED_POINT_TYPE)
&& TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
{
tree subsubexp0, subsubexp1;
- enum tree_code code0, code1;
+ enum tree_code code0, code1, this_code;
subexp0 = TREE_OPERAND (exp, 0);
subsubexp0 = TREE_OPERAND (subexp0, 0);
subsubexp1 = TREE_OPERAND (subexp0, 1);
code0 = TREE_CODE (subsubexp0);
code1 = TREE_CODE (subsubexp1);
- if (code0 == NOP_EXPR && code1 == NOP_EXPR
+ this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
+ : FIXED_CONVERT_EXPR;
+ if (code0 == this_code && code1 == this_code
&& (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
< TYPE_PRECISION (TREE_TYPE (subsubexp0)))
&& (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
enum machine_mode innermode = TYPE_MODE (op0type);
bool zextend_p = TYPE_UNSIGNED (op0type);
- this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
+ bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
+ if (sat_p == 0)
+ this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
+ else
+ this_optab = zextend_p ? usmadd_widen_optab
+ : ssmadd_widen_optab;
if (mode == GET_MODE_2XWIDER_MODE (innermode)
&& (optab_handler (this_optab, mode)->insn_code
!= CODE_FOR_nothing))
case MINUS_EXPR:
/* Check if this is a case for multiplication and subtraction. */
- if (TREE_CODE (type) == INTEGER_TYPE
+ if ((TREE_CODE (type) == INTEGER_TYPE
+ || TREE_CODE (type) == FIXED_POINT_TYPE)
&& TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
{
tree subsubexp0, subsubexp1;
- enum tree_code code0, code1;
+ enum tree_code code0, code1, this_code;
subexp1 = TREE_OPERAND (exp, 1);
subsubexp0 = TREE_OPERAND (subexp1, 0);
subsubexp1 = TREE_OPERAND (subexp1, 1);
code0 = TREE_CODE (subsubexp0);
code1 = TREE_CODE (subsubexp1);
- if (code0 == NOP_EXPR && code1 == NOP_EXPR
+ this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
+ : FIXED_CONVERT_EXPR;
+ if (code0 == this_code && code1 == this_code
&& (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
< TYPE_PRECISION (TREE_TYPE (subsubexp0)))
&& (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
enum machine_mode innermode = TYPE_MODE (op0type);
bool zextend_p = TYPE_UNSIGNED (op0type);
- this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
+ bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
+ if (sat_p == 0)
+ this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
+ else
+ this_optab = zextend_p ? usmsub_widen_optab
+ : ssmsub_widen_optab;
if (mode == GET_MODE_2XWIDER_MODE (innermode)
&& (optab_handler (this_optab, mode)->insn_code
!= CODE_FOR_nothing))
goto binop2;
case MULT_EXPR:
+ /* If this is a fixed-point operation, then we cannot use the code
+ below because "expand_mult" doesn't support sat/no-sat fixed-point
+ multiplications. */
+ if (ALL_FIXED_POINT_MODE_P (mode))
+ goto binop;
+
/* If first operand is constant, swap them.
Thus the following special case checks need only
check the second operand. */
case CEIL_DIV_EXPR:
case ROUND_DIV_EXPR:
case EXACT_DIV_EXPR:
+ /* If this is a fixed-point operation, then we cannot use the code
+ below because "expand_divmod" doesn't support sat/no-sat fixed-point
+ divisions. */
+ if (ALL_FIXED_POINT_MODE_P (mode))
+ goto binop;
+
if (modifier == EXPAND_STACK_PARM)
target = 0;
/* Possible optimization: compute the dividend with EXPAND_SUM
subtarget, &op0, &op1, 0);
return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
+ case FIXED_CONVERT_EXPR:
+ op0 = expand_normal (TREE_OPERAND (exp, 0));
+ if (target == 0 || modifier == EXPAND_STACK_PARM)
+ target = gen_reg_rtx (mode);
+
+ if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE
+ && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
+ expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
+ else
+ expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
+ return target;
+
case FIX_TRUNC_EXPR:
op0 = expand_normal (TREE_OPERAND (exp, 0));
if (target == 0 || modifier == EXPAND_STACK_PARM)
case RSHIFT_EXPR:
case LROTATE_EXPR:
case RROTATE_EXPR:
+ /* If this is a fixed-point operation, then we cannot use the code
+ below because "expand_shift" doesn't support sat/no-sat fixed-point
+ shifts. */
+ if (ALL_FIXED_POINT_MODE_P (mode))
+ goto binop;
+
if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
subtarget = 0;
if (modifier == EXPAND_STACK_PARM)
goto binop;
}
+ case OMP_ATOMIC_LOAD:
+ case OMP_ATOMIC_STORE:
+ /* OMP expansion is not run when there were errors, so these codes
+ can get here. */
+ gcc_assert (errorcount != 0);
+ return NULL_RTX;
+
default:
return lang_hooks.expand_expr (exp, original_target, tmode,
modifier, alt_rtl);
}
/* Put a constant second. */
- if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
+ if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
+ || TREE_CODE (arg0) == FIXED_CST)
{
tem = arg0; arg0 = arg1; arg1 = tem;
code = swap_condition (code);
index = copy_to_mode_reg (Pmode, index);
#endif
- /* If flag_force_addr were to affect this address
- it could interfere with the tricky assumptions made
- about addresses that contain label-refs,
- which may be valid only very near the tablejump itself. */
/* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
GET_MODE_SIZE, because this indicates how large insns are. The other
uses should all be Pmode, because they are addresses. This code
index = PIC_CASE_VECTOR_ADDRESS (index);
else
#endif
- index = memory_address_noforce (CASE_VECTOR_MODE, index);
+ index = memory_address (CASE_VECTOR_MODE, index);
temp = gen_reg_rtx (CASE_VECTOR_MODE);
vector = gen_const_mem (CASE_VECTOR_MODE, index);
convert_move (temp, vector, 0);
/* Doh! What's going on? */
if (class != MODE_VECTOR_INT
- && class != MODE_VECTOR_FLOAT)
+ && class != MODE_VECTOR_FLOAT
+ && class != MODE_VECTOR_FRACT
+ && class != MODE_VECTOR_UFRACT
+ && class != MODE_VECTOR_ACCUM
+ && class != MODE_VECTOR_UACCUM)
return 0;
/* Hardware support. Woo hoo! */
if (TREE_CODE (elt) == REAL_CST)
RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
inner);
+ else if (TREE_CODE (elt) == FIXED_CST)
+ RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
+ inner);
else
RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
TREE_INT_CST_HIGH (elt),