#include "tree.h"
#include "langhooks.h"
#include "c-tree.h"
+#include "c-lang.h"
#include "tm_p.h"
#include "flags.h"
#include "output.h"
/* The level of nesting inside "typeof". */
int in_typeof;
-struct c_label_context_se *label_context_stack_se;
-struct c_label_context_vm *label_context_stack_vm;
-
/* Nonzero if we've already printed a "missing braces around initializer"
message within this initializer. */
static int missing_braces_mentioned;
static tree lookup_field (tree, tree);
static int convert_arguments (tree, VEC(tree,gc) *, VEC(tree,gc) *, tree,
tree);
-static tree pointer_diff (tree, tree);
+static tree pointer_diff (location_t, tree, tree);
static tree convert_for_assignment (location_t, tree, tree, tree,
enum impl_conv, bool, tree, tree, int);
static tree valid_compound_expr_initializer (tree, tree);
return type;
}
+/* Return true if between two named address spaces, whether there is a superset
+ named address space that encompasses both address spaces. If there is a
+ superset, return which address space is the superset. */
+
+static bool
+addr_space_superset (addr_space_t as1, addr_space_t as2, addr_space_t *common)
+{
+ if (as1 == as2)
+ {
+ *common = as1;
+ return true;
+ }
+ else if (targetm.addr_space.subset_p (as1, as2))
+ {
+ *common = as2;
+ return true;
+ }
+ else if (targetm.addr_space.subset_p (as2, as1))
+ {
+ *common = as1;
+ return true;
+ }
+ else
+ return false;
+}
+
/* Return a variant of TYPE which has all the type qualifiers of LIKE
as well as those of TYPE. */
static tree
qualify_type (tree type, tree like)
{
+ addr_space_t as_type = TYPE_ADDR_SPACE (type);
+ addr_space_t as_like = TYPE_ADDR_SPACE (like);
+ addr_space_t as_common;
+
+ /* If the two named address spaces are different, determine the common
+ superset address space. If there isn't one, raise an error. */
+ if (!addr_space_superset (as_type, as_like, &as_common))
+ {
+ as_common = as_type;
+ error ("%qT and %qT are in disjoint named address spaces",
+ type, like);
+ }
+
return c_build_qualified_type (type,
- TYPE_QUALS (type) | TYPE_QUALS (like));
+ TYPE_QUALS_NO_ADDR_SPACE (type)
+ | TYPE_QUALS_NO_ADDR_SPACE (like)
+ | ENCODE_QUAL_ADDR_SPACE (as_common));
}
/* Return true iff the given tree T is a variable length array. */
bool t1_complete, t2_complete;
/* We should not have any type quals on arrays at all. */
- gcc_assert (!TYPE_QUALS (t1) && !TYPE_QUALS (t2));
+ gcc_assert (!TYPE_QUALS_NO_ADDR_SPACE (t1)
+ && !TYPE_QUALS_NO_ADDR_SPACE (t2));
t1_complete = COMPLETE_TYPE_P (t1);
t2_complete = COMPLETE_TYPE_P (t2);
{
TREE_VALUE (n) = composite_type (TREE_TYPE (memb),
TREE_VALUE (p2));
- pedwarn (input_location, OPT_pedantic,
+ pedwarn (input_location, OPT_pedantic,
"function types not truly compatible in ISO C");
goto parm_done;
}
{
TREE_VALUE (n) = composite_type (TREE_TYPE (memb),
TREE_VALUE (p1));
- pedwarn (input_location, OPT_pedantic,
+ pedwarn (input_location, OPT_pedantic,
"function types not truly compatible in ISO C");
goto parm_done;
}
tree pointed_to_2, mv2;
tree target;
unsigned target_quals;
+ addr_space_t as1, as2, as_common;
+ int quals1, quals2;
/* Save time if the two types are the same. */
/* For function types do not merge const qualifiers, but drop them
if used inconsistently. The middle-end uses these to mark const
and noreturn functions. */
+ quals1 = TYPE_QUALS_NO_ADDR_SPACE (pointed_to_1);
+ quals2 = TYPE_QUALS_NO_ADDR_SPACE (pointed_to_2);
+
if (TREE_CODE (pointed_to_1) == FUNCTION_TYPE)
- target_quals = TYPE_QUALS (pointed_to_1) & TYPE_QUALS (pointed_to_2);
+ target_quals = (quals1 & quals2);
else
- target_quals = TYPE_QUALS (pointed_to_1) | TYPE_QUALS (pointed_to_2);
+ target_quals = (quals1 | quals2);
+
+ /* If the two named address spaces are different, determine the common
+ superset address space. This is guaranteed to exist due to the
+ assumption that comp_target_type returned non-zero. */
+ as1 = TYPE_ADDR_SPACE (pointed_to_1);
+ as2 = TYPE_ADDR_SPACE (pointed_to_2);
+ if (!addr_space_superset (as1, as2, &as_common))
+ gcc_unreachable ();
+
+ target_quals |= ENCODE_QUAL_ADDR_SPACE (as_common);
+
t1 = build_pointer_type (c_build_qualified_type (target, target_quals));
return build_type_attribute_variant (t1, attributes);
}
return attrval == 2 && val == 1 ? 2 : val;
}
-/* Return 1 if TTL and TTR are pointers to types that are equivalent,
- ignoring their qualifiers. */
+/* Return 1 if TTL and TTR are pointers to types that are equivalent, ignoring
+ their qualifiers, except for named address spaces. If the pointers point to
+ different named addresses, then we must determine if one address space is a
+ subset of the other. */
static int
comp_target_types (location_t location, tree ttl, tree ttr)
{
int val;
- tree mvl, mvr;
+ tree mvl = TREE_TYPE (ttl);
+ tree mvr = TREE_TYPE (ttr);
+ addr_space_t asl = TYPE_ADDR_SPACE (mvl);
+ addr_space_t asr = TYPE_ADDR_SPACE (mvr);
+ addr_space_t as_common;
bool enum_and_int_p;
+ /* Fail if pointers point to incompatible address spaces. */
+ if (!addr_space_superset (asl, asr, &as_common))
+ return 0;
+
/* Do not lose qualifiers on element types of array types that are
pointer targets by taking their TYPE_MAIN_VARIANT. */
- mvl = TREE_TYPE (ttl);
- mvr = TREE_TYPE (ttr);
if (TREE_CODE (mvl) != ARRAY_TYPE)
mvl = TYPE_MAIN_VARIANT (mvl);
if (TREE_CODE (mvr) != ARRAY_TYPE)
and wait (union wait *) to be compatible. */
if (TREE_CODE (a1) == UNION_TYPE
&& (TYPE_NAME (a1) == 0
- || TYPE_TRANSPARENT_UNION (a1))
+ || TYPE_TRANSPARENT_AGGR (a1))
&& TREE_CODE (TYPE_SIZE (a1)) == INTEGER_CST
&& tree_int_cst_equal (TYPE_SIZE (a1),
TYPE_SIZE (a2)))
}
else if (TREE_CODE (a2) == UNION_TYPE
&& (TYPE_NAME (a2) == 0
- || TYPE_TRANSPARENT_UNION (a2))
+ || TYPE_TRANSPARENT_AGGR (a2))
&& TREE_CODE (TYPE_SIZE (a2)) == INTEGER_CST
&& tree_int_cst_equal (TYPE_SIZE (a2),
TYPE_SIZE (a1)))
}
/* Convert in case a char is more than one unit. */
- return size_binop (CEIL_DIV_EXPR, TYPE_SIZE_UNIT (type),
- size_int (TYPE_PRECISION (char_type_node)
- / BITS_PER_UNIT));
+ return size_binop_loc (input_location, CEIL_DIV_EXPR, TYPE_SIZE_UNIT (type),
+ size_int (TYPE_PRECISION (char_type_node)
+ / BITS_PER_UNIT));
}
\f
/* Return either DECL or its known constant value (if it has one). */
LOC is the location to use for the generated tree. */
tree
-build_indirect_ref (location_t loc, tree ptr, const char *errorstring)
+build_indirect_ref (location_t loc, tree ptr, ref_operator errstring)
{
tree pointer = default_conversion (ptr);
tree type = TREE_TYPE (pointer);
error_at (loc, "dereferencing pointer to incomplete type");
return error_mark_node;
}
- if (VOID_TYPE_P (t) && skip_evaluation == 0)
+ if (VOID_TYPE_P (t) && c_inhibit_evaluation_warnings == 0)
warning_at (loc, 0, "dereferencing %<void *%> pointer");
/* We *must* set TREE_READONLY when dereferencing a pointer to const,
}
}
else if (TREE_CODE (pointer) != ERROR_MARK)
- error_at (loc,
- "invalid type argument of %qs (have %qT)", errorstring, type);
+ switch (errstring)
+ {
+ case RO_ARRAY_INDEXING:
+ error_at (loc,
+ "invalid type argument of array indexing (have %qT)",
+ type);
+ break;
+ case RO_UNARY_STAR:
+ error_at (loc,
+ "invalid type argument of unary %<*%> (have %qT)",
+ type);
+ break;
+ case RO_ARROW:
+ error_at (loc,
+ "invalid type argument of %<->%> (have %qT)",
+ type);
+ break;
+ default:
+ gcc_unreachable ();
+ }
return error_mark_node;
}
while (TREE_CODE (foo) == COMPONENT_REF)
foo = TREE_OPERAND (foo, 0);
if (TREE_CODE (foo) == VAR_DECL && C_DECL_REGISTER (foo))
- pedwarn (loc, OPT_pedantic,
+ pedwarn (loc, OPT_pedantic,
"ISO C forbids subscripting %<register%> array");
else if (!flag_isoc99 && !lvalue_p (foo))
- pedwarn (loc, OPT_pedantic,
+ pedwarn (loc, OPT_pedantic,
"ISO C90 forbids subscripting non-lvalue array");
}
return build_indirect_ref
(loc, build_binary_op (loc, PLUS_EXPR, ar, index, 0),
- "array indexing");
+ RO_ARRAY_INDEXING);
}
}
\f
warn_deprecated_use (ref, NULL_TREE);
/* Recursive call does not count as usage. */
- if (ref != current_function_decl)
+ if (ref != current_function_decl)
{
TREE_USED (ref) = 1;
}
tree tem;
int nargs;
tree *argarray;
-
+
/* Strip NON_LVALUE_EXPRs, etc., since we aren't using as an lvalue. */
STRIP_TYPE_NOPS (function);
if (VOID_TYPE_P (return_type))
{
if (TYPE_QUALS (return_type) != TYPE_UNQUALIFIED)
- pedwarn (input_location, 0,
+ pedwarn (loc, 0,
"function with qualified void return type called");
return trap;
}
build_constructor (return_type, 0),
false);
else
- rhs = fold_convert (return_type, integer_zero_node);
+ rhs = fold_convert_loc (loc, return_type, integer_zero_node);
return require_complete_type (build2 (COMPOUND_EXPR, return_type,
trap, rhs));
&& !strncmp (IDENTIFIER_POINTER (name), "__builtin_", 10))
{
if (require_constant_value)
- result = fold_build_call_array_initializer (TREE_TYPE (fntype),
- function, nargs, argarray);
+ result =
+ fold_build_call_array_initializer_loc (loc, TREE_TYPE (fntype),
+ function, nargs, argarray);
else
- result = fold_build_call_array (TREE_TYPE (fntype),
- function, nargs, argarray);
+ result = fold_build_call_array_loc (loc, TREE_TYPE (fntype),
+ function, nargs, argarray);
if (TREE_CODE (result) == NOP_EXPR
&& TREE_CODE (TREE_OPERAND (result, 0)) == INTEGER_CST)
STRIP_TYPE_NOPS (result);
}
else
- result = build_call_array (TREE_TYPE (fntype),
- function, nargs, argarray);
+ result = build_call_array_loc (loc, TREE_TYPE (fntype),
+ function, nargs, argarray);
if (VOID_TYPE_P (TREE_TYPE (result)))
{
if (TYPE_QUALS (TREE_TYPE (result)) != TYPE_UNQUALIFIED)
- pedwarn (input_location, 0,
+ pedwarn (loc, 0,
"function with qualified void return type called");
return result;
}
{
tree typetail, val;
unsigned int parmnum;
+ bool error_args = false;
const bool type_generic = fundecl
&& lookup_attribute ("type generic", TYPE_ATTRIBUTES(TREE_TYPE (fundecl)));
bool type_generic_remove_excess_precision = false;
if (type == void_type_node)
{
- error ("too many arguments to function %qE", function);
+ error_at (input_location,
+ "too many arguments to function %qE", function);
+ if (fundecl && !DECL_BUILT_IN (fundecl))
+ inform (DECL_SOURCE_LOCATION (fundecl), "declared here");
return parmnum;
}
parmval = default_conversion (val);
VEC_replace (tree, values, parmnum, parmval);
+ if (parmval == error_mark_node)
+ error_args = true;
if (typetail)
typetail = TREE_CHAIN (typetail);
if (typetail != 0 && TREE_VALUE (typetail) != void_type_node)
{
- error ("too few arguments to function %qE", function);
+ error_at (input_location,
+ "too few arguments to function %qE", function);
+ if (fundecl && !DECL_BUILT_IN (fundecl))
+ inform (DECL_SOURCE_LOCATION (fundecl), "declared here");
return -1;
}
- return parmnum;
+ return error_args ? -1 : (int) parmnum;
}
\f
/* This is the entry point used by the parser to build unary operators
warning_at (location, OPT_Waddress,
"comparison with string literal results in unspecified behavior");
- if (TREE_OVERFLOW_P (result.value)
- && !TREE_OVERFLOW_P (arg1.value)
+ if (TREE_OVERFLOW_P (result.value)
+ && !TREE_OVERFLOW_P (arg1.value)
&& !TREE_OVERFLOW_P (arg2.value))
overflow_warning (location, result.value);
The resulting tree has type int. */
static tree
-pointer_diff (tree op0, tree op1)
+pointer_diff (location_t loc, tree op0, tree op1)
{
tree restype = ptrdiff_type_node;
+ tree result, inttype;
+ addr_space_t as0 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (op0)));
+ addr_space_t as1 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (op1)));
tree target_type = TREE_TYPE (TREE_TYPE (op0));
tree con0, con1, lit0, lit1;
tree orig_op1 = op1;
+ /* If the operands point into different address spaces, we need to
+ explicitly convert them to pointers into the common address space
+ before we can subtract the numerical address values. */
+ if (as0 != as1)
+ {
+ addr_space_t as_common;
+ tree common_type;
+
+ /* Determine the common superset address space. This is guaranteed
+ to exist because the caller verified that comp_target_types
+ returned non-zero. */
+ if (!addr_space_superset (as0, as1, &as_common))
+ gcc_unreachable ();
+
+ common_type = common_pointer_type (TREE_TYPE (op0), TREE_TYPE (op1));
+ op0 = convert (common_type, op0);
+ op1 = convert (common_type, op1);
+ }
+
+ /* Determine integer type to perform computations in. This will usually
+ be the same as the result type (ptrdiff_t), but may need to be a wider
+ type if pointers for the address space are wider than ptrdiff_t. */
+ if (TYPE_PRECISION (restype) < TYPE_PRECISION (TREE_TYPE (op0)))
+ inttype = lang_hooks.types.type_for_size
+ (TYPE_PRECISION (TREE_TYPE (op0)), 0);
+ else
+ inttype = restype;
+
+
if (TREE_CODE (target_type) == VOID_TYPE)
- pedwarn (input_location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"pointer of type %<void *%> used in subtraction");
if (TREE_CODE (target_type) == FUNCTION_TYPE)
- pedwarn (input_location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"pointer to a function used in subtraction");
/* If the conversion to ptrdiff_type does anything like widening or
Do not do default conversions on the minus operator
in case restype is a short type. */
- op0 = build_binary_op (input_location,
- MINUS_EXPR, convert (restype, op0),
- convert (restype, op1), 0);
+ op0 = build_binary_op (loc,
+ MINUS_EXPR, convert (inttype, op0),
+ convert (inttype, op1), 0);
/* This generates an error if op1 is pointer to incomplete type. */
if (!COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (TREE_TYPE (orig_op1))))
- error ("arithmetic on pointer to an incomplete type");
+ error_at (loc, "arithmetic on pointer to an incomplete type");
/* This generates an error if op0 is pointer to incomplete type. */
op1 = c_size_in_bytes (target_type);
/* Divide by the size, in easiest possible way. */
- return fold_build2 (EXACT_DIV_EXPR, restype, op0, convert (restype, op1));
+ result = fold_build2_loc (loc, EXACT_DIV_EXPR, inttype,
+ op0, convert (inttype, op1));
+
+ /* Convert to final result type if necessary. */
+ return convert (restype, result);
}
\f
/* Construct and perhaps optimize a tree representation
}
else if (!noconvert)
arg = default_conversion (arg);
- arg = non_lvalue (arg);
+ arg = non_lvalue_loc (location, arg);
break;
case NEGATE_EXPR:
else if (typecode == COMPLEX_TYPE)
{
code = CONJ_EXPR;
- pedwarn (location, OPT_pedantic,
+ pedwarn (location, OPT_pedantic,
"ISO C does not support %<~%> for complex conjugation");
if (!noconvert)
arg = default_conversion (arg);
return error_mark_node;
}
arg = c_objc_common_truthvalue_conversion (location, arg);
- ret = invert_truthvalue (arg);
+ ret = invert_truthvalue_loc (location, arg);
/* If the TRUTH_NOT_EXPR has been folded, reset the location. */
if (EXPR_P (ret) && EXPR_HAS_LOCATION (ret))
location = EXPR_LOCATION (ret);
if (TREE_CODE (arg) == COMPLEX_CST)
ret = TREE_REALPART (arg);
else if (TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
- ret = fold_build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (arg)), arg);
+ ret = fold_build1_loc (location,
+ REALPART_EXPR, TREE_TYPE (TREE_TYPE (arg)), arg);
else
ret = arg;
if (eptype && TREE_CODE (eptype) == COMPLEX_TYPE)
if (TREE_CODE (arg) == COMPLEX_CST)
ret = TREE_IMAGPART (arg);
else if (TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
- ret = fold_build1 (IMAGPART_EXPR, TREE_TYPE (TREE_TYPE (arg)), arg);
+ ret = fold_build1_loc (location,
+ IMAGPART_EXPR, TREE_TYPE (TREE_TYPE (arg)), arg);
else
- ret = omit_one_operand (TREE_TYPE (arg), integer_zero_node, arg);
+ ret = omit_one_operand_loc (location, TREE_TYPE (arg),
+ integer_zero_node, arg);
if (eptype && TREE_CODE (eptype) == COMPLEX_TYPE)
eptype = TREE_TYPE (eptype);
goto return_build_unary_op;
{
tree real, imag;
- pedwarn (location, OPT_pedantic,
+ pedwarn (location, OPT_pedantic,
"ISO C does not support %<++%> and %<--%> on complex types");
arg = stabilize_reference (arg);
|| TREE_CODE (TREE_TYPE (argtype)) == VOID_TYPE)
{
if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
- pedwarn (location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"wrong type argument to increment");
else
- pedwarn (location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"wrong type argument to decrement");
}
inc = c_size_in_bytes (TREE_TYPE (argtype));
- inc = fold_convert (sizetype, inc);
+ inc = fold_convert_loc (location, sizetype, inc);
}
else if (FRACT_MODE_P (TYPE_MODE (argtype)))
{
{
/* Don't let this be an lvalue. */
if (lvalue_p (TREE_OPERAND (arg, 0)))
- return non_lvalue (TREE_OPERAND (arg, 0));
+ return non_lvalue_loc (location, TREE_OPERAND (arg, 0));
ret = TREE_OPERAND (arg, 0);
goto return_build_unary_op;
}
if (val && TREE_CODE (val) == INDIRECT_REF
&& TREE_CONSTANT (TREE_OPERAND (val, 0)))
{
- tree op0 = fold_convert (sizetype, fold_offsetof (arg, val)), op1;
+ tree op0 = fold_convert_loc (location, sizetype,
+ fold_offsetof (arg, val)), op1;
- op1 = fold_convert (argtype, TREE_OPERAND (val, 0));
- ret = fold_build2 (POINTER_PLUS_EXPR, argtype, op1, op0);
+ op1 = fold_convert_loc (location, argtype, TREE_OPERAND (val, 0));
+ ret = fold_build2_loc (location, POINTER_PLUS_EXPR, argtype, op1, op0);
goto return_build_unary_op;
}
argtype = TREE_TYPE (arg);
if (TREE_CODE (arg) == INTEGER_CST)
ret = (require_constant_value
- ? fold_build1_initializer (code, argtype, arg)
- : fold_build1 (code, argtype, arg));
+ ? fold_build1_initializer_loc (location, code, argtype, arg)
+ : fold_build1_loc (location, code, argtype, arg));
else
ret = build1 (code, argtype, arg);
return_build_unary_op:
}
}
\f
+/* Convert EXPR to TYPE, warning about conversion problems with
+ constants. SEMANTIC_TYPE is the type this conversion would use
+ without excess precision. If SEMANTIC_TYPE is NULL, this function
+ is equivalent to convert_and_check. This function is a wrapper that
+ handles conversions that may be different than
+ the usual ones because of excess precision. */
+
+static tree
+ep_convert_and_check (tree type, tree expr, tree semantic_type)
+{
+ if (TREE_TYPE (expr) == type)
+ return expr;
+
+ if (!semantic_type)
+ return convert_and_check (type, expr);
+
+ if (TREE_CODE (TREE_TYPE (expr)) == INTEGER_TYPE
+ && TREE_TYPE (expr) != semantic_type)
+ {
+ /* For integers, we need to check the real conversion, not
+ the conversion to the excess precision type. */
+ expr = convert_and_check (semantic_type, expr);
+ }
+ /* Result type is the excess precision type, which should be
+ large enough, so do not check. */
+ return convert (type, expr);
+}
+
/* Build and return a conditional expression IFEXP ? OP1 : OP2. If
IFEXP_BCP then the condition is a call to __builtin_constant_p, and
if folded to an integer constant then the unselected half may
tree
build_conditional_expr (location_t colon_loc, tree ifexp, bool ifexp_bcp,
- tree op1, tree op2)
+ tree op1, tree op1_original_type, tree op2,
+ tree op2_original_type)
{
tree type1;
tree type2;
enum tree_code code1;
enum tree_code code2;
tree result_type = NULL;
- tree ep_result_type = NULL;
+ tree semantic_result_type = NULL;
tree orig_op1 = op1, orig_op2 = op2;
bool int_const, op1_int_operands, op2_int_operands, int_operands;
bool ifexp_int_operands;
&& (code2 == INTEGER_TYPE || code2 == REAL_TYPE
|| code2 == COMPLEX_TYPE))
{
- ep_result_type = c_common_type (type1, type2);
+ semantic_result_type = c_common_type (type1, type2);
if (TREE_CODE (op1) == EXCESS_PRECISION_EXPR)
{
op1 = TREE_OPERAND (op1, 0);
}
}
+ if (warn_cxx_compat)
+ {
+ tree t1 = op1_original_type ? op1_original_type : TREE_TYPE (orig_op1);
+ tree t2 = op2_original_type ? op2_original_type : TREE_TYPE (orig_op2);
+
+ if (TREE_CODE (t1) == ENUMERAL_TYPE
+ && TREE_CODE (t2) == ENUMERAL_TYPE
+ && TYPE_MAIN_VARIANT (t1) != TYPE_MAIN_VARIANT (t2))
+ warning_at (colon_loc, OPT_Wc___compat,
+ ("different enum types in conditional is "
+ "invalid in C++: %qT vs %qT"),
+ t1, t2);
+ }
+
/* Quickly detect the usual case where op1 and op2 have the same type
after promotion. */
if (TYPE_MAIN_VARIANT (type1) == TYPE_MAIN_VARIANT (type2))
and later code won't know it used to be different.
Do this check on the original types, so that explicit casts
will be considered, but default promotions won't. */
- if (!skip_evaluation)
+ if (c_inhibit_evaluation_warnings == 0)
{
int unsigned_op1 = TYPE_UNSIGNED (TREE_TYPE (orig_op1));
int unsigned_op2 = TYPE_UNSIGNED (TREE_TYPE (orig_op2));
that folding in this case even without
warn_sign_compare to avoid warning options
possibly affecting code generation. */
+ c_inhibit_evaluation_warnings
+ += (ifexp == truthvalue_false_node);
op1 = c_fully_fold (op1, require_constant_value,
&op1_maybe_const);
+ c_inhibit_evaluation_warnings
+ -= (ifexp == truthvalue_false_node);
+
+ c_inhibit_evaluation_warnings
+ += (ifexp == truthvalue_true_node);
op2 = c_fully_fold (op2, require_constant_value,
&op2_maybe_const);
+ c_inhibit_evaluation_warnings
+ -= (ifexp == truthvalue_true_node);
if (warn_sign_compare)
{
"conditional expression"));
}
if (!op1_maybe_const || TREE_CODE (op1) != INTEGER_CST)
- {
- op1 = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (op1),
- NULL, op1);
- C_MAYBE_CONST_EXPR_NON_CONST (op1) = !op1_maybe_const;
- }
+ op1 = c_wrap_maybe_const (op1, !op1_maybe_const);
if (!op2_maybe_const || TREE_CODE (op2) != INTEGER_CST)
- {
- op2 = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (op2),
- NULL, op2);
- C_MAYBE_CONST_EXPR_NON_CONST (op2) = !op2_maybe_const;
- }
+ op2 = c_wrap_maybe_const (op2, !op2_maybe_const);
}
}
}
}
else if (code1 == POINTER_TYPE && code2 == POINTER_TYPE)
{
+ addr_space_t as1 = TYPE_ADDR_SPACE (TREE_TYPE (type1));
+ addr_space_t as2 = TYPE_ADDR_SPACE (TREE_TYPE (type2));
+ addr_space_t as_common;
+
if (comp_target_types (colon_loc, type1, type2))
result_type = common_pointer_type (type1, type2);
else if (null_pointer_constant_p (orig_op1))
- result_type = qualify_type (type2, type1);
+ result_type = type2;
else if (null_pointer_constant_p (orig_op2))
- result_type = qualify_type (type1, type2);
+ result_type = type1;
+ else if (!addr_space_superset (as1, as2, &as_common))
+ {
+ error_at (colon_loc, "pointers to disjoint address spaces "
+ "used in conditional expression");
+ return error_mark_node;
+ }
else if (VOID_TYPE_P (TREE_TYPE (type1)))
{
if (TREE_CODE (TREE_TYPE (type2)) == FUNCTION_TYPE)
}
else
{
+ int qual = ENCODE_QUAL_ADDR_SPACE (as_common);
+
if (!objc_ok)
pedwarn (colon_loc, 0,
"pointer type mismatch in conditional expression");
- result_type = build_pointer_type (void_type_node);
+ result_type = build_pointer_type
+ (build_qualified_type (void_type_node, qual));
}
}
else if (code1 == POINTER_TYPE && code2 == INTEGER_TYPE)
/* Merge const and volatile flags of the incoming types. */
result_type
= build_type_variant (result_type,
- TREE_READONLY (op1) || TREE_READONLY (op2),
- TREE_THIS_VOLATILE (op1) || TREE_THIS_VOLATILE (op2));
+ TYPE_READONLY (type1) || TYPE_READONLY (type2),
+ TYPE_VOLATILE (type1) || TYPE_VOLATILE (type2));
- if (result_type != TREE_TYPE (op1))
- op1 = convert_and_check (result_type, op1);
- if (result_type != TREE_TYPE (op2))
- op2 = convert_and_check (result_type, op2);
+ op1 = ep_convert_and_check (result_type, op1, semantic_result_type);
+ op2 = ep_convert_and_check (result_type, op2, semantic_result_type);
if (ifexp_bcp && ifexp == truthvalue_true_node)
{
&& !TREE_OVERFLOW (orig_op2)));
}
if (int_const || (ifexp_bcp && TREE_CODE (ifexp) == INTEGER_CST))
- ret = fold_build3 (COND_EXPR, result_type, ifexp, op1, op2);
+ ret = fold_build3_loc (colon_loc, COND_EXPR, result_type, ifexp, op1, op2);
else
{
ret = build3 (COND_EXPR, result_type, ifexp, op1, op2);
if (int_operands)
ret = note_integer_operands (ret);
}
- if (ep_result_type)
- ret = build1 (EXCESS_PRECISION_EXPR, ep_result_type, ret);
+ if (semantic_result_type)
+ ret = build1 (EXCESS_PRECISION_EXPR, semantic_result_type, ret);
protected_set_expr_location (ret, colon_loc);
return ret;
&& CONVERT_EXPR_P (TREE_OPERAND (expr1, 1)))
; /* (void) a, (void) b, c */
else
- warning_at (loc, OPT_Wunused_value,
+ warning_at (loc, OPT_Wunused_value,
"left-hand operand of comma expression has no effect");
}
}
/* Issue -Wcast-qual warnings when appropriate. TYPE is the type to
which we are casting. OTYPE is the type of the expression being
cast. Both TYPE and OTYPE are pointer types. -Wcast-qual appeared
- on the command line. */
+ on the command line. Named address space qualifiers are not handled
+ here, because they result in different warnings. */
static void
handle_warn_cast_qual (tree type, tree otype)
taken away. */
if (TREE_CODE (in_otype) == FUNCTION_TYPE
&& TREE_CODE (in_type) == FUNCTION_TYPE)
- added |= (TYPE_QUALS (in_type) & ~TYPE_QUALS (in_otype));
+ added |= (TYPE_QUALS_NO_ADDR_SPACE (in_type)
+ & ~TYPE_QUALS_NO_ADDR_SPACE (in_otype));
else
- discarded |= (TYPE_QUALS (in_otype) & ~TYPE_QUALS (in_type));
+ discarded |= (TYPE_QUALS_NO_ADDR_SPACE (in_otype)
+ & ~TYPE_QUALS_NO_ADDR_SPACE (in_type));
}
while (TREE_CODE (in_type) == POINTER_TYPE
&& TREE_CODE (in_otype) == POINTER_TYPE);
while (TREE_CODE (in_type) == POINTER_TYPE);
}
-/* Build an expression representing a cast to type TYPE of expression EXPR.
+/* Build an expression representing a cast to type TYPE of expression EXPR.
LOC is the location of the cast-- typically the open paren of the cast. */
tree
{
if (TREE_CODE (type) == RECORD_TYPE
|| TREE_CODE (type) == UNION_TYPE)
- pedwarn (loc, OPT_pedantic,
+ pedwarn (loc, OPT_pedantic,
"ISO C forbids casting nonscalar to the same type");
}
else if (TREE_CODE (type) == UNION_TYPE)
if (field)
{
tree t;
+ bool maybe_const = true;
pedwarn (loc, OPT_pedantic, "ISO C forbids casts to union type");
- t = digest_init (loc, type,
- build_constructor_single (type, field, value),
+ t = c_fully_fold (value, false, &maybe_const);
+ t = build_constructor_single (type, field, t);
+ if (!maybe_const)
+ t = c_wrap_maybe_const (t, true);
+ t = digest_init (loc, type, t,
NULL_TREE, false, true, 0);
TREE_CONSTANT (t) = TREE_CONSTANT (value);
return t;
&& TREE_CODE (otype) == POINTER_TYPE)
handle_warn_cast_qual (type, otype);
+ /* Warn about conversions between pointers to disjoint
+ address spaces. */
+ if (TREE_CODE (type) == POINTER_TYPE
+ && TREE_CODE (otype) == POINTER_TYPE
+ && !null_pointer_constant_p (value))
+ {
+ addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
+ addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (otype));
+ addr_space_t as_common;
+
+ if (!addr_space_superset (as_to, as_from, &as_common))
+ {
+ if (ADDR_SPACE_GENERIC_P (as_from))
+ warning_at (loc, 0, "cast to %s address space pointer "
+ "from disjoint generic address space pointer",
+ c_addr_space_name (as_to));
+
+ else if (ADDR_SPACE_GENERIC_P (as_to))
+ warning_at (loc, 0, "cast to generic address space pointer "
+ "from disjoint %s address space pointer",
+ c_addr_space_name (as_from));
+
+ else
+ warning_at (loc, 0, "cast to %s address space pointer "
+ "from disjoint %s address space pointer",
+ c_addr_space_name (as_to),
+ c_addr_space_name (as_from));
+ }
+ }
+
/* Warn about possible alignment problems. */
if (STRICT_ALIGNMENT
&& TREE_CODE (type) == POINTER_TYPE
/* Don't let a cast be an lvalue. */
if (value == expr)
- value = non_lvalue (value);
+ value = non_lvalue_loc (loc, value);
/* Don't allow the results of casting to floating-point or complex
types be confused with actual constants, or casts involving
tree
build_modify_expr (location_t location, tree lhs, tree lhs_origtype,
- enum tree_code modifycode,
+ enum tree_code modifycode,
location_t rhs_loc, tree rhs, tree rhs_origtype)
{
tree result;
&& comptypes (type, rhstype))
return convert_and_check (type, rhs);
- /* Conversion to a transparent union from its member types.
+ /* Conversion to a transparent union or record from its member types.
This applies only to function arguments. */
- if (codel == UNION_TYPE && TYPE_TRANSPARENT_UNION (type)
+ if (((codel == UNION_TYPE || codel == RECORD_TYPE)
+ && TYPE_TRANSPARENT_AGGR (type))
&& errtype == ic_argpass)
{
tree memb, marginal_memb = NULL_TREE;
certain things, it is okay to use a const or volatile
function where an ordinary one is wanted, but not
vice-versa. */
- if (TYPE_QUALS (ttl) & ~TYPE_QUALS (ttr))
+ if (TYPE_QUALS_NO_ADDR_SPACE (ttl)
+ & ~TYPE_QUALS_NO_ADDR_SPACE (ttr))
WARN_FOR_ASSIGNMENT (location, 0,
G_("passing argument %d of %qE "
"makes qualified function "
G_("return makes qualified function "
"pointer from unqualified"));
}
- else if (TYPE_QUALS (ttr) & ~TYPE_QUALS (ttl))
+ else if (TYPE_QUALS_NO_ADDR_SPACE (ttr)
+ & ~TYPE_QUALS_NO_ADDR_SPACE (ttl))
WARN_FOR_ASSIGNMENT (location, 0,
G_("passing argument %d of %qE discards "
"qualifiers from pointer target type"),
}
if (!fundecl || !DECL_IN_SYSTEM_HEADER (fundecl))
- pedwarn (location, OPT_pedantic,
+ pedwarn (location, OPT_pedantic,
"ISO C prohibits argument conversion to union type");
- rhs = fold_convert (TREE_TYPE (memb), rhs);
+ rhs = fold_convert_loc (location, TREE_TYPE (memb), rhs);
return build_constructor_single (type, memb, rhs);
}
}
tree mvr = ttr;
bool is_opaque_pointer;
int target_cmp = 0; /* Cache comp_target_types () result. */
+ addr_space_t asl;
+ addr_space_t asr;
if (TREE_CODE (mvl) != ARRAY_TYPE)
mvl = TYPE_MAIN_VARIANT (mvl);
"request for implicit conversion "
"from %qT to %qT not permitted in C++", rhstype, type);
+ /* See if the pointers point to incompatible address spaces. */
+ asl = TYPE_ADDR_SPACE (ttl);
+ asr = TYPE_ADDR_SPACE (ttr);
+ if (!null_pointer_constant_p (rhs)
+ && asr != asl && !targetm.addr_space.subset_p (asr, asl))
+ {
+ switch (errtype)
+ {
+ case ic_argpass:
+ error_at (location, "passing argument %d of %qE from pointer to "
+ "non-enclosed address space", parmnum, rname);
+ break;
+ case ic_assign:
+ error_at (location, "assignment from pointer to "
+ "non-enclosed address space");
+ break;
+ case ic_init:
+ error_at (location, "initialization from pointer to "
+ "non-enclosed address space");
+ break;
+ case ic_return:
+ error_at (location, "return from pointer to "
+ "non-enclosed address space");
+ break;
+ default:
+ gcc_unreachable ();
+ }
+ return error_mark_node;
+ }
+
/* Check if the right-hand side has a format attribute but the
left-hand side doesn't. */
if (warn_missing_format_attribute
else if (TREE_CODE (ttr) != FUNCTION_TYPE
&& TREE_CODE (ttl) != FUNCTION_TYPE)
{
- if (TYPE_QUALS (ttr) & ~TYPE_QUALS (ttl))
+ if (TYPE_QUALS_NO_ADDR_SPACE (ttr)
+ & ~TYPE_QUALS_NO_ADDR_SPACE (ttl))
{
/* Types differing only by the presence of the 'volatile'
qualifier are acceptable if the 'volatile' has been added
that say the function will not do certain things,
it is okay to use a const or volatile function
where an ordinary one is wanted, but not vice-versa. */
- if (TYPE_QUALS (ttl) & ~TYPE_QUALS (ttr))
+ if (TYPE_QUALS_NO_ADDR_SPACE (ttl)
+ & ~TYPE_QUALS_NO_ADDR_SPACE (ttr))
WARN_FOR_ASSIGNMENT (location, 0,
G_("passing argument %d of %qE makes "
"qualified function pointer "
/* ANSI wants warnings about out-of-range constant initializers. */
STRIP_TYPE_NOPS (value);
- if (TREE_STATIC (decl))
+ if (TREE_STATIC (decl))
constant_expression_warning (value);
/* Check if we need to set array size from compound literal size. */
pedwarn (location, opt, "(near initialization for %qs)", ofwhat);
}
-/* Issue a warning for a bad initializer component.
+/* Issue a warning for a bad initializer component.
OPT is the OPT_W* value corresponding to the warning option that
controls this warning. MSGID identifies the message. The
&& TREE_CODE (type) == ARRAY_TYPE
&& TREE_CODE (expr.value) == STRING_CST
&& expr.original_code != STRING_CST)
- pedwarn_init (input_location, OPT_pedantic,
+ pedwarn_init (input_location, OPT_pedantic,
"array initialized from parenthesized string constant");
}
TREE_TYPE (inside_init) = type;
if (TYPE_DOMAIN (type) != 0
&& TYPE_SIZE (type) != 0
- && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
+ && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
+ {
+ unsigned HOST_WIDE_INT len = TREE_STRING_LENGTH (inside_init);
+
/* Subtract the size of a single (possibly wide) character
because it's ok to ignore the terminating null char
that is counted in the length of the constant. */
- && 0 > compare_tree_int (TYPE_SIZE_UNIT (type),
- TREE_STRING_LENGTH (inside_init)
- - (TYPE_PRECISION (typ1)
- / BITS_PER_UNIT)))
- pedwarn_init (init_loc, 0,
- "initializer-string for array of chars is too long");
+ if (0 > compare_tree_int (TYPE_SIZE_UNIT (type),
+ (len
+ - (TYPE_PRECISION (typ1)
+ / BITS_PER_UNIT))))
+ pedwarn_init (init_loc, 0,
+ ("initializer-string for array of chars "
+ "is too long"));
+ else if (warn_cxx_compat
+ && 0 > compare_tree_int (TYPE_SIZE_UNIT (type), len))
+ warning_at (init_loc, OPT_Wc___compat,
+ ("initializer-string for array chars "
+ "is too long for C++"));
+ }
return inside_init;
}
/* Advance the variable that indicates sequential elements output. */
if (TREE_CODE (constructor_type) == ARRAY_TYPE)
constructor_unfilled_index
- = size_binop (PLUS_EXPR, constructor_unfilled_index,
- bitsize_one_node);
+ = size_binop_loc (input_location, PLUS_EXPR, constructor_unfilled_index,
+ bitsize_one_node);
else if (TREE_CODE (constructor_type) == RECORD_TYPE)
{
constructor_unfilled_fields
/* For a record, keep track of end position of last field. */
if (DECL_SIZE (constructor_fields))
constructor_bit_index
- = size_binop (PLUS_EXPR,
- bit_position (constructor_fields),
- DECL_SIZE (constructor_fields));
+ = size_binop_loc (input_location, PLUS_EXPR,
+ bit_position (constructor_fields),
+ DECL_SIZE (constructor_fields));
/* If the current field was the first one not yet written out,
it isn't now, so update. */
}
constructor_index
- = size_binop (PLUS_EXPR, constructor_index, bitsize_one_node);
+ = size_binop_loc (input_location, PLUS_EXPR,
+ constructor_index, bitsize_one_node);
if (!value.value)
/* If we are doing the bookkeeping for an element that was
}
constructor_index
- = size_binop (PLUS_EXPR, constructor_index, bitsize_one_node);
+ = size_binop_loc (input_location,
+ PLUS_EXPR, constructor_index, bitsize_one_node);
if (!value.value)
/* If we are doing the bookkeeping for an element that was
process_init_element (pop_init_level (1), true);
}
- p->index = size_binop (PLUS_EXPR, p->index, bitsize_one_node);
+ p->index = size_binop_loc (input_location,
+ PLUS_EXPR, p->index, bitsize_one_node);
if (tree_int_cst_equal (p->index, p->range_end) && !p->prev)
finish = 1;
are subtly different. We use a ASM_EXPR node to represent this. */
tree
build_asm_expr (location_t loc, tree string, tree outputs, tree inputs,
- tree clobbers, bool simple)
+ tree clobbers, tree labels, bool simple)
{
tree tail;
tree args;
noutputs = list_length (outputs);
oconstraints = (const char **) alloca (noutputs * sizeof (const char *));
- string = resolve_asm_operand_names (string, outputs, inputs);
+ string = resolve_asm_operand_names (string, outputs, inputs, labels);
/* Remove output conversions that change the type but not the mode. */
for (i = 0, tail = outputs; tail; ++i, tail = TREE_CHAIN (tail))
TREE_VALUE (tail) = input;
}
- args = build_stmt (loc, ASM_EXPR, string, outputs, inputs, clobbers);
+ /* ASMs with labels cannot have outputs. This should have been
+ enforced by the parser. */
+ gcc_assert (outputs == NULL || labels == NULL);
+
+ args = build_stmt (loc, ASM_EXPR, string, outputs, inputs, clobbers, labels);
/* asm statements without outputs, including simple ones, are treated
as volatile. */
tree
c_finish_goto_label (location_t loc, tree label)
{
- tree decl = lookup_label (label);
+ tree decl = lookup_label_for_goto (loc, label);
if (!decl)
return NULL_TREE;
-
- if (C_DECL_UNJUMPABLE_STMT_EXPR (decl))
- {
- error_at (loc, "jump into statement expression");
- return NULL_TREE;
- }
-
- if (C_DECL_UNJUMPABLE_VM (decl))
- {
- error_at (loc,
- "jump into scope of identifier with variably modified type");
- return NULL_TREE;
- }
-
- if (!C_DECL_UNDEFINABLE_STMT_EXPR (decl))
- {
- /* No jump from outside this statement expression context, so
- record that there is a jump from within this context. */
- struct c_label_list *nlist;
- nlist = XOBNEW (&parser_obstack, struct c_label_list);
- nlist->next = label_context_stack_se->labels_used;
- nlist->label = decl;
- label_context_stack_se->labels_used = nlist;
- }
-
- if (!C_DECL_UNDEFINABLE_VM (decl))
- {
- /* No jump from outside this context context of identifiers with
- variably modified type, so record that there is a jump from
- within this context. */
- struct c_label_list *nlist;
- nlist = XOBNEW (&parser_obstack, struct c_label_list);
- nlist->next = label_context_stack_vm->labels_used;
- nlist->label = decl;
- label_context_stack_vm->labels_used = nlist;
- }
-
TREE_USED (decl) = 1;
{
tree t = build1 (GOTO_EXPR, void_type_node, decl);
if ((warn_return_type || flag_isoc99)
&& valtype != 0 && TREE_CODE (valtype) != VOID_TYPE)
{
- pedwarn_c99 (loc, flag_isoc99 ? 0 : OPT_Wreturn_type,
+ pedwarn_c99 (loc, flag_isoc99 ? 0 : OPT_Wreturn_type,
"%<return%> with no value, in "
"function returning non-void");
no_warning = true;
{
current_function_returns_null = 1;
if (TREE_CODE (TREE_TYPE (retval)) != VOID_TYPE)
- pedwarn (loc, 0,
+ pedwarn (loc, 0,
"%<return%> with a value, in function returning void");
- else
+ else
pedwarn (loc, OPT_pedantic, "ISO C forbids "
"%<return%> with expression, in function returning void");
}
of the GNU case range extension. */
splay_tree cases;
- /* Number of nested statement expressions within this switch
- statement; if nonzero, case and default labels may not
- appear. */
- unsigned int blocked_stmt_expr;
-
- /* Scope of outermost declarations of identifiers with variably
- modified type within this switch statement; if nonzero, case and
- default labels may not appear. */
- unsigned int blocked_vm;
+ /* The bindings at the point of the switch. This is used for
+ warnings crossing decls when branching to a case label. */
+ struct c_spot_bindings *bindings;
/* The next node on the stack. */
struct c_switch *next;
SET_EXPR_LOCATION (cs->switch_expr, switch_loc);
cs->orig_type = orig_type;
cs->cases = splay_tree_new (case_compare, NULL, NULL);
- cs->blocked_stmt_expr = 0;
- cs->blocked_vm = 0;
+ cs->bindings = c_get_switch_bindings ();
cs->next = c_switch_stack;
c_switch_stack = cs;
"case label is not an integer constant expression");
}
- if (c_switch_stack && !c_switch_stack->blocked_stmt_expr
- && !c_switch_stack->blocked_vm)
- {
- label = c_add_case_label (loc, c_switch_stack->cases,
- SWITCH_COND (c_switch_stack->switch_expr),
- c_switch_stack->orig_type,
- low_value, high_value);
- if (label == error_mark_node)
- label = NULL_TREE;
- }
- else if (c_switch_stack && c_switch_stack->blocked_stmt_expr)
- {
- if (low_value)
- error_at (loc, "case label in statement expression not containing "
- "enclosing switch statement");
- else
- error_at (loc, "%<default%> label in statement expression not containing "
- "enclosing switch statement");
- }
- else if (c_switch_stack && c_switch_stack->blocked_vm)
+ if (c_switch_stack == NULL)
{
if (low_value)
- error_at (loc, "case label in scope of identifier with variably "
- "modified type not containing enclosing switch statement");
+ error_at (loc, "case label not within a switch statement");
else
- error_at (loc, "%<default%> label in scope of identifier with "
- "variably modified type not containing enclosing switch "
- "statement");
+ error_at (loc, "%<default%> label not within a switch statement");
+ return NULL_TREE;
}
- else if (low_value)
- error_at (loc, "case label not within a switch statement");
- else
- error_at (loc, "%<default%> label not within a switch statement");
+ if (c_check_switch_jump_warnings (c_switch_stack->bindings,
+ EXPR_LOCATION (c_switch_stack->switch_expr),
+ loc))
+ return NULL_TREE;
+
+ label = c_add_case_label (loc, c_switch_stack->cases,
+ SWITCH_COND (c_switch_stack->switch_expr),
+ c_switch_stack->orig_type,
+ low_value, high_value);
+ if (label == error_mark_node)
+ label = NULL_TREE;
return label;
}
SWITCH_BODY (cs->switch_expr) = body;
- /* We must not be within a statement expression nested in the switch
- at this point; we might, however, be within the scope of an
- identifier with variably modified type nested in the switch. */
- gcc_assert (!cs->blocked_stmt_expr);
-
/* Emit warnings as needed. */
switch_location = EXPR_LOCATION (cs->switch_expr);
c_do_switch_warnings (cs->cases, switch_location,
/* Pop the stack. */
c_switch_stack = cs->next;
splay_tree_delete (cs->cases);
+ c_release_switch_bindings (cs->bindings);
XDELETE (cs);
}
\f
found:
if (COND_EXPR_ELSE (inner_if))
- warning (OPT_Wparentheses,
- "%Hsuggest explicit braces to avoid ambiguous %<else%>",
- &if_locus);
+ warning_at (if_locus, OPT_Wparentheses,
+ "suggest explicit braces to avoid ambiguous %<else%>");
}
stmt = build3 (COND_EXPR, void_type_node, cond, then_block, else_block);
}
t = build_and_jump (&blab);
- exit = fold_build3 (COND_EXPR, void_type_node, cond, exit, t);
if (cond_is_first)
- SET_EXPR_LOCATION (exit, start_locus);
+ exit = fold_build3_loc (start_locus,
+ COND_EXPR, void_type_node, cond, exit, t);
else
- SET_EXPR_LOCATION (exit, input_location);
+ exit = fold_build3_loc (input_location,
+ COND_EXPR, void_type_node, cond, exit, t);
}
add_stmt (top);
c_begin_stmt_expr (void)
{
tree ret;
- struct c_label_context_se *nstack;
- struct c_label_list *glist;
/* We must force a BLOCK for this level so that, if it is not expanded
later, there is a way to turn off the entire subtree of blocks that
are contained in it. */
keep_next_level ();
ret = c_begin_compound_stmt (true);
- if (c_switch_stack)
- {
- c_switch_stack->blocked_stmt_expr++;
- gcc_assert (c_switch_stack->blocked_stmt_expr != 0);
- }
- for (glist = label_context_stack_se->labels_used;
- glist != NULL;
- glist = glist->next)
- {
- C_DECL_UNDEFINABLE_STMT_EXPR (glist->label) = 1;
- }
- nstack = XOBNEW (&parser_obstack, struct c_label_context_se);
- nstack->labels_def = NULL;
- nstack->labels_used = NULL;
- nstack->next = label_context_stack_se;
- label_context_stack_se = nstack;
+
+ c_bindings_start_stmt_expr (c_switch_stack == NULL
+ ? NULL
+ : c_switch_stack->bindings);
/* Mark the current statement list as belonging to a statement list. */
STATEMENT_LIST_STMT_EXPR (ret) = 1;
{
tree last, type, tmp, val;
tree *last_p;
- struct c_label_list *dlist, *glist, *glist_prev = NULL;
body = c_end_compound_stmt (loc, body, true);
- if (c_switch_stack)
- {
- gcc_assert (c_switch_stack->blocked_stmt_expr != 0);
- c_switch_stack->blocked_stmt_expr--;
- }
- /* It is no longer possible to jump to labels defined within this
- statement expression. */
- for (dlist = label_context_stack_se->labels_def;
- dlist != NULL;
- dlist = dlist->next)
- {
- C_DECL_UNJUMPABLE_STMT_EXPR (dlist->label) = 1;
- }
- /* It is again possible to define labels with a goto just outside
- this statement expression. */
- for (glist = label_context_stack_se->next->labels_used;
- glist != NULL;
- glist = glist->next)
- {
- C_DECL_UNDEFINABLE_STMT_EXPR (glist->label) = 0;
- glist_prev = glist;
- }
- if (glist_prev != NULL)
- glist_prev->next = label_context_stack_se->labels_used;
- else
- label_context_stack_se->next->labels_used
- = label_context_stack_se->labels_used;
- label_context_stack_se = label_context_stack_se->next;
+
+ c_bindings_end_stmt_expr (c_switch_stack == NULL
+ ? NULL
+ : c_switch_stack->bindings);
/* Locate the last statement in BODY. See c_end_compound_stmt
about always returning a BIND_EXPR. */
goto continue_searching;
}
+ if (last == error_mark_node)
+ return last;
+
/* In the case that the BIND_EXPR is not necessary, return the
expression out from inside it. */
- if (last == error_mark_node
- || (last == BIND_EXPR_BODY (body)
- && BIND_EXPR_VARS (body) == NULL))
+ if (last == BIND_EXPR_BODY (body)
+ && BIND_EXPR_VARS (body) == NULL)
{
/* Even if this looks constant, do not allow it in a constant
expression. */
- last = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (last), NULL_TREE, last);
- C_MAYBE_CONST_EXPR_NON_CONST (last) = 1;
+ last = c_wrap_maybe_const (last, true);
/* Do not warn if the return value of a statement expression is
unused. */
TREE_NO_WARNING (last) = 1;
val = TREE_OPERAND (val, 0);
*last_p = build2 (MODIFY_EXPR, void_type_node, tmp, val);
- SET_EXPR_LOCUS (*last_p, EXPR_LOCUS (last));
+ SET_EXPR_LOCATION (*last_p, EXPR_LOCATION (last));
{
tree t = build4 (TARGET_EXPR, type, tmp, body, NULL_TREE, NULL_TREE);
return t;
}
}
-
-/* Begin the scope of an identifier of variably modified type, scope
- number SCOPE. Jumping from outside this scope to inside it is not
- permitted. */
-
-void
-c_begin_vm_scope (unsigned int scope)
-{
- struct c_label_context_vm *nstack;
- struct c_label_list *glist;
-
- gcc_assert (scope > 0);
-
- /* At file_scope, we don't have to do any processing. */
- if (label_context_stack_vm == NULL)
- return;
-
- if (c_switch_stack && !c_switch_stack->blocked_vm)
- c_switch_stack->blocked_vm = scope;
- for (glist = label_context_stack_vm->labels_used;
- glist != NULL;
- glist = glist->next)
- {
- C_DECL_UNDEFINABLE_VM (glist->label) = 1;
- }
- nstack = XOBNEW (&parser_obstack, struct c_label_context_vm);
- nstack->labels_def = NULL;
- nstack->labels_used = NULL;
- nstack->scope = scope;
- nstack->next = label_context_stack_vm;
- label_context_stack_vm = nstack;
-}
-
-/* End a scope which may contain identifiers of variably modified
- type, scope number SCOPE. */
-
-void
-c_end_vm_scope (unsigned int scope)
-{
- if (label_context_stack_vm == NULL)
- return;
- if (c_switch_stack && c_switch_stack->blocked_vm == scope)
- c_switch_stack->blocked_vm = 0;
- /* We may have a number of nested scopes of identifiers with
- variably modified type, all at this depth. Pop each in turn. */
- while (label_context_stack_vm->scope == scope)
- {
- struct c_label_list *dlist, *glist, *glist_prev = NULL;
-
- /* It is no longer possible to jump to labels defined within this
- scope. */
- for (dlist = label_context_stack_vm->labels_def;
- dlist != NULL;
- dlist = dlist->next)
- {
- C_DECL_UNJUMPABLE_VM (dlist->label) = 1;
- }
- /* It is again possible to define labels with a goto just outside
- this scope. */
- for (glist = label_context_stack_vm->next->labels_used;
- glist != NULL;
- glist = glist->next)
- {
- C_DECL_UNDEFINABLE_VM (glist->label) = 0;
- glist_prev = glist;
- }
- if (glist_prev != NULL)
- glist_prev->next = label_context_stack_vm->labels_used;
- else
- label_context_stack_vm->next->labels_used
- = label_context_stack_vm->labels_used;
- label_context_stack_vm = label_context_stack_vm->next;
- }
-}
\f
/* Begin and end compound statements. This is as simple as pushing
and popping new statement lists from the tree. */
/* When the computation is in excess precision, the type of the
final EXCESS_PRECISION_EXPR. */
- tree real_result_type = NULL;
+ tree semantic_result_type = NULL;
/* Nonzero means operands have already been type-converted
in whatever way is necessary.
/* Handle the pointer + int case. */
if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
{
- ret = pointer_int_sum (PLUS_EXPR, op0, op1);
+ ret = pointer_int_sum (location, PLUS_EXPR, op0, op1);
goto return_build_binary_op;
}
else if (code1 == POINTER_TYPE && code0 == INTEGER_TYPE)
{
- ret = pointer_int_sum (PLUS_EXPR, op1, op0);
+ ret = pointer_int_sum (location, PLUS_EXPR, op1, op0);
goto return_build_binary_op;
}
else
if (code0 == POINTER_TYPE && code1 == POINTER_TYPE
&& comp_target_types (location, type0, type1))
{
- ret = pointer_diff (op0, op1);
+ ret = pointer_diff (location, op0, op1);
goto return_build_binary_op;
}
/* Handle pointer minus int. Just like pointer plus int. */
else if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
{
- ret = pointer_int_sum (MINUS_EXPR, op0, op1);
+ ret = pointer_int_sum (location, MINUS_EXPR, op0, op1);
goto return_build_binary_op;
}
else
if (tree_int_cst_sgn (op1) < 0)
{
int_const = false;
- if (skip_evaluation == 0)
+ if (c_inhibit_evaluation_warnings == 0)
warning (0, "right shift count is negative");
}
else
if (compare_tree_int (op1, TYPE_PRECISION (type0)) >= 0)
{
int_const = false;
- if (skip_evaluation == 0)
+ if (c_inhibit_evaluation_warnings == 0)
warning (0, "right shift count >= width of type");
}
}
if (tree_int_cst_sgn (op1) < 0)
{
int_const = false;
- if (skip_evaluation == 0)
+ if (c_inhibit_evaluation_warnings == 0)
warning (0, "left shift count is negative");
}
else if (compare_tree_int (op1, TYPE_PRECISION (type0)) >= 0)
{
int_const = false;
- if (skip_evaluation == 0)
+ if (c_inhibit_evaluation_warnings == 0)
warning (0, "left shift count >= width of type");
}
}
{
tree tt0 = TREE_TYPE (type0);
tree tt1 = TREE_TYPE (type1);
+ addr_space_t as0 = TYPE_ADDR_SPACE (tt0);
+ addr_space_t as1 = TYPE_ADDR_SPACE (tt1);
+ addr_space_t as_common = ADDR_SPACE_GENERIC;
+
/* Anything compares with void *. void * compares with anything.
Otherwise, the targets must be compatible
and both must be object or both incomplete. */
if (comp_target_types (location, type0, type1))
result_type = common_pointer_type (type0, type1);
+ else if (null_pointer_constant_p (orig_op0))
+ result_type = type1;
+ else if (null_pointer_constant_p (orig_op1))
+ result_type = type0;
+ else if (!addr_space_superset (as0, as1, &as_common))
+ {
+ error_at (location, "comparison of pointers to "
+ "disjoint address spaces");
+ return error_mark_node;
+ }
else if (VOID_TYPE_P (tt0))
{
- /* op0 != orig_op0 detects the case of something
- whose value is 0 but which isn't a valid null ptr const. */
- if (pedantic && !null_pointer_constant_p (orig_op0)
- && TREE_CODE (tt1) == FUNCTION_TYPE)
+ if (pedantic && TREE_CODE (tt1) == FUNCTION_TYPE)
pedwarn (location, OPT_pedantic, "ISO C forbids "
"comparison of %<void *%> with function pointer");
}
else if (VOID_TYPE_P (tt1))
{
- if (pedantic && !null_pointer_constant_p (orig_op1)
- && TREE_CODE (tt0) == FUNCTION_TYPE)
+ if (pedantic && TREE_CODE (tt0) == FUNCTION_TYPE)
pedwarn (location, OPT_pedantic, "ISO C forbids "
"comparison of %<void *%> with function pointer");
}
"comparison of distinct pointer types lacks a cast");
if (result_type == NULL_TREE)
- result_type = ptr_type_node;
+ {
+ int qual = ENCODE_QUAL_ADDR_SPACE (as_common);
+ result_type = build_pointer_type
+ (build_qualified_type (void_type_node, qual));
+ }
}
else if (code0 == POINTER_TYPE && null_pointer_constant_p (orig_op1))
{
short_compare = 1;
else if (code0 == POINTER_TYPE && code1 == POINTER_TYPE)
{
+ addr_space_t as0 = TYPE_ADDR_SPACE (TREE_TYPE (type0));
+ addr_space_t as1 = TYPE_ADDR_SPACE (TREE_TYPE (type1));
+ addr_space_t as_common;
+
if (comp_target_types (location, type0, type1))
{
result_type = common_pointer_type (type0, type1);
pedwarn (location, OPT_pedantic, "ISO C forbids "
"ordered comparisons of pointers to functions");
}
+ else if (!addr_space_superset (as0, as1, &as_common))
+ {
+ error_at (location, "comparison of pointers to "
+ "disjoint address spaces");
+ return error_mark_node;
+ }
else
{
- result_type = ptr_type_node;
+ int qual = ENCODE_QUAL_ADDR_SPACE (as_common);
+ result_type = build_pointer_type
+ (build_qualified_type (void_type_node, qual));
pedwarn (location, 0,
"comparison of distinct pointer types lacks a cast");
}
{
result_type = type0;
if (pedantic)
- pedwarn (location, OPT_pedantic,
+ pedwarn (location, OPT_pedantic,
"ordered comparison of pointer with integer zero");
else if (extra_warnings)
warning_at (location, OPT_Wextra,
else if (code1 == POINTER_TYPE && null_pointer_constant_p (orig_op0))
{
result_type = type1;
- pedwarn (location, OPT_pedantic,
+ pedwarn (location, OPT_pedantic,
"ordered comparison of pointer with integer zero");
}
else if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
if (type0 != orig_type0 || type1 != orig_type1)
{
gcc_assert (may_need_excess_precision && common);
- real_result_type = c_common_type (orig_type0, orig_type1);
+ semantic_result_type = c_common_type (orig_type0, orig_type1);
}
if (first_complex)
{
if (shorten && none_complex)
{
final_type = result_type;
- result_type = shorten_binary_op (result_type, op0, op1,
+ result_type = shorten_binary_op (result_type, op0, op1,
shorten == -1);
}
unsigned_arg = TYPE_UNSIGNED (TREE_TYPE (op0));
if (TYPE_PRECISION (TREE_TYPE (arg0)) < TYPE_PRECISION (result_type)
+ && tree_int_cst_sgn (op1) > 0
/* We can shorten only if the shift count is less than the
number of bits in the smaller type size. */
&& compare_tree_int (op1, TYPE_PRECISION (TREE_TYPE (arg0))) < 0
converted = 1;
resultcode = xresultcode;
- if (!skip_evaluation)
+ if (c_inhibit_evaluation_warnings == 0)
{
bool op0_maybe_const = true;
bool op1_maybe_const = true;
build_conditional_expr. This requires the
"original" values to be folded, not just op0 and
op1. */
+ c_inhibit_evaluation_warnings++;
op0 = c_fully_fold (op0, require_constant_value,
&op0_maybe_const);
op1 = c_fully_fold (op1, require_constant_value,
&op1_maybe_const);
+ c_inhibit_evaluation_warnings--;
orig_op0_folded = c_fully_fold (orig_op0,
require_constant_value,
NULL);
if (!in_late_binary_op)
{
if (!op0_maybe_const || TREE_CODE (op0) != INTEGER_CST)
- {
- op0 = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (op0),
- NULL, op0);
- C_MAYBE_CONST_EXPR_NON_CONST (op0) = !op0_maybe_const;
- }
+ op0 = c_wrap_maybe_const (op0, !op0_maybe_const);
if (!op1_maybe_const || TREE_CODE (op1) != INTEGER_CST)
- {
- op1 = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (op1),
- NULL, op1);
- C_MAYBE_CONST_EXPR_NON_CONST (op1) = !op1_maybe_const;
- }
+ op1 = c_wrap_maybe_const (op1, !op1_maybe_const);
}
}
}
return error_mark_node;
}
- if (!converted)
- {
- if (TREE_TYPE (op0) != result_type)
- op0 = convert_and_check (result_type, op0);
- if (TREE_TYPE (op1) != result_type)
- op1 = convert_and_check (result_type, op1);
-
- /* This can happen if one operand has a vector type, and the other
- has a different type. */
- if (TREE_CODE (op0) == ERROR_MARK || TREE_CODE (op1) == ERROR_MARK)
- return error_mark_node;
- }
-
if (build_type == NULL_TREE)
{
build_type = result_type;
if (type0 != orig_type0 || type1 != orig_type1)
{
gcc_assert (may_need_excess_precision && common);
- real_result_type = c_common_type (orig_type0, orig_type1);
+ semantic_result_type = c_common_type (orig_type0, orig_type1);
}
}
+ if (!converted)
+ {
+ op0 = ep_convert_and_check (result_type, op0, semantic_result_type);
+ op1 = ep_convert_and_check (result_type, op1, semantic_result_type);
+
+ /* This can happen if one operand has a vector type, and the other
+ has a different type. */
+ if (TREE_CODE (op0) == ERROR_MARK || TREE_CODE (op1) == ERROR_MARK)
+ return error_mark_node;
+ }
+
/* Treat expressions in initializers specially as they can't trap. */
if (int_const_or_overflow)
ret = (require_constant_value
- ? fold_build2_initializer (resultcode, build_type, op0, op1)
- : fold_build2 (resultcode, build_type, op0, op1));
+ ? fold_build2_initializer_loc (location, resultcode, build_type,
+ op0, op1)
+ : fold_build2_loc (location, resultcode, build_type, op0, op1));
else
ret = build2 (resultcode, build_type, op0, op1);
if (final_type != 0)
else if (TREE_CODE (ret) != INTEGER_CST && int_operands
&& !in_late_binary_op)
ret = note_integer_operands (ret);
- if (real_result_type)
- ret = build1 (EXCESS_PRECISION_EXPR, real_result_type, ret);
+ if (semantic_result_type)
+ ret = build1 (EXCESS_PRECISION_EXPR, semantic_result_type, ret);
protected_set_expr_location (ret, location);
return ret;
}
else if (TYPE_CANONICAL (element_type) != element_type
|| (domain && TYPE_CANONICAL (domain) != domain))
{
- tree unqualified_canon
+ tree unqualified_canon
= build_array_type (TYPE_CANONICAL (element_type),
- domain? TYPE_CANONICAL (domain)
+ domain? TYPE_CANONICAL (domain)
: NULL_TREE);
- TYPE_CANONICAL (t)
+ TYPE_CANONICAL (t)
= c_build_qualified_type (unqualified_canon, type_quals);
}
else