static int lvalue_or_else (tree, enum lvalue_use);
static int lvalue_p (tree);
static void record_maybe_used_decl (tree);
-\f
+static int comptypes_internal (tree, tree);
+\f/* This is a cache to hold if two types are compatible or not. */
+
+struct tagged_tu_seen_cache {
+ const struct tagged_tu_seen_cache * next;
+ tree t1;
+ tree t2;
+ /* The return value of tagged_types_tu_compatible_p if we had seen
+ these two types already. */
+ int val;
+};
+
+static const struct tagged_tu_seen_cache * tagged_tu_seen_base;
+static void free_all_tagged_tu_seen_up_to (const struct tagged_tu_seen_cache *);
+
/* Do `exp = require_complete_type (exp);' to make sure exp
does not have an incomplete type. (That includes void types.) */
return c_common_type (t1, t2);
}
-\f
+
/* Return 1 if TYPE1 and TYPE2 are compatible types for assignment
or various other operations. Return 2 if they are compatible
but a warning may be needed if you use them together. */
int
comptypes (tree type1, tree type2)
{
+ const struct tagged_tu_seen_cache * tagged_tu_seen_base1 = tagged_tu_seen_base;
+ int val;
+
+ val = comptypes_internal (type1, type2);
+ free_all_tagged_tu_seen_up_to (tagged_tu_seen_base1);
+
+ return val;
+}\f
+/* Return 1 if TYPE1 and TYPE2 are compatible types for assignment
+ or various other operations. Return 2 if they are compatible
+ but a warning may be needed if you use them together. This
+ differs from comptypes, in that we don't free the seen types. */
+
+static int
+comptypes_internal (tree type1, tree type2)
+{
tree t1 = type1;
tree t2 = type2;
int attrval, val;
|| TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
break;
val = (TREE_TYPE (t1) == TREE_TYPE (t2)
- ? 1 : comptypes (TREE_TYPE (t1), TREE_TYPE (t2)));
+ ? 1 : comptypes_internal (TREE_TYPE (t1), TREE_TYPE (t2)));
break;
case FUNCTION_TYPE:
/* Target types must match incl. qualifiers. */
if (TREE_TYPE (t1) != TREE_TYPE (t2)
- && 0 == (val = comptypes (TREE_TYPE (t1), TREE_TYPE (t2))))
+ && 0 == (val = comptypes_internal (TREE_TYPE (t1), TREE_TYPE (t2))))
return 0;
/* Sizes must match unless one is missing or variable. */
case RECORD_TYPE:
case UNION_TYPE:
if (val != 1 && !same_translation_unit_p (t1, t2))
- val = tagged_types_tu_compatible_p (t1, t2);
+ {
+ if (attrval != 2)
+ return tagged_types_tu_compatible_p (t1, t2);
+ val = tagged_types_tu_compatible_p (t1, t2);
+ }
break;
case VECTOR_TYPE:
val = TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
- && comptypes (TREE_TYPE (t1), TREE_TYPE (t2));
+ && comptypes_internal (TREE_TYPE (t1), TREE_TYPE (t2));
break;
default:
return t1 == t2;
}
-/* The C standard says that two structures in different translation
- units are compatible with each other only if the types of their
- fields are compatible (among other things). So, consider two copies
- of this structure: */
+/* Allocate the seen two types, assuming that they are compatible. */
-struct tagged_tu_seen {
- const struct tagged_tu_seen * next;
- tree t1;
- tree t2;
-};
+static struct tagged_tu_seen_cache *
+alloc_tagged_tu_seen_cache (tree t1, tree t2)
+{
+ struct tagged_tu_seen_cache *tu = xmalloc (sizeof (struct tagged_tu_seen_cache));
+ tu->next = tagged_tu_seen_base;
+ tu->t1 = t1;
+ tu->t2 = t2;
+
+ tagged_tu_seen_base = tu;
+
+ /* The C standard says that two structures in different translation
+ units are compatible with each other only if the types of their
+ fields are compatible (among other things). We assume that they
+ are compatible until proven otherwise when building the cache.
+ An example where this can occur is:
+ struct a
+ {
+ struct a *next;
+ };
+ If we are comparing this against a similar struct in another TU,
+ and did not assume they were compatiable, we end up with an infinite
+ loop. */
+ tu->val = 1;
+ return tu;
+}
-/* Can they be compatible with each other? We choose to break the
- recursion by allowing those types to be compatible. */
+/* Free the seen types until we get to TU_TIL. */
-static const struct tagged_tu_seen * tagged_tu_seen_base;
+static void
+free_all_tagged_tu_seen_up_to (const struct tagged_tu_seen_cache *tu_til)
+{
+ const struct tagged_tu_seen_cache *tu = tagged_tu_seen_base;
+ while (tu != tu_til)
+ {
+ struct tagged_tu_seen_cache *tu1 = (struct tagged_tu_seen_cache*)tu;
+ tu = tu1->next;
+ free (tu1);
+ }
+ tagged_tu_seen_base = tu_til;
+}
/* Return 1 if two 'struct', 'union', or 'enum' types T1 and T2 are
compatible. If the two types are not the same (which has been
return 1;
{
- const struct tagged_tu_seen * tts_i;
+ const struct tagged_tu_seen_cache * tts_i;
for (tts_i = tagged_tu_seen_base; tts_i != NULL; tts_i = tts_i->next)
if (tts_i->t1 == t1 && tts_i->t2 == t2)
- return 1;
+ return tts_i->val;
}
switch (TREE_CODE (t1))
{
case ENUMERAL_TYPE:
{
-
+ struct tagged_tu_seen_cache *tu = alloc_tagged_tu_seen_cache (t1, t2);
/* Speed up the case where the type values are in the same order. */
tree tv1 = TYPE_VALUES (t1);
tree tv2 = TYPE_VALUES (t2);
if (tv1 == tv2)
- return 1;
+ {
+ return 1;
+ }
for (;tv1 && tv2; tv1 = TREE_CHAIN (tv1), tv2 = TREE_CHAIN (tv2))
{
if (TREE_PURPOSE (tv1) != TREE_PURPOSE (tv2))
break;
if (simple_cst_equal (TREE_VALUE (tv1), TREE_VALUE (tv2)) != 1)
- return 0;
+ {
+ tu->val = 0;
+ return 0;
+ }
}
if (tv1 == NULL_TREE && tv2 == NULL_TREE)
- return 1;
+ {
+ return 1;
+ }
if (tv1 == NULL_TREE || tv2 == NULL_TREE)
- return 0;
+ {
+ tu->val = 0;
+ return 0;
+ }
if (list_length (TYPE_VALUES (t1)) != list_length (TYPE_VALUES (t2)))
- return 0;
+ {
+ tu->val = 0;
+ return 0;
+ }
for (s1 = TYPE_VALUES (t1); s1; s1 = TREE_CHAIN (s1))
{
s2 = purpose_member (TREE_PURPOSE (s1), TYPE_VALUES (t2));
if (s2 == NULL
|| simple_cst_equal (TREE_VALUE (s1), TREE_VALUE (s2)) != 1)
- return 0;
+ {
+ tu->val = 0;
+ return 0;
+ }
}
return 1;
}
case UNION_TYPE:
{
+ struct tagged_tu_seen_cache *tu = alloc_tagged_tu_seen_cache (t1, t2);
if (list_length (TYPE_FIELDS (t1)) != list_length (TYPE_FIELDS (t2)))
- return 0;
+ {
+ tu->val = 0;
+ return 0;
+ }
+
+ /* Speed up the common case where the fields are in the same order. */
+ for (s1 = TYPE_FIELDS (t1), s2 = TYPE_FIELDS (t2); s1 && s2;
+ s1 = TREE_CHAIN (s1), s2 = TREE_CHAIN (s2))
+ {
+ int result;
+
+
+ if (DECL_NAME (s1) == NULL
+ || DECL_NAME (s1) != DECL_NAME (s2))
+ break;
+ result = comptypes_internal (TREE_TYPE (s1), TREE_TYPE (s2));
+ if (result == 0)
+ {
+ tu->val = 0;
+ return 0;
+ }
+ if (result == 2)
+ needs_warning = true;
+
+ if (TREE_CODE (s1) == FIELD_DECL
+ && simple_cst_equal (DECL_FIELD_BIT_OFFSET (s1),
+ DECL_FIELD_BIT_OFFSET (s2)) != 1)
+ {
+ tu->val = 0;
+ return 0;
+ }
+ }
+ if (!s1 && !s2)
+ {
+ tu->val = needs_warning ? 2 : 1;
+ return tu->val;
+ }
for (s1 = TYPE_FIELDS (t1); s1; s1 = TREE_CHAIN (s1))
{
bool ok = false;
- struct tagged_tu_seen tts;
-
- tts.next = tagged_tu_seen_base;
- tts.t1 = t1;
- tts.t2 = t2;
- tagged_tu_seen_base = &tts;
if (DECL_NAME (s1) != NULL)
for (s2 = TYPE_FIELDS (t2); s2; s2 = TREE_CHAIN (s2))
if (DECL_NAME (s1) == DECL_NAME (s2))
{
int result;
- result = comptypes (TREE_TYPE (s1), TREE_TYPE (s2));
+ result = comptypes_internal (TREE_TYPE (s1), TREE_TYPE (s2));
if (result == 0)
- break;
+ {
+ tu->val = 0;
+ return 0;
+ }
if (result == 2)
needs_warning = true;
ok = true;
break;
}
- tagged_tu_seen_base = tts.next;
if (!ok)
- return 0;
+ {
+ tu->val = 0;
+ return 0;
+ }
}
- return needs_warning ? 2 : 1;
+ tu->val = needs_warning ? 2 : 10;
+ return tu->val;
}
case RECORD_TYPE:
{
- struct tagged_tu_seen tts;
-
- tts.next = tagged_tu_seen_base;
- tts.t1 = t1;
- tts.t2 = t2;
- tagged_tu_seen_base = &tts;
+ struct tagged_tu_seen_cache *tu = alloc_tagged_tu_seen_cache (t1, t2);
for (s1 = TYPE_FIELDS (t1), s2 = TYPE_FIELDS (t2);
s1 && s2;
if (TREE_CODE (s1) != TREE_CODE (s2)
|| DECL_NAME (s1) != DECL_NAME (s2))
break;
- result = comptypes (TREE_TYPE (s1), TREE_TYPE (s2));
+ result = comptypes_internal (TREE_TYPE (s1), TREE_TYPE (s2));
if (result == 0)
break;
if (result == 2)
DECL_FIELD_BIT_OFFSET (s2)) != 1)
break;
}
- tagged_tu_seen_base = tts.next;
if (s1 && s2)
- return 0;
- return needs_warning ? 2 : 1;
+ tu->val = 0;
+ else
+ tu->val = needs_warning ? 2 : 1;
+ return tu->val;
}
default:
if (TYPE_VOLATILE (ret2))
ret2 = build_qualified_type (TYPE_MAIN_VARIANT (ret2),
TYPE_QUALS (ret2) & ~TYPE_QUAL_VOLATILE);
- val = comptypes (ret1, ret2);
+ val = comptypes_internal (ret1, ret2);
if (val == 0)
return 0;
else if (TREE_CODE (a1) == ERROR_MARK
|| TREE_CODE (a2) == ERROR_MARK)
;
- else if (!(newval = comptypes (mv1, mv2)))
+ else if (!(newval = comptypes_internal (mv1, mv2)))
{
/* Allow wait (union {union wait *u; int *i} *)
and wait (union wait *) to be compatible. */
if (mv3 && mv3 != error_mark_node
&& TREE_CODE (mv3) != ARRAY_TYPE)
mv3 = TYPE_MAIN_VARIANT (mv3);
- if (comptypes (mv3, mv2))
+ if (comptypes_internal (mv3, mv2))
break;
}
if (memb == 0)
if (mv3 && mv3 != error_mark_node
&& TREE_CODE (mv3) != ARRAY_TYPE)
mv3 = TYPE_MAIN_VARIANT (mv3);
- if (comptypes (mv3, mv1))
+ if (comptypes_internal (mv3, mv1))
break;
}
if (memb == 0)
check_function_arguments (TYPE_ATTRIBUTES (fntype), coerced_params,
TYPE_ARG_TYPES (fntype));
- result = build3 (CALL_EXPR, TREE_TYPE (fntype),
- function, coerced_params, NULL_TREE);
- TREE_SIDE_EFFECTS (result) = 1;
-
if (require_constant_value)
{
- result = fold_initializer (result);
+ result = fold_build3_initializer (CALL_EXPR, TREE_TYPE (fntype),
+ function, coerced_params, NULL_TREE);
if (TREE_CONSTANT (result)
&& (name == NULL_TREE
pedwarn_init ("initializer element is not constant");
}
else
- result = fold (result);
+ result = fold_build3 (CALL_EXPR, TREE_TYPE (fntype),
+ function, coerced_params, NULL_TREE);
if (VOID_TYPE_P (TREE_TYPE (result)))
return result;
if (argtype == 0)
argtype = TREE_TYPE (arg);
- val = build1 (code, argtype, arg);
- return require_constant_value ? fold_initializer (val) : fold (val);
+ return require_constant_value ? fold_build1_initializer (code, argtype, arg)
+ : fold_build1 (code, argtype, arg);
}
/* Return nonzero if REF is an lvalue valid for this language.
static void
readonly_error (tree arg, enum lvalue_use use)
{
- gcc_assert (use == lv_assign || use == lv_increment || use == lv_decrement);
+ gcc_assert (use == lv_assign || use == lv_increment || use == lv_decrement
+ || use == lv_asm);
/* Using this macro rather than (for example) arrays of messages
ensures that all the format strings are checked at compile
time. */
-#define READONLY_MSG(A, I, D) (use == lv_assign \
- ? (A) \
- : (use == lv_increment ? (I) : (D)))
+#define READONLY_MSG(A, I, D, AS) (use == lv_assign ? (A) \
+ : (use == lv_increment ? (I) \
+ : (use == lv_decrement ? (D) : (AS))))
if (TREE_CODE (arg) == COMPONENT_REF)
{
if (TYPE_READONLY (TREE_TYPE (TREE_OPERAND (arg, 0))))
else
error (READONLY_MSG (G_("assignment of read-only member %qD"),
G_("increment of read-only member %qD"),
- G_("decrement of read-only member %qD")),
+ G_("decrement of read-only member %qD"),
+ G_("read-only member %qD used as %<asm%> output")),
TREE_OPERAND (arg, 1));
}
else if (TREE_CODE (arg) == VAR_DECL)
error (READONLY_MSG (G_("assignment of read-only variable %qD"),
G_("increment of read-only variable %qD"),
- G_("decrement of read-only variable %qD")),
+ G_("decrement of read-only variable %qD"),
+ G_("read-only variable %qD used as %<asm%> output")),
arg);
else
error (READONLY_MSG (G_("assignment of read-only location"),
G_("increment of read-only location"),
- G_("decrement of read-only location")));
+ G_("decrement of read-only location"),
+ G_("read-only location used as %<asm%> output")));
}
static int designator_depth;
/* Nonzero if there were diagnosed errors in this designator list. */
-static int designator_errorneous;
+static int designator_erroneous;
\f
/* This stack has a level for each implicit or explicit level of
constructor_incremental = 1;
constructor_designated = 0;
designator_depth = 0;
- designator_errorneous = 0;
+ designator_erroneous = 0;
if (TREE_CODE (constructor_type) == RECORD_TYPE
|| TREE_CODE (constructor_type) == UNION_TYPE)
p->range_stack = constructor_range_stack;
constructor_range_stack = 0;
designator_depth = 0;
- designator_errorneous = 0;
+ designator_erroneous = 0;
}
/* Don't die if an entire brace-pair level is superfluous
/* If there were errors in this designator list already, bail out
silently. */
- if (designator_errorneous)
+ if (designator_erroneous)
return 1;
if (!designator_depth)
if (set_designator (1))
return;
- designator_errorneous = 1;
+ designator_erroneous = 1;
if (!INTEGRAL_TYPE_P (TREE_TYPE (first))
|| (last && !INTEGRAL_TYPE_P (TREE_TYPE (last))))
}
designator_depth++;
- designator_errorneous = 0;
+ designator_erroneous = 0;
if (constructor_range_stack || last)
push_range_stack (last);
}
if (set_designator (0))
return;
- designator_errorneous = 1;
+ designator_erroneous = 1;
if (TREE_CODE (constructor_type) != RECORD_TYPE
&& TREE_CODE (constructor_type) != UNION_TYPE)
{
constructor_fields = tail;
designator_depth++;
- designator_errorneous = 0;
+ designator_erroneous = 0;
if (constructor_range_stack)
push_range_stack (NULL_TREE);
}
bool strict_string = value.original_code == STRING_CST;
designator_depth = 0;
- designator_errorneous = 0;
+ designator_erroneous = 0;
/* Handle superfluous braces around string cst as in
char x[] = {"foo"}; */
if (!lvalue_or_else (output, lv_asm))
output = error_mark_node;
+ if (output != error_mark_node
+ && (TREE_READONLY (output)
+ || TYPE_READONLY (TREE_TYPE (output))
+ || ((TREE_CODE (TREE_TYPE (output)) == RECORD_TYPE
+ || TREE_CODE (TREE_TYPE (output)) == UNION_TYPE)
+ && C_TYPE_FIELDS_READONLY (TREE_TYPE (output)))))
+ readonly_error (output, lv_asm);
+
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tail)));
oconstraints[i] = constraint;
args = build_stmt (ASM_EXPR, string, outputs, inputs, clobbers);
- /* Simple asm statements are treated as volatile. */
- if (simple)
- {
- ASM_VOLATILE_P (args) = 1;
- ASM_INPUT_P (args) = 1;
- }
+ /* asm statements without outputs, including simple ones, are treated
+ as volatile. */
+ ASM_INPUT_P (args) = simple;
+ ASM_VOLATILE_P (args) = (noutputs == 0);
return args;
}
tree
c_finish_return (tree retval)
{
- tree valtype = TREE_TYPE (TREE_TYPE (current_function_decl));
+ tree valtype = TREE_TYPE (TREE_TYPE (current_function_decl)), ret_stmt;
+ bool no_warning = false;
if (TREE_THIS_VOLATILE (current_function_decl))
warning (0, "function declared %<noreturn%> has a %<return%> statement");
current_function_returns_null = 1;
if ((warn_return_type || flag_isoc99)
&& valtype != 0 && TREE_CODE (valtype) != VOID_TYPE)
- pedwarn_c99 ("%<return%> with no value, in "
- "function returning non-void");
+ {
+ pedwarn_c99 ("%<return%> with no value, in "
+ "function returning non-void");
+ no_warning = true;
+ }
}
else if (valtype == 0 || TREE_CODE (valtype) == VOID_TYPE)
{
retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, t);
}
- return add_stmt (build_stmt (RETURN_EXPR, retval));
+ ret_stmt = build_stmt (RETURN_EXPR, retval);
+ TREE_NO_WARNING (ret_stmt) |= no_warning;
+ return add_stmt (ret_stmt);
}
\f
struct c_switch {
/* Diagnose ";" via the special empty statement node that we create. */
if (extra_warnings)
{
- if (TREE_CODE (then_block) == NOP_EXPR && !TREE_TYPE (then_block))
+ tree *inner_then = &then_block, *inner_else = &else_block;
+
+ if (TREE_CODE (*inner_then) == STATEMENT_LIST
+ && STATEMENT_LIST_TAIL (*inner_then))
+ inner_then = &STATEMENT_LIST_TAIL (*inner_then)->stmt;
+ if (*inner_else && TREE_CODE (*inner_else) == STATEMENT_LIST
+ && STATEMENT_LIST_TAIL (*inner_else))
+ inner_else = &STATEMENT_LIST_TAIL (*inner_else)->stmt;
+
+ if (TREE_CODE (*inner_then) == NOP_EXPR && !TREE_TYPE (*inner_then))
{
- if (!else_block)
+ if (!*inner_else)
warning (0, "%Hempty body in an if-statement",
- EXPR_LOCUS (then_block));
- then_block = alloc_stmt_list ();
+ EXPR_LOCUS (*inner_then));
+
+ *inner_then = alloc_stmt_list ();
}
- if (else_block
- && TREE_CODE (else_block) == NOP_EXPR
- && !TREE_TYPE (else_block))
+ if (*inner_else
+ && TREE_CODE (*inner_else) == NOP_EXPR
+ && !TREE_TYPE (*inner_else))
{
warning (0, "%Hempty body in an else-statement",
- EXPR_LOCUS (else_block));
- else_block = alloc_stmt_list ();
+ EXPR_LOCUS (*inner_else));
+
+ *inner_else = alloc_stmt_list ();
}
}
}
t = build_and_jump (&blab);
- exit = build3 (COND_EXPR, void_type_node, cond, exit, t);
- exit = fold (exit);
+ exit = fold_build3 (COND_EXPR, void_type_node, cond, exit, t);
if (cond_is_first)
SET_EXPR_LOCATION (exit, start_locus);
else
build_type = result_type;
{
- tree result = build2 (resultcode, build_type, op0, op1);
-
/* Treat expressions in initializers specially as they can't trap. */
- result = require_constant_value ? fold_initializer (result)
- : fold (result);
+ tree result = require_constant_value ? fold_build2_initializer (resultcode,
+ build_type,
+ op0, op1)
+ : fold_build2 (resultcode, build_type,
+ op0, op1);
if (final_type != 0)
result = convert (final_type, result);