annotate_one_with_location (gs, location);
}
}
-\f
-/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
- nodes that are referenced more than once in GENERIC functions. This is
- necessary because gimplification (translation into GIMPLE) is performed
- by modifying tree nodes in-place, so gimplication of a shared node in a
- first context could generate an invalid GIMPLE form in a second context.
-
- This is achieved with a simple mark/copy/unmark algorithm that walks the
- GENERIC representation top-down, marks nodes with TREE_VISITED the first
- time it encounters them, duplicates them if they already have TREE_VISITED
- set, and finally removes the TREE_VISITED marks it has set.
-
- The algorithm works only at the function level, i.e. it generates a GENERIC
- representation of a function with no nodes shared within the function when
- passed a GENERIC function (except for nodes that are allowed to be shared).
-
- At the global level, it is also necessary to unshare tree nodes that are
- referenced in more than one function, for the same aforementioned reason.
- This requires some cooperation from the front-end. There are 2 strategies:
-
- 1. Manual unsharing. The front-end needs to call unshare_expr on every
- expression that might end up being shared across functions.
-
- 2. Deep unsharing. This is an extension of regular unsharing. Instead
- of calling unshare_expr on expressions that might be shared across
- functions, the front-end pre-marks them with TREE_VISITED. This will
- ensure that they are unshared on the first reference within functions
- when the regular unsharing algorithm runs. The counterpart is that
- this algorithm must look deeper than for manual unsharing, which is
- specified by LANG_HOOKS_DEEP_UNSHARING.
-
- If there are only few specific cases of node sharing across functions, it is
- probably easier for a front-end to unshare the expressions manually. On the
- contrary, if the expressions generated at the global level are as widespread
- as expressions generated within functions, deep unsharing is very likely the
- way to go. */
-
-/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
+
+
+/* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
These nodes model computations that should only be done once. If we
were to unshare something like SAVE_EXPR(i++), the gimplification
process would create wrong code. */
static tree
mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
{
- tree t = *tp;
- enum tree_code code = TREE_CODE (t);
-
- /* Do not copy SAVE_EXPR or TARGET_EXPR nodes themselves, but copy
- their subtrees if we can make sure to do it only once. */
- if (code == SAVE_EXPR || code == TARGET_EXPR)
- {
- if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
- ;
- else
- *walk_subtrees = 0;
- }
-
- /* Stop at types, decls, constants like copy_tree_r. */
- else if (TREE_CODE_CLASS (code) == tcc_type
- || TREE_CODE_CLASS (code) == tcc_declaration
- || TREE_CODE_CLASS (code) == tcc_constant
- /* We can't do anything sensible with a BLOCK used as an
- expression, but we also can't just die when we see it
- because of non-expression uses. So we avert our eyes
- and cross our fingers. Silly Java. */
- || code == BLOCK)
+ enum tree_code code = TREE_CODE (*tp);
+ /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
+ if (TREE_CODE_CLASS (code) == tcc_type
+ || TREE_CODE_CLASS (code) == tcc_declaration
+ || TREE_CODE_CLASS (code) == tcc_constant
+ || code == SAVE_EXPR || code == TARGET_EXPR
+ /* We can't do anything sensible with a BLOCK used as an expression,
+ but we also can't just die when we see it because of non-expression
+ uses. So just avert our eyes and cross our fingers. Silly Java. */
+ || code == BLOCK)
*walk_subtrees = 0;
-
- /* Cope with the statement expression extension. */
- else if (code == STATEMENT_LIST)
- ;
-
- /* Leave the bulk of the work to copy_tree_r itself. */
else
{
gcc_assert (code != BIND_EXPR);
- copy_tree_r (tp, walk_subtrees, NULL);
+ copy_tree_r (tp, walk_subtrees, data);
}
return NULL_TREE;
/* Callback for walk_tree to unshare most of the shared trees rooted at
*TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
- then *TP is deep copied by calling mostly_copy_tree_r. */
+ then *TP is deep copied by calling copy_tree_r.
+
+ This unshares the same trees as copy_tree_r with the exception of
+ SAVE_EXPR nodes. These nodes model computations that should only be
+ done once. If we were to unshare something like SAVE_EXPR(i++), the
+ gimplification process would create wrong code. */
static tree
-copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
+copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
+ void *data ATTRIBUTE_UNUSED)
{
tree t = *tp;
enum tree_code code = TREE_CODE (t);
any deeper. */
else if (TREE_VISITED (t))
{
- walk_tree (tp, mostly_copy_tree_r, data, NULL);
+ walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
*walk_subtrees = 0;
}
- /* Otherwise, mark the node as visited and keep looking. */
+ /* Otherwise, mark the tree as visited and keep looking. */
else
TREE_VISITED (t) = 1;
return NULL_TREE;
}
-/* Unshare most of the shared trees rooted at *TP. */
-
-static inline void
-copy_if_shared (tree *tp)
+static tree
+unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
+ void *data ATTRIBUTE_UNUSED)
{
- /* If the language requires deep unsharing, we need a pointer set to make
- sure we don't repeatedly unshare subtrees of unshareable nodes. */
- struct pointer_set_t *visited
- = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
- walk_tree (tp, copy_if_shared_r, visited, NULL);
- if (visited)
- pointer_set_destroy (visited);
+ if (TREE_VISITED (*tp))
+ TREE_VISITED (*tp) = 0;
+ else
+ *walk_subtrees = 0;
+
+ return NULL_TREE;
}
/* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
{
struct cgraph_node *cgn = cgraph_node (fndecl);
- copy_if_shared (body_p);
-
+ walk_tree (body_p, copy_if_shared_r, NULL, NULL);
if (body_p == &DECL_SAVED_TREE (fndecl))
for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
}
-/* Callback for walk_tree to unmark the visited trees rooted at *TP.
- Subtrees are walked until the first unvisited node is encountered. */
-
-static tree
-unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
-{
- tree t = *tp;
-
- /* If this node has been visited, unmark it and keep looking. */
- if (TREE_VISITED (t))
- TREE_VISITED (t) = 0;
-
- /* Otherwise, don't look any deeper. */
- else
- *walk_subtrees = 0;
-
- return NULL_TREE;
-}
-
-/* Unmark the visited trees rooted at *TP. */
-
-static inline void
-unmark_visited (tree *tp)
-{
- walk_tree (tp, unmark_visited_r, NULL, NULL);
-}
-
/* Likewise, but mark all trees as not visited. */
static void
{
struct cgraph_node *cgn = cgraph_node (fndecl);
- unmark_visited (body_p);
-
+ walk_tree (body_p, unmark_visited_r, NULL, NULL);
if (body_p == &DECL_SAVED_TREE (fndecl))
for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
}
}
- default:
- break;
+ ret = GS_UNHANDLED;
+ break;
+
+ case WITH_SIZE_EXPR:
+ /* Likewise for calls that return an aggregate of non-constant size,
+ since we would not be able to generate a temporary at all. */
+ if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
+ {
+ *from_p = TREE_OPERAND (*from_p, 0);
+ ret = GS_OK;
+ }
+ else
+ ret = GS_UNHANDLED;
+ break;
+
+ /* If we're initializing from a container, push the initialization
+ inside it. */
+ case CLEANUP_POINT_EXPR:
+ case BIND_EXPR:
+ case STATEMENT_LIST:
+ {
+ tree wrap = *from_p;
+ tree t;
+
+ ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
+ fb_lvalue);
+ if (ret != GS_ERROR)
+ ret = GS_OK;
+
+ t = voidify_wrapper_expr (wrap, *expr_p);
+ gcc_assert (t == *expr_p);
+
+ if (want_value)
+ {
+ gimplify_and_add (wrap, pre_p);
+ *expr_p = unshare_expr (*to_p);
+ }
+ else
+ *expr_p = wrap;
+ return GS_OK;
+ }
+
+ case COMPOUND_LITERAL_EXPR:
+ {
+ tree complit = TREE_OPERAND (*expr_p, 1);
+ tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
+ tree decl = DECL_EXPR_DECL (decl_s);
+ tree init = DECL_INITIAL (decl);
+
+ /* struct T x = (struct T) { 0, 1, 2 } can be optimized
+ into struct T x = { 0, 1, 2 } if the address of the
+ compound literal has never been taken. */
+ if (!TREE_ADDRESSABLE (complit)
+ && !TREE_ADDRESSABLE (decl)
+ && init)
+ {
+ *expr_p = copy_node (*expr_p);
+ TREE_OPERAND (*expr_p, 1) = init;
+ return GS_OK;
+ }
}
}
while (changed);