+ /* For statements that may load from memory (have a VUSE) we
+ have to mark all reaching (may-)definitions as necessary.
+ We partition this task into two cases:
+ 1) explicit loads based on decls that are not aliased
+ 2) implicit loads (like calls) and explicit loads not
+ based on decls that are not aliased (like indirect
+ references or loads from globals)
+ For 1) we mark all reaching may-defs as necessary, stopping
+ at dominating kills. For 2) we want to mark all dominating
+ references necessary, but non-aliased ones which we handle
+ in 1). By keeping a global visited bitmap for references
+ we walk for 2) we avoid quadratic behavior for those. */
+
+ if (is_gimple_call (stmt))
+ {
+ tree callee = gimple_call_fndecl (stmt);
+ unsigned i;
+
+ /* Calls to functions that are merely acting as barriers
+ or that only store to memory do not make any previous
+ stores necessary. */
+ if (callee != NULL_TREE
+ && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
+ && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
+ || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
+ || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE))
+ continue;
+
+ /* Calls implicitly load from memory, their arguments
+ in addition may explicitly perform memory loads. */
+ mark_all_reaching_defs_necessary (stmt);
+ for (i = 0; i < gimple_call_num_args (stmt); ++i)
+ {
+ tree arg = gimple_call_arg (stmt, i);
+ if (TREE_CODE (arg) == SSA_NAME
+ || is_gimple_min_invariant (arg))
+ continue;
+ if (!ref_may_be_aliased (arg))
+ mark_aliased_reaching_defs_necessary (stmt, arg);
+ }
+ }
+ else if (gimple_assign_single_p (stmt))
+ {
+ tree rhs;
+ bool rhs_aliased = false;
+ /* If this is a load mark things necessary. */
+ rhs = gimple_assign_rhs1 (stmt);
+ if (TREE_CODE (rhs) != SSA_NAME
+ && !is_gimple_min_invariant (rhs))
+ {
+ if (!ref_may_be_aliased (rhs))
+ mark_aliased_reaching_defs_necessary (stmt, rhs);
+ else
+ rhs_aliased = true;
+ }
+ if (rhs_aliased)
+ mark_all_reaching_defs_necessary (stmt);
+ }
+ else if (gimple_code (stmt) == GIMPLE_RETURN)
+ {
+ tree rhs = gimple_return_retval (stmt);
+ /* A return statement may perform a load. */
+ if (TREE_CODE (rhs) != SSA_NAME
+ && !is_gimple_min_invariant (rhs))
+ {
+ if (!ref_may_be_aliased (rhs))
+ mark_aliased_reaching_defs_necessary (stmt, rhs);
+ else
+ mark_all_reaching_defs_necessary (stmt);
+ }
+ }
+ else if (gimple_code (stmt) == GIMPLE_ASM)
+ {
+ unsigned i;
+ mark_all_reaching_defs_necessary (stmt);
+ /* Inputs may perform loads. */
+ for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
+ {
+ tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
+ if (TREE_CODE (op) != SSA_NAME
+ && !is_gimple_min_invariant (op)
+ && !ref_may_be_aliased (op))
+ mark_aliased_reaching_defs_necessary (stmt, op);
+ }
+ }