gimple_build_call_1 (tree fn, unsigned nargs)
{
gimple s = gimple_build_with_ops (GIMPLE_CALL, 0, nargs + 3);
+ if (TREE_CODE (fn) == FUNCTION_DECL)
+ fn = build_fold_addr_expr (fn);
gimple_set_op (s, 1, fn);
return s;
}
{
gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
/* Ensure all the predictors fit into the lower bits of the subcode. */
- gcc_assert (END_PREDICTORS <= GF_PREDICT_TAKEN);
+ gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
gimple_predict_set_predictor (p, predictor);
gimple_predict_set_outcome (p, outcome);
return p;
return fn ? fn->gimple_body : NULL;
}
+/* Return true when FNDECL has Gimple body either in unlowered
+ or CFG form. */
+bool
+gimple_has_body_p (tree fndecl)
+{
+ struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
+ return (gimple_body (fndecl) || (fn && fn->cfg));
+}
/* Detect flags from a GIMPLE_CALL. This is just like
call_expr_flags, but for gimple tuples. */
gimple_assign_unary_nop_p (gimple gs)
{
return (gimple_code (gs) == GIMPLE_ASSIGN
- && (gimple_assign_rhs_code (gs) == NOP_EXPR
- || gimple_assign_rhs_code (gs) == CONVERT_EXPR
+ && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
|| gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
&& gimple_assign_rhs1 (gs) != error_mark_node
&& (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
tree
gimple_get_lhs (const_gimple stmt)
{
- enum tree_code code = gimple_code (stmt);
+ enum gimple_code code = gimple_code (stmt);
if (code == GIMPLE_ASSIGN)
return gimple_assign_lhs (stmt);
void
gimple_set_lhs (gimple stmt, tree lhs)
{
- enum tree_code code = gimple_code (stmt);
+ enum gimple_code code = gimple_code (stmt);
if (code == GIMPLE_ASSIGN)
gimple_assign_set_lhs (stmt, lhs);
}
}
-/* Return true if T is a gimple invariant address. */
+/* Strip out all handled components that produce invariant
+ offsets. */
-bool
-is_gimple_invariant_address (const_tree t)
+static const_tree
+strip_invariant_refs (const_tree op)
{
- tree op;
-
- if (TREE_CODE (t) != ADDR_EXPR)
- return false;
-
- op = TREE_OPERAND (t, 0);
while (handled_component_p (op))
{
switch (TREE_CODE (op))
if (!is_gimple_constant (TREE_OPERAND (op, 1))
|| TREE_OPERAND (op, 2) != NULL_TREE
|| TREE_OPERAND (op, 3) != NULL_TREE)
- return false;
+ return NULL;
break;
case COMPONENT_REF:
if (TREE_OPERAND (op, 2) != NULL_TREE)
- return false;
+ return NULL;
break;
default:;
op = TREE_OPERAND (op, 0);
}
- return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
+ return op;
+}
+
+/* Return true if T is a gimple invariant address. */
+
+bool
+is_gimple_invariant_address (const_tree t)
+{
+ const_tree op;
+
+ if (TREE_CODE (t) != ADDR_EXPR)
+ return false;
+
+ op = strip_invariant_refs (TREE_OPERAND (t, 0));
+
+ return op && (CONSTANT_CLASS_P (op) || decl_address_invariant_p (op));
+}
+
+/* Return true if T is a gimple invariant address at IPA level
+ (so addresses of variables on stack are not allowed). */
+
+bool
+is_gimple_ip_invariant_address (const_tree t)
+{
+ const_tree op;
+
+ if (TREE_CODE (t) != ADDR_EXPR)
+ return false;
+
+ op = strip_invariant_refs (TREE_OPERAND (t, 0));
+
+ return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
}
/* Return true if T is a GIMPLE minimal invariant. It's a restricted
return is_gimple_constant (t);
}
+/* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
+ form of gimple minimal invariant. */
+
+bool
+is_gimple_ip_invariant (const_tree t)
+{
+ if (TREE_CODE (t) == ADDR_EXPR)
+ return is_gimple_ip_invariant_address (t);
+
+ return is_gimple_constant (t);
+}
+
/* Return true if T looks like a valid GIMPLE statement. */
bool
return NULL_TREE;
}
+/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
+ the positions marked by the set ARGS_TO_SKIP. */
+
+gimple
+gimple_copy_call_skip_args (gimple stmt, bitmap args_to_skip)
+{
+ int i;
+ tree fn = gimple_call_fn (stmt);
+ int nargs = gimple_call_num_args (stmt);
+ VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
+ gimple new_stmt;
+
+ for (i = 0; i < nargs; i++)
+ if (!bitmap_bit_p (args_to_skip, i))
+ VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
+
+ new_stmt = gimple_build_call_vec (fn, vargs);
+ VEC_free (tree, heap, vargs);
+ if (gimple_call_lhs (stmt))
+ gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
+
+ gimple_set_block (new_stmt, gimple_block (stmt));
+ if (gimple_has_location (stmt))
+ gimple_set_location (new_stmt, gimple_location (stmt));
+
+ /* Carry all the flags to the new GIMPLE_CALL. */
+ gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
+ gimple_call_set_tail (new_stmt, gimple_call_tail_p (stmt));
+ gimple_call_set_cannot_inline (new_stmt, gimple_call_cannot_inline_p (stmt));
+ gimple_call_set_return_slot_opt (new_stmt, gimple_call_return_slot_opt_p (stmt));
+ gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt));
+ gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt));
+ return new_stmt;
+}
+
#include "gt-gimple.h"