X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftree-flow-inline.h;h=dcbe355e926220af8c6c517653c01aa7996c86a0;hb=dc9aeee0dda9f31ee94bab5760425b1b57d2b771;hp=2430f0355f7b4aca833bf9ce796886d3b8dda831;hpb=cb24521686308314fadcd96cdfd23858aa19aa95;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/tree-flow-inline.h b/gcc/tree-flow-inline.h index 2430f0355f7..dcbe355e926 100644 --- a/gcc/tree-flow-inline.h +++ b/gcc/tree-flow-inline.h @@ -48,7 +48,7 @@ gimple_referenced_vars (const struct function *fun) static inline tree gimple_vop (const struct function *fun) { - gcc_assert (fun && fun->gimple_df); + gcc_checking_assert (fun && fun->gimple_df); return fun->gimple_df->vop; } @@ -98,14 +98,24 @@ next_htab_element (htab_iterator *hti) return NULL; } +/* Get the variable with uid UID from the list of referenced vars. */ + +static inline tree +referenced_var (unsigned int uid) +{ + tree var = referenced_var_lookup (cfun, uid); + gcc_assert (var || uid == 0); + return var; +} + /* Initialize ITER to point to the first referenced variable in the referenced_vars hashtable, and return that variable. */ static inline tree -first_referenced_var (referenced_var_iterator *iter) +first_referenced_var (struct function *fn, referenced_var_iterator *iter) { return (tree) first_htab_element (&iter->hti, - gimple_referenced_vars (cfun)); + gimple_referenced_vars (fn)); } /* Return true if we have hit the end of the referenced variables ITER is @@ -141,7 +151,7 @@ static inline var_ann_t get_var_ann (tree var) { var_ann_t *p = DECL_VAR_ANN_PTR (var); - gcc_assert (p); + gcc_checking_assert (p); return *p ? *p : create_var_ann (var); } @@ -220,10 +230,8 @@ link_imm_use (ssa_use_operand_t *linknode, tree def) else { root = &(SSA_NAME_IMM_USE_NODE (def)); -#ifdef ENABLE_CHECKING if (linknode->use) - gcc_assert (*(linknode->use) == def); -#endif + gcc_checking_assert (*(linknode->use) == def); link_imm_use_to_list (linknode, root); } } @@ -254,7 +262,7 @@ static inline void relink_imm_use (ssa_use_operand_t *node, ssa_use_operand_t *old) { /* The node one had better be in the same list. */ - gcc_assert (*(old->use) == *(node->use)); + gcc_checking_assert (*(old->use) == *(node->use)); node->prev = old->prev; node->next = old->next; if (old->prev) @@ -507,7 +515,7 @@ gimple_phi_arg_has_location (gimple gs, size_t i) static inline gimple_seq phi_nodes (const_basic_block bb) { - gcc_assert (!(bb->flags & BB_RTL)); + gcc_checking_assert (!(bb->flags & BB_RTL)); if (!bb->il.gimple) return NULL; return bb->il.gimple->phi_nodes; @@ -520,7 +528,7 @@ set_phi_nodes (basic_block bb, gimple_seq seq) { gimple_stmt_iterator i; - gcc_assert (!(bb->flags & BB_RTL)); + gcc_checking_assert (!(bb->flags & BB_RTL)); bb->il.gimple->phi_nodes = seq; if (seq) for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i)) @@ -541,19 +549,16 @@ phi_arg_index_from_use (use_operand_p use) pointer arithmetic. */ phi = USE_STMT (use); - gcc_assert (gimple_code (phi) == GIMPLE_PHI); element = (struct phi_arg_d *)use; root = gimple_phi_arg (phi, 0); index = element - root; -#ifdef ENABLE_CHECKING /* Make sure the calculation doesn't have any leftover bytes. If it does, then imm_use is likely not the first element in phi_arg_d. */ - gcc_assert ((((char *)element - (char *)root) - % sizeof (struct phi_arg_d)) == 0 - && index < gimple_phi_capacity (phi)); -#endif + gcc_checking_assert ((((char *)element - (char *)root) + % sizeof (struct phi_arg_d)) == 0 + && index < gimple_phi_capacity (phi)); return index; } @@ -564,9 +569,26 @@ static inline void set_is_used (tree var) { var_ann_t ann = get_var_ann (var); - ann->used = 1; + ann->used = true; +} + +/* Clear VAR's used flag. */ + +static inline void +clear_is_used (tree var) +{ + var_ann_t ann = var_ann (var); + ann->used = false; } +/* Return true if VAR is marked as used. */ + +static inline bool +is_used_p (tree var) +{ + var_ann_t ann = var_ann (var); + return ann->used; +} /* Return true if T (assumed to be a DECL) is a global variable. A variable is considered global if its storage is not automatic. */ @@ -604,9 +626,7 @@ phi_ssa_name_p (const_tree t) { if (TREE_CODE (t) == SSA_NAME) return true; -#ifdef ENABLE_CHECKING - gcc_assert (is_gimple_min_invariant (t)); -#endif + gcc_checking_assert (is_gimple_min_invariant (t)); return false; } @@ -624,15 +644,6 @@ loop_containing_stmt (gimple stmt) } -/* Return true if VAR is clobbered by function calls. */ -static inline bool -is_call_clobbered (const_tree var) -{ - return (is_global_var (var) - || (may_be_aliased (var) - && pt_solution_includes (&cfun->gimple_df->escaped, var))); -} - /* ----------------------------------------------------------------------- */ /* The following set of routines are used to iterator over various type of @@ -650,9 +661,7 @@ static inline use_operand_p op_iter_next_use (ssa_op_iter *ptr) { use_operand_p use_p; -#ifdef ENABLE_CHECKING - gcc_assert (ptr->iter_type == ssa_op_iter_use); -#endif + gcc_checking_assert (ptr->iter_type == ssa_op_iter_use); if (ptr->uses) { use_p = USE_OP_PTR (ptr->uses); @@ -672,9 +681,7 @@ static inline def_operand_p op_iter_next_def (ssa_op_iter *ptr) { def_operand_p def_p; -#ifdef ENABLE_CHECKING - gcc_assert (ptr->iter_type == ssa_op_iter_def); -#endif + gcc_checking_assert (ptr->iter_type == ssa_op_iter_def); if (ptr->defs) { def_p = DEF_OP_PTR (ptr->defs); @@ -690,9 +697,7 @@ static inline tree op_iter_next_tree (ssa_op_iter *ptr) { tree val; -#ifdef ENABLE_CHECKING - gcc_assert (ptr->iter_type == ssa_op_iter_tree); -#endif + gcc_checking_assert (ptr->iter_type == ssa_op_iter_tree); if (ptr->uses) { val = USE_OP (ptr->uses); @@ -734,8 +739,8 @@ op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags) { /* We do not support iterating over virtual defs or uses without iterating over defs or uses at the same time. */ - gcc_assert ((!(flags & SSA_OP_VDEF) || (flags & SSA_OP_DEF)) - && (!(flags & SSA_OP_VUSE) || (flags & SSA_OP_USE))); + gcc_checking_assert ((!(flags & SSA_OP_VDEF) || (flags & SSA_OP_DEF)) + && (!(flags & SSA_OP_VUSE) || (flags & SSA_OP_USE))); ptr->defs = (flags & (SSA_OP_DEF|SSA_OP_VDEF)) ? gimple_def_ops (stmt) : NULL; if (!(flags & SSA_OP_VDEF) && ptr->defs @@ -758,8 +763,8 @@ op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags) static inline use_operand_p op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags) { - gcc_assert ((flags & SSA_OP_ALL_DEFS) == 0 - && (flags & SSA_OP_USE)); + gcc_checking_assert ((flags & SSA_OP_ALL_DEFS) == 0 + && (flags & SSA_OP_USE)); op_iter_init (ptr, stmt, flags); ptr->iter_type = ssa_op_iter_use; return op_iter_next_use (ptr); @@ -770,8 +775,8 @@ op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags) static inline def_operand_p op_iter_init_def (ssa_op_iter *ptr, gimple stmt, int flags) { - gcc_assert ((flags & SSA_OP_ALL_USES) == 0 - && (flags & SSA_OP_DEF)); + gcc_checking_assert ((flags & SSA_OP_ALL_USES) == 0 + && (flags & SSA_OP_DEF)); op_iter_init (ptr, stmt, flags); ptr->iter_type = ssa_op_iter_def; return op_iter_next_def (ptr); @@ -906,7 +911,7 @@ op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags) clear_and_done_ssa_iter (ptr); ptr->done = false; - gcc_assert ((flags & (SSA_OP_USE | SSA_OP_VIRTUAL_USES)) != 0); + gcc_checking_assert ((flags & (SSA_OP_USE | SSA_OP_VIRTUAL_USES)) != 0); comp = (is_gimple_reg (phi_def) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES); @@ -935,7 +940,7 @@ op_iter_init_phidef (ssa_op_iter *ptr, gimple phi, int flags) clear_and_done_ssa_iter (ptr); ptr->done = false; - gcc_assert ((flags & (SSA_OP_DEF | SSA_OP_VIRTUAL_DEFS)) != 0); + gcc_checking_assert ((flags & (SSA_OP_DEF | SSA_OP_VIRTUAL_DEFS)) != 0); comp = (is_gimple_reg (phi_def) ? SSA_OP_DEF : SSA_OP_VIRTUAL_DEFS); @@ -981,9 +986,7 @@ static inline use_operand_p move_use_after_head (use_operand_p use_p, use_operand_p head, use_operand_p last_p) { -#ifdef ENABLE_CHECKING - gcc_assert (USE_FROM_PTR (use_p) == USE_FROM_PTR (head)); -#endif + gcc_checking_assert (USE_FROM_PTR (use_p) == USE_FROM_PTR (head)); /* Skip head when we find it. */ if (use_p != head) { @@ -1129,27 +1132,13 @@ unmodifiable_var_p (const_tree var) return TREE_READONLY (var) && (TREE_STATIC (var) || DECL_EXTERNAL (var)); } -/* Return true if REF, an ARRAY_REF, has an INDIRECT_REF somewhere in it. */ - -static inline bool -array_ref_contains_indirect_ref (const_tree ref) -{ - gcc_assert (TREE_CODE (ref) == ARRAY_REF); - - do { - ref = TREE_OPERAND (ref, 0); - } while (handled_component_p (ref)); - - return TREE_CODE (ref) == INDIRECT_REF; -} - /* Return true if REF, a handled component reference, has an ARRAY_REF somewhere in it. */ static inline bool ref_contains_array_ref (const_tree ref) { - gcc_assert (handled_component_p (ref)); + gcc_checking_assert (handled_component_p (ref)); do { if (TREE_CODE (ref) == ARRAY_REF) @@ -1238,4 +1227,139 @@ make_ssa_name (tree var, gimple stmt) return make_ssa_name_fn (cfun, var, stmt); } +/* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that + denotes the starting address of the memory access EXP. + Returns NULL_TREE if the offset is not constant or any component + is not BITS_PER_UNIT-aligned. + VALUEIZE if non-NULL is used to valueize SSA names. It should return + its argument or a constant if the argument is known to be constant. */ + +static inline tree +get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset, + tree (*valueize) (tree)) +{ + HOST_WIDE_INT byte_offset = 0; + + /* Compute cumulative byte-offset for nested component-refs and array-refs, + and find the ultimate containing object. */ + while (1) + { + switch (TREE_CODE (exp)) + { + case BIT_FIELD_REF: + return NULL_TREE; + + case COMPONENT_REF: + { + tree field = TREE_OPERAND (exp, 1); + tree this_offset = component_ref_field_offset (exp); + HOST_WIDE_INT hthis_offset; + + if (!this_offset + || TREE_CODE (this_offset) != INTEGER_CST + || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)) + % BITS_PER_UNIT)) + return NULL_TREE; + + hthis_offset = TREE_INT_CST_LOW (this_offset); + hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)) + / BITS_PER_UNIT); + byte_offset += hthis_offset; + } + break; + + case ARRAY_REF: + case ARRAY_RANGE_REF: + { + tree index = TREE_OPERAND (exp, 1); + tree low_bound, unit_size; + + if (valueize + && TREE_CODE (index) == SSA_NAME) + index = (*valueize) (index); + + /* If the resulting bit-offset is constant, track it. */ + if (TREE_CODE (index) == INTEGER_CST + && (low_bound = array_ref_low_bound (exp), + TREE_CODE (low_bound) == INTEGER_CST) + && (unit_size = array_ref_element_size (exp), + TREE_CODE (unit_size) == INTEGER_CST)) + { + HOST_WIDE_INT hindex = TREE_INT_CST_LOW (index); + + hindex -= TREE_INT_CST_LOW (low_bound); + hindex *= TREE_INT_CST_LOW (unit_size); + byte_offset += hindex; + } + else + return NULL_TREE; + } + break; + + case REALPART_EXPR: + break; + + case IMAGPART_EXPR: + byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp))); + break; + + case VIEW_CONVERT_EXPR: + break; + + case MEM_REF: + { + tree base = TREE_OPERAND (exp, 0); + if (valueize + && TREE_CODE (base) == SSA_NAME) + base = (*valueize) (base); + + /* Hand back the decl for MEM[&decl, off]. */ + if (TREE_CODE (base) == ADDR_EXPR) + { + if (!integer_zerop (TREE_OPERAND (exp, 1))) + { + double_int off = mem_ref_offset (exp); + gcc_assert (off.high == -1 || off.high == 0); + byte_offset += double_int_to_shwi (off); + } + exp = TREE_OPERAND (base, 0); + } + goto done; + } + + case TARGET_MEM_REF: + { + tree base = TREE_OPERAND (exp, 0); + if (valueize + && TREE_CODE (base) == SSA_NAME) + base = (*valueize) (base); + + /* Hand back the decl for MEM[&decl, off]. */ + if (TREE_CODE (base) == ADDR_EXPR) + { + if (TMR_INDEX (exp) || TMR_INDEX2 (exp)) + return NULL_TREE; + if (!integer_zerop (TMR_OFFSET (exp))) + { + double_int off = mem_ref_offset (exp); + gcc_assert (off.high == -1 || off.high == 0); + byte_offset += double_int_to_shwi (off); + } + exp = TREE_OPERAND (base, 0); + } + goto done; + } + + default: + goto done; + } + + exp = TREE_OPERAND (exp, 0); + } +done: + + *poffset = byte_offset; + return exp; +} + #endif /* _TREE_FLOW_INLINE_H */