1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
34 /* Return true when DECL is static object in other partition.
35 In that case we must prevent folding as we can't refer to
38 We can get into it in two ways:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body. */
52 static_object_in_other_unit_p (tree decl)
54 struct varpool_node *vnode;
55 struct cgraph_node *node;
57 if (!TREE_STATIC (decl)
58 || TREE_PUBLIC (decl) || DECL_COMDAT (decl))
60 /* External flag is set, so we deal with C++ reference
61 to static object from other file. */
62 if (DECL_EXTERNAL (decl))
64 /* Just be sure it is not big in frontend setting
65 flags incorrectly. Those variables should never
67 gcc_checking_assert (!(vnode = varpool_get_node (decl))
68 || !vnode->finalized);
71 /* We are not at ltrans stage; so don't worry about WHOPR. */
74 if (TREE_CODE (decl) == FUNCTION_DECL)
76 node = cgraph_get_node (decl);
77 if (!node || !node->analyzed)
80 else if (TREE_CODE (decl) == VAR_DECL)
82 vnode = varpool_get_node (decl);
83 if (!vnode || !vnode->finalized)
89 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into
90 acceptable form for is_gimple_min_invariant. */
93 canonicalize_constructor_val (tree cval)
96 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
98 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
99 TREE_OPERAND (cval, 0),
100 TREE_OPERAND (cval, 1),
105 if (TREE_CODE (cval) == ADDR_EXPR)
107 tree base = get_base_address (TREE_OPERAND (cval, 0));
109 && (TREE_CODE (base) == VAR_DECL
110 || TREE_CODE (base) == FUNCTION_DECL)
111 && static_object_in_other_unit_p (base))
113 if (base && TREE_CODE (base) == VAR_DECL)
114 add_referenced_var (base);
119 /* If SYM is a constant variable with known value, return the value.
120 NULL_TREE is returned otherwise. */
123 get_symbol_constant_value (tree sym)
125 if ((TREE_STATIC (sym) || DECL_EXTERNAL (sym))
126 && (TREE_CODE (sym) == CONST_DECL
127 || varpool_get_node (sym)->const_value_known))
129 tree val = DECL_INITIAL (sym);
132 val = canonicalize_constructor_val (val);
133 if (val && is_gimple_min_invariant (val))
138 /* Variables declared 'const' without an initializer
139 have zero as the initializer if they may not be
140 overridden at link or run time. */
142 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
143 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
144 return fold_convert (TREE_TYPE (sym), integer_zero_node);
151 /* Return true if we may propagate the address expression ADDR into the
152 dereference DEREF and cancel them. */
155 may_propagate_address_into_dereference (tree addr, tree deref)
157 gcc_assert (TREE_CODE (deref) == MEM_REF
158 && TREE_CODE (addr) == ADDR_EXPR);
160 /* Don't propagate if ADDR's operand has incomplete type. */
161 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
164 /* If the address is invariant then we do not need to preserve restrict
165 qualifications. But we do need to preserve volatile qualifiers until
166 we can annotate the folded dereference itself properly. */
167 if (is_gimple_min_invariant (addr)
168 && (!TREE_THIS_VOLATILE (deref)
169 || TYPE_VOLATILE (TREE_TYPE (addr))))
170 return useless_type_conversion_p (TREE_TYPE (deref),
171 TREE_TYPE (TREE_OPERAND (addr, 0)));
173 /* Else both the address substitution and the folding must result in
174 a valid useless type conversion sequence. */
175 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
177 && useless_type_conversion_p (TREE_TYPE (deref),
178 TREE_TYPE (TREE_OPERAND (addr, 0))));
182 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
183 BASE is an array type. OFFSET is a byte displacement.
185 LOC is the location of the original expression. */
188 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
190 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
191 tree array_type, elt_type, elt_size;
194 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
195 measured in units of the size of elements type) from that ARRAY_REF).
196 We can't do anything if either is variable.
198 The case we handle here is *(&A[N]+O). */
199 if (TREE_CODE (base) == ARRAY_REF)
201 tree low_bound = array_ref_low_bound (base);
203 elt_offset = TREE_OPERAND (base, 1);
204 if (TREE_CODE (low_bound) != INTEGER_CST
205 || TREE_CODE (elt_offset) != INTEGER_CST)
208 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
209 base = TREE_OPERAND (base, 0);
212 /* Ignore stupid user tricks of indexing non-array variables. */
213 array_type = TREE_TYPE (base);
214 if (TREE_CODE (array_type) != ARRAY_TYPE)
216 elt_type = TREE_TYPE (array_type);
218 /* Use signed size type for intermediate computation on the index. */
219 idx_type = ssizetype;
221 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
222 element type (so we can use the alignment if it's not constant).
223 Otherwise, compute the offset as an index by using a division. If the
224 division isn't exact, then don't do anything. */
225 elt_size = TYPE_SIZE_UNIT (elt_type);
228 if (integer_zerop (offset))
230 if (TREE_CODE (elt_size) != INTEGER_CST)
231 elt_size = size_int (TYPE_ALIGN (elt_type));
233 idx = build_int_cst (idx_type, 0);
237 unsigned HOST_WIDE_INT lquo, lrem;
238 HOST_WIDE_INT hquo, hrem;
241 /* The final array offset should be signed, so we need
242 to sign-extend the (possibly pointer) offset here
243 and use signed division. */
244 soffset = double_int_sext (tree_to_double_int (offset),
245 TYPE_PRECISION (TREE_TYPE (offset)));
246 if (TREE_CODE (elt_size) != INTEGER_CST
247 || div_and_round_double (TRUNC_DIV_EXPR, 0,
248 soffset.low, soffset.high,
249 TREE_INT_CST_LOW (elt_size),
250 TREE_INT_CST_HIGH (elt_size),
251 &lquo, &hquo, &lrem, &hrem)
255 idx = build_int_cst_wide (idx_type, lquo, hquo);
258 /* Assume the low bound is zero. If there is a domain type, get the
259 low bound, if any, convert the index into that type, and add the
261 min_idx = build_int_cst (idx_type, 0);
262 domain_type = TYPE_DOMAIN (array_type);
265 idx_type = domain_type;
266 if (TYPE_MIN_VALUE (idx_type))
267 min_idx = TYPE_MIN_VALUE (idx_type);
269 min_idx = fold_convert (idx_type, min_idx);
271 if (TREE_CODE (min_idx) != INTEGER_CST)
274 elt_offset = fold_convert (idx_type, elt_offset);
277 if (!integer_zerop (min_idx))
278 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
279 if (!integer_zerop (elt_offset))
280 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
282 /* Make sure to possibly truncate late after offsetting. */
283 idx = fold_convert (idx_type, idx);
285 /* We don't want to construct access past array bounds. For example
288 should not be simplified into (*c)[14] or tree-vrp will
290 This is only an issue for multi-dimensional arrays. */
291 if (TREE_CODE (elt_type) == ARRAY_TYPE
294 if (TYPE_MAX_VALUE (domain_type)
295 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
296 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
298 else if (TYPE_MIN_VALUE (domain_type)
299 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
300 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
302 else if (compare_tree_int (idx, 0) < 0)
307 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
308 SET_EXPR_LOCATION (t, loc);
314 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
315 LOC is the location of original expression.
317 Before attempting the conversion strip off existing ADDR_EXPRs. */
320 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
326 if (TREE_CODE (base) != ADDR_EXPR)
329 base = TREE_OPERAND (base, 0);
330 if (types_compatible_p (orig_type, TREE_TYPE (base))
331 && integer_zerop (offset))
334 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
335 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
340 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
341 LOC is the location of the original expression. */
344 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
350 if (TREE_CODE (addr) != ADDR_EXPR)
352 base = TREE_OPERAND (addr, 0);
353 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
356 ret = build_fold_addr_expr (ret);
357 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
359 SET_EXPR_LOCATION (ret, loc);
366 /* A quaint feature extant in our address arithmetic is that there
367 can be hidden type changes here. The type of the result need
368 not be the same as the type of the input pointer.
370 What we're after here is an expression of the form
371 (T *)(&array + const)
372 where array is OP0, const is OP1, RES_TYPE is T and
373 the cast doesn't actually exist, but is implicit in the
374 type of the POINTER_PLUS_EXPR. We'd like to turn this into
376 which may be able to propagate further. */
379 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
384 /* The first operand should be an ADDR_EXPR. */
385 if (TREE_CODE (op0) != ADDR_EXPR)
387 op0 = TREE_OPERAND (op0, 0);
389 /* It had better be a constant. */
390 if (TREE_CODE (op1) != INTEGER_CST)
392 /* Or op0 should now be A[0] and the non-constant offset defined
393 via a multiplication by the array element size. */
394 if (TREE_CODE (op0) == ARRAY_REF
395 /* As we will end up creating a variable index array access
396 in the outermost array dimension make sure there isn't
397 a more inner array that the index could overflow to. */
398 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
399 && integer_zerop (TREE_OPERAND (op0, 1))
400 && TREE_CODE (op1) == SSA_NAME)
402 gimple offset_def = SSA_NAME_DEF_STMT (op1);
403 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
404 if (!host_integerp (elsz, 1)
405 || !is_gimple_assign (offset_def))
408 /* Do not build array references of something that we can't
409 see the true number of array dimensions for. */
410 if (!DECL_P (TREE_OPERAND (op0, 0))
411 && !handled_component_p (TREE_OPERAND (op0, 0)))
414 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
415 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
416 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
417 return build_fold_addr_expr
418 (build4 (ARRAY_REF, TREE_TYPE (op0),
419 TREE_OPERAND (op0, 0),
420 gimple_assign_rhs1 (offset_def),
421 TREE_OPERAND (op0, 2),
422 TREE_OPERAND (op0, 3)));
423 else if (integer_onep (elsz)
424 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
425 return build_fold_addr_expr
426 (build4 (ARRAY_REF, TREE_TYPE (op0),
427 TREE_OPERAND (op0, 0),
429 TREE_OPERAND (op0, 2),
430 TREE_OPERAND (op0, 3)));
432 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
434 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
435 && TREE_CODE (op1) == SSA_NAME)
437 gimple offset_def = SSA_NAME_DEF_STMT (op1);
438 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
439 if (!host_integerp (elsz, 1)
440 || !is_gimple_assign (offset_def))
443 /* Do not build array references of something that we can't
444 see the true number of array dimensions for. */
446 && !handled_component_p (op0))
449 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
450 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
451 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
452 return build_fold_addr_expr
453 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
454 op0, gimple_assign_rhs1 (offset_def),
455 integer_zero_node, NULL_TREE));
456 else if (integer_onep (elsz)
457 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
458 return build_fold_addr_expr
459 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
461 integer_zero_node, NULL_TREE));
467 /* If the first operand is an ARRAY_REF, expand it so that we can fold
468 the offset into it. */
469 while (TREE_CODE (op0) == ARRAY_REF)
471 tree array_obj = TREE_OPERAND (op0, 0);
472 tree array_idx = TREE_OPERAND (op0, 1);
473 tree elt_type = TREE_TYPE (op0);
474 tree elt_size = TYPE_SIZE_UNIT (elt_type);
477 if (TREE_CODE (array_idx) != INTEGER_CST)
479 if (TREE_CODE (elt_size) != INTEGER_CST)
482 /* Un-bias the index by the min index of the array type. */
483 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
486 min_idx = TYPE_MIN_VALUE (min_idx);
489 if (TREE_CODE (min_idx) != INTEGER_CST)
492 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
493 if (!integer_zerop (min_idx))
494 array_idx = int_const_binop (MINUS_EXPR, array_idx,
499 /* Convert the index to a byte offset. */
500 array_idx = fold_convert (sizetype, array_idx);
501 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
503 /* Update the operands for the next round, or for folding. */
504 op1 = int_const_binop (PLUS_EXPR,
509 ptd_type = TREE_TYPE (res_type);
510 /* If we want a pointer to void, reconstruct the reference from the
511 array element type. A pointer to that can be trivially converted
512 to void *. This happens as we fold (void *)(ptr p+ off). */
513 if (VOID_TYPE_P (ptd_type)
514 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
515 ptd_type = TREE_TYPE (TREE_TYPE (op0));
517 /* At which point we can try some of the same things as for indirects. */
518 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
521 t = build_fold_addr_expr (t);
522 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
524 SET_EXPR_LOCATION (t, loc);
530 /* Subroutine of fold_stmt. We perform several simplifications of the
531 memory reference tree EXPR and make sure to re-gimplify them properly
532 after propagation of constant addresses. IS_LHS is true if the
533 reference is supposed to be an lvalue. */
536 maybe_fold_reference (tree expr, bool is_lhs)
542 && (result = fold_const_aggregate_ref (expr))
543 && is_gimple_min_invariant (result))
546 /* ??? We might want to open-code the relevant remaining cases
547 to avoid using the generic fold. */
548 if (handled_component_p (*t)
549 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
551 tree tem = fold (*t);
556 while (handled_component_p (*t))
557 t = &TREE_OPERAND (*t, 0);
559 /* Fold back MEM_REFs to reference trees. */
560 if (TREE_CODE (*t) == MEM_REF
561 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
562 && integer_zerop (TREE_OPERAND (*t, 1))
563 && (TREE_THIS_VOLATILE (*t)
564 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
565 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
566 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
567 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
568 /* We have to look out here to not drop a required conversion
569 from the rhs to the lhs if is_lhs, but we don't have the
570 rhs here to verify that. Thus require strict type
572 && types_compatible_p (TREE_TYPE (*t),
573 TREE_TYPE (TREE_OPERAND
574 (TREE_OPERAND (*t, 0), 0))))
577 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
578 tem = maybe_fold_reference (expr, is_lhs);
583 /* Canonicalize MEM_REFs invariant address operand. */
584 else if (TREE_CODE (*t) == MEM_REF
585 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
586 && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
587 && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
589 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
590 TREE_OPERAND (*t, 0),
591 TREE_OPERAND (*t, 1));
595 tem = maybe_fold_reference (expr, is_lhs);
601 else if (TREE_CODE (*t) == TARGET_MEM_REF)
603 tree tem = maybe_fold_tmr (*t);
607 tem = maybe_fold_reference (expr, is_lhs);
616 tree tem = get_symbol_constant_value (*t);
618 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
620 *t = unshare_expr (tem);
621 tem = maybe_fold_reference (expr, is_lhs);
632 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
633 replacement rhs for the statement or NULL_TREE if no simplification
634 could be made. It is assumed that the operands have been previously
638 fold_gimple_assign (gimple_stmt_iterator *si)
640 gimple stmt = gsi_stmt (*si);
641 enum tree_code subcode = gimple_assign_rhs_code (stmt);
642 location_t loc = gimple_location (stmt);
644 tree result = NULL_TREE;
646 switch (get_gimple_rhs_class (subcode))
648 case GIMPLE_SINGLE_RHS:
650 tree rhs = gimple_assign_rhs1 (stmt);
652 /* Try to fold a conditional expression. */
653 if (TREE_CODE (rhs) == COND_EXPR)
655 tree op0 = COND_EXPR_COND (rhs);
658 location_t cond_loc = EXPR_LOCATION (rhs);
660 if (COMPARISON_CLASS_P (op0))
662 fold_defer_overflow_warnings ();
663 tem = fold_binary_loc (cond_loc,
664 TREE_CODE (op0), TREE_TYPE (op0),
665 TREE_OPERAND (op0, 0),
666 TREE_OPERAND (op0, 1));
667 /* This is actually a conditional expression, not a GIMPLE
668 conditional statement, however, the valid_gimple_rhs_p
669 test still applies. */
670 set = (tem && is_gimple_condexpr (tem)
671 && valid_gimple_rhs_p (tem));
672 fold_undefer_overflow_warnings (set, stmt, 0);
674 else if (is_gimple_min_invariant (op0))
683 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
684 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
687 else if (REFERENCE_CLASS_P (rhs))
688 return maybe_fold_reference (rhs, false);
690 else if (TREE_CODE (rhs) == ADDR_EXPR)
692 tree ref = TREE_OPERAND (rhs, 0);
693 tree tem = maybe_fold_reference (ref, true);
695 && TREE_CODE (tem) == MEM_REF
696 && integer_zerop (TREE_OPERAND (tem, 1)))
697 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
699 result = fold_convert (TREE_TYPE (rhs),
700 build_fold_addr_expr_loc (loc, tem));
701 else if (TREE_CODE (ref) == MEM_REF
702 && integer_zerop (TREE_OPERAND (ref, 1)))
703 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
706 else if (TREE_CODE (rhs) == CONSTRUCTOR
707 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
708 && (CONSTRUCTOR_NELTS (rhs)
709 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
711 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
715 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
716 if (TREE_CODE (val) != INTEGER_CST
717 && TREE_CODE (val) != REAL_CST
718 && TREE_CODE (val) != FIXED_CST)
721 return build_vector_from_ctor (TREE_TYPE (rhs),
722 CONSTRUCTOR_ELTS (rhs));
725 else if (DECL_P (rhs))
726 return unshare_expr (get_symbol_constant_value (rhs));
728 /* If we couldn't fold the RHS, hand over to the generic
730 if (result == NULL_TREE)
733 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
734 that may have been added by fold, and "useless" type
735 conversions that might now be apparent due to propagation. */
736 STRIP_USELESS_TYPE_CONVERSION (result);
738 if (result != rhs && valid_gimple_rhs_p (result))
745 case GIMPLE_UNARY_RHS:
747 tree rhs = gimple_assign_rhs1 (stmt);
749 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
752 /* If the operation was a conversion do _not_ mark a
753 resulting constant with TREE_OVERFLOW if the original
754 constant was not. These conversions have implementation
755 defined behavior and retaining the TREE_OVERFLOW flag
756 here would confuse later passes such as VRP. */
757 if (CONVERT_EXPR_CODE_P (subcode)
758 && TREE_CODE (result) == INTEGER_CST
759 && TREE_CODE (rhs) == INTEGER_CST)
760 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
762 STRIP_USELESS_TYPE_CONVERSION (result);
763 if (valid_gimple_rhs_p (result))
766 else if (CONVERT_EXPR_CODE_P (subcode)
767 && POINTER_TYPE_P (gimple_expr_type (stmt))
768 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
770 tree type = gimple_expr_type (stmt);
771 tree t = maybe_fold_offset_to_address (loc,
772 gimple_assign_rhs1 (stmt),
773 integer_zero_node, type);
780 case GIMPLE_BINARY_RHS:
781 /* Try to fold pointer addition. */
782 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
784 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
785 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
787 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
788 if (!useless_type_conversion_p
789 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
790 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
792 result = maybe_fold_stmt_addition (gimple_location (stmt),
794 gimple_assign_rhs1 (stmt),
795 gimple_assign_rhs2 (stmt));
799 result = fold_binary_loc (loc, subcode,
800 TREE_TYPE (gimple_assign_lhs (stmt)),
801 gimple_assign_rhs1 (stmt),
802 gimple_assign_rhs2 (stmt));
806 STRIP_USELESS_TYPE_CONVERSION (result);
807 if (valid_gimple_rhs_p (result))
810 /* Fold might have produced non-GIMPLE, so if we trust it blindly
811 we lose canonicalization opportunities. Do not go again
812 through fold here though, or the same non-GIMPLE will be
814 if (commutative_tree_code (subcode)
815 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
816 gimple_assign_rhs2 (stmt), false))
817 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
818 gimple_assign_rhs2 (stmt),
819 gimple_assign_rhs1 (stmt));
823 case GIMPLE_TERNARY_RHS:
824 result = fold_ternary_loc (loc, subcode,
825 TREE_TYPE (gimple_assign_lhs (stmt)),
826 gimple_assign_rhs1 (stmt),
827 gimple_assign_rhs2 (stmt),
828 gimple_assign_rhs3 (stmt));
832 STRIP_USELESS_TYPE_CONVERSION (result);
833 if (valid_gimple_rhs_p (result))
836 /* Fold might have produced non-GIMPLE, so if we trust it blindly
837 we lose canonicalization opportunities. Do not go again
838 through fold here though, or the same non-GIMPLE will be
840 if (commutative_ternary_tree_code (subcode)
841 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
842 gimple_assign_rhs2 (stmt), false))
843 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
844 gimple_assign_rhs2 (stmt),
845 gimple_assign_rhs1 (stmt),
846 gimple_assign_rhs3 (stmt));
850 case GIMPLE_INVALID_RHS:
857 /* Attempt to fold a conditional statement. Return true if any changes were
858 made. We only attempt to fold the condition expression, and do not perform
859 any transformation that would require alteration of the cfg. It is
860 assumed that the operands have been previously folded. */
863 fold_gimple_cond (gimple stmt)
865 tree result = fold_binary_loc (gimple_location (stmt),
866 gimple_cond_code (stmt),
868 gimple_cond_lhs (stmt),
869 gimple_cond_rhs (stmt));
873 STRIP_USELESS_TYPE_CONVERSION (result);
874 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
876 gimple_cond_set_condition_from_tree (stmt, result);
884 /* Convert EXPR into a GIMPLE value suitable for substitution on the
885 RHS of an assignment. Insert the necessary statements before
886 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
887 is replaced. If the call is expected to produces a result, then it
888 is replaced by an assignment of the new RHS to the result variable.
889 If the result is to be ignored, then the call is replaced by a
890 GIMPLE_NOP. A proper VDEF chain is retained by making the first
891 VUSE and the last VDEF of the whole sequence be the same as the replaced
892 statement and using new SSA names for stores in between. */
895 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
898 tree tmp = NULL_TREE; /* Silence warning. */
899 gimple stmt, new_stmt;
900 gimple_stmt_iterator i;
901 gimple_seq stmts = gimple_seq_alloc();
902 struct gimplify_ctx gctx;
904 gimple laststore = NULL;
907 stmt = gsi_stmt (*si_p);
909 gcc_assert (is_gimple_call (stmt));
911 lhs = gimple_call_lhs (stmt);
912 reaching_vuse = gimple_vuse (stmt);
914 push_gimplify_context (&gctx);
916 if (lhs == NULL_TREE)
917 gimplify_and_add (expr, &stmts);
919 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
921 pop_gimplify_context (NULL);
923 if (gimple_has_location (stmt))
924 annotate_all_with_location (stmts, gimple_location (stmt));
926 /* The replacement can expose previously unreferenced variables. */
927 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
931 gsi_insert_before (si_p, last, GSI_NEW_STMT);
934 new_stmt = gsi_stmt (i);
935 if (gimple_in_ssa_p (cfun))
937 find_new_referenced_vars (new_stmt);
938 mark_symbols_for_renaming (new_stmt);
940 /* If the new statement has a VUSE, update it with exact SSA name we
941 know will reach this one. */
942 if (gimple_vuse (new_stmt))
944 /* If we've also seen a previous store create a new VDEF for
945 the latter one, and make that the new reaching VUSE. */
948 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
949 gimple_set_vdef (laststore, reaching_vuse);
950 update_stmt (laststore);
953 gimple_set_vuse (new_stmt, reaching_vuse);
954 gimple_set_modified (new_stmt, true);
956 if (gimple_assign_single_p (new_stmt)
957 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
959 laststore = new_stmt;
964 if (lhs == NULL_TREE)
966 /* If we replace a call without LHS that has a VDEF and our new
967 sequence ends with a store we must make that store have the same
968 vdef in order not to break the sequencing. This can happen
969 for instance when folding memcpy calls into assignments. */
970 if (gimple_vdef (stmt) && laststore)
972 gimple_set_vdef (laststore, gimple_vdef (stmt));
973 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
974 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
975 update_stmt (laststore);
977 else if (gimple_in_ssa_p (cfun))
979 unlink_stmt_vdef (stmt);
988 gsi_insert_before (si_p, last, GSI_NEW_STMT);
991 if (laststore && is_gimple_reg (lhs))
993 gimple_set_vdef (laststore, gimple_vdef (stmt));
994 update_stmt (laststore);
995 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
996 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1001 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1002 gimple_set_vdef (laststore, reaching_vuse);
1003 update_stmt (laststore);
1006 new_stmt = gimple_build_assign (lhs, tmp);
1007 if (!is_gimple_reg (tmp))
1008 gimple_set_vuse (new_stmt, reaching_vuse);
1009 if (!is_gimple_reg (lhs))
1011 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1012 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1013 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1017 gimple_set_location (new_stmt, gimple_location (stmt));
1018 gsi_replace (si_p, new_stmt, false);
1021 /* Return the string length, maximum string length or maximum value of
1023 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1024 is not NULL and, for TYPE == 0, its value is not equal to the length
1025 we determine or if we are unable to determine the length or value,
1026 return false. VISITED is a bitmap of visited variables.
1027 TYPE is 0 if string length should be returned, 1 for maximum string
1028 length and 2 for maximum value ARG can have. */
1031 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1036 if (TREE_CODE (arg) != SSA_NAME)
1038 if (TREE_CODE (arg) == COND_EXPR)
1039 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1040 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1041 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1042 else if (TREE_CODE (arg) == ADDR_EXPR
1043 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1044 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1046 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1047 if (TREE_CODE (aop0) == INDIRECT_REF
1048 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1049 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1050 length, visited, type);
1056 if (TREE_CODE (val) != INTEGER_CST
1057 || tree_int_cst_sgn (val) < 0)
1061 val = c_strlen (arg, 1);
1069 if (TREE_CODE (*length) != INTEGER_CST
1070 || TREE_CODE (val) != INTEGER_CST)
1073 if (tree_int_cst_lt (*length, val))
1077 else if (simple_cst_equal (val, *length) != 1)
1085 /* If we were already here, break the infinite cycle. */
1086 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1090 def_stmt = SSA_NAME_DEF_STMT (var);
1092 switch (gimple_code (def_stmt))
1095 /* The RHS of the statement defining VAR must either have a
1096 constant length or come from another SSA_NAME with a constant
1098 if (gimple_assign_single_p (def_stmt)
1099 || gimple_assign_unary_nop_p (def_stmt))
1101 tree rhs = gimple_assign_rhs1 (def_stmt);
1102 return get_maxval_strlen (rhs, length, visited, type);
1108 /* All the arguments of the PHI node must have the same constant
1112 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1114 tree arg = gimple_phi_arg (def_stmt, i)->def;
1116 /* If this PHI has itself as an argument, we cannot
1117 determine the string length of this argument. However,
1118 if we can find a constant string length for the other
1119 PHI args then we can still be sure that this is a
1120 constant string length. So be optimistic and just
1121 continue with the next argument. */
1122 if (arg == gimple_phi_result (def_stmt))
1125 if (!get_maxval_strlen (arg, length, visited, type))
1137 /* Fold builtin call in statement STMT. Returns a simplified tree.
1138 We may return a non-constant expression, including another call
1139 to a different function and with different arguments, e.g.,
1140 substituting memcpy for strcpy when the string length is known.
1141 Note that some builtins expand into inline code that may not
1142 be valid in GIMPLE. Callers must take care. */
1145 gimple_fold_builtin (gimple stmt)
1147 tree result, val[3];
1153 location_t loc = gimple_location (stmt);
1155 gcc_assert (is_gimple_call (stmt));
1157 ignore = (gimple_call_lhs (stmt) == NULL);
1159 /* First try the generic builtin folder. If that succeeds, return the
1161 result = fold_call_stmt (stmt, ignore);
1165 STRIP_NOPS (result);
1169 /* Ignore MD builtins. */
1170 callee = gimple_call_fndecl (stmt);
1171 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1174 /* If the builtin could not be folded, and it has no argument list,
1176 nargs = gimple_call_num_args (stmt);
1180 /* Limit the work only for builtins we know how to simplify. */
1181 switch (DECL_FUNCTION_CODE (callee))
1183 case BUILT_IN_STRLEN:
1184 case BUILT_IN_FPUTS:
1185 case BUILT_IN_FPUTS_UNLOCKED:
1189 case BUILT_IN_STRCPY:
1190 case BUILT_IN_STRNCPY:
1194 case BUILT_IN_MEMCPY_CHK:
1195 case BUILT_IN_MEMPCPY_CHK:
1196 case BUILT_IN_MEMMOVE_CHK:
1197 case BUILT_IN_MEMSET_CHK:
1198 case BUILT_IN_STRNCPY_CHK:
1202 case BUILT_IN_STRCPY_CHK:
1203 case BUILT_IN_STPCPY_CHK:
1207 case BUILT_IN_SNPRINTF_CHK:
1208 case BUILT_IN_VSNPRINTF_CHK:
1216 if (arg_idx >= nargs)
1219 /* Try to use the dataflow information gathered by the CCP process. */
1220 visited = BITMAP_ALLOC (NULL);
1221 bitmap_clear (visited);
1223 memset (val, 0, sizeof (val));
1224 a = gimple_call_arg (stmt, arg_idx);
1225 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1226 val[arg_idx] = NULL_TREE;
1228 BITMAP_FREE (visited);
1231 switch (DECL_FUNCTION_CODE (callee))
1233 case BUILT_IN_STRLEN:
1234 if (val[0] && nargs == 1)
1237 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1239 /* If the result is not a valid gimple value, or not a cast
1240 of a valid gimple value, then we cannot use the result. */
1241 if (is_gimple_val (new_val)
1242 || (is_gimple_cast (new_val)
1243 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1248 case BUILT_IN_STRCPY:
1249 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1250 result = fold_builtin_strcpy (loc, callee,
1251 gimple_call_arg (stmt, 0),
1252 gimple_call_arg (stmt, 1),
1256 case BUILT_IN_STRNCPY:
1257 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1258 result = fold_builtin_strncpy (loc, callee,
1259 gimple_call_arg (stmt, 0),
1260 gimple_call_arg (stmt, 1),
1261 gimple_call_arg (stmt, 2),
1265 case BUILT_IN_FPUTS:
1267 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1268 gimple_call_arg (stmt, 1),
1269 ignore, false, val[0]);
1272 case BUILT_IN_FPUTS_UNLOCKED:
1274 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1275 gimple_call_arg (stmt, 1),
1276 ignore, true, val[0]);
1279 case BUILT_IN_MEMCPY_CHK:
1280 case BUILT_IN_MEMPCPY_CHK:
1281 case BUILT_IN_MEMMOVE_CHK:
1282 case BUILT_IN_MEMSET_CHK:
1283 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1284 result = fold_builtin_memory_chk (loc, callee,
1285 gimple_call_arg (stmt, 0),
1286 gimple_call_arg (stmt, 1),
1287 gimple_call_arg (stmt, 2),
1288 gimple_call_arg (stmt, 3),
1290 DECL_FUNCTION_CODE (callee));
1293 case BUILT_IN_STRCPY_CHK:
1294 case BUILT_IN_STPCPY_CHK:
1295 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1296 result = fold_builtin_stxcpy_chk (loc, callee,
1297 gimple_call_arg (stmt, 0),
1298 gimple_call_arg (stmt, 1),
1299 gimple_call_arg (stmt, 2),
1301 DECL_FUNCTION_CODE (callee));
1304 case BUILT_IN_STRNCPY_CHK:
1305 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1306 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1307 gimple_call_arg (stmt, 1),
1308 gimple_call_arg (stmt, 2),
1309 gimple_call_arg (stmt, 3),
1313 case BUILT_IN_SNPRINTF_CHK:
1314 case BUILT_IN_VSNPRINTF_CHK:
1315 if (val[1] && is_gimple_val (val[1]))
1316 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1317 DECL_FUNCTION_CODE (callee));
1324 if (result && ignore)
1325 result = fold_ignored_result (result);
1329 /* Return the first of the base binfos of BINFO that has virtual functions. */
1332 get_first_base_binfo_with_virtuals (tree binfo)
1337 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1338 if (BINFO_VIRTUALS (base_binfo))
1345 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1346 it is found or NULL_TREE if it is not. */
1349 get_base_binfo_for_type (tree binfo, tree type)
1353 tree res = NULL_TREE;
1355 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1356 if (TREE_TYPE (base_binfo) == type)
1365 /* Return a binfo describing the part of object referenced by expression REF.
1366 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1367 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1368 a simple declaration, indirect reference or an SSA_NAME. If the function
1369 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1370 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1371 Otherwise the first non-artificial field declaration or the base declaration
1372 will be examined to get the encapsulating type. */
1375 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1379 if (TREE_CODE (ref) == COMPONENT_REF)
1382 tree binfo, base_binfo;
1383 tree field = TREE_OPERAND (ref, 1);
1385 if (!DECL_ARTIFICIAL (field))
1387 tree type = TREE_TYPE (field);
1388 if (TREE_CODE (type) == RECORD_TYPE)
1389 return TYPE_BINFO (type);
1394 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1395 binfo = TYPE_BINFO (par_type);
1397 || BINFO_N_BASE_BINFOS (binfo) == 0)
1400 base_binfo = get_first_base_binfo_with_virtuals (binfo);
1401 if (base_binfo && BINFO_TYPE (base_binfo) != TREE_TYPE (field))
1405 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1407 /* Get descendant binfo. */
1410 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1413 ref = TREE_OPERAND (ref, 0);
1415 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1416 return TYPE_BINFO (TREE_TYPE (ref));
1417 else if (known_binfo
1418 && (TREE_CODE (ref) == SSA_NAME
1419 || TREE_CODE (ref) == MEM_REF))
1426 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1427 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1428 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1431 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1436 v = BINFO_VIRTUALS (known_binfo);
1440 i += (TARGET_VTABLE_USES_DESCRIPTORS
1441 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1445 fndecl = TREE_VALUE (v);
1446 /* When cgraph node is missing and function is not public, we cannot
1447 devirtualize. This can happen in WHOPR when the actual method
1448 ends up in other partition, because we found devirtualization
1449 possibility too late. */
1450 if (static_object_in_other_unit_p (fndecl))
1452 return build_fold_addr_expr (fndecl);
1456 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1457 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1458 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1459 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1460 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1463 gimple_fold_obj_type_ref (tree ref, tree known_type)
1465 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1466 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1469 if (TREE_CODE (obj) == ADDR_EXPR)
1470 obj = TREE_OPERAND (obj, 0);
1472 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1475 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1476 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1482 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1483 The statement may be replaced by another statement, e.g., if the call
1484 simplifies to a constant value. Return true if any changes were made.
1485 It is assumed that the operands have been previously folded. */
1488 fold_gimple_call (gimple_stmt_iterator *gsi)
1490 gimple stmt = gsi_stmt (*gsi);
1492 tree callee = gimple_call_fndecl (stmt);
1494 /* Check for builtins that CCP can handle using information not
1495 available in the generic fold routines. */
1496 if (callee && DECL_BUILT_IN (callee))
1498 tree result = gimple_fold_builtin (stmt);
1502 if (!update_call_from_tree (gsi, result))
1503 gimplify_and_update_call_from_tree (gsi, result);
1509 /* ??? Should perhaps do this in fold proper. However, doing it
1510 there requires that we create a new CALL_EXPR, and that requires
1511 copying EH region info to the new node. Easier to just do it
1512 here where we can just smash the call operand. */
1513 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1514 callee = gimple_call_fn (stmt);
1515 if (TREE_CODE (callee) == OBJ_TYPE_REF
1516 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1520 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1523 gimple_call_set_fn (stmt, t);
1532 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1533 distinguishes both cases. */
1536 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1538 bool changed = false;
1539 gimple stmt = gsi_stmt (*gsi);
1542 /* Fold the main computation performed by the statement. */
1543 switch (gimple_code (stmt))
1547 unsigned old_num_ops = gimple_num_ops (stmt);
1548 tree new_rhs = fold_gimple_assign (gsi);
1549 tree lhs = gimple_assign_lhs (stmt);
1551 && !useless_type_conversion_p (TREE_TYPE (lhs),
1552 TREE_TYPE (new_rhs)))
1553 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1556 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1558 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1565 changed |= fold_gimple_cond (stmt);
1569 /* Fold *& in call arguments. */
1570 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1571 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1573 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1576 gimple_call_set_arg (stmt, i, tmp);
1580 /* The entire statement may be replaced in this case. */
1582 changed |= fold_gimple_call (gsi);
1586 /* Fold *& in asm operands. */
1587 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1589 tree link = gimple_asm_output_op (stmt, i);
1590 tree op = TREE_VALUE (link);
1591 if (REFERENCE_CLASS_P (op)
1592 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1594 TREE_VALUE (link) = op;
1598 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1600 tree link = gimple_asm_input_op (stmt, i);
1601 tree op = TREE_VALUE (link);
1602 if (REFERENCE_CLASS_P (op)
1603 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1605 TREE_VALUE (link) = op;
1612 if (gimple_debug_bind_p (stmt))
1614 tree val = gimple_debug_bind_get_value (stmt);
1616 && REFERENCE_CLASS_P (val))
1618 tree tem = maybe_fold_reference (val, false);
1621 gimple_debug_bind_set_value (stmt, tem);
1631 stmt = gsi_stmt (*gsi);
1633 /* Fold *& on the lhs. */
1634 if (gimple_has_lhs (stmt))
1636 tree lhs = gimple_get_lhs (stmt);
1637 if (lhs && REFERENCE_CLASS_P (lhs))
1639 tree new_lhs = maybe_fold_reference (lhs, true);
1642 gimple_set_lhs (stmt, new_lhs);
1651 /* Fold the statement pointed to by GSI. In some cases, this function may
1652 replace the whole statement with a new one. Returns true iff folding
1654 The statement pointed to by GSI should be in valid gimple form but may
1655 be in unfolded state as resulting from for example constant propagation
1656 which can produce *&x = 0. */
1659 fold_stmt (gimple_stmt_iterator *gsi)
1661 return fold_stmt_1 (gsi, false);
1664 /* Perform the minimal folding on statement STMT. Only operations like
1665 *&x created by constant propagation are handled. The statement cannot
1666 be replaced with a new one. Return true if the statement was
1667 changed, false otherwise.
1668 The statement STMT should be in valid gimple form but may
1669 be in unfolded state as resulting from for example constant propagation
1670 which can produce *&x = 0. */
1673 fold_stmt_inplace (gimple stmt)
1675 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1676 bool changed = fold_stmt_1 (&gsi, true);
1677 gcc_assert (gsi_stmt (gsi) == stmt);
1681 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1682 if EXPR is null or we don't know how.
1683 If non-null, the result always has boolean type. */
1686 canonicalize_bool (tree expr, bool invert)
1692 if (integer_nonzerop (expr))
1693 return boolean_false_node;
1694 else if (integer_zerop (expr))
1695 return boolean_true_node;
1696 else if (TREE_CODE (expr) == SSA_NAME)
1697 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1698 build_int_cst (TREE_TYPE (expr), 0));
1699 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1700 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1702 TREE_OPERAND (expr, 0),
1703 TREE_OPERAND (expr, 1));
1709 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1711 if (integer_nonzerop (expr))
1712 return boolean_true_node;
1713 else if (integer_zerop (expr))
1714 return boolean_false_node;
1715 else if (TREE_CODE (expr) == SSA_NAME)
1716 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1717 build_int_cst (TREE_TYPE (expr), 0));
1718 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1719 return fold_build2 (TREE_CODE (expr),
1721 TREE_OPERAND (expr, 0),
1722 TREE_OPERAND (expr, 1));
1728 /* Check to see if a boolean expression EXPR is logically equivalent to the
1729 comparison (OP1 CODE OP2). Check for various identities involving
1733 same_bool_comparison_p (const_tree expr, enum tree_code code,
1734 const_tree op1, const_tree op2)
1738 /* The obvious case. */
1739 if (TREE_CODE (expr) == code
1740 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1741 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1744 /* Check for comparing (name, name != 0) and the case where expr
1745 is an SSA_NAME with a definition matching the comparison. */
1746 if (TREE_CODE (expr) == SSA_NAME
1747 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1749 if (operand_equal_p (expr, op1, 0))
1750 return ((code == NE_EXPR && integer_zerop (op2))
1751 || (code == EQ_EXPR && integer_nonzerop (op2)));
1752 s = SSA_NAME_DEF_STMT (expr);
1753 if (is_gimple_assign (s)
1754 && gimple_assign_rhs_code (s) == code
1755 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1756 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1760 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1761 of name is a comparison, recurse. */
1762 if (TREE_CODE (op1) == SSA_NAME
1763 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1765 s = SSA_NAME_DEF_STMT (op1);
1766 if (is_gimple_assign (s)
1767 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1769 enum tree_code c = gimple_assign_rhs_code (s);
1770 if ((c == NE_EXPR && integer_zerop (op2))
1771 || (c == EQ_EXPR && integer_nonzerop (op2)))
1772 return same_bool_comparison_p (expr, c,
1773 gimple_assign_rhs1 (s),
1774 gimple_assign_rhs2 (s));
1775 if ((c == EQ_EXPR && integer_zerop (op2))
1776 || (c == NE_EXPR && integer_nonzerop (op2)))
1777 return same_bool_comparison_p (expr,
1778 invert_tree_comparison (c, false),
1779 gimple_assign_rhs1 (s),
1780 gimple_assign_rhs2 (s));
1786 /* Check to see if two boolean expressions OP1 and OP2 are logically
1790 same_bool_result_p (const_tree op1, const_tree op2)
1792 /* Simple cases first. */
1793 if (operand_equal_p (op1, op2, 0))
1796 /* Check the cases where at least one of the operands is a comparison.
1797 These are a bit smarter than operand_equal_p in that they apply some
1798 identifies on SSA_NAMEs. */
1799 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1800 && same_bool_comparison_p (op1, TREE_CODE (op2),
1801 TREE_OPERAND (op2, 0),
1802 TREE_OPERAND (op2, 1)))
1804 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1805 && same_bool_comparison_p (op2, TREE_CODE (op1),
1806 TREE_OPERAND (op1, 0),
1807 TREE_OPERAND (op1, 1)))
1814 /* Forward declarations for some mutually recursive functions. */
1817 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1818 enum tree_code code2, tree op2a, tree op2b);
1820 and_var_with_comparison (tree var, bool invert,
1821 enum tree_code code2, tree op2a, tree op2b);
1823 and_var_with_comparison_1 (gimple stmt,
1824 enum tree_code code2, tree op2a, tree op2b);
1826 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1827 enum tree_code code2, tree op2a, tree op2b);
1829 or_var_with_comparison (tree var, bool invert,
1830 enum tree_code code2, tree op2a, tree op2b);
1832 or_var_with_comparison_1 (gimple stmt,
1833 enum tree_code code2, tree op2a, tree op2b);
1835 /* Helper function for and_comparisons_1: try to simplify the AND of the
1836 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1837 If INVERT is true, invert the value of the VAR before doing the AND.
1838 Return NULL_EXPR if we can't simplify this to a single expression. */
1841 and_var_with_comparison (tree var, bool invert,
1842 enum tree_code code2, tree op2a, tree op2b)
1845 gimple stmt = SSA_NAME_DEF_STMT (var);
1847 /* We can only deal with variables whose definitions are assignments. */
1848 if (!is_gimple_assign (stmt))
1851 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1852 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1853 Then we only have to consider the simpler non-inverted cases. */
1855 t = or_var_with_comparison_1 (stmt,
1856 invert_tree_comparison (code2, false),
1859 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1860 return canonicalize_bool (t, invert);
1863 /* Try to simplify the AND of the ssa variable defined by the assignment
1864 STMT with the comparison specified by (OP2A CODE2 OP2B).
1865 Return NULL_EXPR if we can't simplify this to a single expression. */
1868 and_var_with_comparison_1 (gimple stmt,
1869 enum tree_code code2, tree op2a, tree op2b)
1871 tree var = gimple_assign_lhs (stmt);
1872 tree true_test_var = NULL_TREE;
1873 tree false_test_var = NULL_TREE;
1874 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1876 /* Check for identities like (var AND (var == 0)) => false. */
1877 if (TREE_CODE (op2a) == SSA_NAME
1878 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1880 if ((code2 == NE_EXPR && integer_zerop (op2b))
1881 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1883 true_test_var = op2a;
1884 if (var == true_test_var)
1887 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1888 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1890 false_test_var = op2a;
1891 if (var == false_test_var)
1892 return boolean_false_node;
1896 /* If the definition is a comparison, recurse on it. */
1897 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1899 tree t = and_comparisons_1 (innercode,
1900 gimple_assign_rhs1 (stmt),
1901 gimple_assign_rhs2 (stmt),
1909 /* If the definition is an AND or OR expression, we may be able to
1910 simplify by reassociating. */
1911 if (innercode == TRUTH_AND_EXPR
1912 || innercode == TRUTH_OR_EXPR
1913 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1914 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1916 tree inner1 = gimple_assign_rhs1 (stmt);
1917 tree inner2 = gimple_assign_rhs2 (stmt);
1920 tree partial = NULL_TREE;
1921 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1923 /* Check for boolean identities that don't require recursive examination
1925 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1926 inner1 AND (inner1 OR inner2) => inner1
1927 !inner1 AND (inner1 AND inner2) => false
1928 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1929 Likewise for similar cases involving inner2. */
1930 if (inner1 == true_test_var)
1931 return (is_and ? var : inner1);
1932 else if (inner2 == true_test_var)
1933 return (is_and ? var : inner2);
1934 else if (inner1 == false_test_var)
1936 ? boolean_false_node
1937 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1938 else if (inner2 == false_test_var)
1940 ? boolean_false_node
1941 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1943 /* Next, redistribute/reassociate the AND across the inner tests.
1944 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1945 if (TREE_CODE (inner1) == SSA_NAME
1946 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1947 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1948 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1949 gimple_assign_rhs1 (s),
1950 gimple_assign_rhs2 (s),
1951 code2, op2a, op2b)))
1953 /* Handle the AND case, where we are reassociating:
1954 (inner1 AND inner2) AND (op2a code2 op2b)
1956 If the partial result t is a constant, we win. Otherwise
1957 continue on to try reassociating with the other inner test. */
1960 if (integer_onep (t))
1962 else if (integer_zerop (t))
1963 return boolean_false_node;
1966 /* Handle the OR case, where we are redistributing:
1967 (inner1 OR inner2) AND (op2a code2 op2b)
1968 => (t OR (inner2 AND (op2a code2 op2b))) */
1971 if (integer_onep (t))
1972 return boolean_true_node;
1974 /* Save partial result for later. */
1979 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1980 if (TREE_CODE (inner2) == SSA_NAME
1981 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1982 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1983 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1984 gimple_assign_rhs1 (s),
1985 gimple_assign_rhs2 (s),
1986 code2, op2a, op2b)))
1988 /* Handle the AND case, where we are reassociating:
1989 (inner1 AND inner2) AND (op2a code2 op2b)
1990 => (inner1 AND t) */
1993 if (integer_onep (t))
1995 else if (integer_zerop (t))
1996 return boolean_false_node;
1999 /* Handle the OR case. where we are redistributing:
2000 (inner1 OR inner2) AND (op2a code2 op2b)
2001 => (t OR (inner1 AND (op2a code2 op2b)))
2002 => (t OR partial) */
2005 if (integer_onep (t))
2006 return boolean_true_node;
2009 /* We already got a simplification for the other
2010 operand to the redistributed OR expression. The
2011 interesting case is when at least one is false.
2012 Or, if both are the same, we can apply the identity
2014 if (integer_zerop (partial))
2016 else if (integer_zerop (t))
2018 else if (same_bool_result_p (t, partial))
2027 /* Try to simplify the AND of two comparisons defined by
2028 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2029 If this can be done without constructing an intermediate value,
2030 return the resulting tree; otherwise NULL_TREE is returned.
2031 This function is deliberately asymmetric as it recurses on SSA_DEFs
2032 in the first comparison but not the second. */
2035 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2036 enum tree_code code2, tree op2a, tree op2b)
2038 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2039 if (operand_equal_p (op1a, op2a, 0)
2040 && operand_equal_p (op1b, op2b, 0))
2042 tree t = combine_comparisons (UNKNOWN_LOCATION,
2043 TRUTH_ANDIF_EXPR, code1, code2,
2044 boolean_type_node, op1a, op1b);
2049 /* Likewise the swapped case of the above. */
2050 if (operand_equal_p (op1a, op2b, 0)
2051 && operand_equal_p (op1b, op2a, 0))
2053 tree t = combine_comparisons (UNKNOWN_LOCATION,
2054 TRUTH_ANDIF_EXPR, code1,
2055 swap_tree_comparison (code2),
2056 boolean_type_node, op1a, op1b);
2061 /* If both comparisons are of the same value against constants, we might
2062 be able to merge them. */
2063 if (operand_equal_p (op1a, op2a, 0)
2064 && TREE_CODE (op1b) == INTEGER_CST
2065 && TREE_CODE (op2b) == INTEGER_CST)
2067 int cmp = tree_int_cst_compare (op1b, op2b);
2069 /* If we have (op1a == op1b), we should either be able to
2070 return that or FALSE, depending on whether the constant op1b
2071 also satisfies the other comparison against op2b. */
2072 if (code1 == EQ_EXPR)
2078 case EQ_EXPR: val = (cmp == 0); break;
2079 case NE_EXPR: val = (cmp != 0); break;
2080 case LT_EXPR: val = (cmp < 0); break;
2081 case GT_EXPR: val = (cmp > 0); break;
2082 case LE_EXPR: val = (cmp <= 0); break;
2083 case GE_EXPR: val = (cmp >= 0); break;
2084 default: done = false;
2089 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2091 return boolean_false_node;
2094 /* Likewise if the second comparison is an == comparison. */
2095 else if (code2 == EQ_EXPR)
2101 case EQ_EXPR: val = (cmp == 0); break;
2102 case NE_EXPR: val = (cmp != 0); break;
2103 case LT_EXPR: val = (cmp > 0); break;
2104 case GT_EXPR: val = (cmp < 0); break;
2105 case LE_EXPR: val = (cmp >= 0); break;
2106 case GE_EXPR: val = (cmp <= 0); break;
2107 default: done = false;
2112 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2114 return boolean_false_node;
2118 /* Same business with inequality tests. */
2119 else if (code1 == NE_EXPR)
2124 case EQ_EXPR: val = (cmp != 0); break;
2125 case NE_EXPR: val = (cmp == 0); break;
2126 case LT_EXPR: val = (cmp >= 0); break;
2127 case GT_EXPR: val = (cmp <= 0); break;
2128 case LE_EXPR: val = (cmp > 0); break;
2129 case GE_EXPR: val = (cmp < 0); break;
2134 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2136 else if (code2 == NE_EXPR)
2141 case EQ_EXPR: val = (cmp == 0); break;
2142 case NE_EXPR: val = (cmp != 0); break;
2143 case LT_EXPR: val = (cmp <= 0); break;
2144 case GT_EXPR: val = (cmp >= 0); break;
2145 case LE_EXPR: val = (cmp < 0); break;
2146 case GE_EXPR: val = (cmp > 0); break;
2151 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2154 /* Chose the more restrictive of two < or <= comparisons. */
2155 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2156 && (code2 == LT_EXPR || code2 == LE_EXPR))
2158 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2159 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2161 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2164 /* Likewise chose the more restrictive of two > or >= comparisons. */
2165 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2166 && (code2 == GT_EXPR || code2 == GE_EXPR))
2168 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2169 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2171 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2174 /* Check for singleton ranges. */
2176 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2177 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2178 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2180 /* Check for disjoint ranges. */
2182 && (code1 == LT_EXPR || code1 == LE_EXPR)
2183 && (code2 == GT_EXPR || code2 == GE_EXPR))
2184 return boolean_false_node;
2186 && (code1 == GT_EXPR || code1 == GE_EXPR)
2187 && (code2 == LT_EXPR || code2 == LE_EXPR))
2188 return boolean_false_node;
2191 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2192 NAME's definition is a truth value. See if there are any simplifications
2193 that can be done against the NAME's definition. */
2194 if (TREE_CODE (op1a) == SSA_NAME
2195 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2196 && (integer_zerop (op1b) || integer_onep (op1b)))
2198 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2199 || (code1 == NE_EXPR && integer_onep (op1b)));
2200 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2201 switch (gimple_code (stmt))
2204 /* Try to simplify by copy-propagating the definition. */
2205 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2208 /* If every argument to the PHI produces the same result when
2209 ANDed with the second comparison, we win.
2210 Do not do this unless the type is bool since we need a bool
2211 result here anyway. */
2212 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2214 tree result = NULL_TREE;
2216 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2218 tree arg = gimple_phi_arg_def (stmt, i);
2220 /* If this PHI has itself as an argument, ignore it.
2221 If all the other args produce the same result,
2223 if (arg == gimple_phi_result (stmt))
2225 else if (TREE_CODE (arg) == INTEGER_CST)
2227 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2230 result = boolean_false_node;
2231 else if (!integer_zerop (result))
2235 result = fold_build2 (code2, boolean_type_node,
2237 else if (!same_bool_comparison_p (result,
2241 else if (TREE_CODE (arg) == SSA_NAME)
2243 tree temp = and_var_with_comparison (arg, invert,
2249 else if (!same_bool_result_p (result, temp))
2265 /* Try to simplify the AND of two comparisons, specified by
2266 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2267 If this can be simplified to a single expression (without requiring
2268 introducing more SSA variables to hold intermediate values),
2269 return the resulting tree. Otherwise return NULL_TREE.
2270 If the result expression is non-null, it has boolean type. */
2273 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2274 enum tree_code code2, tree op2a, tree op2b)
2276 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2280 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2283 /* Helper function for or_comparisons_1: try to simplify the OR of the
2284 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2285 If INVERT is true, invert the value of VAR before doing the OR.
2286 Return NULL_EXPR if we can't simplify this to a single expression. */
2289 or_var_with_comparison (tree var, bool invert,
2290 enum tree_code code2, tree op2a, tree op2b)
2293 gimple stmt = SSA_NAME_DEF_STMT (var);
2295 /* We can only deal with variables whose definitions are assignments. */
2296 if (!is_gimple_assign (stmt))
2299 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2300 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2301 Then we only have to consider the simpler non-inverted cases. */
2303 t = and_var_with_comparison_1 (stmt,
2304 invert_tree_comparison (code2, false),
2307 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2308 return canonicalize_bool (t, invert);
2311 /* Try to simplify the OR of the ssa variable defined by the assignment
2312 STMT with the comparison specified by (OP2A CODE2 OP2B).
2313 Return NULL_EXPR if we can't simplify this to a single expression. */
2316 or_var_with_comparison_1 (gimple stmt,
2317 enum tree_code code2, tree op2a, tree op2b)
2319 tree var = gimple_assign_lhs (stmt);
2320 tree true_test_var = NULL_TREE;
2321 tree false_test_var = NULL_TREE;
2322 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2324 /* Check for identities like (var OR (var != 0)) => true . */
2325 if (TREE_CODE (op2a) == SSA_NAME
2326 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2328 if ((code2 == NE_EXPR && integer_zerop (op2b))
2329 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2331 true_test_var = op2a;
2332 if (var == true_test_var)
2335 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2336 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2338 false_test_var = op2a;
2339 if (var == false_test_var)
2340 return boolean_true_node;
2344 /* If the definition is a comparison, recurse on it. */
2345 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2347 tree t = or_comparisons_1 (innercode,
2348 gimple_assign_rhs1 (stmt),
2349 gimple_assign_rhs2 (stmt),
2357 /* If the definition is an AND or OR expression, we may be able to
2358 simplify by reassociating. */
2359 if (innercode == TRUTH_AND_EXPR
2360 || innercode == TRUTH_OR_EXPR
2361 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2362 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2364 tree inner1 = gimple_assign_rhs1 (stmt);
2365 tree inner2 = gimple_assign_rhs2 (stmt);
2368 tree partial = NULL_TREE;
2369 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2371 /* Check for boolean identities that don't require recursive examination
2373 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2374 inner1 OR (inner1 AND inner2) => inner1
2375 !inner1 OR (inner1 OR inner2) => true
2376 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2378 if (inner1 == true_test_var)
2379 return (is_or ? var : inner1);
2380 else if (inner2 == true_test_var)
2381 return (is_or ? var : inner2);
2382 else if (inner1 == false_test_var)
2385 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2386 else if (inner2 == false_test_var)
2389 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2391 /* Next, redistribute/reassociate the OR across the inner tests.
2392 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2393 if (TREE_CODE (inner1) == SSA_NAME
2394 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2395 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2396 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2397 gimple_assign_rhs1 (s),
2398 gimple_assign_rhs2 (s),
2399 code2, op2a, op2b)))
2401 /* Handle the OR case, where we are reassociating:
2402 (inner1 OR inner2) OR (op2a code2 op2b)
2404 If the partial result t is a constant, we win. Otherwise
2405 continue on to try reassociating with the other inner test. */
2406 if (innercode == TRUTH_OR_EXPR)
2408 if (integer_onep (t))
2409 return boolean_true_node;
2410 else if (integer_zerop (t))
2414 /* Handle the AND case, where we are redistributing:
2415 (inner1 AND inner2) OR (op2a code2 op2b)
2416 => (t AND (inner2 OR (op2a code op2b))) */
2419 if (integer_zerop (t))
2420 return boolean_false_node;
2422 /* Save partial result for later. */
2427 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2428 if (TREE_CODE (inner2) == SSA_NAME
2429 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2430 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2431 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2432 gimple_assign_rhs1 (s),
2433 gimple_assign_rhs2 (s),
2434 code2, op2a, op2b)))
2436 /* Handle the OR case, where we are reassociating:
2437 (inner1 OR inner2) OR (op2a code2 op2b)
2439 if (innercode == TRUTH_OR_EXPR)
2441 if (integer_zerop (t))
2443 else if (integer_onep (t))
2444 return boolean_true_node;
2447 /* Handle the AND case, where we are redistributing:
2448 (inner1 AND inner2) OR (op2a code2 op2b)
2449 => (t AND (inner1 OR (op2a code2 op2b)))
2450 => (t AND partial) */
2453 if (integer_zerop (t))
2454 return boolean_false_node;
2457 /* We already got a simplification for the other
2458 operand to the redistributed AND expression. The
2459 interesting case is when at least one is true.
2460 Or, if both are the same, we can apply the identity
2461 (x AND x) == true. */
2462 if (integer_onep (partial))
2464 else if (integer_onep (t))
2466 else if (same_bool_result_p (t, partial))
2467 return boolean_true_node;
2475 /* Try to simplify the OR of two comparisons defined by
2476 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2477 If this can be done without constructing an intermediate value,
2478 return the resulting tree; otherwise NULL_TREE is returned.
2479 This function is deliberately asymmetric as it recurses on SSA_DEFs
2480 in the first comparison but not the second. */
2483 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2484 enum tree_code code2, tree op2a, tree op2b)
2486 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2487 if (operand_equal_p (op1a, op2a, 0)
2488 && operand_equal_p (op1b, op2b, 0))
2490 tree t = combine_comparisons (UNKNOWN_LOCATION,
2491 TRUTH_ORIF_EXPR, code1, code2,
2492 boolean_type_node, op1a, op1b);
2497 /* Likewise the swapped case of the above. */
2498 if (operand_equal_p (op1a, op2b, 0)
2499 && operand_equal_p (op1b, op2a, 0))
2501 tree t = combine_comparisons (UNKNOWN_LOCATION,
2502 TRUTH_ORIF_EXPR, code1,
2503 swap_tree_comparison (code2),
2504 boolean_type_node, op1a, op1b);
2509 /* If both comparisons are of the same value against constants, we might
2510 be able to merge them. */
2511 if (operand_equal_p (op1a, op2a, 0)
2512 && TREE_CODE (op1b) == INTEGER_CST
2513 && TREE_CODE (op2b) == INTEGER_CST)
2515 int cmp = tree_int_cst_compare (op1b, op2b);
2517 /* If we have (op1a != op1b), we should either be able to
2518 return that or TRUE, depending on whether the constant op1b
2519 also satisfies the other comparison against op2b. */
2520 if (code1 == NE_EXPR)
2526 case EQ_EXPR: val = (cmp == 0); break;
2527 case NE_EXPR: val = (cmp != 0); break;
2528 case LT_EXPR: val = (cmp < 0); break;
2529 case GT_EXPR: val = (cmp > 0); break;
2530 case LE_EXPR: val = (cmp <= 0); break;
2531 case GE_EXPR: val = (cmp >= 0); break;
2532 default: done = false;
2537 return boolean_true_node;
2539 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2542 /* Likewise if the second comparison is a != comparison. */
2543 else if (code2 == NE_EXPR)
2549 case EQ_EXPR: val = (cmp == 0); break;
2550 case NE_EXPR: val = (cmp != 0); break;
2551 case LT_EXPR: val = (cmp > 0); break;
2552 case GT_EXPR: val = (cmp < 0); break;
2553 case LE_EXPR: val = (cmp >= 0); break;
2554 case GE_EXPR: val = (cmp <= 0); break;
2555 default: done = false;
2560 return boolean_true_node;
2562 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2566 /* See if an equality test is redundant with the other comparison. */
2567 else if (code1 == EQ_EXPR)
2572 case EQ_EXPR: val = (cmp == 0); break;
2573 case NE_EXPR: val = (cmp != 0); break;
2574 case LT_EXPR: val = (cmp < 0); break;
2575 case GT_EXPR: val = (cmp > 0); break;
2576 case LE_EXPR: val = (cmp <= 0); break;
2577 case GE_EXPR: val = (cmp >= 0); break;
2582 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2584 else if (code2 == EQ_EXPR)
2589 case EQ_EXPR: val = (cmp == 0); break;
2590 case NE_EXPR: val = (cmp != 0); break;
2591 case LT_EXPR: val = (cmp > 0); break;
2592 case GT_EXPR: val = (cmp < 0); break;
2593 case LE_EXPR: val = (cmp >= 0); break;
2594 case GE_EXPR: val = (cmp <= 0); break;
2599 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2602 /* Chose the less restrictive of two < or <= comparisons. */
2603 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2604 && (code2 == LT_EXPR || code2 == LE_EXPR))
2606 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2607 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2609 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2612 /* Likewise chose the less restrictive of two > or >= comparisons. */
2613 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2614 && (code2 == GT_EXPR || code2 == GE_EXPR))
2616 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2617 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2619 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2622 /* Check for singleton ranges. */
2624 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2625 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2626 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2628 /* Check for less/greater pairs that don't restrict the range at all. */
2630 && (code1 == LT_EXPR || code1 == LE_EXPR)
2631 && (code2 == GT_EXPR || code2 == GE_EXPR))
2632 return boolean_true_node;
2634 && (code1 == GT_EXPR || code1 == GE_EXPR)
2635 && (code2 == LT_EXPR || code2 == LE_EXPR))
2636 return boolean_true_node;
2639 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2640 NAME's definition is a truth value. See if there are any simplifications
2641 that can be done against the NAME's definition. */
2642 if (TREE_CODE (op1a) == SSA_NAME
2643 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2644 && (integer_zerop (op1b) || integer_onep (op1b)))
2646 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2647 || (code1 == NE_EXPR && integer_onep (op1b)));
2648 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2649 switch (gimple_code (stmt))
2652 /* Try to simplify by copy-propagating the definition. */
2653 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2656 /* If every argument to the PHI produces the same result when
2657 ORed with the second comparison, we win.
2658 Do not do this unless the type is bool since we need a bool
2659 result here anyway. */
2660 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2662 tree result = NULL_TREE;
2664 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2666 tree arg = gimple_phi_arg_def (stmt, i);
2668 /* If this PHI has itself as an argument, ignore it.
2669 If all the other args produce the same result,
2671 if (arg == gimple_phi_result (stmt))
2673 else if (TREE_CODE (arg) == INTEGER_CST)
2675 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2678 result = boolean_true_node;
2679 else if (!integer_onep (result))
2683 result = fold_build2 (code2, boolean_type_node,
2685 else if (!same_bool_comparison_p (result,
2689 else if (TREE_CODE (arg) == SSA_NAME)
2691 tree temp = or_var_with_comparison (arg, invert,
2697 else if (!same_bool_result_p (result, temp))
2713 /* Try to simplify the OR of two comparisons, specified by
2714 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2715 If this can be simplified to a single expression (without requiring
2716 introducing more SSA variables to hold intermediate values),
2717 return the resulting tree. Otherwise return NULL_TREE.
2718 If the result expression is non-null, it has boolean type. */
2721 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2722 enum tree_code code2, tree op2a, tree op2b)
2724 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2728 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);